_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
5e4707c29e7e401ba026bf071bcdac50f3f668c2f0c813bd435e630446ad27eb | haskus/packages | Helper.hs | # LANGUAGE TypeApplications #
# LANGUAGE DataKinds #
module Haskus.Binary.Bits.Helper
( bitOffset
, byteOffset
)
where
import Haskus.Binary.Bits.Shift
import Haskus.Binary.Bits.Mask
| Compute bit offset ( equivalent to x ` mod ` 8 but faster )
bitOffset :: Word -> Word
# INLINABLE bitOffset #
bitOffset n = mask @3 n
| Compute byte offset ( equivalent to x ` div ` 8 but faster )
byteOffset :: Word -> Word
# INLINABLE byteOffset #
byteOffset n = n `uncheckedShiftR` 3
| null | https://raw.githubusercontent.com/haskus/packages/40ea6101cea84e2c1466bc55cdb22bed92f642a2/haskus-binary/src/lib/Haskus/Binary/Bits/Helper.hs | haskell | # LANGUAGE TypeApplications #
# LANGUAGE DataKinds #
module Haskus.Binary.Bits.Helper
( bitOffset
, byteOffset
)
where
import Haskus.Binary.Bits.Shift
import Haskus.Binary.Bits.Mask
| Compute bit offset ( equivalent to x ` mod ` 8 but faster )
bitOffset :: Word -> Word
# INLINABLE bitOffset #
bitOffset n = mask @3 n
| Compute byte offset ( equivalent to x ` div ` 8 but faster )
byteOffset :: Word -> Word
# INLINABLE byteOffset #
byteOffset n = n `uncheckedShiftR` 3
|
|
42c21fca7e344a81a1792e33cc8308ed1f50f61afe102252dd81e6ba88f94d8f | avsm/platform | breakout.ml | ---------------------------------------------------------------------------
Copyright ( c ) 2009 . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
% % NAME%% % % ---------------------------------------------------------------------------
Copyright (c) 2009 Daniel C. BΓΌnzli. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
%%NAME%% %%VERSION%%
---------------------------------------------------------------------------*)
(* Breakout clone. *)
open React
module Log : sig (* Logs values, signals and events to stderr. *)
val init : unit -> unit
val value : (Format.formatter -> 'a -> unit) -> string -> 'a -> unit
val e : (Format.formatter -> 'a -> unit) -> string -> 'a event -> 'a event
val s : (Format.formatter -> 'a -> unit) -> string -> 'a signal -> 'a signal
end = struct
let init () =
let t = Unix.gettimeofday () in
let tm = Unix.localtime t in
Format.eprintf
"\x1B[2J\x1B[H\x1B[7m@[>> %04d-%02d-%02d %02d:%02d:%02d <<@]\x1B[0m@."
(tm.Unix.tm_year + 1900) (tm.Unix.tm_mon + 1) tm.Unix.tm_mday
tm.Unix.tm_hour tm.Unix.tm_min tm.Unix.tm_sec
let value pp name v = Format.eprintf "@[<hov 2>%s =@ %a@]@." name pp v
let e pp name e = E.trace (value pp name) e
let s pp name s = S.trace (value pp name) s
end
module V2 : sig (* Vectors. *)
type t
val v : float -> float -> t
val o : t
val ex : t
val ey : t
val x : t -> float
val y : t -> float
val add : t -> t -> t
val sub : t -> t -> t
val neg : t -> t
val smul : float -> t -> t
val dot : t -> t -> float
val to_ints : t -> int * int
val print : Format.formatter -> t -> unit
end = struct
type t = { x : float; y : float }
let v x y = { x = x; y = y }
let o = v 0. 0.
let ex = v 1. 0.
let ey = v 0. 1.
let x p = p.x
let y p = p.y
let add p p' = v (p.x +. p'.x) (p.y +. p'.y)
let sub p p' = v (p.x -. p'.x) (p.y -. p'.y)
let neg p = v (-. p.x) (-. p.y)
let smul s p = v (s *. p.x) (s *. p.y)
let dot p p' = p.x *. p'.x +. p.y *. p'.y
let to_ints p = (truncate p.x, truncate p.y)
let print pp p = Format.fprintf pp "(%F,%F)" p.x p.y
end
module Rect : sig (* Rectangles. *)
type t
val create : V2.t -> V2.t -> t (* lower left corner and extents. *)
val empty : t
val o : t -> V2.t
val size : t -> V2.t
val xmin : t -> float
val xmax : t -> float
val ymin : t -> float
val ymax : t -> float
val print : Format.formatter -> t -> unit
end = struct
type t = V2.t * V2.t
let create o size = o, size
let empty = V2.o, V2.o
let o (o, s) = o
let size (_, s) = s
let xmin (o, _) = V2.x o
let xmax (o, s) = V2.x o +. V2.x s
let ymin (o, _) = V2.y o
let ymax (o, s) = V2.y o +. V2.y s
let print pp (o, s) = Format.fprintf pp "%a %a" V2.print o V2.print s
end
module Draw : sig (* Draw with ANSI escape sequences. *)
val frame : Rect.t
val init : unit -> unit
val clear : unit -> unit
val flush : unit -> unit
val text : ?center:bool -> ?color:int -> V2.t -> string -> unit
val rect : ?color:int -> Rect.t -> unit
val beep : unit -> unit
end = struct
let pr = Printf.printf
let frame = Rect.create (V2.v 1. 1.) (V2.v 80. 24.)
let clear () = pr "\x1B[47m\x1B[2J"
let flush () = pr "%!"
let reset () = clear (); pr "\x1Bc"; flush ()
let init () =
pr "\x1B[H\x1B[7l\x1B[?25l"; clear (); flush ();
at_exit (reset)
let text ?(center = true) ?(color = 30) pos str =
let x, y = V2.to_ints pos in
let x = if center then x - (String.length str) / 2 else x in
pr ("\x1B[%d;%df\x1B[47;%dm%s") y x color str
let rect ?(color = 40) r =
let (x, y) = V2.to_ints (Rect.o r) in
let (w, h) = V2.to_ints (Rect.size r) in
pr "\x1B[%dm" color;
for y' = y to y + h - 1 do
pr "\x1B[%d;%df" y' x; for i = 1 to w do pr " " done
done
let beep () = pr "\x07%!"
end
module Input : sig (* Keyboard and time events. *)
val init : unit -> unit
val time : float event (* time event. *)
val key : char event (* keyboard event. *)
val gather : unit -> unit
end = struct
let init () = (* suppress input echo and buffering. *)
let reset tattr () = Unix.tcsetattr Unix.stdin Unix.TCSAFLUSH tattr in
let attr = Unix.tcgetattr Unix.stdin in
let attr' = { attr with Unix.c_echo = false; c_icanon = false } in
let quit _ = exit 0 in
at_exit (reset attr);
Unix.tcsetattr Unix.stdin Unix.TCSANOW attr';
Sys.set_signal Sys.sigquit (Sys.Signal_handle quit);
Sys.set_signal Sys.sigint (Sys.Signal_handle quit);
Sys.set_signal Sys.sigfpe (Sys.Signal_handle quit)
let time, send_time = E.create ()
let key, send_key = E.create ()
let gather () = (* updates primitive events. *)
let c = Bytes.create 1 in
let i = Unix.stdin in
let input_char i = ignore (Unix.read i c 0 1); Bytes.get c 0 in
let dt = 0.1 in
while true do
if Unix.select [i] [] [] dt = ([i], [], []) then send_key (input_char i);
send_time (Unix.gettimeofday ());
done
end
module Game : sig (* Game simulation and logic. *)
type t
val create : Rect.t -> float event -> [`Left | `Right ] event -> t
val walls : t -> Rect.t
val ball : t -> Rect.t signal
val paddle : t -> Rect.t signal
val bricks : t -> Rect.t list signal
val brick_count : t -> int signal
val collisions : t -> unit event
val outcome : t -> [> `Game_over of int ] event
end = struct
type t =
{ walls : Rect.t;
ball : Rect.t signal;
paddle : Rect.t signal;
bricks : Rect.t list signal;
brick_count : int signal;
collisions : unit event }
(* Collisions *)
let ctime c r d n = Some (n, (r -. c) /. d)
let cmin c r d n = if r <= c && d < 0. then ctime c r d n else None
let cmax c r d n = if r >= c && d > 0. then ctime c r d n else None
let cinter cmin cmax rmin rmax d n = match d with
| d when d < 0. ->
if rmax -. d < cmin then None else (* moving apart. *)
if rmin -. d >= cmax then
if rmin <= cmax then ctime cmax rmin d n else None
else Some (V2.o, 0.) (* initially overlapping. *)
| d when d > 0. ->
if rmin -. d > cmax then None else (* moving apart. *)
if rmax -. d <= cmin then
if rmax >= cmin then ctime cmin rmax d (V2.neg n) else None
else Some (V2.o, 0.) (* initially overlapping. *)
| _ (* d = 0. *) ->
if cmax < rmin || rmax < cmin then None else Some (V2.o, 0.)
let crect c r d = (* r last moved by d relatively to c. *)
let inter min max c r d n = cinter (min c) (max c) (min r) (max r) d n in
match inter Rect.xmin Rect.xmax c r (V2.x d) V2.ex with
| None -> None
| Some (_, t as x) ->
match inter Rect.ymin Rect.ymax c r (V2.y d) V2.ey with
| None -> None
| Some (_, t' as y) ->
let _, t as c = if t > t' then x else y in
if t = 0. then None else Some c
(* Game objects *)
let moving_rect pos size = S.map (fun pos -> Rect.create pos size) pos
let ball walls dt collisions =
let size = V2.v 2. 1. in
let x0 = 0.5 *. (Rect.xmax walls -. V2.x size) in
let p0 = V2.v x0 (0.5 *. Rect.ymax walls) in
let v0 =
let sign = if Random.bool () then -1. else 1. in
let angle = (sign *. (10. +. Random.float 60.) *. 3.14) /. 180. in
let speed = 18. +. Random.float 2. in
V2.v (speed *. sin angle) (speed *. cos angle)
in
let v =
let bounce (n, _) v = V2.sub v (V2.smul (2. *. V2.dot n v) n) in
S.accum (E.map bounce collisions) v0
in
let dp = S.sample (fun dt v -> V2.smul dt v) dt v in
let p =
let pos p0 = S.fold V2.add p0 dp in
let adjust (_, pc) = pos pc in (* visually sufficient. *)
S.switch (S.hold ~eq:( == ) (pos p0) (E.map adjust collisions))
in
moving_rect p size, dp
let walls walls (ball, dp) =
let left = Rect.xmin walls in
let right = Rect.xmax walls in
let top = Rect.ymin walls in
let collisions =
let collide dp ball =
let c = match cmin left (Rect.xmin ball) (V2.x dp) V2.ex with
| Some _ as c -> c
| None ->
match cmax right (Rect.xmax ball) (V2.x dp) (V2.neg V2.ex) with
| Some _ as c -> c
| None -> cmin top (Rect.ymin ball) (V2.y dp) V2.ey
in
match c with
| None -> None
| Some (n, t) -> Some (n, V2.sub (Rect.o ball) (V2.smul t dp))
in
E.fmap (fun x -> x) (S.sample collide dp ball)
in
walls, collisions
let paddle walls moves (ball, dp) =
let speed = 4. in
let size = V2.v 9. 1. in
let xmin = Rect.xmin walls in
let xmax = Rect.xmax walls -. (V2.x size) in
let p0 = V2.v (0.5 *. xmax) (Rect.ymax walls -. 2.) in
let control p = function
| `Left ->
let x' = V2.x p -. speed in
if x' < xmin then V2.v xmin (V2.y p) else V2.v x' (V2.y p)
| `Right ->
let x' = V2.x p +. speed in
if x' > xmax then V2.v xmax (V2.y p) else V2.v x' (V2.y p)
in
let paddle = moving_rect (S.fold control p0 moves) size in
let collisions =
let collide dp (ball, paddle) = match crect paddle ball dp with
| None -> None
| Some (n, t) -> Some (n, V2.sub (Rect.o ball) (V2.smul t dp))
in
E.fmap (fun x -> x) (S.sample collide dp (S.Pair.pair ball paddle))
in
paddle, collisions
let bricks walls (ball, dp) =
let bricks0 =
let size = Rect.size walls in
let w = V2.x size in
use 1/4 for bricks .
let bw, bh = (w /. 8.), h /. 3. in
let x_count = truncate (w /. bw) in
let y_count = truncate (h /. bh) in
let acc = ref [] in
for x = 0 to x_count - 1 do
for y = 0 to y_count - 1 do
let x = Rect.xmin walls +. (float x) *. bw in
let y = Rect.ymin walls +. 2. *. bh +. (float y) *. bh in
acc := Rect.create (V2.v x y) (V2.v bw bh) :: !acc
done
done;
!acc
in
let define bricks =
let cresult =
let collide dp (ball, bricks) =
let rec aux c acc bricks ball dp = match bricks with
| [] -> c, List.rev acc
| b :: bricks' -> match crect b ball dp with
| None -> aux c (b :: acc) bricks' ball dp
| c -> aux c acc bricks' ball dp
in
match aux None [] bricks ball dp with
| None, bl -> None, bl
| Some (n, t), bl -> Some (n, V2.sub (Rect.o ball) (V2.smul t dp)),bl
in
S.sample collide dp (S.Pair.pair ball bricks)
in
let collisions = E.fmap (fun (c, _) -> c) cresult in
let bricks_e = E.map (fun (_, bl) -> fun _ -> bl) cresult in
let bricks' = S.accum bricks_e bricks0 in
bricks', (bricks', collisions)
in
S.fix bricks0 define
(* Game data structure, links game objects *)
let create w dt moves =
let define collisions =
let ball = ball w dt collisions in
let walls, wcollisions = walls w ball in
let paddle, pcollisions = paddle w moves ball in
let bricks, bcollisions = bricks w ball in
let collisions' = E.select [pcollisions; wcollisions; bcollisions] in
let g =
{ walls = walls;
ball = S.dismiss collisions' Rect.empty (fst ball);
paddle = paddle;
bricks = bricks;
brick_count = S.map List.length bricks;
collisions = E.stamp collisions' () }
in
collisions', g
in
E.fix define
let walls g = g.walls
let ball g = g.ball
let paddle g = g.paddle
let bricks g = g.bricks
let brick_count g = g.brick_count
let collisions g = g.collisions
let outcome g = (* game outcome logic. *)
let no_bricks = S.map (fun l -> l = 0) g.brick_count in
let miss = S.map (fun b -> Rect.ymax b >= Rect.ymax g.walls) g.ball in
let game_over = S.changes (S.Bool.( || ) no_bricks miss) in
S.sample (fun _ l -> `Game_over l) game_over g.brick_count
end
module Render = struct
let str = Printf.sprintf
let str_bricks count = if count = 1 then "1 brick" else str "%d bricks" count
let intro title_color = (* draws the splash screen. *)
let x = 0.5 *. Rect.xmax Draw.frame in
let y = 0.5 *. Rect.ymax Draw.frame in
Draw.clear ();
Draw.text ~color:title_color (V2.v x (y -. 2.)) "BREAKOUT";
Draw.text ~color:30 (V2.v x y)
"Hit 'a' and 'd' to move the paddle, 'q' to quit";
Draw.text ~color:31 (V2.v x (y +. 2.)) "Hit spacebar to start the game";
Draw.flush ()
let game_init m = (* draws game init message. *)
let x = 0.5 *. Rect.xmax Draw.frame in
let y = 0.5 *. Rect.ymax Draw.frame in
Draw.text ~color:31 (V2.v x (y +. 2.)) m;
Draw.flush ()
let game ball paddle bricks bcount = (* draws the game state. *)
let bl = V2.v (Rect.xmin Draw.frame) (Rect.ymax Draw.frame -. 1.) in
Draw.clear ();
List.iter (Draw.rect ~color:40) bricks;
Draw.rect ~color:44 paddle;
Draw.rect ~color:41 ball;
Draw.text ~center:false ~color:30 bl (str "%s left" (str_bricks bcount));
Draw.flush ()
let game_over outcome = (* draws the game over screen. *)
let x = 0.5 *. Rect.xmax Draw.frame in
let y = 0.5 *. Rect.ymax Draw.frame in
let outcome_msg =
if outcome = 0 then "Congratulations, no bricks left" else
str "%s left, you can do better" (str_bricks outcome)
in
Draw.text ~color:34 (V2.v x (y +. 2.)) "GAME OVER";
Draw.text ~color:30 (V2.v x (y +. 4.)) outcome_msg;
Draw.text ~color:31 (V2.v x (y +. 6.)) "Hit spacebar to start again";
Draw.flush ()
end
module Ui : sig
val init : unit -> unit event
end = struct
let key k = E.fmap (fun c -> if c = k then Some () else None) Input.key
let quit () = E.once (E.stamp (key 'q') `Quit)
let new_game () = E.once (E.stamp (key ' ') `Game)
let wait_until ?stop e = match stop with
| Some s -> E.map (fun v -> s (); v) (E.once e)
| None -> E.once e
let intro () =
let color_swap = E.stamp Input.time (fun c -> if c = 31 then 34 else 31) in
let output = S.l1 Render.intro (S.accum color_swap 34) in
let stop () = S.stop output in
wait_until (E.select [quit (); new_game ()]) ~stop
let game () =
let run = S.hold false (E.once (E.stamp (key ' ') true)) in
let moves =
let move = function 'a' -> Some `Left | 'd' -> Some `Right | _ -> None in
E.on run (E.fmap move Input.key)
in
let dt = E.on run (E.diff ( -. ) Input.time) in
let g = Game.create Draw.frame dt moves in
let outcome = Game.outcome g in
let sound = E.map Draw.beep (Game.collisions g) in
let output = S.l4 Render.game (Game.ball g) (Game.paddle g) (Game.bricks g)
(Game.brick_count g)
in
let stop () = E.stop sound; S.stop output in
Render.game_init "Hit spacebar to start the game";
wait_until (E.select [quit (); outcome]) ~stop
let game_over outcome =
Render.game_over outcome;
wait_until (E.select [quit (); new_game ()])
let init () =
let define ui =
let display ui =
Gc.full_major (); (* cleanup game objects. *)
match ui with
| `Intro -> intro ()
| `Game -> game ()
| `Game_over outcome -> game_over outcome
| `Quit -> exit 0
in
let ui' = E.switch (display `Intro) (E.map display ui) in
ui', ui'
in
E.stamp (E.fix define) ()
end
let main () =
Random.self_init ();
Log.init ();
Draw.init ();
Input.init ();
let ui = Ui.init () in
Input.gather ();
ui
let ui = main () (* keep a ref. to avoid g.c. *)
----------------------------------------------------------------------------
Copyright ( c ) 2009
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2009 Daniel C. BΓΌnzli
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| null | https://raw.githubusercontent.com/avsm/platform/b254e3c6b60f3c0c09dfdcde92eb1abdc267fa1c/duniverse/react.1.2.1%2Bdune/test/breakout.ml | ocaml | Breakout clone.
Logs values, signals and events to stderr.
Vectors.
Rectangles.
lower left corner and extents.
Draw with ANSI escape sequences.
Keyboard and time events.
time event.
keyboard event.
suppress input echo and buffering.
updates primitive events.
Game simulation and logic.
Collisions
moving apart.
initially overlapping.
moving apart.
initially overlapping.
d = 0.
r last moved by d relatively to c.
Game objects
visually sufficient.
Game data structure, links game objects
game outcome logic.
draws the splash screen.
draws game init message.
draws the game state.
draws the game over screen.
cleanup game objects.
keep a ref. to avoid g.c. | ---------------------------------------------------------------------------
Copyright ( c ) 2009 . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
% % NAME%% % % ---------------------------------------------------------------------------
Copyright (c) 2009 Daniel C. BΓΌnzli. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
%%NAME%% %%VERSION%%
---------------------------------------------------------------------------*)
open React
val init : unit -> unit
val value : (Format.formatter -> 'a -> unit) -> string -> 'a -> unit
val e : (Format.formatter -> 'a -> unit) -> string -> 'a event -> 'a event
val s : (Format.formatter -> 'a -> unit) -> string -> 'a signal -> 'a signal
end = struct
let init () =
let t = Unix.gettimeofday () in
let tm = Unix.localtime t in
Format.eprintf
"\x1B[2J\x1B[H\x1B[7m@[>> %04d-%02d-%02d %02d:%02d:%02d <<@]\x1B[0m@."
(tm.Unix.tm_year + 1900) (tm.Unix.tm_mon + 1) tm.Unix.tm_mday
tm.Unix.tm_hour tm.Unix.tm_min tm.Unix.tm_sec
let value pp name v = Format.eprintf "@[<hov 2>%s =@ %a@]@." name pp v
let e pp name e = E.trace (value pp name) e
let s pp name s = S.trace (value pp name) s
end
type t
val v : float -> float -> t
val o : t
val ex : t
val ey : t
val x : t -> float
val y : t -> float
val add : t -> t -> t
val sub : t -> t -> t
val neg : t -> t
val smul : float -> t -> t
val dot : t -> t -> float
val to_ints : t -> int * int
val print : Format.formatter -> t -> unit
end = struct
type t = { x : float; y : float }
let v x y = { x = x; y = y }
let o = v 0. 0.
let ex = v 1. 0.
let ey = v 0. 1.
let x p = p.x
let y p = p.y
let add p p' = v (p.x +. p'.x) (p.y +. p'.y)
let sub p p' = v (p.x -. p'.x) (p.y -. p'.y)
let neg p = v (-. p.x) (-. p.y)
let smul s p = v (s *. p.x) (s *. p.y)
let dot p p' = p.x *. p'.x +. p.y *. p'.y
let to_ints p = (truncate p.x, truncate p.y)
let print pp p = Format.fprintf pp "(%F,%F)" p.x p.y
end
type t
val empty : t
val o : t -> V2.t
val size : t -> V2.t
val xmin : t -> float
val xmax : t -> float
val ymin : t -> float
val ymax : t -> float
val print : Format.formatter -> t -> unit
end = struct
type t = V2.t * V2.t
let create o size = o, size
let empty = V2.o, V2.o
let o (o, s) = o
let size (_, s) = s
let xmin (o, _) = V2.x o
let xmax (o, s) = V2.x o +. V2.x s
let ymin (o, _) = V2.y o
let ymax (o, s) = V2.y o +. V2.y s
let print pp (o, s) = Format.fprintf pp "%a %a" V2.print o V2.print s
end
val frame : Rect.t
val init : unit -> unit
val clear : unit -> unit
val flush : unit -> unit
val text : ?center:bool -> ?color:int -> V2.t -> string -> unit
val rect : ?color:int -> Rect.t -> unit
val beep : unit -> unit
end = struct
let pr = Printf.printf
let frame = Rect.create (V2.v 1. 1.) (V2.v 80. 24.)
let clear () = pr "\x1B[47m\x1B[2J"
let flush () = pr "%!"
let reset () = clear (); pr "\x1Bc"; flush ()
let init () =
pr "\x1B[H\x1B[7l\x1B[?25l"; clear (); flush ();
at_exit (reset)
let text ?(center = true) ?(color = 30) pos str =
let x, y = V2.to_ints pos in
let x = if center then x - (String.length str) / 2 else x in
pr ("\x1B[%d;%df\x1B[47;%dm%s") y x color str
let rect ?(color = 40) r =
let (x, y) = V2.to_ints (Rect.o r) in
let (w, h) = V2.to_ints (Rect.size r) in
pr "\x1B[%dm" color;
for y' = y to y + h - 1 do
pr "\x1B[%d;%df" y' x; for i = 1 to w do pr " " done
done
let beep () = pr "\x07%!"
end
val init : unit -> unit
val gather : unit -> unit
end = struct
let reset tattr () = Unix.tcsetattr Unix.stdin Unix.TCSAFLUSH tattr in
let attr = Unix.tcgetattr Unix.stdin in
let attr' = { attr with Unix.c_echo = false; c_icanon = false } in
let quit _ = exit 0 in
at_exit (reset attr);
Unix.tcsetattr Unix.stdin Unix.TCSANOW attr';
Sys.set_signal Sys.sigquit (Sys.Signal_handle quit);
Sys.set_signal Sys.sigint (Sys.Signal_handle quit);
Sys.set_signal Sys.sigfpe (Sys.Signal_handle quit)
let time, send_time = E.create ()
let key, send_key = E.create ()
let c = Bytes.create 1 in
let i = Unix.stdin in
let input_char i = ignore (Unix.read i c 0 1); Bytes.get c 0 in
let dt = 0.1 in
while true do
if Unix.select [i] [] [] dt = ([i], [], []) then send_key (input_char i);
send_time (Unix.gettimeofday ());
done
end
type t
val create : Rect.t -> float event -> [`Left | `Right ] event -> t
val walls : t -> Rect.t
val ball : t -> Rect.t signal
val paddle : t -> Rect.t signal
val bricks : t -> Rect.t list signal
val brick_count : t -> int signal
val collisions : t -> unit event
val outcome : t -> [> `Game_over of int ] event
end = struct
type t =
{ walls : Rect.t;
ball : Rect.t signal;
paddle : Rect.t signal;
bricks : Rect.t list signal;
brick_count : int signal;
collisions : unit event }
let ctime c r d n = Some (n, (r -. c) /. d)
let cmin c r d n = if r <= c && d < 0. then ctime c r d n else None
let cmax c r d n = if r >= c && d > 0. then ctime c r d n else None
let cinter cmin cmax rmin rmax d n = match d with
| d when d < 0. ->
if rmin -. d >= cmax then
if rmin <= cmax then ctime cmax rmin d n else None
| d when d > 0. ->
if rmax -. d <= cmin then
if rmax >= cmin then ctime cmin rmax d (V2.neg n) else None
if cmax < rmin || rmax < cmin then None else Some (V2.o, 0.)
let inter min max c r d n = cinter (min c) (max c) (min r) (max r) d n in
match inter Rect.xmin Rect.xmax c r (V2.x d) V2.ex with
| None -> None
| Some (_, t as x) ->
match inter Rect.ymin Rect.ymax c r (V2.y d) V2.ey with
| None -> None
| Some (_, t' as y) ->
let _, t as c = if t > t' then x else y in
if t = 0. then None else Some c
let moving_rect pos size = S.map (fun pos -> Rect.create pos size) pos
let ball walls dt collisions =
let size = V2.v 2. 1. in
let x0 = 0.5 *. (Rect.xmax walls -. V2.x size) in
let p0 = V2.v x0 (0.5 *. Rect.ymax walls) in
let v0 =
let sign = if Random.bool () then -1. else 1. in
let angle = (sign *. (10. +. Random.float 60.) *. 3.14) /. 180. in
let speed = 18. +. Random.float 2. in
V2.v (speed *. sin angle) (speed *. cos angle)
in
let v =
let bounce (n, _) v = V2.sub v (V2.smul (2. *. V2.dot n v) n) in
S.accum (E.map bounce collisions) v0
in
let dp = S.sample (fun dt v -> V2.smul dt v) dt v in
let p =
let pos p0 = S.fold V2.add p0 dp in
S.switch (S.hold ~eq:( == ) (pos p0) (E.map adjust collisions))
in
moving_rect p size, dp
let walls walls (ball, dp) =
let left = Rect.xmin walls in
let right = Rect.xmax walls in
let top = Rect.ymin walls in
let collisions =
let collide dp ball =
let c = match cmin left (Rect.xmin ball) (V2.x dp) V2.ex with
| Some _ as c -> c
| None ->
match cmax right (Rect.xmax ball) (V2.x dp) (V2.neg V2.ex) with
| Some _ as c -> c
| None -> cmin top (Rect.ymin ball) (V2.y dp) V2.ey
in
match c with
| None -> None
| Some (n, t) -> Some (n, V2.sub (Rect.o ball) (V2.smul t dp))
in
E.fmap (fun x -> x) (S.sample collide dp ball)
in
walls, collisions
let paddle walls moves (ball, dp) =
let speed = 4. in
let size = V2.v 9. 1. in
let xmin = Rect.xmin walls in
let xmax = Rect.xmax walls -. (V2.x size) in
let p0 = V2.v (0.5 *. xmax) (Rect.ymax walls -. 2.) in
let control p = function
| `Left ->
let x' = V2.x p -. speed in
if x' < xmin then V2.v xmin (V2.y p) else V2.v x' (V2.y p)
| `Right ->
let x' = V2.x p +. speed in
if x' > xmax then V2.v xmax (V2.y p) else V2.v x' (V2.y p)
in
let paddle = moving_rect (S.fold control p0 moves) size in
let collisions =
let collide dp (ball, paddle) = match crect paddle ball dp with
| None -> None
| Some (n, t) -> Some (n, V2.sub (Rect.o ball) (V2.smul t dp))
in
E.fmap (fun x -> x) (S.sample collide dp (S.Pair.pair ball paddle))
in
paddle, collisions
let bricks walls (ball, dp) =
let bricks0 =
let size = Rect.size walls in
let w = V2.x size in
use 1/4 for bricks .
let bw, bh = (w /. 8.), h /. 3. in
let x_count = truncate (w /. bw) in
let y_count = truncate (h /. bh) in
let acc = ref [] in
for x = 0 to x_count - 1 do
for y = 0 to y_count - 1 do
let x = Rect.xmin walls +. (float x) *. bw in
let y = Rect.ymin walls +. 2. *. bh +. (float y) *. bh in
acc := Rect.create (V2.v x y) (V2.v bw bh) :: !acc
done
done;
!acc
in
let define bricks =
let cresult =
let collide dp (ball, bricks) =
let rec aux c acc bricks ball dp = match bricks with
| [] -> c, List.rev acc
| b :: bricks' -> match crect b ball dp with
| None -> aux c (b :: acc) bricks' ball dp
| c -> aux c acc bricks' ball dp
in
match aux None [] bricks ball dp with
| None, bl -> None, bl
| Some (n, t), bl -> Some (n, V2.sub (Rect.o ball) (V2.smul t dp)),bl
in
S.sample collide dp (S.Pair.pair ball bricks)
in
let collisions = E.fmap (fun (c, _) -> c) cresult in
let bricks_e = E.map (fun (_, bl) -> fun _ -> bl) cresult in
let bricks' = S.accum bricks_e bricks0 in
bricks', (bricks', collisions)
in
S.fix bricks0 define
let create w dt moves =
let define collisions =
let ball = ball w dt collisions in
let walls, wcollisions = walls w ball in
let paddle, pcollisions = paddle w moves ball in
let bricks, bcollisions = bricks w ball in
let collisions' = E.select [pcollisions; wcollisions; bcollisions] in
let g =
{ walls = walls;
ball = S.dismiss collisions' Rect.empty (fst ball);
paddle = paddle;
bricks = bricks;
brick_count = S.map List.length bricks;
collisions = E.stamp collisions' () }
in
collisions', g
in
E.fix define
let walls g = g.walls
let ball g = g.ball
let paddle g = g.paddle
let bricks g = g.bricks
let brick_count g = g.brick_count
let collisions g = g.collisions
let no_bricks = S.map (fun l -> l = 0) g.brick_count in
let miss = S.map (fun b -> Rect.ymax b >= Rect.ymax g.walls) g.ball in
let game_over = S.changes (S.Bool.( || ) no_bricks miss) in
S.sample (fun _ l -> `Game_over l) game_over g.brick_count
end
module Render = struct
let str = Printf.sprintf
let str_bricks count = if count = 1 then "1 brick" else str "%d bricks" count
let x = 0.5 *. Rect.xmax Draw.frame in
let y = 0.5 *. Rect.ymax Draw.frame in
Draw.clear ();
Draw.text ~color:title_color (V2.v x (y -. 2.)) "BREAKOUT";
Draw.text ~color:30 (V2.v x y)
"Hit 'a' and 'd' to move the paddle, 'q' to quit";
Draw.text ~color:31 (V2.v x (y +. 2.)) "Hit spacebar to start the game";
Draw.flush ()
let x = 0.5 *. Rect.xmax Draw.frame in
let y = 0.5 *. Rect.ymax Draw.frame in
Draw.text ~color:31 (V2.v x (y +. 2.)) m;
Draw.flush ()
let bl = V2.v (Rect.xmin Draw.frame) (Rect.ymax Draw.frame -. 1.) in
Draw.clear ();
List.iter (Draw.rect ~color:40) bricks;
Draw.rect ~color:44 paddle;
Draw.rect ~color:41 ball;
Draw.text ~center:false ~color:30 bl (str "%s left" (str_bricks bcount));
Draw.flush ()
let x = 0.5 *. Rect.xmax Draw.frame in
let y = 0.5 *. Rect.ymax Draw.frame in
let outcome_msg =
if outcome = 0 then "Congratulations, no bricks left" else
str "%s left, you can do better" (str_bricks outcome)
in
Draw.text ~color:34 (V2.v x (y +. 2.)) "GAME OVER";
Draw.text ~color:30 (V2.v x (y +. 4.)) outcome_msg;
Draw.text ~color:31 (V2.v x (y +. 6.)) "Hit spacebar to start again";
Draw.flush ()
end
module Ui : sig
val init : unit -> unit event
end = struct
let key k = E.fmap (fun c -> if c = k then Some () else None) Input.key
let quit () = E.once (E.stamp (key 'q') `Quit)
let new_game () = E.once (E.stamp (key ' ') `Game)
let wait_until ?stop e = match stop with
| Some s -> E.map (fun v -> s (); v) (E.once e)
| None -> E.once e
let intro () =
let color_swap = E.stamp Input.time (fun c -> if c = 31 then 34 else 31) in
let output = S.l1 Render.intro (S.accum color_swap 34) in
let stop () = S.stop output in
wait_until (E.select [quit (); new_game ()]) ~stop
let game () =
let run = S.hold false (E.once (E.stamp (key ' ') true)) in
let moves =
let move = function 'a' -> Some `Left | 'd' -> Some `Right | _ -> None in
E.on run (E.fmap move Input.key)
in
let dt = E.on run (E.diff ( -. ) Input.time) in
let g = Game.create Draw.frame dt moves in
let outcome = Game.outcome g in
let sound = E.map Draw.beep (Game.collisions g) in
let output = S.l4 Render.game (Game.ball g) (Game.paddle g) (Game.bricks g)
(Game.brick_count g)
in
let stop () = E.stop sound; S.stop output in
Render.game_init "Hit spacebar to start the game";
wait_until (E.select [quit (); outcome]) ~stop
let game_over outcome =
Render.game_over outcome;
wait_until (E.select [quit (); new_game ()])
let init () =
let define ui =
let display ui =
match ui with
| `Intro -> intro ()
| `Game -> game ()
| `Game_over outcome -> game_over outcome
| `Quit -> exit 0
in
let ui' = E.switch (display `Intro) (E.map display ui) in
ui', ui'
in
E.stamp (E.fix define) ()
end
let main () =
Random.self_init ();
Log.init ();
Draw.init ();
Input.init ();
let ui = Ui.init () in
Input.gather ();
ui
----------------------------------------------------------------------------
Copyright ( c ) 2009
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2009 Daniel C. BΓΌnzli
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
|
7b86d89f6dba12695450af83071dc2d3272023601ae884ab98c507d19bc28bf3 | marianoguerra/tanodb | tanodb_coverage_fsm_sup.erl | -module(tanodb_coverage_fsm_sup).
-behavior(supervisor).
-export([start_link/0, start_fsm/1]).
-export([init/1]).
-ignore_xref([start_link/0, init/1]).
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
init([]) ->
CoverageFSM = {undefined,
{tanodb_coverage_fsm, start_link, []},
temporary, 5000, worker, [tanodb_coverage_fsm]},
{ok, {{simple_one_for_one, 10, 10}, [CoverageFSM]}}.
start_fsm(Args) ->
supervisor:start_child(?MODULE, Args).
| null | https://raw.githubusercontent.com/marianoguerra/tanodb/7b8bb0ddc0fd1e67b2522cff8a0dac40b412acdb/apps/tanodb/src/tanodb_coverage_fsm_sup.erl | erlang | -module(tanodb_coverage_fsm_sup).
-behavior(supervisor).
-export([start_link/0, start_fsm/1]).
-export([init/1]).
-ignore_xref([start_link/0, init/1]).
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
init([]) ->
CoverageFSM = {undefined,
{tanodb_coverage_fsm, start_link, []},
temporary, 5000, worker, [tanodb_coverage_fsm]},
{ok, {{simple_one_for_one, 10, 10}, [CoverageFSM]}}.
start_fsm(Args) ->
supervisor:start_child(?MODULE, Args).
|
|
33035510b9f3deadd456298eba772aa31211fada3e4ffe1e9c50eb5802978dd1 | ghc/testsuite | T5329.hs | # LANGUAGE UnicodeSyntax #
{-# LANGUAGE EmptyDataDecls #-}
# LANGUAGE TypeOperators #
# LANGUAGE ScopedTypeVariables #
{-# LANGUAGE GADTs #-}
# LANGUAGE TypeFamilies #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
module T5329 where
data PZero
data PSucc p
data Peano n where
PZero β· Peano PZero
PSucc β· IsPeano p β Peano p β Peano (PSucc p)
class IsPeano n where
peano β· Peano n
instance IsPeano PZero where
peano = PZero
instance IsPeano p β IsPeano (PSucc p) where
peano = PSucc peano
class (n ~ PSucc (PPred n)) β PHasPred n where
type PPred n
instance PHasPred (PSucc p) where
type PPred (PSucc p) = p
pPred β· Peano (PSucc p) β Peano p
pPred (PSucc p) = p
infixl 6 :+:
class (IsPeano n, IsPeano m, IsPeano (n :+: m), (n :+: m) ~ (m :+: n))
β PAdd n m where
type n :+: m
instance PAdd PZero PZero where
type PZero :+: PZero = PZero
instance IsPeano p β PAdd PZero (PSucc p) where
type PZero :+: (PSucc p) = PSucc p
instance IsPeano p β PAdd (PSucc p) PZero where
type (PSucc p) :+: PZero = PSucc p
instance (IsPeano n, IsPeano m, PAdd n m) β PAdd (PSucc n) (PSucc m) where
type (PSucc n) :+: (PSucc m) = PSucc (PSucc (n :+: m))
data PAddResult n m r where
PAddResult β· (PAdd n m, PAdd m n, (n :+: m) ~ r)
β PAddResult n m r
pAddLeftZero β· β n . IsPeano n β PAddResult PZero n n
pAddLeftZero = case peano β· Peano n of
PZero β PAddResult
PSucc _ β PAddResult
pAddRightZero β· β n . IsPeano n β PAddResult n PZero n
pAddRightZero = case peano β· Peano n of
PZero β PAddResult
PSucc _ β PAddResult
data PAddSucc n m where
PAddSucc β· (PAdd n m, PAdd m n,
PAdd (PSucc n) m, PAdd m (PSucc n),
PAdd n (PSucc m), PAdd (PSucc m) n,
(PSucc n :+: m) ~ PSucc (n :+: m),
(n :+: PSucc m) ~ PSucc (n :+: m))
β PAddSucc n m
pAddSucc β· β n m . (IsPeano n, IsPeano m) β PAddSucc n m
pAddSucc = case (peano β· Peano n, peano β· Peano m) of
(PZero, PZero) β PAddSucc
(PZero, PSucc _) β case pAddLeftZero β· PAddResult n (PPred m) (PPred m) of
PAddResult β PAddSucc
(PSucc _, PZero) β case pAddRightZero β· PAddResult (PPred n) m (PPred n) of
PAddResult β PAddSucc
(PSucc _, PSucc _) β case pAddSucc β· PAddSucc (PPred n) (PPred m) of
PAddSucc β PAddSucc
data PAdd2 n m where
PAdd2 β· (PAdd n m, PAdd m n) β PAdd2 n m
pAdd2 β· β n m . (IsPeano n, IsPeano m) β PAdd2 n m
pAdd2 = case (peano β· Peano n, peano β· Peano m) of
(PZero, PZero) β PAdd2
(PZero, PSucc _) β PAdd2
(PSucc _, PZero) β PAdd2
(PSucc _, PSucc _) β case pAdd2 β· PAdd2 (PPred n) (PPred m) of
PAdd2 β PAdd2
data PAdd3 n m k where
PAdd3 β· (PAdd n m, PAdd m k, PAdd m n, PAdd k m, PAdd n k, PAdd k n,
PAdd (n :+: m) k, PAdd k (m :+: n),
PAdd n (m :+: k), PAdd (m :+: k) n,
PAdd (n :+: k) m, PAdd m (n :+: k),
((n :+: m) :+: k) ~ (n :+: (m :+: k)),
(m :+: (n :+: k)) ~ ((m :+: n) :+: k))
β PAdd3 n m k
pAdd3 β· β n m k . (IsPeano n, IsPeano m, IsPeano k) β PAdd3 n m k
pAdd3 = case (peano β· Peano n, peano β· Peano m, peano β· Peano k) of
(PZero, PZero, PZero) β PAdd3
(PZero, PZero, PSucc _) β PAdd3
(PZero, PSucc _, PZero) β PAdd3
(PSucc _, PZero, PZero) β PAdd3
(PZero, PSucc _, PSucc _) β
case pAdd2 β· PAdd2 (PPred m) (PPred k) of
PAdd2 β PAdd3
(PSucc _, PZero, PSucc _) β
case pAdd2 β· PAdd2 (PPred n) (PPred k) of
PAdd2 β PAdd3
(PSucc _, PSucc _, PZero) β
case pAdd2 β· PAdd2 (PPred n) (PPred m) of
PAdd2 β PAdd3
(PSucc _, PSucc _, PSucc _) β
case pAdd3 β· PAdd3 (PPred n) (PPred m) (PPred k) of
PAdd3 β case pAddSucc β· PAddSucc (PPred n :+: PPred m) (PPred k) of
PAddSucc β case pAddSucc β· PAddSucc (PPred n :+: PPred k) (PPred m) of
PAddSucc β case pAddSucc β· PAddSucc (PPred m :+: PPred k) (PPred n) of
PAddSucc β PAdd3
| null | https://raw.githubusercontent.com/ghc/testsuite/998a816ae89c4fd573f4abd7c6abb346cf7ee9af/tests/simplCore/should_compile/T5329.hs | haskell | # LANGUAGE EmptyDataDecls #
# LANGUAGE GADTs # | # LANGUAGE UnicodeSyntax #
# LANGUAGE TypeOperators #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
module T5329 where
data PZero
data PSucc p
data Peano n where
PZero β· Peano PZero
PSucc β· IsPeano p β Peano p β Peano (PSucc p)
class IsPeano n where
peano β· Peano n
instance IsPeano PZero where
peano = PZero
instance IsPeano p β IsPeano (PSucc p) where
peano = PSucc peano
class (n ~ PSucc (PPred n)) β PHasPred n where
type PPred n
instance PHasPred (PSucc p) where
type PPred (PSucc p) = p
pPred β· Peano (PSucc p) β Peano p
pPred (PSucc p) = p
infixl 6 :+:
class (IsPeano n, IsPeano m, IsPeano (n :+: m), (n :+: m) ~ (m :+: n))
β PAdd n m where
type n :+: m
instance PAdd PZero PZero where
type PZero :+: PZero = PZero
instance IsPeano p β PAdd PZero (PSucc p) where
type PZero :+: (PSucc p) = PSucc p
instance IsPeano p β PAdd (PSucc p) PZero where
type (PSucc p) :+: PZero = PSucc p
instance (IsPeano n, IsPeano m, PAdd n m) β PAdd (PSucc n) (PSucc m) where
type (PSucc n) :+: (PSucc m) = PSucc (PSucc (n :+: m))
data PAddResult n m r where
PAddResult β· (PAdd n m, PAdd m n, (n :+: m) ~ r)
β PAddResult n m r
pAddLeftZero β· β n . IsPeano n β PAddResult PZero n n
pAddLeftZero = case peano β· Peano n of
PZero β PAddResult
PSucc _ β PAddResult
pAddRightZero β· β n . IsPeano n β PAddResult n PZero n
pAddRightZero = case peano β· Peano n of
PZero β PAddResult
PSucc _ β PAddResult
data PAddSucc n m where
PAddSucc β· (PAdd n m, PAdd m n,
PAdd (PSucc n) m, PAdd m (PSucc n),
PAdd n (PSucc m), PAdd (PSucc m) n,
(PSucc n :+: m) ~ PSucc (n :+: m),
(n :+: PSucc m) ~ PSucc (n :+: m))
β PAddSucc n m
pAddSucc β· β n m . (IsPeano n, IsPeano m) β PAddSucc n m
pAddSucc = case (peano β· Peano n, peano β· Peano m) of
(PZero, PZero) β PAddSucc
(PZero, PSucc _) β case pAddLeftZero β· PAddResult n (PPred m) (PPred m) of
PAddResult β PAddSucc
(PSucc _, PZero) β case pAddRightZero β· PAddResult (PPred n) m (PPred n) of
PAddResult β PAddSucc
(PSucc _, PSucc _) β case pAddSucc β· PAddSucc (PPred n) (PPred m) of
PAddSucc β PAddSucc
data PAdd2 n m where
PAdd2 β· (PAdd n m, PAdd m n) β PAdd2 n m
pAdd2 β· β n m . (IsPeano n, IsPeano m) β PAdd2 n m
pAdd2 = case (peano β· Peano n, peano β· Peano m) of
(PZero, PZero) β PAdd2
(PZero, PSucc _) β PAdd2
(PSucc _, PZero) β PAdd2
(PSucc _, PSucc _) β case pAdd2 β· PAdd2 (PPred n) (PPred m) of
PAdd2 β PAdd2
data PAdd3 n m k where
PAdd3 β· (PAdd n m, PAdd m k, PAdd m n, PAdd k m, PAdd n k, PAdd k n,
PAdd (n :+: m) k, PAdd k (m :+: n),
PAdd n (m :+: k), PAdd (m :+: k) n,
PAdd (n :+: k) m, PAdd m (n :+: k),
((n :+: m) :+: k) ~ (n :+: (m :+: k)),
(m :+: (n :+: k)) ~ ((m :+: n) :+: k))
β PAdd3 n m k
pAdd3 β· β n m k . (IsPeano n, IsPeano m, IsPeano k) β PAdd3 n m k
pAdd3 = case (peano β· Peano n, peano β· Peano m, peano β· Peano k) of
(PZero, PZero, PZero) β PAdd3
(PZero, PZero, PSucc _) β PAdd3
(PZero, PSucc _, PZero) β PAdd3
(PSucc _, PZero, PZero) β PAdd3
(PZero, PSucc _, PSucc _) β
case pAdd2 β· PAdd2 (PPred m) (PPred k) of
PAdd2 β PAdd3
(PSucc _, PZero, PSucc _) β
case pAdd2 β· PAdd2 (PPred n) (PPred k) of
PAdd2 β PAdd3
(PSucc _, PSucc _, PZero) β
case pAdd2 β· PAdd2 (PPred n) (PPred m) of
PAdd2 β PAdd3
(PSucc _, PSucc _, PSucc _) β
case pAdd3 β· PAdd3 (PPred n) (PPred m) (PPred k) of
PAdd3 β case pAddSucc β· PAddSucc (PPred n :+: PPred m) (PPred k) of
PAddSucc β case pAddSucc β· PAddSucc (PPred n :+: PPred k) (PPred m) of
PAddSucc β case pAddSucc β· PAddSucc (PPred m :+: PPred k) (PPred n) of
PAddSucc β PAdd3
|
a863f019ac624df1f8b82e8f7622e6ce59f5e17b3d96bfa8c01c5801ce2eabb7 | kevinmershon/copy-trader | event.clj | (ns copy-trader.websocket.event)
(defmulti on-event
(fn [_uri {:keys [message-code _payload]}]
message-code))
(defmethod on-event :default
[_uri _msg]
:not-implemented)
TODO -- implement too - many - clients error handler
(def ONE-DAY
(* 1000 60 60 24))
| null | https://raw.githubusercontent.com/kevinmershon/copy-trader/3262a1b4a2dd7e145a5673b6e6c22f94ab4f4e72/src/clj/copy_trader/websocket/event.clj | clojure | (ns copy-trader.websocket.event)
(defmulti on-event
(fn [_uri {:keys [message-code _payload]}]
message-code))
(defmethod on-event :default
[_uri _msg]
:not-implemented)
TODO -- implement too - many - clients error handler
(def ONE-DAY
(* 1000 60 60 24))
|
|
328793b005a2fcab8660e842ad49e78ab7338ddb36f6ed2c2579a2d10f3c3f7a | ralsei/graphite | faceting.rkt | #lang racket
(require data-frame graphite rackunit
racket/runtime-path
"util.rkt")
(define-runtime-path facet-1-data "./test-data/facet-1.dat")
(define facet-1-df
(begin
(random-seed 888)
(let ([int-data (make-data-frame)]
[xs (build-vector 1000 (Ξ» (_) (* (random) 30)))]
[ys (build-vector 1000 (Ξ» (_) (* (random) 30)))]
[strats (for/vector ([_ (in-range 0 1000)]
[x (in-cycle (in-list '("a" "b" "c" "d" "e")))])
x)])
(df-add-series! int-data (make-series "x-var" #:data xs))
(df-add-series! int-data (make-series "y-var" #:data ys))
(df-add-series! int-data (make-series "stratify-on" #:data strats))
int-data)))
(define facet-1
(graph #:data facet-1-df
#:mapping (aes #:x "x-var" #:y "y-var" #:facet "stratify-on")
#:title "THIS IS A TITLE"
#:x-label "BOTTOM TEXT"
#:y-label "LEFT TEXT"
(points)))
(define-runtime-path facet-2-data "./test-data/facet-2.dat")
(define facet-2
(graph #:data facet-1-df
#:mapping (aes #:x "x-var" #:y "y-var" #:facet "stratify-on")
#:facet-wrap 5
(lines)))
(define-runtime-path facet-3-data "./test-data/facet-3.dat")
(define facet-3
(graph #:data facet-1-df
#:mapping (aes #:x "x-var" #:facet "stratify-on")
#:facet-wrap 1
(density)))
; stress test. this one takes a while to render
; OOMs the test server, so commented out unless running locally...
;; (define-runtime-path facet-4-data "./test-data/facet-4.dat")
;; (define facet-4
;; (graph #:data facet-1-df
# : mapping ( aes # :x " stratify - on " # : facet " x - var " )
# : width 10000
# : height 10000
;; (bar)))
(module+ test
(check-draw-steps facet-1 facet-1-data)
(check-draw-steps facet-2 facet-2-data)
(check-draw-steps facet-3 facet-3-data))
; (check-draw-steps facet-4 facet-4-data))
| null | https://raw.githubusercontent.com/ralsei/graphite/303becd5036f9181f729a4c480dbf9fecb2a50bc/graphite-test/faceting.rkt | racket | stress test. this one takes a while to render
OOMs the test server, so commented out unless running locally...
(define-runtime-path facet-4-data "./test-data/facet-4.dat")
(define facet-4
(graph #:data facet-1-df
(bar)))
(check-draw-steps facet-4 facet-4-data)) | #lang racket
(require data-frame graphite rackunit
racket/runtime-path
"util.rkt")
(define-runtime-path facet-1-data "./test-data/facet-1.dat")
(define facet-1-df
(begin
(random-seed 888)
(let ([int-data (make-data-frame)]
[xs (build-vector 1000 (Ξ» (_) (* (random) 30)))]
[ys (build-vector 1000 (Ξ» (_) (* (random) 30)))]
[strats (for/vector ([_ (in-range 0 1000)]
[x (in-cycle (in-list '("a" "b" "c" "d" "e")))])
x)])
(df-add-series! int-data (make-series "x-var" #:data xs))
(df-add-series! int-data (make-series "y-var" #:data ys))
(df-add-series! int-data (make-series "stratify-on" #:data strats))
int-data)))
(define facet-1
(graph #:data facet-1-df
#:mapping (aes #:x "x-var" #:y "y-var" #:facet "stratify-on")
#:title "THIS IS A TITLE"
#:x-label "BOTTOM TEXT"
#:y-label "LEFT TEXT"
(points)))
(define-runtime-path facet-2-data "./test-data/facet-2.dat")
(define facet-2
(graph #:data facet-1-df
#:mapping (aes #:x "x-var" #:y "y-var" #:facet "stratify-on")
#:facet-wrap 5
(lines)))
(define-runtime-path facet-3-data "./test-data/facet-3.dat")
(define facet-3
(graph #:data facet-1-df
#:mapping (aes #:x "x-var" #:facet "stratify-on")
#:facet-wrap 1
(density)))
# : mapping ( aes # :x " stratify - on " # : facet " x - var " )
# : width 10000
# : height 10000
(module+ test
(check-draw-steps facet-1 facet-1-data)
(check-draw-steps facet-2 facet-2-data)
(check-draw-steps facet-3 facet-3-data))
|
7458bd1eeaf26662a4cc0ead748209576a3dbd1f2b9b86616201f287fd4a1e50 | glv/snergly | grid.cljc | (ns snergly.grid
# ? (: [ clojure.spec : as s ]
: cljs [ cljs.spec : as s ] )
[clojure.spec.alpha :as s]
[snergly.util :as util]
[clojure.set :as set]
# ? (: [ clojure.spec.gen : as gen ]
; :cljs [cljs.spec.impl.gen :as gen])
[clojure.spec.gen.alpha :as gen]
))
;; If we don't want other namespaces to have to use the keywords from this
;; namespace to gather information from these maps, what other methods would
;; we need to provide?
;;
;; For snergly.algorithms:
;;
;; * grid, w: (g/grid-set-algorithm grid "algorithm-name")
;; * β cell, r: (g/cell-neighbor cell :east)
;; * cell, r: (g/cell-links cell)
* chgs , r : ( g / changed - cells thing )
;; * dist, r: (g/max-dist distances)
;; * dist, r: (g/max-pos distances)
;; * dist, r: (g/origin distances)
;;
;; For snergly.image:
;;
;; * grid, r: (g/grid-rows grid) and (g/grid-cols grid)
;; or maybe (g/grid-size grid) ; returning a pair
;;
;; For snergly.animation:
;;
;; basic type constraints
(s/def ::cell-coord (s/with-gen nat-int?
#(s/gen (s/int-in 0 24))))
(s/def ::cell-position (s/tuple ::cell-coord ::cell-coord))
;; restrict grid sizes for testing, but allow larger grids in production.
(s/def ::grid-dimen (s/with-gen (s/and integer? #(>= % 2))
#(s/gen (s/int-in 2 25))))
;; cell
(declare make-cell cell-neighbors)
(s/def ::neighbor (s/nilable ::cell-position))
(s/def ::pos ::cell-position)
;; Every cell has a `max-pos` coordinate pair that refers to the cell
;; in the highest-numbered row and column (the lower right cell). This
;; is redundant, because it's a property of the grid to which the cell
;; belongs. But it is a very useful optimization to have it available
;; in the cell.
(s/def ::max-pos ::cell-position)
(s/def ::links (s/coll-of ::cell-position :kind set?))
(s/def ::cell (s/with-gen
(s/and
(s/keys :req [::pos ::max-pos ::links])
#(<= (first (::pos %)) (first (::max-pos %)))
#(<= (second (::pos %)) (second (::max-pos %)))
#(set/subset? (::links %) (set (cell-neighbors %))))
(gen/fmap (fn [[rs cs]]
(let [r (rand-int rs)
c (rand-int cs)]
(make-cell r c rs cs)))
(let [dgen (s/gen ::grid-dimen)]
(gen/tuple dgen dgen)))))
;; change tracking
(s/def ::changed-cells (s/nilable (s/coll-of ::cell-position :kind set?)))
(s/def ::with-changes (s/keys :req [::changed-cells]))
;; grid
(declare make-grid)
(s/def ::algorithm-name string?)
(s/def ::rows ::grid-dimen)
(s/def ::cols ::grid-dimen)
(s/def ::cells (s/every ::cell :kind vector?))
;; What are the other validity constraints that *could* be specified here?
* one cell for each [ row , col ] permutation
;; * cells are in row-major order
* cells all have proper : :
(s/def ::grid (s/with-gen
(s/and
(s/keys :req [::algorithm-name
::rows ::cols
::cells])
#(= (* (::rows %) (::cols %)) (count (::cells %))))
(gen/fmap (fn [[rs cs]] (make-grid rs cs))
(let [dgen (s/gen ::grid-dimen)]
(gen/tuple dgen dgen)))))
;; distances
(s/def ::origin ::cell-position)
(s/def ::max-dist nat-int?)
;;(s/def ::dist-or-annot (s/or :origin (s/tuple #(= ::origin %) ::cell-position)
: ( s / tuple # (= : : ) : : cell - position )
;; :max-dist (s/tuple #(= ::max-dist %) nat-int?)
;; :changes (s/tuple #(= ::changed-cells %) ::changed-cells)
: dist ( s / tuple : : cell - position - int ? ) ) )
;;
( s / def : : distances ( s / and ( s / keys : req [: : origin : : max - dist ] : opt [: : ] )
;; (s/coll-of ::dist-or-annot :type map?)))
;; I think if I write the right macro, I can replace ::dist-or-annot
;; and ::distances with this:
;;
( s / def : : distance - annotations ( s / keys : req [: : origin : : max - dist ] : opt [: : ] ) )
;; (s/def ::distances (s/merged ::distance-annotations
;; ::with-changes
( s / map - of : : cell - position - int ? ) ) )
;;
( I might have to use ( s / coll - of [ s / tuple : : cell - position - int ? ] )
;; instead of map/of.)
;;
But here 's what and are working on :
;;
;; (s/def ::distances (s/key-cond
keyword ? ( s / keys : req [: : origin : : max - dist ] : opt [: : ] )
vector ? ( s / map - of : : cell - position - int ? ) ) )
;;
;; Much, much better!
;;
But ! Five years later , it seems that never happened . And I do n't think my
;; other option is really feasible. Probably the right solution is to change
the data structure : push the [: : cell - position - int ? ] mappings down into
;; a sub-map, accessible via a key. (The obvious right name for that key
;; is ::distances, which means that the top-level map should be renamed to
;; something else, like perhaps ::distance-analysis. But for now I'll go with
;; ::cell-dists)
(s/def ::cell-dists (s/map-of ::cell-position nat-int?))
(s/def ::distances (s/and (s/keys :req [::origin ::max-dist ::cell-dists]
:opt [::max-pos])))
(defmacro fnp [name & body]
`(fn ~name [{:keys [args ret]}] ; deliberately capturing these
~@body))
(s/fdef make-cell
:args (s/and (s/cat :row ::cell-coord
:col ::cell-coord
:rows ::grid-dimen
:cols ::grid-dimen)
#(< (:row %) (:rows %))
#(< (:col %) (:cols %)))
:ret ::cell
First attempt .
;; Pro: concise, readable, idiomatic.
;; Con: when one fails, you have to inspect the code to determine which
: fn ( s / and # (= ( - > c : ret : : pos first ) ( - > c : args : row ) )
# (= ( - > c : ret : : pos second ) ( - > c : args : col ) )
# (= ( - > c : ret : : first ) ( - > c : args : rows ) )
# (= ( - > c : ret : : second ) ( - > c : args : cols ) )
# ( empty ? ( - > c : ret : : links ) ) ) )
;;
Second attempt :
;; Pro: error message tells you what failed
;; Con: verbose, repetitive, non-idiomatic
: fn ( [ ( row - correct [ c ] (= ( - > c : ret : : pos first ) ( - > c : args : row ) ) )
( col - correct [ c ] (= ( - > c : ret : : pos second ) ( - > c : args : col ) ) )
( rows - correct [ c ] (= ( - > c : ret : : first ) ( dec ( - > c : args : rows ) ) ) )
( cols - correct [ c ] (= ( - > c : ret : : second ) ( dec ( - > c : args : cols ) ) ) )
( links - empty [ c ] ( empty ? ( - > c : ret : : links ) ) ) ]
;; (s/and row-correct
;; col-correct
;; rows-correct
;; cols-correct
;; links-empty)))
;;
Third attempt :
;; Pro: concise, readable, idiomatic (same as #1)
;; when one fails, the message tells you what failed (same as #2)
Con : None of the pros are * quite * as good as they are in # 1 and # 2
: fn ( s / and ( fn row - correct [ c ] (= ( - > c : ret : : pos first ) ( - > c : args : row ) ) )
( fn col - correct [ c ] (= ( - > c : ret : : pos second ) ( - > c : args : col ) ) )
( fn rows - right [ c ] (= ( - > c : ret : : first ) ( - > c : args : rows ) ) )
( fn cols - correct [ c ] (= ( - > c : ret : : second ) ( - > c : args : cols ) ) )
( fn links - empty [ c ] ( empty ? ( - > c : ret : : links ) ) ) ) )
;;
Fourth attempt :
;; Pro: concise, readable, error message tells you what failed
;; Con: non-idiomatic, relies on implicit captured variable names
:fn (s/and (fnp row-correct (= (-> ret ::pos first) (-> args ::row)))
(fnp col-correct (= (-> ret ::pos second) (-> args ::col)))
(fnp rows-correct (= (-> ret ::max-pos first) (-> args ::rows)))
(fnp cols-correct (= (-> ret ::max-pos second) (-> args ::cols)))
(fnp links-empty (empty? (::links ret)))))
(defn make-cell
[row col rows cols]
{::pos [row col]
::max-pos [(dec rows) (dec cols)]
::links #{}})
(s/fdef cell-neighbor
:args (s/cat :cell ::cell :direction #{:north :south :east :west})
:ret (s/nilable ::cell-position))
(defn cell-neighbor [cell direction]
(let [[row col] (::pos cell)
[mrow mcol] (::max-pos cell)]
(case direction
:north (when (> row 0) [(dec row) col])
:west (when (> col 0) [row (dec col)])
:south (when (< row mrow) [(inc row) col])
:east (when (< col mcol) [row (inc col)]))))
(s/fdef cell-neighbors
:args (s/cat :cell ::cell
:directions (s/? (s/coll-of #{:north :south :east :west} :kind vector?)))
:ret (s/coll-of ::cell-position :kind vector?))
(defn cell-neighbors
([cell] (cell-neighbors cell [:north :south :east :west]))
([cell directions]
(filter identity (map #(cell-neighbor cell %) directions))))
(s/fdef make-grid
:args (s/cat :rows ::grid-dimen :cols ::grid-dimen)
:ret (s/and ::grid ::with-changes)) ;; FIXME s/merge ?
(defn make-grid
"Creates and returns a new grid with the specified row and column sizes."
[rows cols]
{::algorithm-name "none"
::rows rows
::cols cols
::cells (into [] (for [row (range rows) col (range cols)]
(make-cell row col rows cols)))
::changed-cells nil})
(s/fdef cell-index
:args (s/cat :grid ::grid :position ::cell-position)
:ret nat-int?
:fn #(= (-> % :args :position)
(::pos (get (-> % :args :grid ::cells) (-> % :ret)))))
(defn cell-index [grid [row col]]
(+ (* row (::cols grid)) col))
(s/fdef grid-cell
:args (s/cat :grid ::grid :position ::cell-position)
:ret ::cell)
(defn grid-cell [grid [row col]]
((::cells grid) (cell-index grid [row col])))
(s/fdef random-pos
:args (s/cat :grid ::grid)
:ret ::cell-position)
(defn random-pos [{:keys [::rows ::cols] :as grid}]
(let [row (rand-int rows)
col (rand-int cols)]
[row col]))
(s/fdef grid-size
:args (s/cat :grid ::grid)
:ret pos-int?)
(defn grid-size [{:keys [::rows ::cols]}]
(* rows cols))
(s/fdef grid-row-positions
:args (s/cat :grid ::grid)
;; maybe this double-nesting could be made clearer by defining a
;; separate predicate, maybe ::row-positions or something.
:ret (s/every (s/every ::cell-position :kind vector?) :kind vector?))
(defn grid-row-positions [{:keys [::rows ::cols]}]
"Grid cell positions, batched into rows."
(for [row (range rows)]
(for [col (range cols)]
[row col])))
(s/fdef grid-positions
:args (s/cat :grid ::grid)
:ret (s/coll-of ::cell-position :kind vector?))
(defn grid-positions [{:keys [::rows ::cols]}]
(for [row (range rows) col (range cols)]
[row col]))
(s/fdef grid-deadends
:args (s/cat :grid ::grid)
:ret (s/coll-of ::cell :kind vector?))
(defn grid-deadends [grid]
(filter #(= 1 (count (::links %)))
(map #(grid-cell grid %) (grid-positions grid))))
(s/fdef begin-step
:args (s/cat :thing ::with-changes)
:ret ::with-changes
:fn #(empty? (-> % :ret ::changed-cells)))
(defn begin-step [thing]
(assoc thing ::changed-cells #{}))
(s/fdef new?
:args (s/cat :thing ::with-changes)
:ret boolean?)
(defn new? [thing]
(nil? (::changed-cells thing)))
(s/fdef changed?
:args (s/cat :thing ::with-changes)
:ret boolean?)
(defn changed? [thing]
(boolean (not-empty (::changed-cells thing))))
(s/fdef link-cells
:args (s/cat :grid ::grid :cell ::cell :neighbor-pos ::cell-position)
:ret ::grid)
(defn link-cells [{:keys [::cells ::changed-cells] :as grid}
{cell-pos ::pos cell-links ::links :as cell}
neighbor-pos]
(let [neighbor (grid-cell grid neighbor-pos)
neighbor-links (::links neighbor)]
(assoc grid ::cells
(assoc cells (cell-index grid cell-pos)
(assoc cell ::links (conj cell-links neighbor-pos))
(cell-index grid neighbor-pos)
(assoc neighbor ::links (conj neighbor-links cell-pos)))
::changed-cells (conj changed-cells cell-pos neighbor-pos))))
(s/fdef linked?
:args (s/cat :cell ::cell :other-cell-pos ::cell-position)
:ret boolean?)
(defn linked? [cell other-cell-pos]
(contains? (::links cell) other-cell-pos))
(s/fdef make-distances
:args (s/cat :origin ::cell-position)
FIXME use s / merge , I think ?
(defn make-distances
"Creates and returns a new distances object with the supplied origin."
[origin]
{::origin origin
::max-dist 0
::cell-dists {origin 0}
::changed-cells #{origin}})
(defn cell-dist
"Returns the distance for cell `pos` in `distances`"
[distances pos]
(get-in distances [::cell-dists pos]))
(s/fdef add-distances
:args (s/cat :distances ::distances
:positions (s/coll-of ::cell-position :type vector?)
:distance nat-int?)
:ret ::distances)
(defn add-distances [{:keys [::changed-cells ::cell-dists] :as distances}
positions
distance]
(let [new-max-dist (max distance (::max-dist distances))]
(assoc distances
::max-dist new-max-dist
::changed-cells (apply conj (or changed-cells #{}) positions)
::cell-dists (apply assoc cell-dists (mapcat #(vector % distance) positions)))))
(s/fdef xform-values
:args (s/cat :value-xform (s/fspec :args (s/cat :val int?) :ret identity)
:value-map ::distances)
:ret ::distances)
(defn xform-values [value-xform value-map]
"Returns a version of value-map with values transformed by value-xform."
(reduce (fn [m cell] (assoc m cell (value-xform (value-map cell)))) value-map (::changed-cells value-map)))
(s/fdef grid-annotate-cells
:args (s/cat :grid ::grid :label-specs (s/map-of keyword? ::distances))
:ret ::grid)
(defn grid-annotate-cells [grid label-specs]
(let [specs (seq label-specs)
changed-cells (apply set/union (map (comp ::changed-cells second) specs))
cells-to-annotate (if changed-cells changed-cells (grid-positions grid))]
(letfn [(get-annotations [cell-pos [label value-map]] (vector label (get-in value-map [::cell-dists cell-pos])))
(assoc-cell [cell cell-pos]
(apply assoc cell (mapcat (partial get-annotations cell-pos) specs)))
(annotate-cell [grid cell-pos]
(update-in grid
[::cells (cell-index grid cell-pos)]
assoc-cell cell-pos))]
(assoc (reduce annotate-cell grid cells-to-annotate) ::changed-cells (set cells-to-annotate)))))
(defn intlabel [val]
#?(:clj (format "%2d" val)
:default (util/pad 2 " " (str val))))
(defn print-grid
([grid] (print-grid grid false))
([{cols ::cols :as grid} print-positions?]
(let [resolve (partial grid-cell grid)]
(when print-positions?
(println (apply str " " (map #(str (intlabel %) " ")) (range cols)))
(print " "))
;; top border
(println (apply str "+" (repeat cols "---+")))
(doseq [row (grid-row-positions grid)]
;; cell space line
(when print-positions?
(print (intlabel (ffirst row))))
(println (apply str "|"
(for [cell (map resolve row)]
(str (if (::label cell)
(str " " (::label cell) " ")
" ")
(if (linked? cell (cell-neighbor cell :east))
" "
"|")))))
;; bottom separator line
(when print-positions? (print " "))
(println (apply str "+"
(for [cell (map resolve row)]
(str (if (linked? cell (cell-neighbor cell :south))
" "
"---") "+"))))))))
| null | https://raw.githubusercontent.com/glv/snergly/2f09860b3b3f8d2a58d8a2b2b63184c4a7240f05/src/main/snergly/grid.cljc | clojure | :cljs [cljs.spec.impl.gen :as gen])
If we don't want other namespaces to have to use the keywords from this
namespace to gather information from these maps, what other methods would
we need to provide?
For snergly.algorithms:
* grid, w: (g/grid-set-algorithm grid "algorithm-name")
* β cell, r: (g/cell-neighbor cell :east)
* cell, r: (g/cell-links cell)
* dist, r: (g/max-dist distances)
* dist, r: (g/max-pos distances)
* dist, r: (g/origin distances)
For snergly.image:
* grid, r: (g/grid-rows grid) and (g/grid-cols grid)
or maybe (g/grid-size grid) ; returning a pair
For snergly.animation:
basic type constraints
restrict grid sizes for testing, but allow larger grids in production.
cell
Every cell has a `max-pos` coordinate pair that refers to the cell
in the highest-numbered row and column (the lower right cell). This
is redundant, because it's a property of the grid to which the cell
belongs. But it is a very useful optimization to have it available
in the cell.
change tracking
grid
What are the other validity constraints that *could* be specified here?
* cells are in row-major order
distances
(s/def ::dist-or-annot (s/or :origin (s/tuple #(= ::origin %) ::cell-position)
:max-dist (s/tuple #(= ::max-dist %) nat-int?)
:changes (s/tuple #(= ::changed-cells %) ::changed-cells)
(s/coll-of ::dist-or-annot :type map?)))
I think if I write the right macro, I can replace ::dist-or-annot
and ::distances with this:
(s/def ::distances (s/merged ::distance-annotations
::with-changes
instead of map/of.)
(s/def ::distances (s/key-cond
Much, much better!
other option is really feasible. Probably the right solution is to change
a sub-map, accessible via a key. (The obvious right name for that key
is ::distances, which means that the top-level map should be renamed to
something else, like perhaps ::distance-analysis. But for now I'll go with
::cell-dists)
deliberately capturing these
Pro: concise, readable, idiomatic.
Con: when one fails, you have to inspect the code to determine which
Pro: error message tells you what failed
Con: verbose, repetitive, non-idiomatic
(s/and row-correct
col-correct
rows-correct
cols-correct
links-empty)))
Pro: concise, readable, idiomatic (same as #1)
when one fails, the message tells you what failed (same as #2)
Pro: concise, readable, error message tells you what failed
Con: non-idiomatic, relies on implicit captured variable names
FIXME s/merge ?
maybe this double-nesting could be made clearer by defining a
separate predicate, maybe ::row-positions or something.
top border
cell space line
bottom separator line | (ns snergly.grid
# ? (: [ clojure.spec : as s ]
: cljs [ cljs.spec : as s ] )
[clojure.spec.alpha :as s]
[snergly.util :as util]
[clojure.set :as set]
# ? (: [ clojure.spec.gen : as gen ]
[clojure.spec.gen.alpha :as gen]
))
* chgs , r : ( g / changed - cells thing )
(s/def ::cell-coord (s/with-gen nat-int?
#(s/gen (s/int-in 0 24))))
(s/def ::cell-position (s/tuple ::cell-coord ::cell-coord))
(s/def ::grid-dimen (s/with-gen (s/and integer? #(>= % 2))
#(s/gen (s/int-in 2 25))))
(declare make-cell cell-neighbors)
(s/def ::neighbor (s/nilable ::cell-position))
(s/def ::pos ::cell-position)
(s/def ::max-pos ::cell-position)
(s/def ::links (s/coll-of ::cell-position :kind set?))
(s/def ::cell (s/with-gen
(s/and
(s/keys :req [::pos ::max-pos ::links])
#(<= (first (::pos %)) (first (::max-pos %)))
#(<= (second (::pos %)) (second (::max-pos %)))
#(set/subset? (::links %) (set (cell-neighbors %))))
(gen/fmap (fn [[rs cs]]
(let [r (rand-int rs)
c (rand-int cs)]
(make-cell r c rs cs)))
(let [dgen (s/gen ::grid-dimen)]
(gen/tuple dgen dgen)))))
(s/def ::changed-cells (s/nilable (s/coll-of ::cell-position :kind set?)))
(s/def ::with-changes (s/keys :req [::changed-cells]))
(declare make-grid)
(s/def ::algorithm-name string?)
(s/def ::rows ::grid-dimen)
(s/def ::cols ::grid-dimen)
(s/def ::cells (s/every ::cell :kind vector?))
* one cell for each [ row , col ] permutation
* cells all have proper : :
(s/def ::grid (s/with-gen
(s/and
(s/keys :req [::algorithm-name
::rows ::cols
::cells])
#(= (* (::rows %) (::cols %)) (count (::cells %))))
(gen/fmap (fn [[rs cs]] (make-grid rs cs))
(let [dgen (s/gen ::grid-dimen)]
(gen/tuple dgen dgen)))))
(s/def ::origin ::cell-position)
(s/def ::max-dist nat-int?)
: ( s / tuple # (= : : ) : : cell - position )
: dist ( s / tuple : : cell - position - int ? ) ) )
( s / def : : distances ( s / and ( s / keys : req [: : origin : : max - dist ] : opt [: : ] )
( s / def : : distance - annotations ( s / keys : req [: : origin : : max - dist ] : opt [: : ] ) )
( s / map - of : : cell - position - int ? ) ) )
( I might have to use ( s / coll - of [ s / tuple : : cell - position - int ? ] )
But here 's what and are working on :
keyword ? ( s / keys : req [: : origin : : max - dist ] : opt [: : ] )
vector ? ( s / map - of : : cell - position - int ? ) ) )
But ! Five years later , it seems that never happened . And I do n't think my
the data structure : push the [: : cell - position - int ? ] mappings down into
(s/def ::cell-dists (s/map-of ::cell-position nat-int?))
(s/def ::distances (s/and (s/keys :req [::origin ::max-dist ::cell-dists]
:opt [::max-pos])))
(defmacro fnp [name & body]
~@body))
(s/fdef make-cell
:args (s/and (s/cat :row ::cell-coord
:col ::cell-coord
:rows ::grid-dimen
:cols ::grid-dimen)
#(< (:row %) (:rows %))
#(< (:col %) (:cols %)))
:ret ::cell
First attempt .
: fn ( s / and # (= ( - > c : ret : : pos first ) ( - > c : args : row ) )
# (= ( - > c : ret : : pos second ) ( - > c : args : col ) )
# (= ( - > c : ret : : first ) ( - > c : args : rows ) )
# (= ( - > c : ret : : second ) ( - > c : args : cols ) )
# ( empty ? ( - > c : ret : : links ) ) ) )
Second attempt :
: fn ( [ ( row - correct [ c ] (= ( - > c : ret : : pos first ) ( - > c : args : row ) ) )
( col - correct [ c ] (= ( - > c : ret : : pos second ) ( - > c : args : col ) ) )
( rows - correct [ c ] (= ( - > c : ret : : first ) ( dec ( - > c : args : rows ) ) ) )
( cols - correct [ c ] (= ( - > c : ret : : second ) ( dec ( - > c : args : cols ) ) ) )
( links - empty [ c ] ( empty ? ( - > c : ret : : links ) ) ) ]
Third attempt :
Con : None of the pros are * quite * as good as they are in # 1 and # 2
: fn ( s / and ( fn row - correct [ c ] (= ( - > c : ret : : pos first ) ( - > c : args : row ) ) )
( fn col - correct [ c ] (= ( - > c : ret : : pos second ) ( - > c : args : col ) ) )
( fn rows - right [ c ] (= ( - > c : ret : : first ) ( - > c : args : rows ) ) )
( fn cols - correct [ c ] (= ( - > c : ret : : second ) ( - > c : args : cols ) ) )
( fn links - empty [ c ] ( empty ? ( - > c : ret : : links ) ) ) ) )
Fourth attempt :
:fn (s/and (fnp row-correct (= (-> ret ::pos first) (-> args ::row)))
(fnp col-correct (= (-> ret ::pos second) (-> args ::col)))
(fnp rows-correct (= (-> ret ::max-pos first) (-> args ::rows)))
(fnp cols-correct (= (-> ret ::max-pos second) (-> args ::cols)))
(fnp links-empty (empty? (::links ret)))))
(defn make-cell
[row col rows cols]
{::pos [row col]
::max-pos [(dec rows) (dec cols)]
::links #{}})
(s/fdef cell-neighbor
:args (s/cat :cell ::cell :direction #{:north :south :east :west})
:ret (s/nilable ::cell-position))
(defn cell-neighbor [cell direction]
(let [[row col] (::pos cell)
[mrow mcol] (::max-pos cell)]
(case direction
:north (when (> row 0) [(dec row) col])
:west (when (> col 0) [row (dec col)])
:south (when (< row mrow) [(inc row) col])
:east (when (< col mcol) [row (inc col)]))))
(s/fdef cell-neighbors
:args (s/cat :cell ::cell
:directions (s/? (s/coll-of #{:north :south :east :west} :kind vector?)))
:ret (s/coll-of ::cell-position :kind vector?))
(defn cell-neighbors
([cell] (cell-neighbors cell [:north :south :east :west]))
([cell directions]
(filter identity (map #(cell-neighbor cell %) directions))))
(s/fdef make-grid
:args (s/cat :rows ::grid-dimen :cols ::grid-dimen)
(defn make-grid
"Creates and returns a new grid with the specified row and column sizes."
[rows cols]
{::algorithm-name "none"
::rows rows
::cols cols
::cells (into [] (for [row (range rows) col (range cols)]
(make-cell row col rows cols)))
::changed-cells nil})
(s/fdef cell-index
:args (s/cat :grid ::grid :position ::cell-position)
:ret nat-int?
:fn #(= (-> % :args :position)
(::pos (get (-> % :args :grid ::cells) (-> % :ret)))))
(defn cell-index [grid [row col]]
(+ (* row (::cols grid)) col))
(s/fdef grid-cell
:args (s/cat :grid ::grid :position ::cell-position)
:ret ::cell)
(defn grid-cell [grid [row col]]
((::cells grid) (cell-index grid [row col])))
(s/fdef random-pos
:args (s/cat :grid ::grid)
:ret ::cell-position)
(defn random-pos [{:keys [::rows ::cols] :as grid}]
(let [row (rand-int rows)
col (rand-int cols)]
[row col]))
(s/fdef grid-size
:args (s/cat :grid ::grid)
:ret pos-int?)
(defn grid-size [{:keys [::rows ::cols]}]
(* rows cols))
(s/fdef grid-row-positions
:args (s/cat :grid ::grid)
:ret (s/every (s/every ::cell-position :kind vector?) :kind vector?))
(defn grid-row-positions [{:keys [::rows ::cols]}]
"Grid cell positions, batched into rows."
(for [row (range rows)]
(for [col (range cols)]
[row col])))
(s/fdef grid-positions
:args (s/cat :grid ::grid)
:ret (s/coll-of ::cell-position :kind vector?))
(defn grid-positions [{:keys [::rows ::cols]}]
(for [row (range rows) col (range cols)]
[row col]))
(s/fdef grid-deadends
:args (s/cat :grid ::grid)
:ret (s/coll-of ::cell :kind vector?))
(defn grid-deadends [grid]
(filter #(= 1 (count (::links %)))
(map #(grid-cell grid %) (grid-positions grid))))
(s/fdef begin-step
:args (s/cat :thing ::with-changes)
:ret ::with-changes
:fn #(empty? (-> % :ret ::changed-cells)))
(defn begin-step [thing]
(assoc thing ::changed-cells #{}))
(s/fdef new?
:args (s/cat :thing ::with-changes)
:ret boolean?)
(defn new? [thing]
(nil? (::changed-cells thing)))
(s/fdef changed?
:args (s/cat :thing ::with-changes)
:ret boolean?)
(defn changed? [thing]
(boolean (not-empty (::changed-cells thing))))
(s/fdef link-cells
:args (s/cat :grid ::grid :cell ::cell :neighbor-pos ::cell-position)
:ret ::grid)
(defn link-cells [{:keys [::cells ::changed-cells] :as grid}
{cell-pos ::pos cell-links ::links :as cell}
neighbor-pos]
(let [neighbor (grid-cell grid neighbor-pos)
neighbor-links (::links neighbor)]
(assoc grid ::cells
(assoc cells (cell-index grid cell-pos)
(assoc cell ::links (conj cell-links neighbor-pos))
(cell-index grid neighbor-pos)
(assoc neighbor ::links (conj neighbor-links cell-pos)))
::changed-cells (conj changed-cells cell-pos neighbor-pos))))
(s/fdef linked?
:args (s/cat :cell ::cell :other-cell-pos ::cell-position)
:ret boolean?)
(defn linked? [cell other-cell-pos]
(contains? (::links cell) other-cell-pos))
(s/fdef make-distances
:args (s/cat :origin ::cell-position)
FIXME use s / merge , I think ?
(defn make-distances
"Creates and returns a new distances object with the supplied origin."
[origin]
{::origin origin
::max-dist 0
::cell-dists {origin 0}
::changed-cells #{origin}})
(defn cell-dist
"Returns the distance for cell `pos` in `distances`"
[distances pos]
(get-in distances [::cell-dists pos]))
(s/fdef add-distances
:args (s/cat :distances ::distances
:positions (s/coll-of ::cell-position :type vector?)
:distance nat-int?)
:ret ::distances)
(defn add-distances [{:keys [::changed-cells ::cell-dists] :as distances}
positions
distance]
(let [new-max-dist (max distance (::max-dist distances))]
(assoc distances
::max-dist new-max-dist
::changed-cells (apply conj (or changed-cells #{}) positions)
::cell-dists (apply assoc cell-dists (mapcat #(vector % distance) positions)))))
(s/fdef xform-values
:args (s/cat :value-xform (s/fspec :args (s/cat :val int?) :ret identity)
:value-map ::distances)
:ret ::distances)
(defn xform-values [value-xform value-map]
"Returns a version of value-map with values transformed by value-xform."
(reduce (fn [m cell] (assoc m cell (value-xform (value-map cell)))) value-map (::changed-cells value-map)))
(s/fdef grid-annotate-cells
:args (s/cat :grid ::grid :label-specs (s/map-of keyword? ::distances))
:ret ::grid)
(defn grid-annotate-cells [grid label-specs]
(let [specs (seq label-specs)
changed-cells (apply set/union (map (comp ::changed-cells second) specs))
cells-to-annotate (if changed-cells changed-cells (grid-positions grid))]
(letfn [(get-annotations [cell-pos [label value-map]] (vector label (get-in value-map [::cell-dists cell-pos])))
(assoc-cell [cell cell-pos]
(apply assoc cell (mapcat (partial get-annotations cell-pos) specs)))
(annotate-cell [grid cell-pos]
(update-in grid
[::cells (cell-index grid cell-pos)]
assoc-cell cell-pos))]
(assoc (reduce annotate-cell grid cells-to-annotate) ::changed-cells (set cells-to-annotate)))))
(defn intlabel [val]
#?(:clj (format "%2d" val)
:default (util/pad 2 " " (str val))))
(defn print-grid
([grid] (print-grid grid false))
([{cols ::cols :as grid} print-positions?]
(let [resolve (partial grid-cell grid)]
(when print-positions?
(println (apply str " " (map #(str (intlabel %) " ")) (range cols)))
(print " "))
(println (apply str "+" (repeat cols "---+")))
(doseq [row (grid-row-positions grid)]
(when print-positions?
(print (intlabel (ffirst row))))
(println (apply str "|"
(for [cell (map resolve row)]
(str (if (::label cell)
(str " " (::label cell) " ")
" ")
(if (linked? cell (cell-neighbor cell :east))
" "
"|")))))
(when print-positions? (print " "))
(println (apply str "+"
(for [cell (map resolve row)]
(str (if (linked? cell (cell-neighbor cell :south))
" "
"---") "+"))))))))
|
592cac4f329a9c06dab3e0d84b271a5a252dbafe784c834494d77878252dafe9 | melange-re/melange | matching_polyfill.mli | Copyright ( C ) 2020- Authors of ReScript
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* In addition to the permissions granted to you by the LGPL , you may combine
* or link a " work that uses the Library " with a publicly distributed version
* of this file to produce a combined library or application , then distribute
* that combined work under the terms of your choosing , with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 ( or the corresponding section of a later version of the LGPL
* should you choose to use a later version ) .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition to the permissions granted to you by the LGPL, you may combine
* or link a "work that uses the Library" with a publicly distributed version
* of this file to produce a combined library or application, then distribute
* that combined work under the terms of your choosing, with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 (or the corresponding section of a later version of the LGPL
* should you choose to use a later version).
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
val names_from_construct_pattern :
Patterns.Head.desc Typedtree.pattern_data -> Lambda.switch_names option
| null | https://raw.githubusercontent.com/melange-re/melange/246e6df78fe3b6cc124cb48e5a37fdffd99379ed/jscomp/core/matching_polyfill.mli | ocaml | Copyright ( C ) 2020- Authors of ReScript
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* In addition to the permissions granted to you by the LGPL , you may combine
* or link a " work that uses the Library " with a publicly distributed version
* of this file to produce a combined library or application , then distribute
* that combined work under the terms of your choosing , with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 ( or the corresponding section of a later version of the LGPL
* should you choose to use a later version ) .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition to the permissions granted to you by the LGPL, you may combine
* or link a "work that uses the Library" with a publicly distributed version
* of this file to produce a combined library or application, then distribute
* that combined work under the terms of your choosing, with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 (or the corresponding section of a later version of the LGPL
* should you choose to use a later version).
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
val names_from_construct_pattern :
Patterns.Head.desc Typedtree.pattern_data -> Lambda.switch_names option
|
|
a03c8cf9a9d54dc57c1cc2837a4385c63ec4e2f4c3dce27cf96a34f598b9090d | karamellpelle/grid | Types.hs | module OpenGL.GLFW.Types
(
GLvoid,
GLchar,
GLenum,
GLboolean,
GLbitfield,
GLbyte,
GLshort,
GLint,
GLsizei,
GLubyte,
GLushort,
GLuint,
GLfloat,
GLclampf,
GLfixed,
GLclampx,
GLintptr,
GLsizeiptr,
) where
import Foreign.C.Types
type GLvoid =
()
type GLchar =
CChar
type GLenum =
CUInt
type GLboolean =
CUChar
type GLbitfield =
CUInt
type GLbyte =
CSChar
type GLshort =
CShort
type GLint =
CInt
type GLsizei =
CInt
type GLubyte =
CUChar
type GLushort =
CUShort
type GLuint =
CUInt
type GLfloat =
CFloat
type GLclampf =
CFloat
type GLfixed =
CInt
type GLclampx =
CInt
type GLintptr =
CLong
type GLsizeiptr =
CLong
| null | https://raw.githubusercontent.com/karamellpelle/grid/56729e63ed6404fd6cfd6d11e73fa358f03c386f/designer/source/OpenGL/GLFW/Types.hs | haskell | module OpenGL.GLFW.Types
(
GLvoid,
GLchar,
GLenum,
GLboolean,
GLbitfield,
GLbyte,
GLshort,
GLint,
GLsizei,
GLubyte,
GLushort,
GLuint,
GLfloat,
GLclampf,
GLfixed,
GLclampx,
GLintptr,
GLsizeiptr,
) where
import Foreign.C.Types
type GLvoid =
()
type GLchar =
CChar
type GLenum =
CUInt
type GLboolean =
CUChar
type GLbitfield =
CUInt
type GLbyte =
CSChar
type GLshort =
CShort
type GLint =
CInt
type GLsizei =
CInt
type GLubyte =
CUChar
type GLushort =
CUShort
type GLuint =
CUInt
type GLfloat =
CFloat
type GLclampf =
CFloat
type GLfixed =
CInt
type GLclampx =
CInt
type GLintptr =
CLong
type GLsizeiptr =
CLong
|
|
6dc664f66ff8f3ab1a073f2d7fc84d3e0bd530b77f483545ccbe5b22636785f0 | songyahui/AlgebraicEffect | loop1.ml |
effect Foo : (unit -> unit)
effect Goo : (unit -> unit)
let f ()
(*@ requires (true, emp, ()) @*)
(*@ ensures (true, (Foo!).(Goo!).Foo?(), ()) @*)
=
let h = perform Foo in
let g = perform Goo in
h ()
let handler
(*@ requires (true, emp, ()) @*)
(*@ ensures (true, Foo^w, ()) @*)
=
match f () with
| x -> x
| effect Foo k -> continue k (fun () -> perform Goo )
| effect Goo k -> continue k (fun () -> perform Foo )
( Foo!).Foo ? ( )
his current ev continuation ( k ) bindings
1 emp ( ! ) ? ( ) Foo ? = ( fun ( ) - > perform Goo )
2 Foo Foo ? ( ) emp
* ? ( ) - > Goo ! . ? ( )
3 . ! ? ( ) Goo ? = ( fun ( ) - > perform )
4 . . ? ( )
* Goo ? ( ) - > Foo ! ? ( )
5 . . Goo.(Foo . Goo)^W
A.B.C. ( Foo!).Foo ? ( )
his current ev continuation ( k ) bindings
1 A.B. C ( Foo!).Foo ? ( )
2 ! ? ( ) Foo ? = ( fun ( ) - > perform ; perform L )
3 . ? ( ) emp
* ? ( ) - > Goo ! . L ! Goo ? ( )
4 . ! L ! . ? ( ) Goo ? = ( fun ( ) - > perform )
5 . . Goo L ! Goo ? ( ) Goo ? = ( fun ( ) - > perform )
5 . . Goo . L ! Goo ? ( ) emp Goo ? = ( fun ( ) - > perform )
* Goo ? ( ) - > Foo ! . ? ( )
. Goo . L!.(Foo . Goo . L!)^W
(Foo!).Foo?()
his current ev continuation (k) bindings
1 emp (Foo!) Foo?() Foo? = (fun () -> perform Goo )
2 Foo Foo?() emp
* Foo?() -> Goo!. Goo?()
3. Foo Goo! Goo?() Goo? = (fun () -> perform Foo )
4. Foo.Goo Goo?()
* Goo?() -> Foo! Foo?()
5. Foo.Goo.(Foo.Goo)^W
A.B.C. (Foo!).Foo?()
his current ev continuation (k) bindings
1 A.B. C (Foo!).Foo?()
2 A.B.C Foo! Foo?() Foo? = (fun () -> perform Goo; perform L )
3. A.B.C.Foo Foo?() emp
* Foo?() -> Goo!. L! Goo?()
4. A.B.C.Foo Goo! L!. Goo?() Goo? = (fun () -> perform Foo )
5. A.B.C.Foo.Goo L! Goo?() Goo? = (fun () -> perform Foo )
5. A.B.C.Foo.Goo.L! Goo?() emp Goo? = (fun () -> perform Foo )
* Goo?() -> Foo!. Foo?()
A.B.C.Foo.Goo.L!.(Foo.Goo.L!)^W
*) | null | https://raw.githubusercontent.com/songyahui/AlgebraicEffect/27688952b598a101a27523be796e8011d70b02de/src/demo/loop1.ml | ocaml | @ requires (true, emp, ()) @
@ ensures (true, (Foo!).(Goo!).Foo?(), ()) @
@ requires (true, emp, ()) @
@ ensures (true, Foo^w, ()) @ |
effect Foo : (unit -> unit)
effect Goo : (unit -> unit)
let f ()
=
let h = perform Foo in
let g = perform Goo in
h ()
let handler
=
match f () with
| x -> x
| effect Foo k -> continue k (fun () -> perform Goo )
| effect Goo k -> continue k (fun () -> perform Foo )
( Foo!).Foo ? ( )
his current ev continuation ( k ) bindings
1 emp ( ! ) ? ( ) Foo ? = ( fun ( ) - > perform Goo )
2 Foo Foo ? ( ) emp
* ? ( ) - > Goo ! . ? ( )
3 . ! ? ( ) Goo ? = ( fun ( ) - > perform )
4 . . ? ( )
* Goo ? ( ) - > Foo ! ? ( )
5 . . Goo.(Foo . Goo)^W
A.B.C. ( Foo!).Foo ? ( )
his current ev continuation ( k ) bindings
1 A.B. C ( Foo!).Foo ? ( )
2 ! ? ( ) Foo ? = ( fun ( ) - > perform ; perform L )
3 . ? ( ) emp
* ? ( ) - > Goo ! . L ! Goo ? ( )
4 . ! L ! . ? ( ) Goo ? = ( fun ( ) - > perform )
5 . . Goo L ! Goo ? ( ) Goo ? = ( fun ( ) - > perform )
5 . . Goo . L ! Goo ? ( ) emp Goo ? = ( fun ( ) - > perform )
* Goo ? ( ) - > Foo ! . ? ( )
. Goo . L!.(Foo . Goo . L!)^W
(Foo!).Foo?()
his current ev continuation (k) bindings
1 emp (Foo!) Foo?() Foo? = (fun () -> perform Goo )
2 Foo Foo?() emp
* Foo?() -> Goo!. Goo?()
3. Foo Goo! Goo?() Goo? = (fun () -> perform Foo )
4. Foo.Goo Goo?()
* Goo?() -> Foo! Foo?()
5. Foo.Goo.(Foo.Goo)^W
A.B.C. (Foo!).Foo?()
his current ev continuation (k) bindings
1 A.B. C (Foo!).Foo?()
2 A.B.C Foo! Foo?() Foo? = (fun () -> perform Goo; perform L )
3. A.B.C.Foo Foo?() emp
* Foo?() -> Goo!. L! Goo?()
4. A.B.C.Foo Goo! L!. Goo?() Goo? = (fun () -> perform Foo )
5. A.B.C.Foo.Goo L! Goo?() Goo? = (fun () -> perform Foo )
5. A.B.C.Foo.Goo.L! Goo?() emp Goo? = (fun () -> perform Foo )
* Goo?() -> Foo!. Foo?()
A.B.C.Foo.Goo.L!.(Foo.Goo.L!)^W
*) |
771fda0f156b63f59261793625d38e34b1d74840f38ae34e9c56a8e2b70a1999 | funcool/octet | string.cljc | Copyright ( c ) 2015 - 2016 < >
;; All rights reserved.
;;
;; Redistribution and use in source and binary forms, with or without
;; modification, are permitted provided that the following conditions are met:
;;
;; * Redistributions of source code must retain the above copyright notice, this
;; list of conditions and the following disclaimer.
;;
;; * Redistributions in binary form must reproduce the above copyright notice,
;; this list of conditions and the following disclaimer in the documentation
;; and/or other materials provided with the distribution.
;;
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT HOLDER OR LIABLE
FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY ,
;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
(ns octet.spec.string
(:require [octet.buffer :as buffer]
[octet.spec :as spec]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Helpers
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn zeropad-count
"Given a byte array, returns a number of bytes
allocated with zero padding (zero byte)."
[input]
(let [mark (byte 0)]
(reduce (fn [sum index]
(let [value (aget input index)]
(if (= value mark)
(inc sum)
(reduced sum))))
0
(reverse (range (count input))))))
Clojure Helpers
#?(:clj
(do
(defn zeropad!
"Add zero byte padding to the given byte array
to the remaining bytes after specified data length."
[^bytes input ^long datalength]
(java.util.Arrays/fill input datalength (count input) (byte 0)))
(defn bytes->string
[^bytes input ^long length]
(String. input 0 length "UTF-8"))
(defn string->bytes
[^String value]
(.getBytes value "UTF-8"))
(defn arraycopy
[^bytes input ^bytes output ^long length]
(System/arraycopy input 0 output 0 length)))
:cljs
(do
(defn zeropad!
[^bytes input ^number datalength]
(doseq [^number i (range (.-length input))]
(when (> i datalength)
(aset input i 0))))
(defn bytes->string
[input length]
(let [view (.subarray input 0 length)
view (js/Uint8Array. view)
fcc (.-fromCharCode js/String)]
(.apply fcc nil view)))
(defn string->bytes
[value]
(let [buff (js/ArrayBuffer. (count value))
view (js/Uint8Array. buff)]
(doseq [i (range (count value))]
(aset view i (.charCodeAt value i)))
(js/Int8Array. buff)))
(defn arraycopy
[^bytes input ^bytes output ^long length]
(reduce (fn [_ i]
(aset output i (aget input i)))
nil
(range (.-length input))))
(defn byte-array
[length]
(js/Int8Array. length))
(extend-type js/Int8Array
ICounted
(-count [s]
(.-length s)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Type Spec implementation
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn string
"Fixed length string type spec constructor."
[size]
(reify
spec/ISpecSize
(size [_] size)
spec/ISpec
(read [_ buff pos]
(let [rawdata (buffer/read-bytes buff pos size)
length (- size (zeropad-count rawdata))
data (bytes->string rawdata length)]
[size data]))
(write [_ buff pos value]
(let [input (string->bytes value)
length (count input)
tmpbuf (byte-array size)]
(if (< length size)
(arraycopy input tmpbuf length)
(arraycopy input tmpbuf size))
(when (< length size)
(zeropad! tmpbuf length))
(buffer/write-bytes buff pos size tmpbuf)
size))))
(def ^{:doc "Arbitrary length string type spec."}
string*
(reify
#?@(:clj
[clojure.lang.IFn
(invoke [s] s)]
:cljs
[cljs.core/IFn
(-invoke [s] s)])
spec/ISpecDynamicSize
(size* [_ data]
(let [data (string->bytes data)]
(+ 4 (count data))))
spec/ISpec
(read [_ buff pos]
(let [datasize (buffer/read-int buff pos)
data (buffer/read-bytes buff (+ pos 4) datasize)
data (bytes->string data datasize)]
[(+ datasize 4) data]))
(write [_ buff pos value]
(let [input (string->bytes value)
length (count input)]
(buffer/write-int buff pos length)
(buffer/write-bytes buff (+ pos 4) length input)
(+ length 4)))))
| null | https://raw.githubusercontent.com/funcool/octet/6afe46d98717e5a538c97c5445a29686524bd0c9/src/octet/spec/string.cljc | clojure | All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Helpers
Type Spec implementation
| Copyright ( c ) 2015 - 2016 < >
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT HOLDER OR LIABLE
FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR
CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY ,
(ns octet.spec.string
(:require [octet.buffer :as buffer]
[octet.spec :as spec]))
(defn zeropad-count
"Given a byte array, returns a number of bytes
allocated with zero padding (zero byte)."
[input]
(let [mark (byte 0)]
(reduce (fn [sum index]
(let [value (aget input index)]
(if (= value mark)
(inc sum)
(reduced sum))))
0
(reverse (range (count input))))))
Clojure Helpers
#?(:clj
(do
(defn zeropad!
"Add zero byte padding to the given byte array
to the remaining bytes after specified data length."
[^bytes input ^long datalength]
(java.util.Arrays/fill input datalength (count input) (byte 0)))
(defn bytes->string
[^bytes input ^long length]
(String. input 0 length "UTF-8"))
(defn string->bytes
[^String value]
(.getBytes value "UTF-8"))
(defn arraycopy
[^bytes input ^bytes output ^long length]
(System/arraycopy input 0 output 0 length)))
:cljs
(do
(defn zeropad!
[^bytes input ^number datalength]
(doseq [^number i (range (.-length input))]
(when (> i datalength)
(aset input i 0))))
(defn bytes->string
[input length]
(let [view (.subarray input 0 length)
view (js/Uint8Array. view)
fcc (.-fromCharCode js/String)]
(.apply fcc nil view)))
(defn string->bytes
[value]
(let [buff (js/ArrayBuffer. (count value))
view (js/Uint8Array. buff)]
(doseq [i (range (count value))]
(aset view i (.charCodeAt value i)))
(js/Int8Array. buff)))
(defn arraycopy
[^bytes input ^bytes output ^long length]
(reduce (fn [_ i]
(aset output i (aget input i)))
nil
(range (.-length input))))
(defn byte-array
[length]
(js/Int8Array. length))
(extend-type js/Int8Array
ICounted
(-count [s]
(.-length s)))))
(defn string
"Fixed length string type spec constructor."
[size]
(reify
spec/ISpecSize
(size [_] size)
spec/ISpec
(read [_ buff pos]
(let [rawdata (buffer/read-bytes buff pos size)
length (- size (zeropad-count rawdata))
data (bytes->string rawdata length)]
[size data]))
(write [_ buff pos value]
(let [input (string->bytes value)
length (count input)
tmpbuf (byte-array size)]
(if (< length size)
(arraycopy input tmpbuf length)
(arraycopy input tmpbuf size))
(when (< length size)
(zeropad! tmpbuf length))
(buffer/write-bytes buff pos size tmpbuf)
size))))
(def ^{:doc "Arbitrary length string type spec."}
string*
(reify
#?@(:clj
[clojure.lang.IFn
(invoke [s] s)]
:cljs
[cljs.core/IFn
(-invoke [s] s)])
spec/ISpecDynamicSize
(size* [_ data]
(let [data (string->bytes data)]
(+ 4 (count data))))
spec/ISpec
(read [_ buff pos]
(let [datasize (buffer/read-int buff pos)
data (buffer/read-bytes buff (+ pos 4) datasize)
data (bytes->string data datasize)]
[(+ datasize 4) data]))
(write [_ buff pos value]
(let [input (string->bytes value)
length (count input)]
(buffer/write-int buff pos length)
(buffer/write-bytes buff (+ pos 4) length input)
(+ length 4)))))
|
c3495b77d1126e6fc1af2eff6908ed99c991de89ad731f73333b06d388e8672f | JHU-PL-Lab/jaylang | CE-1CFA09.ml |
let rec bot _ = bot ()
let fail _ = assert false
let rec c7_COEFFICIENT_1128 = 0
let rec c6_COEFFICIENT_1126 = 0
let rec c5_COEFFICIENT_1125 = 0
let rec c4_COEFFICIENT_1123 = 0
let rec c3_COEFFICIENT_1120 = 0
let rec c2_COEFFICIENT_1118 = 0
let rec c1_COEFFICIENT_1117 = 0
let rec c0_COEFFICIENT_1115 = 0
let id_1030 set_flag_app_1344 s_app_h_EXPARAM_1337 s_app_x_1339 x_1031 =
x_1031
let rec omega_1032 set_flag_app_1344 s_app_h_EXPARAM_1337 s_app_x_1339 x_1033 =
omega_1032 set_flag_app_1344 s_app_h_EXPARAM_1337 s_app_x_1339 x_1033
let f_1034 x_DO_NOT_CARE_1438 x_DO_NOT_CARE_1439 x_DO_NOT_CARE_1440 x_EXPARAM_1133 x_DO_NOT_CARE_1435 x_DO_NOT_CARE_1436 x_DO_NOT_CARE_1437 x_1035 x_DO_NOT_CARE_1432 x_DO_NOT_CARE_1433 x_DO_NOT_CARE_1434 y_EXPARAM_1134 x_DO_NOT_CARE_1429 x_DO_NOT_CARE_1430 x_DO_NOT_CARE_1431 y_1036 set_flag_app_1344 s_app_h_EXPARAM_1337 s_app_x_1339 z_1037 =
y_1036 set_flag_app_1344 s_app_h_EXPARAM_1337 s_app_x_1339 z_1037
let app_without_checking_1355 x_DO_NOT_CARE_1426 x_DO_NOT_CARE_1427 x_DO_NOT_CARE_1428 h_EXPARAM_1131 x_DO_NOT_CARE_1423 x_DO_NOT_CARE_1424 x_DO_NOT_CARE_1425 h_1039 set_flag_app_1344 s_app_h_EXPARAM_1337 s_app_x_1339 x_1040 =
let set_flag_app_1344 = true
in
let s_app_x_1339 = x_1040
in
let s_app_h_EXPARAM_1337 = h_EXPARAM_1131
in
h_1039 set_flag_app_1344 s_app_h_EXPARAM_1337 s_app_x_1339 x_1040
let rec app_1038 x_DO_NOT_CARE_1360 x_DO_NOT_CARE_1361 x_DO_NOT_CARE_1362 h_EXPARAM_1131 x_DO_NOT_CARE_1357 x_DO_NOT_CARE_1358 x_DO_NOT_CARE_1359 h_1039 prev_set_flag_app_1343 s_prev_app_h_EXPARAM_1340 s_prev_app_x_1342 x_1040 =
let u = if prev_set_flag_app_1343 then
let u_21789 = fail ()
in
bot()
else () in
app_without_checking_1355 x_DO_NOT_CARE_1360 x_DO_NOT_CARE_1361
x_DO_NOT_CARE_1362 h_EXPARAM_1131 x_DO_NOT_CARE_1357
x_DO_NOT_CARE_1358 x_DO_NOT_CARE_1359 h_1039
prev_set_flag_app_1343 s_prev_app_h_EXPARAM_1340
s_prev_app_x_1342 x_1040
let main =
f_1034 false 0 0 c5_COEFFICIENT_1125 false 0 0
(app_without_checking_1355 false 0 0 c4_COEFFICIENT_1123 false 0 0
(f_1034 false 0 0 c1_COEFFICIENT_1117 false 0 0
(app_without_checking_1355 false 0 0 c0_COEFFICIENT_1115 false 0 0
id_1030) false 0 0 c3_COEFFICIENT_1120 false 0 0
(app_without_checking_1355 false 0 0 c2_COEFFICIENT_1118 false 0 0
omega_1032))) false 0 0 c7_COEFFICIENT_1128 false 0 0
(app_1038 false 0 0 c6_COEFFICIENT_1126 false 0 0 id_1030) false 0 0 1
| null | https://raw.githubusercontent.com/JHU-PL-Lab/jaylang/484b3876986a515fb57b11768a1b3b50418cde0c/benchmark/cases/mochi_origin/termination/CE-1CFA09.ml | ocaml |
let rec bot _ = bot ()
let fail _ = assert false
let rec c7_COEFFICIENT_1128 = 0
let rec c6_COEFFICIENT_1126 = 0
let rec c5_COEFFICIENT_1125 = 0
let rec c4_COEFFICIENT_1123 = 0
let rec c3_COEFFICIENT_1120 = 0
let rec c2_COEFFICIENT_1118 = 0
let rec c1_COEFFICIENT_1117 = 0
let rec c0_COEFFICIENT_1115 = 0
let id_1030 set_flag_app_1344 s_app_h_EXPARAM_1337 s_app_x_1339 x_1031 =
x_1031
let rec omega_1032 set_flag_app_1344 s_app_h_EXPARAM_1337 s_app_x_1339 x_1033 =
omega_1032 set_flag_app_1344 s_app_h_EXPARAM_1337 s_app_x_1339 x_1033
let f_1034 x_DO_NOT_CARE_1438 x_DO_NOT_CARE_1439 x_DO_NOT_CARE_1440 x_EXPARAM_1133 x_DO_NOT_CARE_1435 x_DO_NOT_CARE_1436 x_DO_NOT_CARE_1437 x_1035 x_DO_NOT_CARE_1432 x_DO_NOT_CARE_1433 x_DO_NOT_CARE_1434 y_EXPARAM_1134 x_DO_NOT_CARE_1429 x_DO_NOT_CARE_1430 x_DO_NOT_CARE_1431 y_1036 set_flag_app_1344 s_app_h_EXPARAM_1337 s_app_x_1339 z_1037 =
y_1036 set_flag_app_1344 s_app_h_EXPARAM_1337 s_app_x_1339 z_1037
let app_without_checking_1355 x_DO_NOT_CARE_1426 x_DO_NOT_CARE_1427 x_DO_NOT_CARE_1428 h_EXPARAM_1131 x_DO_NOT_CARE_1423 x_DO_NOT_CARE_1424 x_DO_NOT_CARE_1425 h_1039 set_flag_app_1344 s_app_h_EXPARAM_1337 s_app_x_1339 x_1040 =
let set_flag_app_1344 = true
in
let s_app_x_1339 = x_1040
in
let s_app_h_EXPARAM_1337 = h_EXPARAM_1131
in
h_1039 set_flag_app_1344 s_app_h_EXPARAM_1337 s_app_x_1339 x_1040
let rec app_1038 x_DO_NOT_CARE_1360 x_DO_NOT_CARE_1361 x_DO_NOT_CARE_1362 h_EXPARAM_1131 x_DO_NOT_CARE_1357 x_DO_NOT_CARE_1358 x_DO_NOT_CARE_1359 h_1039 prev_set_flag_app_1343 s_prev_app_h_EXPARAM_1340 s_prev_app_x_1342 x_1040 =
let u = if prev_set_flag_app_1343 then
let u_21789 = fail ()
in
bot()
else () in
app_without_checking_1355 x_DO_NOT_CARE_1360 x_DO_NOT_CARE_1361
x_DO_NOT_CARE_1362 h_EXPARAM_1131 x_DO_NOT_CARE_1357
x_DO_NOT_CARE_1358 x_DO_NOT_CARE_1359 h_1039
prev_set_flag_app_1343 s_prev_app_h_EXPARAM_1340
s_prev_app_x_1342 x_1040
let main =
f_1034 false 0 0 c5_COEFFICIENT_1125 false 0 0
(app_without_checking_1355 false 0 0 c4_COEFFICIENT_1123 false 0 0
(f_1034 false 0 0 c1_COEFFICIENT_1117 false 0 0
(app_without_checking_1355 false 0 0 c0_COEFFICIENT_1115 false 0 0
id_1030) false 0 0 c3_COEFFICIENT_1120 false 0 0
(app_without_checking_1355 false 0 0 c2_COEFFICIENT_1118 false 0 0
omega_1032))) false 0 0 c7_COEFFICIENT_1128 false 0 0
(app_1038 false 0 0 c6_COEFFICIENT_1126 false 0 0 id_1030) false 0 0 1
|
|
17900c1bd6b6bc59902783f1c55ca14f3d5b3af03e6fa71933a7538780b14ca4 | kowainik/github-graphql | Data.hs | module Test.Data
( githubGraphqlRepositoryId
) where
import GitHub.Id (Id (..), RepositoryId)
githubGraphqlRepositoryId :: RepositoryId
githubGraphqlRepositoryId = Id "MDEwOlJlcG9zaXRvcnkyOTA1MDA2MzI="
| null | https://raw.githubusercontent.com/kowainik/github-graphql/5ffa6fc31631bc0e2f9b5b071e340cb49002c009/test/Test/Data.hs | haskell | module Test.Data
( githubGraphqlRepositoryId
) where
import GitHub.Id (Id (..), RepositoryId)
githubGraphqlRepositoryId :: RepositoryId
githubGraphqlRepositoryId = Id "MDEwOlJlcG9zaXRvcnkyOTA1MDA2MzI="
|
|
8bddeb23be27b2f9821b4c093fbb7612ac826c69e7f9b440b077c47268914108 | m4b/elf2json | E2j_ElfProgramHeader.ml | open Elf.ProgramHeader
open E2j_Json
let program_header2json ph =
`O [
"p_type" , to_float ph.p_type;
"p_flags", to_float ph.p_flags;
"p_offset", to_float ph.p_offset;
"p_vaddr", to_float ph.p_vaddr;
"p_paddr", to_float ph.p_paddr;
"p_filesz", to_float ph.p_filesz;
"p_memsz", to_float ph.p_memsz;
"p_align", to_float ph.p_align;
"type", `String (ptype_to_string ph.p_type);
"flags", `String (flags_to_string ph.p_flags);
]
let to_json phs =
let json = List.map program_header2json phs in
let meta =
[
"bytes", to_byte_array [4; 4; 8; 8; 8; 8; 8; 8;];
"prefix", `String "p_";
] in
`O [
"value",`A json;
"meta", `O meta;
]
| null | https://raw.githubusercontent.com/m4b/elf2json/72dfb62e797f68e4ad24cf3e74547ef91f8a1e62/lib/E2j_ElfProgramHeader.ml | ocaml | open Elf.ProgramHeader
open E2j_Json
let program_header2json ph =
`O [
"p_type" , to_float ph.p_type;
"p_flags", to_float ph.p_flags;
"p_offset", to_float ph.p_offset;
"p_vaddr", to_float ph.p_vaddr;
"p_paddr", to_float ph.p_paddr;
"p_filesz", to_float ph.p_filesz;
"p_memsz", to_float ph.p_memsz;
"p_align", to_float ph.p_align;
"type", `String (ptype_to_string ph.p_type);
"flags", `String (flags_to_string ph.p_flags);
]
let to_json phs =
let json = List.map program_header2json phs in
let meta =
[
"bytes", to_byte_array [4; 4; 8; 8; 8; 8; 8; 8;];
"prefix", `String "p_";
] in
`O [
"value",`A json;
"meta", `O meta;
]
|
|
1209775a0e7928bf2df2fcbd6aa30e8b36984d22cf52b405e78533272d54d224 | RolfRolles/PandemicML | X86FeatureUtil.ml | open X86
open X86InternalOperand
This array exists in two places , which is obviously bad . Should refactor this out of
X86Random and this file , and into a common file .
X86Random and this file, and into a common file. *)
let x86_mnem_arr = [|
Aaa;
Aad;
Aam;
Aas;
Adc;
Add;
Addpd;
Addps;
Addsd;
Addss;
Addsubpd;
Addsubps;
And;
Andnpd;
Andnps;
Andpd;
Andps;
Arpl;
Blendpd;
Blendps;
Blendvpd;
Blendvps;
Bound;
Bsf;
Bsr;
Bswap;
Bt;
Btc;
Btr;
Bts;
Call;
CallF;
Cbw;
Cdq;
Clc;
Cld;
Clflush;
Cli;
Clts;
Cmc;
Cmova;
Cmovae;
Cmovb;
Cmovbe;
Cmovg;
Cmovge;
Cmovl;
Cmovle;
Cmovno;
Cmovnp;
Cmovns;
Cmovnz;
Cmovo;
Cmovp;
Cmovs;
Cmovz;
Cmp;
Cmppd;
Cmpps;
Cmpsb;
Cmpsd;
Cmpss;
Cmpsw;
Cmpxchg;
Cmpxchg8b;
Comisd;
Comiss;
Cpuid;
Crc32;
Cvtdq2pd;
Cvtdq2ps;
Cvtpd2dq;
Cvtpd2pi;
Cvtpd2ps;
Cvtpi2pd;
Cvtpi2ps;
Cvtps2dq;
Cvtps2pd;
Cvtps2pi;
Cvtsd2si;
Cvtsd2ss;
Cvtsi2sd;
Cvtsi2ss;
Cvtss2sd;
Cvtss2si;
Cvttpd2dq;
Cvttpd2pi;
Cvttps2dq;
Cvttps2pi;
Cvttsd2si;
Cvttss2si;
Cwd;
Cwde;
Daa;
Das;
Dec;
Div;
Divpd;
Divps;
Divsd;
Divss;
Dppd;
Dpps;
Emms;
Enter;
Extractps;
F2xm1;
Fabs;
Fadd;
Faddp;
Fbld;
Fbstp;
Fchs;
Fclex;
Fcmovb;
Fcmovbe;
Fcmove;
Fcmovnb;
Fcmovnbe;
Fcmovne;
Fcmovnu;
Fcmovu;
Fcom;
Fcomi;
Fcomip;
Fcomp;
Fcompp;
Fcos;
Fdecstp;
Fdiv;
Fdivp;
Fdivr;
Fdivrp;
Ffree;
Fiadd;
Ficom;
Ficomp;
Fidiv;
Fidivr;
Fild;
Fimul;
Fincstp;
Finit;
Fist;
Fistp;
Fisttp;
Fisub;
Fisubr;
Fld;
Fld1;
Fldcw;
Fldenv;
Fldl2e;
Fldl2t;
Fldlg2;
Fldln2;
Fldpi;
Fldz;
Fmul;
Fmulp;
Fnop;
Fpatan;
Fprem;
Fprem1;
Fptan;
Frndint;
Frstor;
Fsave;
Fscale;
Fsin;
Fsincos;
Fsqrt;
Fst;
Fstcw;
Fstenv;
Fstp;
Fstsw;
Fsub;
Fsubp;
Fsubr;
Fsubrp;
Ftst;
Fucom;
Fucomi;
Fucomip;
Fucomp;
Fucompp;
Fxam;
Fxch;
Fxrstor;
Fxsave;
Fxtract;
Fyl2x;
Fyl2xp1;
Getsec;
Haddpd;
Haddps;
Hlt;
Hsubpd;
Hsubps;
Icebp;
Idiv;
Imul;
In;
Inc;
Insb;
Insd;
Insertps;
Insw;
Int;
Int3;
Into;
Invd;
Invlpg;
Iretd;
Iretw;
Ja;
Jae;
Jb;
Jbe;
Jcxz;
Jecxz;
Jg;
Jge;
Jl;
Jle;
Jmp;
JmpF;
Jno;
Jnp;
Jns;
Jnz;
Jo;
Jp;
Js;
Jz;
Lahf;
Lar;
Lddqu;
Ldmxcsr;
Lds;
Lea;
Leave;
Les;
Lfence;
Lfs;
Lgdt;
Lgs;
Lidt;
Lldt;
Lmsw;
Lodsb;
Lodsd;
Lodsw;
Loop;
Loopnz;
Loopz;
Lsl;
Lss;
Ltr;
Maskmovdqu;
Maskmovq;
Maxpd;
Maxps;
Maxsd;
Maxss;
Mfence;
Minpd;
Minps;
Minsd;
Minss;
Monitor;
Mov;
Movapd;
Movaps;
Movd;
Movddup;
Movdq2q;
Movdqa;
Movdqu;
Movhlps;
Movhpd;
Movhps;
Movlhps;
Movlpd;
Movlps;
Movmskpd;
Movmskps;
Movntdq;
Movntdqa;
Movnti;
Movntpd;
Movntps;
Movntq;
Movq;
Movq2dq;
Movsb;
Movsd;
Movshdup;
Movsldup;
Movss;
Movsw;
Movsx;
Movupd;
Movups;
Movzx;
Mpsadbw;
Mul;
Mulpd;
Mulps;
Mulsd;
Mulss;
Mwait;
Neg;
Nop;
Not;
Or;
Orpd;
Orps;
Out;
Outsb;
Outsd;
Outsw;
Pabsb;
Pabsd;
Pabsw;
Packssdw;
Packsswb;
Packusdw;
Packuswb;
Paddb;
Paddd;
Paddq;
Paddsb;
Paddsw;
Paddusb;
Paddusw;
Paddw;
Palignr;
Pand;
Pandn;
Pause;
Pavgb;
Pavgw;
Pblendvb;
Pblendw;
Pcmpeqb;
Pcmpeqd;
Pcmpeqq;
Pcmpeqw;
Pcmpestri;
Pcmpestrm;
Pcmpgtb;
Pcmpgtd;
Pcmpgtq;
Pcmpgtw;
Pcmpistri;
Pcmpistrm;
Pextrb;
Pextrd;
Pextrw;
Phaddd;
Phaddsw;
Phaddw;
Phminposuw;
Phsubd;
Phsubsw;
Phsubw;
Pinsrb;
Pinsrd;
Pinsrw;
Pmaddubsw;
Pmaddwd;
Pmaxsb;
Pmaxsd;
Pmaxsw;
Pmaxub;
Pmaxud;
Pmaxuw;
Pminsb;
Pminsd;
Pminsw;
Pminub;
Pminud;
Pminuw;
Pmovmskb;
Pmovsxbd;
Pmovsxbq;
Pmovsxbw;
Pmovsxdq;
Pmovsxwd;
Pmovsxwq;
Pmovzxbd;
Pmovzxbq;
Pmovzxbw;
Pmovzxdq;
Pmovzxwd;
Pmovzxwq;
Pmuldq;
Pmulhrsw;
Pmulhuw;
Pmulhw;
Pmulld;
Pmullw;
Pmuludq;
Pop;
Popad;
Popaw;
Popcnt;
Popfd;
Popfw;
Por;
Prefetchnta;
Prefetcht0;
Prefetcht1;
Prefetcht2;
Psadbw;
Pshufb;
Pshufd;
Pshufhw;
Pshuflw;
Pshufw;
Psignb;
Psignd;
Psignw;
Pslld;
Pslldq;
Psllq;
Psllw;
Psrad;
Psraw;
Psrld;
Psrldq;
Psrlq;
Psrlw;
Psubb;
Psubd;
Psubq;
Psubsb;
Psubsw;
Psubusb;
Psubusw;
Psubw;
Ptest;
Punpckhbw;
Punpckhdq;
Punpckhqdq;
Punpckhwd;
Punpcklbw;
Punpckldq;
Punpcklqdq;
Punpcklwd;
Push;
Pushad;
Pushaw;
Pushfd;
Pushfw;
Pxor;
Rcl;
Rcpps;
Rcpss;
Rcr;
Rdmsr;
Rdpmc;
Rdtsc;
Ret;
Retf;
Rol;
Ror;
Roundpd;
Roundps;
Roundsd;
Roundss;
Rsm;
Rsqrtps;
Rsqrtss;
Sahf;
Sal;
Salc;
Sar;
Sbb;
Scasb;
Scasd;
Scasw;
Seta;
Setae;
Setb;
Setbe;
Setg;
Setge;
Setl;
Setle;
Setno;
Setnp;
Setns;
Setnz;
Seto;
Setp;
Sets;
Setz;
Sfence;
Sgdt;
Shl;
Shld;
Shr;
Shrd;
Shufpd;
Shufps;
Sidt;
Sldt;
Smsw;
Sqrtpd;
Sqrtps;
Sqrtsd;
Sqrtss;
Stc;
Std;
Sti;
Stmxcsr;
Stosb;
Stosd;
Stosw;
Str;
Sub;
Subpd;
Subps;
Subsd;
Subss;
Syscall;
Sysenter;
Sysexit;
Sysret;
Test;
Ucomisd;
Ucomiss;
Ud2;
Unpckhpd;
Unpckhps;
Unpcklpd;
Unpcklps;
Verr;
Verw;
Vmcall;
Vmclear;
Vmlaunch;
Vmptrld;
Vmptrst;
Vmread;
Vmresume;
Vmwrite;
Vmxoff;
Vmxon;
Wait;
Wbinvd;
Wrmsr;
Xadd;
Xlat;
Xchg;
Xor;
Xorpd;
Xorps;|]
let number_mnem = function
| Aaa -> 1
| Aad -> 2
| Aam -> 3
| Aas -> 4
| Adc -> 5
| Add -> 6
| Addpd -> 7
| Addps -> 8
| Addsd -> 9
| Addss -> 10
| Addsubpd -> 11
| Addsubps -> 12
| And -> 13
| Andnpd -> 14
| Andnps -> 15
| Andpd -> 16
| Andps -> 17
| Arpl -> 18
| Blendpd -> 19
| Blendps -> 20
| Blendvpd -> 21
| Blendvps -> 22
| Bound -> 23
| Bsf -> 24
| Bsr -> 25
| Bswap -> 26
| Bt -> 27
| Btc -> 28
| Btr -> 29
| Bts -> 30
| Call -> 31
| CallF -> 32
| Cbw -> 33
| Cdq -> 34
| Clc -> 35
| Cld -> 36
| Clflush -> 37
| Cli -> 38
| Clts -> 39
| Cmc -> 40
| Cmova -> 41
| Cmovae -> 42
| Cmovb -> 43
| Cmovbe -> 44
| Cmovg -> 45
| Cmovge -> 46
| Cmovl -> 47
| Cmovle -> 48
| Cmovno -> 49
| Cmovnp -> 50
| Cmovns -> 51
| Cmovnz -> 52
| Cmovo -> 53
| Cmovp -> 54
| Cmovs -> 55
| Cmovz -> 56
| Cmp -> 57
| Cmppd -> 58
| Cmpps -> 59
| Cmpsb -> 60
| Cmpsd -> 61
| Cmpss -> 62
| Cmpsw -> 63
| Cmpxchg -> 64
| Cmpxchg8b -> 65
| Comisd -> 66
| Comiss -> 67
| Cpuid -> 68
| Crc32 -> 69
| Cvtdq2pd -> 70
| Cvtdq2ps -> 71
| Cvtpd2dq -> 72
| Cvtpd2pi -> 73
| Cvtpd2ps -> 74
| Cvtpi2pd -> 75
| Cvtpi2ps -> 76
| Cvtps2dq -> 77
| Cvtps2pd -> 78
| Cvtps2pi -> 79
| Cvtsd2si -> 80
| Cvtsd2ss -> 81
| Cvtsi2sd -> 82
| Cvtsi2ss -> 83
| Cvtss2sd -> 84
| Cvtss2si -> 85
| Cvttpd2dq -> 86
| Cvttpd2pi -> 87
| Cvttps2dq -> 88
| Cvttps2pi -> 89
| Cvttsd2si -> 90
| Cvttss2si -> 91
| Cwd -> 92
| Cwde -> 93
| Daa -> 94
| Das -> 95
| Dec -> 96
| Div -> 97
| Divpd -> 98
| Divps -> 99
| Divsd -> 100
| Divss -> 101
| Dppd -> 102
| Dpps -> 103
| Emms -> 104
| Enter -> 105
| Extractps -> 106
| F2xm1 -> 107
| Fabs -> 108
| Fadd -> 109
| Faddp -> 110
| Fbld -> 111
| Fbstp -> 112
| Fchs -> 113
| Fclex -> 114
| Fcmovb -> 115
| Fcmovbe -> 116
| Fcmove -> 117
| Fcmovnb -> 118
| Fcmovnbe -> 119
| Fcmovne -> 120
| Fcmovnu -> 121
| Fcmovu -> 122
| Fcom -> 123
| Fcomi -> 124
| Fcomip -> 125
| Fcomp -> 126
| Fcompp -> 127
| Fcos -> 128
| Fdecstp -> 129
| Fdiv -> 130
| Fdivp -> 131
| Fdivr -> 132
| Fdivrp -> 133
| Ffree -> 134
| Fiadd -> 135
| Ficom -> 136
| Ficomp -> 137
| Fidiv -> 138
| Fidivr -> 139
| Fild -> 140
| Fimul -> 141
| Fincstp -> 142
| Finit -> 143
| Fist -> 144
| Fistp -> 145
| Fisttp -> 146
| Fisub -> 147
| Fisubr -> 148
| Fld -> 149
| Fld1 -> 150
| Fldcw -> 151
| Fldenv -> 152
| Fldl2e -> 153
| Fldl2t -> 154
| Fldlg2 -> 155
| Fldln2 -> 156
| Fldpi -> 157
| Fldz -> 158
| Fmul -> 159
| Fmulp -> 160
| Fnop -> 161
| Fpatan -> 162
| Fprem -> 163
| Fprem1 -> 164
| Fptan -> 165
| Frndint -> 166
| Frstor -> 167
| Fsave -> 168
| Fscale -> 169
| Fsin -> 170
| Fsincos -> 171
| Fsqrt -> 172
| Fst -> 173
| Fstcw -> 174
| Fstenv -> 175
| Fstp -> 176
| Fstsw -> 177
| Fsub -> 178
| Fsubp -> 179
| Fsubr -> 180
| Fsubrp -> 181
| Ftst -> 182
| Fucom -> 183
| Fucomi -> 184
| Fucomip -> 185
| Fucomp -> 186
| Fucompp -> 187
| Fxam -> 188
| Fxch -> 189
| Fxrstor -> 190
| Fxsave -> 191
| Fxtract -> 192
| Fyl2x -> 193
| Fyl2xp1 -> 194
| Getsec -> 195
| Haddpd -> 196
| Haddps -> 197
| Hlt -> 198
| Hsubpd -> 199
| Hsubps -> 200
| Icebp -> 201
| Idiv -> 202
| Imul -> 203
| In -> 204
| Inc -> 205
| Insb -> 206
| Insd -> 207
| Insertps -> 208
| Insw -> 209
| Int -> 210
| Int3 -> 211
| Into -> 212
| Invd -> 213
| Invlpg -> 214
| Iretd -> 215
| Iretw -> 216
| Ja -> 217
| Jae -> 218
| Jb -> 219
| Jbe -> 220
| Jcxz -> 221
| Jecxz -> 222
| Jg -> 223
| Jge -> 224
| Jl -> 225
| Jle -> 226
| Jmp -> 227
| JmpF -> 228
| Jno -> 229
| Jnp -> 230
| Jns -> 231
| Jnz -> 232
| Jo -> 233
| Jp -> 234
| Js -> 235
| Jz -> 236
| Lahf -> 237
| Lar -> 238
| Lddqu -> 239
| Ldmxcsr -> 240
| Lds -> 241
| Lea -> 242
| Leave -> 243
| Les -> 244
| Lfence -> 245
| Lfs -> 246
| Lgdt -> 247
| Lgs -> 248
| Lidt -> 249
| Lldt -> 250
| Lmsw -> 251
| Lodsb -> 252
| Lodsd -> 253
| Lodsw -> 254
| Loop -> 255
| Loopnz -> 256
| Loopz -> 257
| Lsl -> 258
| Lss -> 259
| Ltr -> 260
| Maskmovdqu -> 261
| Maskmovq -> 262
| Maxpd -> 263
| Maxps -> 264
| Maxsd -> 265
| Maxss -> 266
| Mfence -> 267
| Minpd -> 268
| Minps -> 269
| Minsd -> 270
| Minss -> 271
| Monitor -> 272
| Mov -> 273
| Movapd -> 274
| Movaps -> 275
| Movd -> 276
| Movddup -> 277
| Movdq2q -> 278
| Movdqa -> 279
| Movdqu -> 280
| Movhlps -> 281
| Movhpd -> 282
| Movhps -> 283
| Movlhps -> 284
| Movlpd -> 285
| Movlps -> 286
| Movmskpd -> 287
| Movmskps -> 288
| Movntdq -> 289
| Movntdqa -> 290
| Movnti -> 291
| Movntpd -> 292
| Movntps -> 293
| Movntq -> 294
| Movq -> 295
| Movq2dq -> 296
| Movsb -> 297
| Movsd -> 298
| Movshdup -> 299
| Movsldup -> 300
| Movss -> 301
| Movsw -> 302
| Movsx -> 303
| Movupd -> 304
| Movups -> 305
| Movzx -> 306
| Mpsadbw -> 307
| Mul -> 308
| Mulpd -> 309
| Mulps -> 310
| Mulsd -> 311
| Mulss -> 312
| Mwait -> 313
| Neg -> 314
| Nop -> 315
| Not -> 316
| Or -> 317
| Orpd -> 318
| Orps -> 319
| Out -> 320
| Outsb -> 321
| Outsd -> 322
| Outsw -> 323
| Pabsb -> 324
| Pabsd -> 325
| Pabsw -> 326
| Packssdw -> 327
| Packsswb -> 328
| Packusdw -> 329
| Packuswb -> 330
| Paddb -> 331
| Paddd -> 332
| Paddq -> 333
| Paddsb -> 334
| Paddsw -> 335
| Paddusb -> 336
| Paddusw -> 337
| Paddw -> 338
| Palignr -> 339
| Pand -> 340
| Pandn -> 341
| Pause -> 342
| Pavgb -> 343
| Pavgw -> 344
| Pblendvb -> 345
| Pblendw -> 346
| Pcmpeqb -> 347
| Pcmpeqd -> 348
| Pcmpeqq -> 349
| Pcmpeqw -> 350
| Pcmpestri -> 351
| Pcmpestrm -> 352
| Pcmpgtb -> 353
| Pcmpgtd -> 354
| Pcmpgtq -> 355
| Pcmpgtw -> 356
| Pcmpistri -> 357
| Pcmpistrm -> 358
| Pextrb -> 359
| Pextrd -> 360
| Pextrw -> 361
| Phaddd -> 362
| Phaddsw -> 363
| Phaddw -> 364
| Phminposuw -> 365
| Phsubd -> 366
| Phsubsw -> 367
| Phsubw -> 368
| Pinsrb -> 369
| Pinsrd -> 370
| Pinsrw -> 371
| Pmaddubsw -> 372
| Pmaddwd -> 373
| Pmaxsb -> 374
| Pmaxsd -> 375
| Pmaxsw -> 376
| Pmaxub -> 377
| Pmaxud -> 378
| Pmaxuw -> 379
| Pminsb -> 380
| Pminsd -> 381
| Pminsw -> 382
| Pminub -> 383
| Pminud -> 384
| Pminuw -> 385
| Pmovmskb -> 386
| Pmovsxbd -> 387
| Pmovsxbq -> 388
| Pmovsxbw -> 389
| Pmovsxdq -> 390
| Pmovsxwd -> 391
| Pmovsxwq -> 392
| Pmovzxbd -> 393
| Pmovzxbq -> 394
| Pmovzxbw -> 395
| Pmovzxdq -> 396
| Pmovzxwd -> 397
| Pmovzxwq -> 398
| Pmuldq -> 399
| Pmulhrsw -> 400
| Pmulhuw -> 401
| Pmulhw -> 402
| Pmulld -> 403
| Pmullw -> 404
| Pmuludq -> 405
| Pop -> 406
| Popad -> 407
| Popaw -> 408
| Popcnt -> 409
| Popfd -> 410
| Popfw -> 411
| Por -> 412
| Prefetchnta -> 413
| Prefetcht0 -> 414
| Prefetcht1 -> 415
| Prefetcht2 -> 416
| Psadbw -> 417
| Pshufb -> 418
| Pshufd -> 419
| Pshufhw -> 420
| Pshuflw -> 421
| Pshufw -> 422
| Psignb -> 423
| Psignd -> 424
| Psignw -> 425
| Pslld -> 426
| Pslldq -> 427
| Psllq -> 428
| Psllw -> 429
| Psrad -> 430
| Psraw -> 431
| Psrld -> 432
| Psrldq -> 433
| Psrlq -> 434
| Psrlw -> 435
| Psubb -> 436
| Psubd -> 437
| Psubq -> 438
| Psubsb -> 439
| Psubsw -> 440
| Psubusb -> 441
| Psubusw -> 442
| Psubw -> 443
| Ptest -> 444
| Punpckhbw -> 445
| Punpckhdq -> 446
| Punpckhqdq -> 447
| Punpckhwd -> 448
| Punpcklbw -> 449
| Punpckldq -> 450
| Punpcklqdq -> 451
| Punpcklwd -> 452
| Push -> 453
| Pushad -> 454
| Pushaw -> 455
| Pushfd -> 456
| Pushfw -> 457
| Pxor -> 458
| Rcl -> 459
| Rcpps -> 460
| Rcpss -> 461
| Rcr -> 462
| Rdmsr -> 463
| Rdpmc -> 464
| Rdtsc -> 465
| Ret -> 466
| Retf -> 467
| Rol -> 468
| Ror -> 469
| Roundpd -> 470
| Roundps -> 471
| Roundsd -> 472
| Roundss -> 473
| Rsm -> 474
| Rsqrtps -> 475
| Rsqrtss -> 476
| Sahf -> 477
| Sal -> 478
| Salc -> 479
| Sar -> 480
| Sbb -> 481
| Scasb -> 482
| Scasd -> 483
| Scasw -> 484
| Seta -> 485
| Setae -> 486
| Setb -> 487
| Setbe -> 488
| Setg -> 489
| Setge -> 490
| Setl -> 491
| Setle -> 492
| Setno -> 493
| Setnp -> 494
| Setns -> 495
| Setnz -> 496
| Seto -> 497
| Setp -> 498
| Sets -> 499
| Setz -> 500
| Sfence -> 501
| Sgdt -> 502
| Shl -> 503
| Shld -> 504
| Shr -> 505
| Shrd -> 506
| Shufpd -> 507
| Shufps -> 508
| Sidt -> 509
| Sldt -> 510
| Smsw -> 511
| Sqrtpd -> 512
| Sqrtps -> 513
| Sqrtsd -> 514
| Sqrtss -> 515
| Stc -> 516
| Std -> 517
| Sti -> 518
| Stmxcsr -> 519
| Stosb -> 520
| Stosd -> 521
| Stosw -> 522
| Str -> 523
| Sub -> 524
| Subpd -> 525
| Subps -> 526
| Subsd -> 527
| Subss -> 528
| Syscall -> 529
| Sysenter -> 530
| Sysexit -> 531
| Sysret -> 532
| Test -> 533
| Ucomisd -> 534
| Ucomiss -> 535
| Ud2 -> 536
| Unpckhpd -> 537
| Unpckhps -> 538
| Unpcklpd -> 539
| Unpcklps -> 540
| Verr -> 541
| Verw -> 542
| Vmcall -> 543
| Vmclear -> 544
| Vmlaunch -> 545
| Vmptrld -> 546
| Vmptrst -> 547
| Vmread -> 548
| Vmresume -> 549
| Vmwrite -> 550
| Vmxoff -> 551
| Vmxon -> 552
| Wait -> 553
| Wbinvd -> 554
| Wrmsr -> 555
| Xadd -> 556
| Xlat -> 557
| Xchg -> 558
| Xor -> 559
| Xorpd -> 560
| Xorps -> 561
let make_canonical_map () =
let map = Hashtbl.create (Array.length x86_mnem_arr * 2) in
let list =
Array.fold_left
(fun acc mnem -> List.fold_left (fun acc (aol,(_,_)) -> (mnem,aol)::acc)
acc
(let list = try X86Encode.mnem_to_encodings mnem with _ -> [] in list)) [] x86_mnem_arr in
(* Since the encoder doesn't handle these encodings (due to their relative addressing),
we include them manually. *)
let extra_encodings =
[(Jo ,[OJb]);
(Jno,[OJb]);
(Jb ,[OJb]);
(Jae,[OJb]);
(Jz ,[OJb]);
(Jnz,[OJb]);
(Jbe,[OJb]);
(Ja ,[OJb]);
(Js ,[OJb]);
(Jns,[OJb]);
(Jp ,[OJb]);
(Jnp,[OJb]);
(Jl ,[OJb]);
(Jge,[OJb]);
(Jle,[OJb]);
(Jo ,[OJz]);
(Jno,[OJz]);
(Jb ,[OJz]);
(Jae,[OJz]);
(Jz ,[OJz]);
(Jnz,[OJz]);
(Jbe,[OJz]);
(Ja ,[OJz]);
(Js ,[OJz]);
(Jns,[OJz]);
(Jp ,[OJz]);
(Jnp,[OJz]);
(Jl ,[OJz]);
(Jge,[OJz]);
(Jle,[OJz]);
(Jg ,[OJz]);
(Loopnz,[OJb]);
(Loopz,[OJb]);
(Loop,[OJb]);
(Jcxz,[OJb]);
(Jecxz,[OJb]);
(Call,[OJz]);
(Jmp,[OJz]);
(JmpF,[OAp]);
(Jmp,[OJb])]
in
let num_encodings = List.fold_left (fun i e -> Hashtbl.replace map e i; i+1) 1 (extra_encodings@list) in
(num_encodings-1,map)
| null | https://raw.githubusercontent.com/RolfRolles/PandemicML/9c31ecaf9c782dbbeb6cf502bc2a6730316d681e/Projects/DetectVM/X86FeatureUtil.ml | ocaml | Since the encoder doesn't handle these encodings (due to their relative addressing),
we include them manually. | open X86
open X86InternalOperand
This array exists in two places , which is obviously bad . Should refactor this out of
X86Random and this file , and into a common file .
X86Random and this file, and into a common file. *)
let x86_mnem_arr = [|
Aaa;
Aad;
Aam;
Aas;
Adc;
Add;
Addpd;
Addps;
Addsd;
Addss;
Addsubpd;
Addsubps;
And;
Andnpd;
Andnps;
Andpd;
Andps;
Arpl;
Blendpd;
Blendps;
Blendvpd;
Blendvps;
Bound;
Bsf;
Bsr;
Bswap;
Bt;
Btc;
Btr;
Bts;
Call;
CallF;
Cbw;
Cdq;
Clc;
Cld;
Clflush;
Cli;
Clts;
Cmc;
Cmova;
Cmovae;
Cmovb;
Cmovbe;
Cmovg;
Cmovge;
Cmovl;
Cmovle;
Cmovno;
Cmovnp;
Cmovns;
Cmovnz;
Cmovo;
Cmovp;
Cmovs;
Cmovz;
Cmp;
Cmppd;
Cmpps;
Cmpsb;
Cmpsd;
Cmpss;
Cmpsw;
Cmpxchg;
Cmpxchg8b;
Comisd;
Comiss;
Cpuid;
Crc32;
Cvtdq2pd;
Cvtdq2ps;
Cvtpd2dq;
Cvtpd2pi;
Cvtpd2ps;
Cvtpi2pd;
Cvtpi2ps;
Cvtps2dq;
Cvtps2pd;
Cvtps2pi;
Cvtsd2si;
Cvtsd2ss;
Cvtsi2sd;
Cvtsi2ss;
Cvtss2sd;
Cvtss2si;
Cvttpd2dq;
Cvttpd2pi;
Cvttps2dq;
Cvttps2pi;
Cvttsd2si;
Cvttss2si;
Cwd;
Cwde;
Daa;
Das;
Dec;
Div;
Divpd;
Divps;
Divsd;
Divss;
Dppd;
Dpps;
Emms;
Enter;
Extractps;
F2xm1;
Fabs;
Fadd;
Faddp;
Fbld;
Fbstp;
Fchs;
Fclex;
Fcmovb;
Fcmovbe;
Fcmove;
Fcmovnb;
Fcmovnbe;
Fcmovne;
Fcmovnu;
Fcmovu;
Fcom;
Fcomi;
Fcomip;
Fcomp;
Fcompp;
Fcos;
Fdecstp;
Fdiv;
Fdivp;
Fdivr;
Fdivrp;
Ffree;
Fiadd;
Ficom;
Ficomp;
Fidiv;
Fidivr;
Fild;
Fimul;
Fincstp;
Finit;
Fist;
Fistp;
Fisttp;
Fisub;
Fisubr;
Fld;
Fld1;
Fldcw;
Fldenv;
Fldl2e;
Fldl2t;
Fldlg2;
Fldln2;
Fldpi;
Fldz;
Fmul;
Fmulp;
Fnop;
Fpatan;
Fprem;
Fprem1;
Fptan;
Frndint;
Frstor;
Fsave;
Fscale;
Fsin;
Fsincos;
Fsqrt;
Fst;
Fstcw;
Fstenv;
Fstp;
Fstsw;
Fsub;
Fsubp;
Fsubr;
Fsubrp;
Ftst;
Fucom;
Fucomi;
Fucomip;
Fucomp;
Fucompp;
Fxam;
Fxch;
Fxrstor;
Fxsave;
Fxtract;
Fyl2x;
Fyl2xp1;
Getsec;
Haddpd;
Haddps;
Hlt;
Hsubpd;
Hsubps;
Icebp;
Idiv;
Imul;
In;
Inc;
Insb;
Insd;
Insertps;
Insw;
Int;
Int3;
Into;
Invd;
Invlpg;
Iretd;
Iretw;
Ja;
Jae;
Jb;
Jbe;
Jcxz;
Jecxz;
Jg;
Jge;
Jl;
Jle;
Jmp;
JmpF;
Jno;
Jnp;
Jns;
Jnz;
Jo;
Jp;
Js;
Jz;
Lahf;
Lar;
Lddqu;
Ldmxcsr;
Lds;
Lea;
Leave;
Les;
Lfence;
Lfs;
Lgdt;
Lgs;
Lidt;
Lldt;
Lmsw;
Lodsb;
Lodsd;
Lodsw;
Loop;
Loopnz;
Loopz;
Lsl;
Lss;
Ltr;
Maskmovdqu;
Maskmovq;
Maxpd;
Maxps;
Maxsd;
Maxss;
Mfence;
Minpd;
Minps;
Minsd;
Minss;
Monitor;
Mov;
Movapd;
Movaps;
Movd;
Movddup;
Movdq2q;
Movdqa;
Movdqu;
Movhlps;
Movhpd;
Movhps;
Movlhps;
Movlpd;
Movlps;
Movmskpd;
Movmskps;
Movntdq;
Movntdqa;
Movnti;
Movntpd;
Movntps;
Movntq;
Movq;
Movq2dq;
Movsb;
Movsd;
Movshdup;
Movsldup;
Movss;
Movsw;
Movsx;
Movupd;
Movups;
Movzx;
Mpsadbw;
Mul;
Mulpd;
Mulps;
Mulsd;
Mulss;
Mwait;
Neg;
Nop;
Not;
Or;
Orpd;
Orps;
Out;
Outsb;
Outsd;
Outsw;
Pabsb;
Pabsd;
Pabsw;
Packssdw;
Packsswb;
Packusdw;
Packuswb;
Paddb;
Paddd;
Paddq;
Paddsb;
Paddsw;
Paddusb;
Paddusw;
Paddw;
Palignr;
Pand;
Pandn;
Pause;
Pavgb;
Pavgw;
Pblendvb;
Pblendw;
Pcmpeqb;
Pcmpeqd;
Pcmpeqq;
Pcmpeqw;
Pcmpestri;
Pcmpestrm;
Pcmpgtb;
Pcmpgtd;
Pcmpgtq;
Pcmpgtw;
Pcmpistri;
Pcmpistrm;
Pextrb;
Pextrd;
Pextrw;
Phaddd;
Phaddsw;
Phaddw;
Phminposuw;
Phsubd;
Phsubsw;
Phsubw;
Pinsrb;
Pinsrd;
Pinsrw;
Pmaddubsw;
Pmaddwd;
Pmaxsb;
Pmaxsd;
Pmaxsw;
Pmaxub;
Pmaxud;
Pmaxuw;
Pminsb;
Pminsd;
Pminsw;
Pminub;
Pminud;
Pminuw;
Pmovmskb;
Pmovsxbd;
Pmovsxbq;
Pmovsxbw;
Pmovsxdq;
Pmovsxwd;
Pmovsxwq;
Pmovzxbd;
Pmovzxbq;
Pmovzxbw;
Pmovzxdq;
Pmovzxwd;
Pmovzxwq;
Pmuldq;
Pmulhrsw;
Pmulhuw;
Pmulhw;
Pmulld;
Pmullw;
Pmuludq;
Pop;
Popad;
Popaw;
Popcnt;
Popfd;
Popfw;
Por;
Prefetchnta;
Prefetcht0;
Prefetcht1;
Prefetcht2;
Psadbw;
Pshufb;
Pshufd;
Pshufhw;
Pshuflw;
Pshufw;
Psignb;
Psignd;
Psignw;
Pslld;
Pslldq;
Psllq;
Psllw;
Psrad;
Psraw;
Psrld;
Psrldq;
Psrlq;
Psrlw;
Psubb;
Psubd;
Psubq;
Psubsb;
Psubsw;
Psubusb;
Psubusw;
Psubw;
Ptest;
Punpckhbw;
Punpckhdq;
Punpckhqdq;
Punpckhwd;
Punpcklbw;
Punpckldq;
Punpcklqdq;
Punpcklwd;
Push;
Pushad;
Pushaw;
Pushfd;
Pushfw;
Pxor;
Rcl;
Rcpps;
Rcpss;
Rcr;
Rdmsr;
Rdpmc;
Rdtsc;
Ret;
Retf;
Rol;
Ror;
Roundpd;
Roundps;
Roundsd;
Roundss;
Rsm;
Rsqrtps;
Rsqrtss;
Sahf;
Sal;
Salc;
Sar;
Sbb;
Scasb;
Scasd;
Scasw;
Seta;
Setae;
Setb;
Setbe;
Setg;
Setge;
Setl;
Setle;
Setno;
Setnp;
Setns;
Setnz;
Seto;
Setp;
Sets;
Setz;
Sfence;
Sgdt;
Shl;
Shld;
Shr;
Shrd;
Shufpd;
Shufps;
Sidt;
Sldt;
Smsw;
Sqrtpd;
Sqrtps;
Sqrtsd;
Sqrtss;
Stc;
Std;
Sti;
Stmxcsr;
Stosb;
Stosd;
Stosw;
Str;
Sub;
Subpd;
Subps;
Subsd;
Subss;
Syscall;
Sysenter;
Sysexit;
Sysret;
Test;
Ucomisd;
Ucomiss;
Ud2;
Unpckhpd;
Unpckhps;
Unpcklpd;
Unpcklps;
Verr;
Verw;
Vmcall;
Vmclear;
Vmlaunch;
Vmptrld;
Vmptrst;
Vmread;
Vmresume;
Vmwrite;
Vmxoff;
Vmxon;
Wait;
Wbinvd;
Wrmsr;
Xadd;
Xlat;
Xchg;
Xor;
Xorpd;
Xorps;|]
let number_mnem = function
| Aaa -> 1
| Aad -> 2
| Aam -> 3
| Aas -> 4
| Adc -> 5
| Add -> 6
| Addpd -> 7
| Addps -> 8
| Addsd -> 9
| Addss -> 10
| Addsubpd -> 11
| Addsubps -> 12
| And -> 13
| Andnpd -> 14
| Andnps -> 15
| Andpd -> 16
| Andps -> 17
| Arpl -> 18
| Blendpd -> 19
| Blendps -> 20
| Blendvpd -> 21
| Blendvps -> 22
| Bound -> 23
| Bsf -> 24
| Bsr -> 25
| Bswap -> 26
| Bt -> 27
| Btc -> 28
| Btr -> 29
| Bts -> 30
| Call -> 31
| CallF -> 32
| Cbw -> 33
| Cdq -> 34
| Clc -> 35
| Cld -> 36
| Clflush -> 37
| Cli -> 38
| Clts -> 39
| Cmc -> 40
| Cmova -> 41
| Cmovae -> 42
| Cmovb -> 43
| Cmovbe -> 44
| Cmovg -> 45
| Cmovge -> 46
| Cmovl -> 47
| Cmovle -> 48
| Cmovno -> 49
| Cmovnp -> 50
| Cmovns -> 51
| Cmovnz -> 52
| Cmovo -> 53
| Cmovp -> 54
| Cmovs -> 55
| Cmovz -> 56
| Cmp -> 57
| Cmppd -> 58
| Cmpps -> 59
| Cmpsb -> 60
| Cmpsd -> 61
| Cmpss -> 62
| Cmpsw -> 63
| Cmpxchg -> 64
| Cmpxchg8b -> 65
| Comisd -> 66
| Comiss -> 67
| Cpuid -> 68
| Crc32 -> 69
| Cvtdq2pd -> 70
| Cvtdq2ps -> 71
| Cvtpd2dq -> 72
| Cvtpd2pi -> 73
| Cvtpd2ps -> 74
| Cvtpi2pd -> 75
| Cvtpi2ps -> 76
| Cvtps2dq -> 77
| Cvtps2pd -> 78
| Cvtps2pi -> 79
| Cvtsd2si -> 80
| Cvtsd2ss -> 81
| Cvtsi2sd -> 82
| Cvtsi2ss -> 83
| Cvtss2sd -> 84
| Cvtss2si -> 85
| Cvttpd2dq -> 86
| Cvttpd2pi -> 87
| Cvttps2dq -> 88
| Cvttps2pi -> 89
| Cvttsd2si -> 90
| Cvttss2si -> 91
| Cwd -> 92
| Cwde -> 93
| Daa -> 94
| Das -> 95
| Dec -> 96
| Div -> 97
| Divpd -> 98
| Divps -> 99
| Divsd -> 100
| Divss -> 101
| Dppd -> 102
| Dpps -> 103
| Emms -> 104
| Enter -> 105
| Extractps -> 106
| F2xm1 -> 107
| Fabs -> 108
| Fadd -> 109
| Faddp -> 110
| Fbld -> 111
| Fbstp -> 112
| Fchs -> 113
| Fclex -> 114
| Fcmovb -> 115
| Fcmovbe -> 116
| Fcmove -> 117
| Fcmovnb -> 118
| Fcmovnbe -> 119
| Fcmovne -> 120
| Fcmovnu -> 121
| Fcmovu -> 122
| Fcom -> 123
| Fcomi -> 124
| Fcomip -> 125
| Fcomp -> 126
| Fcompp -> 127
| Fcos -> 128
| Fdecstp -> 129
| Fdiv -> 130
| Fdivp -> 131
| Fdivr -> 132
| Fdivrp -> 133
| Ffree -> 134
| Fiadd -> 135
| Ficom -> 136
| Ficomp -> 137
| Fidiv -> 138
| Fidivr -> 139
| Fild -> 140
| Fimul -> 141
| Fincstp -> 142
| Finit -> 143
| Fist -> 144
| Fistp -> 145
| Fisttp -> 146
| Fisub -> 147
| Fisubr -> 148
| Fld -> 149
| Fld1 -> 150
| Fldcw -> 151
| Fldenv -> 152
| Fldl2e -> 153
| Fldl2t -> 154
| Fldlg2 -> 155
| Fldln2 -> 156
| Fldpi -> 157
| Fldz -> 158
| Fmul -> 159
| Fmulp -> 160
| Fnop -> 161
| Fpatan -> 162
| Fprem -> 163
| Fprem1 -> 164
| Fptan -> 165
| Frndint -> 166
| Frstor -> 167
| Fsave -> 168
| Fscale -> 169
| Fsin -> 170
| Fsincos -> 171
| Fsqrt -> 172
| Fst -> 173
| Fstcw -> 174
| Fstenv -> 175
| Fstp -> 176
| Fstsw -> 177
| Fsub -> 178
| Fsubp -> 179
| Fsubr -> 180
| Fsubrp -> 181
| Ftst -> 182
| Fucom -> 183
| Fucomi -> 184
| Fucomip -> 185
| Fucomp -> 186
| Fucompp -> 187
| Fxam -> 188
| Fxch -> 189
| Fxrstor -> 190
| Fxsave -> 191
| Fxtract -> 192
| Fyl2x -> 193
| Fyl2xp1 -> 194
| Getsec -> 195
| Haddpd -> 196
| Haddps -> 197
| Hlt -> 198
| Hsubpd -> 199
| Hsubps -> 200
| Icebp -> 201
| Idiv -> 202
| Imul -> 203
| In -> 204
| Inc -> 205
| Insb -> 206
| Insd -> 207
| Insertps -> 208
| Insw -> 209
| Int -> 210
| Int3 -> 211
| Into -> 212
| Invd -> 213
| Invlpg -> 214
| Iretd -> 215
| Iretw -> 216
| Ja -> 217
| Jae -> 218
| Jb -> 219
| Jbe -> 220
| Jcxz -> 221
| Jecxz -> 222
| Jg -> 223
| Jge -> 224
| Jl -> 225
| Jle -> 226
| Jmp -> 227
| JmpF -> 228
| Jno -> 229
| Jnp -> 230
| Jns -> 231
| Jnz -> 232
| Jo -> 233
| Jp -> 234
| Js -> 235
| Jz -> 236
| Lahf -> 237
| Lar -> 238
| Lddqu -> 239
| Ldmxcsr -> 240
| Lds -> 241
| Lea -> 242
| Leave -> 243
| Les -> 244
| Lfence -> 245
| Lfs -> 246
| Lgdt -> 247
| Lgs -> 248
| Lidt -> 249
| Lldt -> 250
| Lmsw -> 251
| Lodsb -> 252
| Lodsd -> 253
| Lodsw -> 254
| Loop -> 255
| Loopnz -> 256
| Loopz -> 257
| Lsl -> 258
| Lss -> 259
| Ltr -> 260
| Maskmovdqu -> 261
| Maskmovq -> 262
| Maxpd -> 263
| Maxps -> 264
| Maxsd -> 265
| Maxss -> 266
| Mfence -> 267
| Minpd -> 268
| Minps -> 269
| Minsd -> 270
| Minss -> 271
| Monitor -> 272
| Mov -> 273
| Movapd -> 274
| Movaps -> 275
| Movd -> 276
| Movddup -> 277
| Movdq2q -> 278
| Movdqa -> 279
| Movdqu -> 280
| Movhlps -> 281
| Movhpd -> 282
| Movhps -> 283
| Movlhps -> 284
| Movlpd -> 285
| Movlps -> 286
| Movmskpd -> 287
| Movmskps -> 288
| Movntdq -> 289
| Movntdqa -> 290
| Movnti -> 291
| Movntpd -> 292
| Movntps -> 293
| Movntq -> 294
| Movq -> 295
| Movq2dq -> 296
| Movsb -> 297
| Movsd -> 298
| Movshdup -> 299
| Movsldup -> 300
| Movss -> 301
| Movsw -> 302
| Movsx -> 303
| Movupd -> 304
| Movups -> 305
| Movzx -> 306
| Mpsadbw -> 307
| Mul -> 308
| Mulpd -> 309
| Mulps -> 310
| Mulsd -> 311
| Mulss -> 312
| Mwait -> 313
| Neg -> 314
| Nop -> 315
| Not -> 316
| Or -> 317
| Orpd -> 318
| Orps -> 319
| Out -> 320
| Outsb -> 321
| Outsd -> 322
| Outsw -> 323
| Pabsb -> 324
| Pabsd -> 325
| Pabsw -> 326
| Packssdw -> 327
| Packsswb -> 328
| Packusdw -> 329
| Packuswb -> 330
| Paddb -> 331
| Paddd -> 332
| Paddq -> 333
| Paddsb -> 334
| Paddsw -> 335
| Paddusb -> 336
| Paddusw -> 337
| Paddw -> 338
| Palignr -> 339
| Pand -> 340
| Pandn -> 341
| Pause -> 342
| Pavgb -> 343
| Pavgw -> 344
| Pblendvb -> 345
| Pblendw -> 346
| Pcmpeqb -> 347
| Pcmpeqd -> 348
| Pcmpeqq -> 349
| Pcmpeqw -> 350
| Pcmpestri -> 351
| Pcmpestrm -> 352
| Pcmpgtb -> 353
| Pcmpgtd -> 354
| Pcmpgtq -> 355
| Pcmpgtw -> 356
| Pcmpistri -> 357
| Pcmpistrm -> 358
| Pextrb -> 359
| Pextrd -> 360
| Pextrw -> 361
| Phaddd -> 362
| Phaddsw -> 363
| Phaddw -> 364
| Phminposuw -> 365
| Phsubd -> 366
| Phsubsw -> 367
| Phsubw -> 368
| Pinsrb -> 369
| Pinsrd -> 370
| Pinsrw -> 371
| Pmaddubsw -> 372
| Pmaddwd -> 373
| Pmaxsb -> 374
| Pmaxsd -> 375
| Pmaxsw -> 376
| Pmaxub -> 377
| Pmaxud -> 378
| Pmaxuw -> 379
| Pminsb -> 380
| Pminsd -> 381
| Pminsw -> 382
| Pminub -> 383
| Pminud -> 384
| Pminuw -> 385
| Pmovmskb -> 386
| Pmovsxbd -> 387
| Pmovsxbq -> 388
| Pmovsxbw -> 389
| Pmovsxdq -> 390
| Pmovsxwd -> 391
| Pmovsxwq -> 392
| Pmovzxbd -> 393
| Pmovzxbq -> 394
| Pmovzxbw -> 395
| Pmovzxdq -> 396
| Pmovzxwd -> 397
| Pmovzxwq -> 398
| Pmuldq -> 399
| Pmulhrsw -> 400
| Pmulhuw -> 401
| Pmulhw -> 402
| Pmulld -> 403
| Pmullw -> 404
| Pmuludq -> 405
| Pop -> 406
| Popad -> 407
| Popaw -> 408
| Popcnt -> 409
| Popfd -> 410
| Popfw -> 411
| Por -> 412
| Prefetchnta -> 413
| Prefetcht0 -> 414
| Prefetcht1 -> 415
| Prefetcht2 -> 416
| Psadbw -> 417
| Pshufb -> 418
| Pshufd -> 419
| Pshufhw -> 420
| Pshuflw -> 421
| Pshufw -> 422
| Psignb -> 423
| Psignd -> 424
| Psignw -> 425
| Pslld -> 426
| Pslldq -> 427
| Psllq -> 428
| Psllw -> 429
| Psrad -> 430
| Psraw -> 431
| Psrld -> 432
| Psrldq -> 433
| Psrlq -> 434
| Psrlw -> 435
| Psubb -> 436
| Psubd -> 437
| Psubq -> 438
| Psubsb -> 439
| Psubsw -> 440
| Psubusb -> 441
| Psubusw -> 442
| Psubw -> 443
| Ptest -> 444
| Punpckhbw -> 445
| Punpckhdq -> 446
| Punpckhqdq -> 447
| Punpckhwd -> 448
| Punpcklbw -> 449
| Punpckldq -> 450
| Punpcklqdq -> 451
| Punpcklwd -> 452
| Push -> 453
| Pushad -> 454
| Pushaw -> 455
| Pushfd -> 456
| Pushfw -> 457
| Pxor -> 458
| Rcl -> 459
| Rcpps -> 460
| Rcpss -> 461
| Rcr -> 462
| Rdmsr -> 463
| Rdpmc -> 464
| Rdtsc -> 465
| Ret -> 466
| Retf -> 467
| Rol -> 468
| Ror -> 469
| Roundpd -> 470
| Roundps -> 471
| Roundsd -> 472
| Roundss -> 473
| Rsm -> 474
| Rsqrtps -> 475
| Rsqrtss -> 476
| Sahf -> 477
| Sal -> 478
| Salc -> 479
| Sar -> 480
| Sbb -> 481
| Scasb -> 482
| Scasd -> 483
| Scasw -> 484
| Seta -> 485
| Setae -> 486
| Setb -> 487
| Setbe -> 488
| Setg -> 489
| Setge -> 490
| Setl -> 491
| Setle -> 492
| Setno -> 493
| Setnp -> 494
| Setns -> 495
| Setnz -> 496
| Seto -> 497
| Setp -> 498
| Sets -> 499
| Setz -> 500
| Sfence -> 501
| Sgdt -> 502
| Shl -> 503
| Shld -> 504
| Shr -> 505
| Shrd -> 506
| Shufpd -> 507
| Shufps -> 508
| Sidt -> 509
| Sldt -> 510
| Smsw -> 511
| Sqrtpd -> 512
| Sqrtps -> 513
| Sqrtsd -> 514
| Sqrtss -> 515
| Stc -> 516
| Std -> 517
| Sti -> 518
| Stmxcsr -> 519
| Stosb -> 520
| Stosd -> 521
| Stosw -> 522
| Str -> 523
| Sub -> 524
| Subpd -> 525
| Subps -> 526
| Subsd -> 527
| Subss -> 528
| Syscall -> 529
| Sysenter -> 530
| Sysexit -> 531
| Sysret -> 532
| Test -> 533
| Ucomisd -> 534
| Ucomiss -> 535
| Ud2 -> 536
| Unpckhpd -> 537
| Unpckhps -> 538
| Unpcklpd -> 539
| Unpcklps -> 540
| Verr -> 541
| Verw -> 542
| Vmcall -> 543
| Vmclear -> 544
| Vmlaunch -> 545
| Vmptrld -> 546
| Vmptrst -> 547
| Vmread -> 548
| Vmresume -> 549
| Vmwrite -> 550
| Vmxoff -> 551
| Vmxon -> 552
| Wait -> 553
| Wbinvd -> 554
| Wrmsr -> 555
| Xadd -> 556
| Xlat -> 557
| Xchg -> 558
| Xor -> 559
| Xorpd -> 560
| Xorps -> 561
let make_canonical_map () =
let map = Hashtbl.create (Array.length x86_mnem_arr * 2) in
let list =
Array.fold_left
(fun acc mnem -> List.fold_left (fun acc (aol,(_,_)) -> (mnem,aol)::acc)
acc
(let list = try X86Encode.mnem_to_encodings mnem with _ -> [] in list)) [] x86_mnem_arr in
let extra_encodings =
[(Jo ,[OJb]);
(Jno,[OJb]);
(Jb ,[OJb]);
(Jae,[OJb]);
(Jz ,[OJb]);
(Jnz,[OJb]);
(Jbe,[OJb]);
(Ja ,[OJb]);
(Js ,[OJb]);
(Jns,[OJb]);
(Jp ,[OJb]);
(Jnp,[OJb]);
(Jl ,[OJb]);
(Jge,[OJb]);
(Jle,[OJb]);
(Jo ,[OJz]);
(Jno,[OJz]);
(Jb ,[OJz]);
(Jae,[OJz]);
(Jz ,[OJz]);
(Jnz,[OJz]);
(Jbe,[OJz]);
(Ja ,[OJz]);
(Js ,[OJz]);
(Jns,[OJz]);
(Jp ,[OJz]);
(Jnp,[OJz]);
(Jl ,[OJz]);
(Jge,[OJz]);
(Jle,[OJz]);
(Jg ,[OJz]);
(Loopnz,[OJb]);
(Loopz,[OJb]);
(Loop,[OJb]);
(Jcxz,[OJb]);
(Jecxz,[OJb]);
(Call,[OJz]);
(Jmp,[OJz]);
(JmpF,[OAp]);
(Jmp,[OJb])]
in
let num_encodings = List.fold_left (fun i e -> Hashtbl.replace map e i; i+1) 1 (extra_encodings@list) in
(num_encodings-1,map)
|
09825bf93957c462853739bf301a08a65ffbc09262ff577962b44a8f71482ea8 | LambdaHack/LambdaHack | LoopM.hs | -- | The main loop of the server, processing human and computer player
-- moves turn by turn.
module Game.LambdaHack.Server.LoopM
( loopSer
#ifdef EXPOSE_INTERNAL
-- * Internal operations
, factionArena, arenasForLoop, handleFidUpd, loopUpd, endClip
, manageCalmAndDomination, applyPeriodicLevel
, handleTrajectories, hTrajectories, advanceTrajectory
, handleActors, hActors, handleUIunderAI, dieSer, restartGame
#endif
) where
import Prelude ()
import Game.LambdaHack.Core.Prelude
import qualified Data.EnumMap.Strict as EM
import qualified Data.EnumSet as ES
import Game.LambdaHack.Atomic
import Game.LambdaHack.Client (ReqUI (..), Response (..))
import Game.LambdaHack.Common.Actor
import Game.LambdaHack.Common.ActorState
import Game.LambdaHack.Common.Analytics
import Game.LambdaHack.Common.Faction
import Game.LambdaHack.Common.Item
import qualified Game.LambdaHack.Common.ItemAspect as IA
import Game.LambdaHack.Common.Kind
import Game.LambdaHack.Common.Level
import Game.LambdaHack.Common.Misc
import Game.LambdaHack.Common.MonadStateRead
import Game.LambdaHack.Common.Perception
import Game.LambdaHack.Common.State
import qualified Game.LambdaHack.Common.Tile as Tile
import Game.LambdaHack.Common.Time
import Game.LambdaHack.Common.Types
import Game.LambdaHack.Common.Vector
import Game.LambdaHack.Content.FactionKind
import qualified Game.LambdaHack.Content.ItemKind as IK
import Game.LambdaHack.Content.ModeKind
import Game.LambdaHack.Content.RuleKind
import qualified Game.LambdaHack.Definition.Ability as Ability
import Game.LambdaHack.Definition.Defs
import Game.LambdaHack.Server.CommonM
import Game.LambdaHack.Server.HandleEffectM
import Game.LambdaHack.Server.HandleRequestM
import Game.LambdaHack.Server.MonadServer
import Game.LambdaHack.Server.PeriodicM
import Game.LambdaHack.Server.ProtocolM
import Game.LambdaHack.Server.ServerOptions
import Game.LambdaHack.Server.StartM
import Game.LambdaHack.Server.State
-- | Start a game session, including the clients, and then loop,
-- communicating with the clients.
--
-- The loop is started in server state that is empty, see 'emptyStateServer'.
loopSer :: (MonadServerAtomic m, MonadServerComm m)
=> ServerOptions
-- ^ player-supplied server options
-> (Bool -> FactionId -> ChanServer -> IO ())
-- ^ function that initializes a client and runs its main loop
-> m ()
loopSer serverOptions executorClient = do
-- Recover states and launch clients.
modifyServer $ \ser -> ser { soptionsNxt = serverOptions
, soptions = serverOptions }
cops <- getsState scops
let updConn startsNewGame = updateConn $ executorClient startsNewGame
restored <- tryRestore
case restored of
Just (sRaw, ser) | not $ snewGameSer serverOptions -> do -- a restored game
execUpdAtomic $ UpdResumeServer
$ updateCOpsAndCachedData (const cops) sRaw
putServer ser {soptionsNxt = serverOptions}
applyDebug
factionD <- getsState sfactionD
let f fid = let cmd = UpdResumeServer
$ updateCOpsAndCachedData (const cops)
$ sclientStates ser EM.! fid
in execUpdAtomicFidCatch fid cmd
mapM_ (void <$> f) $ EM.keys factionD
updConn False
initPer
pers <- getsServer sperFid
let clear = const emptyPer
persFid fid | sknowEvents serverOptions = EM.map clear (pers EM.! fid)
| otherwise = pers EM.! fid
mapM_ (\fid -> sendUpdate fid $ UpdResume fid (persFid fid))
(EM.keys factionD)
arenasNew <- arenasForLoop
modifyServer $ \ser2 -> ser2 {sarenas = arenasNew, svalidArenas = True}
We dump RNG seeds here , based on @soptionsNxt@ , in case the game
-- wasn't run with @--dumpInitRngs@ previously, but we need the seeds,
-- e.g., to diagnose a crash.
rngs <- getsServer srngs
when (sdumpInitRngs serverOptions) $ dumpRngs rngs
_ -> do -- starting new game for this savefile (--newGame or fresh save)
factionDold <- getsState sfactionD
s <- gameReset serverOptions Nothing Nothing
get RNG from item boost
-- Set up commandline options.
let optionsBarRngs =
serverOptions {sdungeonRng = Nothing, smainRng = Nothing}
modifyServer $ \ser -> ser { soptionsNxt = optionsBarRngs
, soptions = optionsBarRngs }
execUpdAtomic $ UpdRestartServer s
updConn True
initPer
reinitGame factionDold
writeSaveAll False False
loopUpd $ updConn True
factionArena :: MonadStateRead m => Faction -> m (Maybe LevelId)
factionArena fact = case gleader fact of
-- Even spawners need an active arena for their leader,
-- or they start clogging stairs.
Just leader -> do
b <- getsState $ getActorBody leader
return $ Just $ blid b
Nothing -> return Nothing
This means Allure heroes can kill all aliens on lvl 4 , retreat ,
-- hide and sleep on lvl 3 and they are guaranteed aliens don't spawn.
-- However, animals still spawn, if slowly, and aliens resume
-- spawning when heroes move on again.
arenasForLoop :: MonadStateRead m => m (ES.EnumSet LevelId)
# INLINE arenasForLoop #
arenasForLoop = do
factionD <- getsState sfactionD
marenas <- mapM factionArena $ EM.elems factionD
let arenas = ES.fromList $ catMaybes marenas
!_A = assert (not (ES.null arenas)
`blame` "game over not caught earlier"
`swith` factionD) ()
return $! arenas
handleFidUpd :: forall m. (MonadServerAtomic m, MonadServerComm m)
=> (FactionId -> m ()) -> FactionId -> Faction -> m ()
# INLINE handleFidUpd #
handleFidUpd updatePerFid fid fact = do
-- Update perception on all levels at once,
-- in case a leader is changed to actor on another
-- (possibly not even currently active) level.
-- This runs for all factions even if save is requested by UI.
-- Let players ponder new game state while the engine is busy saving.
-- Also, this ensures perception before game save is exactly the same
-- as at game resume, which is an invariant we check elsewhere.
-- However, if perception is not updated after the action, the actor
-- may not see his vicinity, so may not see enemy that displaces (or hits) him
-- resulting in breaking the displace action and temporary leader loss,
-- which is fine, though a bit alarming. So, we update it at the end.
updatePerFid fid
-- Move a single actor only. Note that the skipped actors are not marked
-- as waiting. Normally they will act in the next clip or the next few,
so that 's natural . But if there are dozens of them , this is .
-- E.g., they don't move, but still make nearby foes lose Calm.
However , for KISS , we leave it be .
--
-- Bail out if immediate loop break- requested by UI. No check
for @sbreakLoop@ needed , for the same reasons as in @handleActors@.
let handle :: [LevelId] -> m Bool
handle [] = return False
handle (lid : rest) = do
breakASAP <- getsServer sbreakASAP
if breakASAP
then return False
else do
nonWaitMove <- handleActors lid fid
if nonWaitMove
then return True
else handle rest
killDying :: [LevelId] -> m ()
killDying = mapM_ killDyingLid
killDyingLid :: LevelId -> m ()
killDyingLid lid = do
localTime <- getsState $ getLocalTime lid
levelTime <- getsServer $ (EM.! lid) . (EM.! fid) . sactorTime
let l = filter (\(_, atime) -> atime <= localTime) $ EM.assocs levelTime
killAid (aid, _) = do
b1 <- getsState $ getActorBody aid
when (bhp b1 <= 0) $ dieSer aid b1
mapM_ killAid l
-- Start on arena with leader, if available. This is crucial to ensure
that no actor ( even ours ) moves before UI declares save(&exit ) .
fa <- factionArena fact
arenas <- getsServer sarenas
let myArenas = case fa of
Just myArena -> myArena : delete myArena (ES.elems arenas)
Nothing -> ES.elems arenas
nonWaitMove <- handle myArenas
breakASAP <- getsServer sbreakASAP
unless breakASAP $ killDying myArenas
-- We update perception at the end, see comment above. This is usually
cheap , and when not , if it 's AI faction , it 's a waste , but if it 's UI ,
-- that's exactly where it prevents lost attack messages, etc.
-- If the move was a wait, perception unchanged, so no need to update,
-- unless the actor starts sleeping, in which case his perception
-- is reduced a bit later, so no harm done.
when nonWaitMove $ updatePerFid fid
-- | Handle a clip (the smallest fraction of a game turn for which a frame may
-- potentially be generated). Run the leader and other actors moves.
-- Eventually advance the time and repeat.
loopUpd :: forall m. (MonadServerAtomic m, MonadServerComm m)
=> m () -> m ()
loopUpd updConn = do
let updatePerFid :: FactionId -> m ()
# NOINLINE updatePerFid #
{ - # SCC updatePerFid # - } do
perValid <- getsServer $ (EM.! fid) . sperValidFid
mapM_ (\(lid, valid) -> unless valid $ updatePer fid lid)
(EM.assocs perValid)
handleFid :: (FactionId, Faction) -> m ()
# NOINLINE handleFid #
handleFid (fid, fact) = do
breakASAP <- getsServer sbreakASAP
-- Don't process other factions, even their perceptions,
-- if UI saves and/or exits.
unless breakASAP $ handleFidUpd updatePerFid fid fact
loopConditionally = do
factionD <- getsState sfactionD
Update perception one last time to satisfy save / resume assertions ,
-- because we may get here at arbitrary moment due to game over
-- and so have outdated perception.
mapM_ updatePerFid (EM.keys factionD)
modifyServer $ \ser -> ser { sbreakLoop = False
, sbreakASAP = False }
endOrLoop loopUpdConn (restartGame updConn loopUpdConn)
loopUpdConn = do
factionD <- getsState sfactionD
-- Start handling actors with the single UI faction,
-- to safely save/exit. Note that this hack fails if there are many UI
-- factions (when we reenable multiplayer). Then players will request
save&exit and others will vote on it and it will happen
-- after the clip has ended, not at the start.
-- Note that at most a single actor with a time-consuming action
-- is processed per faction, so it's fair, but many loops are needed.
let hasUI (_, fact) = fhasUI (gkind fact)
(factionUI, factionsRest) = case break hasUI $ EM.assocs factionD of
(noUI1, ui : noUI2) -> (ui, noUI1 ++ noUI2)
_ -> error "no UI faction in the game"
mapM_ handleFid $ factionUI : factionsRest
breakASAP <- getsServer sbreakASAP
breakLoop <- getsServer sbreakLoop
if breakASAP || breakLoop
then loopConditionally
else do
Projectiles are processed last and not at all if the UI leader
-- decides to save or exit or restart or if there is game over.
-- This and UI leader acting before any other ordinary actors
-- ensures state is not changed and so the clip doesn't need
-- to be carried through before save.
arenas <- getsServer sarenas
mapM_ (\fid -> mapM_ (`handleTrajectories` fid) $ ES.elems arenas)
(EM.keys factionD)
endClip updatePerFid -- must be last, in case performs a bkp save
-- The condition can be changed in @handleTrajectories@ by pushing
onto an escape and in @endClip@.
breakLoop2 <- getsServer sbreakLoop
if breakLoop2
then loopConditionally
else loopUpdConn -- process next iteration unconditionally
loopUpdConn
-- | Handle the end of every clip. Do whatever has to be done
-- every fixed number of clips, e.g., monster generation.
-- Advance time. Perform periodic saves, if applicable.
--
-- This is never run if UI requested save or exit or restart and it's correct,
-- because we know nobody moved and no time was or needs to be advanced
-- and arenas are not changed. After game was saved and exited,
on game resume the first clip is performed with empty arenas ,
-- so arena time is not updated and nobody moves, nor anything happens,
-- but arenas are here correctly updated.
endClip :: forall m. MonadServerAtomic m => (FactionId -> m ()) -> m ()
# INLINE endClip #
endClip updatePerFid = do
COps{corule} <- getsState scops
time <- getsState stime
let clipN = time `timeFit` timeClip
-- No check if @sbreakASAP@ is set, because then the function is not called.
breakLoop <- getsServer sbreakLoop
-- We don't send a lot of useless info to the client if the game has already
-- ended. At best wasteful, at worst the player sees strange messages.
unless breakLoop $ do
-- I need to send time updates, because I can't add time to each command,
-- because I'd need to send also all arenas, which should be updated,
-- and this is too expensive data for each, e.g., projectile move.
-- I send even if nothing changes so that UI time display can progress.
-- Possibly @arenas@ are invalid here, but all moves were performed
-- according to this value, so time should be replenished according
-- to this value as well.
-- This is crucial, because tiny time discrepancies can accumulate
-- magnified by hunders of actors that share the clip slots due to the
restriction that at most one faction member acts each clip .
arenas <- getsServer sarenas
execUpdAtomic $ UpdAgeGame arenas
-- Perform periodic dungeon maintenance.
when (clipN `mod` rleadLevelClips corule == 0) leadLevelSwitch
case clipN `mod` clipsInTurn of
0 ->
Spawn monsters at most once per 3 turns .
when (clipN `mod` (3 * clipsInTurn) == 0)
spawnMonster
4 ->
-- Periodic activation only once per turn, for speed,
-- but on all active arenas. Calm updates and domination
-- happen there as well. Once per turn is too rare for accurate
expiration of short conditions , e.g. , 1 - turn haste . TODO .
applyPeriodicLevel
_ -> return ()
-- @applyPeriodicLevel@ might have, e.g., dominated actors, ending the game.
-- It could not have unended the game, though.
breakLoop2 <- getsServer sbreakLoop
unless breakLoop2 $ do
Possibly a leader change due to , so update arenas here
for 100 % accuracy at least at the start of actor moves , before they
-- change leaders as part of their moves.
--
After game resume , this is the first non - vacuus computation .
-- Next call to @loopUpdConn@ really moves actors and updates arena times
-- so we start in exactly the same place that UI save ended in.
validArenas <- getsServer svalidArenas
unless validArenas $ do
arenasNew <- arenasForLoop
modifyServer $ \ser -> ser {sarenas = arenasNew, svalidArenas = True}
-- Update all perception for visual feedback and to make sure saving
-- and resuming game doesn't affect gameplay (by updating perception).
Perception updates in @handleFidUpd@ are not enough , because
-- periodic actions could have invalidated them.
factionD <- getsState sfactionD
mapM_ updatePerFid (EM.keys factionD)
-- Saving on the browser causes a huge lag, hence autosave disabled.
#ifndef USE_JSFILE
unless breakLoop2 $ -- if by chance requested and periodic saves coincide
-- Periodic save needs to be at the end, so that restore can start
at the beginning . Double save on first turn is avoided with @succ@.
when (succ clipN `mod` rwriteSaveClips corule == 0) $
writeSaveAll False False
#endif
-- | Check if the given actor is dominated and update his calm.
manageCalmAndDomination :: MonadServerAtomic m => ActorId -> Actor -> m ()
manageCalmAndDomination aid b = do
performedDomination <-
if bcalm b > 0 then return False else do -- triggered by zeroed Calm
hiImpression <- highestImpression b
case hiImpression of
Nothing -> return False
Just (hiImpressionFid, hiImpressionK) -> do
fact <- getsState $ (EM.! bfid b) . sfactionD
if fhasPointman (gkind fact)
-- animals/robots/human drones never Calm-dominated
|| hiImpressionK >= 10
-- unless very high impression, e.g., in a dominated hero
then dominateFidSfx aid aid (btrunk b) hiImpressionFid
else return False
unless performedDomination $ do
newCalmDelta <- getsState $ regenCalmDelta aid b
unless (newCalmDelta == 0) $
-- Update delta for the current player turn.
updateCalm aid newCalmDelta
-- | Trigger periodic items for all actors on the given level.
applyPeriodicLevel :: MonadServerAtomic m => m ()
applyPeriodicLevel = do
arenas <- getsServer sarenas
let applyPeriodicItem _ _ (_, (_, [])) = return ()
periodic items always have at least one timer
applyPeriodicItem aid cstore (iid, _) = do
itemFull <- getsState $ itemToFull iid
let arItem = aspectRecordFull itemFull
when (IA.checkFlag Ability.Periodic arItem) $ do
-- Check if the item is still in the bag (previous items act!).
b2 <- getsState $ getActorBody aid
bag <- getsState $ getBodyStoreBag b2 cstore
case iid `EM.lookup` bag of
Nothing -> return () -- item dropped
Just (k, _) -> do
Activate even if effects null or vacuous , to possibly
-- destroy the item.
let effApplyFlags = EffApplyFlags
{ effToUse = EffBare -- no periodic crafting
, effVoluntary = True
, effUseAllCopies = k <= 1
, effKineticPerformed = False
, effActivation = Ability.ActivationPeriodic
, effMayDestroy = True
}
void $ effectAndDestroyAndAddKill
effApplyFlags
aid aid aid iid (CActor aid cstore) itemFull
applyPeriodicActor (aid, b) =
-- While it's fun when projectiles flash or speed up mid-air,
it 's very exotic and quite time - intensive whenever hundreds
-- of projectiles exist due to ongoing explosions.
-- Nothing activates when actor dying to prevent a regenerating
actor from resurrecting each turn , resulting in silly stats .
when (not (bproj b) && bhp b > 0 && blid b `ES.member` arenas) $ do
Equipment goes first , to refresh organs before they expire ,
-- to avoid the message that organ expired.
mapM_ (applyPeriodicItem aid CEqp) $ EM.assocs $ beqp b
mapM_ (applyPeriodicItem aid COrgan) $ EM.assocs $ borgan b
-- While we are at it, also update his Calm.
manageCalmAndDomination aid b
allActors <- getsState sactorD
mapM_ applyPeriodicActor $ EM.assocs allActors
handleTrajectories :: MonadServerAtomic m => LevelId -> FactionId -> m ()
handleTrajectories lid fid = do
localTime <- getsState $ getLocalTime lid
levelTime <- getsServer $ (EM.! lid) . (EM.! fid) . strajTime
let l = sort $ map fst
$ filter (\(_, atime) -> atime <= localTime) $ EM.assocs levelTime
The @strajTime@ map may be outdated before @hTrajectories@
-- call (due to other actors following their trajectories),
-- so it's only used to decide which actors are processed in this
-- @handleTrajectories@ call. If an actor is added to the map,
-- the recursive call to @handleTrajectories@ will detect that
-- and process him later on.
-- If the actor is no longer on the level or no longer belongs
-- to the faction, it is nevertheless processed without a problem.
-- We are guaranteed the actor still exists.
mapM_ hTrajectories l
-- Avoid frames between fadeout and fadein.
breakLoop <- getsServer sbreakLoop
unless (null l || breakLoop) $
handleTrajectories lid fid -- for speeds > tile/clip
hTrajectories :: MonadServerAtomic m => ActorId -> m ()
# INLINE hTrajectories #
hTrajectories aid = do
b1 <- getsState $ getActorBody aid
let removePushed b =
-- No longer fulfills criteria and was not removed by dying; remove him.
modifyServer $ \ser ->
ser { strajTime =
EM.adjust (EM.adjust (EM.delete aid) (blid b)) (bfid b)
(strajTime ser)
, strajPushedBy = EM.delete aid (strajPushedBy ser) }
removeTrajectory b =
-- Non-projectile actor stops flying (a projectile with empty trajectory
-- would be intercepted earlier on as dead).
Will be removed from @strajTime@ in recursive call
-- to @handleTrajectories@.
assert (not $ bproj b)
$ execUpdAtomic $ UpdTrajectory aid (btrajectory b) Nothing
breakLoop <- getsServer sbreakLoop
if breakLoop then return () -- don't move if game over via pushing
else if actorDying b1 then dieSer aid b1
else case btrajectory b1 of
Nothing -> removePushed b1
Just ([], _) -> removeTrajectory b1 >> removePushed b1
Just{} -> do
advanceTrajectory aid b1
-- Here, @advanceTrajectory@ might have affected @actorDying@,
-- so we check again ASAP to make sure the body of the projectile
-- (or pushed actor) doesn't block movement of other actors,
-- but vanishes promptly.
-- Bodies of actors that die not flying remain on the battlefied until
-- their natural next turn, to give them a chance of rescue.
-- Note that domination of pushed actors is not checked
nor is their calm updated . They are helpless wrt movement ,
-- but also invulnerable in this respect.
b2 <- getsState $ getActorBody aid
if actorDying b2
then dieSer aid b2
else case btrajectory b2 of
Nothing -> removePushed b2
Just ([], _) -> removeTrajectory b2 >> removePushed b2
Just{} -> -- delay next iteration only if still flying
advanceTimeTraj aid
-- if @actorDying@ due to @bhp b <= 0@:
If @b@ is a projectile , it means hits an actor or is hit by actor .
-- Then the carried item is destroyed and that's all.
If @b@ is not projectile , it dies , his items drop to the ground
-- and possibly a new leader is elected.
--
-- if @actorDying@ due to @btrajectory@ null:
-- A projectile drops to the ground due to obstacles or range.
-- The carried item is not destroyed, unless it's fragile,
-- but drops to the ground.
-- | Manage trajectory of a projectile or a pushed other actor.
--
Colliding with a wall or actor does n't take time , because
-- the projectile does not move (the move is blocked).
-- Not advancing time forces dead projectiles to be destroyed ASAP.
-- Otherwise, with some timings, it can stay on the game map dead,
-- blocking path of human-controlled actors and alarming the hapless human.
advanceTrajectory :: MonadServerAtomic m => ActorId -> Actor -> m ()
advanceTrajectory aid b1 = do
COps{coTileSpeedup} <- getsState scops
lvl <- getLevel $ blid b1
arTrunk <- getsState $ (EM.! btrunk b1) . sdiscoAspect
let registerKill killHow =
-- Kill counts for each blast particle is TMI.
when (bproj b1
&& not (IA.checkFlag Ability.Blast arTrunk)) $ do
killer <- getsServer $ EM.findWithDefault aid aid . strajPushedBy
addKillToAnalytics killer killHow (bfid b1) (btrunk b1)
case btrajectory b1 of
Just (d : lv, speed) -> do
let tpos = bpos b1 `shift` d -- target position
if Tile.isWalkable coTileSpeedup $ lvl `at` tpos then do
-- Hit will clear trajectories in @reqMelee@,
-- so no need to do that here.
execUpdAtomic $ UpdTrajectory aid (btrajectory b1) (Just (lv, speed))
when (null lv) $ registerKill KillDropLaunch
let occupied = occupiedBigLvl tpos lvl || occupiedProjLvl tpos lvl
reqMoveHit = reqMoveGeneric False True aid d
reqDisp = reqDisplaceGeneric False aid
if | bproj b1 -> reqMoveHit -- projectiles always hit
| occupied ->
-- Non-projectiles displace if they are ending their flight
-- or if only a projectile is in the way.
-- So, no chaos of displacing a whole line of enemies.
case (posToBigLvl tpos lvl, posToProjsLvl tpos lvl) of
(Nothing, []) -> error "advanceTrajectory: not occupied"
(Nothing, [target]) -> reqDisp target
(Nothing, _) -> reqMoveHit -- can't displace multiple
(Just target, []) ->
if null lv then reqDisp target else reqMoveHit
(Just _, _) -> reqMoveHit -- can't displace multiple
| otherwise -> reqMoveHit -- if not occupied, just move
else do
Will be removed from @strajTime@ in recursive call
-- to @handleTrajectories@.
unless (bproj b1) $
execSfxAtomic $ SfxCollideTile aid tpos
embedsPre <- getsState $ getEmbedBag (blid b1) tpos
-- No crafting by projectiles that bump tiles nor by pushed actors.
-- The only way is if they land in a tile (are engulfed by it)
-- and have enough skill. But projectiles transform when hitting,
-- if terrain permits, not just bump off the obstacle.
mfail <- reqAlterFail (not $ bproj b1) EffBare False aid tpos
embedsPost <- getsState $ getEmbedBag (blid b1) tpos
b2 <- getsState $ getActorBody aid
possibly another level and/or bpos
lvl2 <- getLevel $ blid b2
case mfail of
Nothing | Tile.isWalkable coTileSpeedup $ lvl2 `at` tpos2 ->
-- Too late to announce anything, but given that the way
is opened , continue flight . Do n't even normally lose any HP ,
-- because it's not a hard collision, but altering.
However , if embed was possibly triggered / removed , lose HP .
if embedsPre /= embedsPost && not (EM.null embedsPre) then
if bhp b2 > oneM then do
execUpdAtomic $ UpdRefillHP aid minusM
b3 <- getsState $ getActorBody aid
advanceTrajectory aid b3
else do
Projectile has too low HP to pierce ; terminate its flight .
execUpdAtomic $ UpdTrajectory aid (btrajectory b2)
$ Just ([], speed)
registerKill KillTileLaunch
else
-- Try again with the cleared path and possibly actors
-- spawned in the way, etc.
advanceTrajectory aid b2
_ -> do
-- Altering failed to open the passage, probably just a wall,
so lose HP due to being pushed into an obstacle .
-- Never kill in this way.
Note that sometimes this may come already after one faction
-- wins the game and end game screens are show. This is OK-ish.
-- @Nothing@ trajectory of signals an obstacle hit.
If projectile , second call of @actorDying@ above
-- will take care of dropping dead.
execUpdAtomic $ UpdTrajectory aid (btrajectory b2) Nothing
If projectile , losing HP due to hitting an obstacle
-- not needed, because trajectory is halted, so projectile
-- will die soon anyway
if bproj b2
then registerKill KillTileLaunch
else when (bhp b2 > oneM) $ do
execUpdAtomic $ UpdRefillHP aid minusM
let effect = IK.RefillHP (-2) -- -2 is a lie to ensure display
execSfxAtomic $ SfxEffect (bfid b2) aid (btrunk b2) effect (-1)
_ -> error $ "Nothing or empty trajectory" `showFailure` (aid, b1)
handleActors :: (MonadServerAtomic m, MonadServerComm m)
=> LevelId -> FactionId -> m Bool
handleActors lid fid = do
localTime <- getsState $ getLocalTime lid
levelTime <- getsServer $ (EM.! lid) . (EM.! fid) . sactorTime
let l = sort $ map fst
$ filter (\(_, atime) -> atime <= localTime) $ EM.assocs levelTime
-- The @sactorTime@ map may be outdated before @hActors@
-- call (due to other actors on the list acting),
-- so it's only used to decide which actors are processed in this call.
-- If the actor is no longer on the level or no longer belongs
-- to the faction, it is nevertheless processed without a problem
-- (the client may act wrt slightly outdated Perception and that's all).
-- We are guaranteed the actor still exists.
mleader <- getsState $ gleader . (EM.! fid) . sfactionD
Leader acts first , so that UI leader can before state changes .
hActors $ case mleader of
Just aid | aid `elem` l -> aid : delete aid l
_ -> l
hActors :: forall m. (MonadServerAtomic m, MonadServerComm m)
=> [ActorId] -> m Bool
hActors [] = return False
hActors as@(aid : rest) = do
b1 <- getsState $ getActorBody aid
let !_A = assert (not $ bproj b1) ()
if bhp b1 <= 0 then
-- Will be killed in a later pass, making it possible to revive him now.
hActors rest
else do
let side = bfid b1
fact <- getsState $ (EM.! side) . sfactionD
breakLoop <- getsServer sbreakLoop
let mleader = gleader fact
aidIsLeader = mleader == Just aid
mainUIactor = fhasUI (gkind fact)
&& (aidIsLeader || not (fhasPointman (gkind fact)))
-- Checking @breakLoop@, to avoid doubly setting faction status to Camping
-- in case AI-controlled UI client asks to exit game at exactly
-- the same moment as natural game over was detected.
mainUIunderAI = mainUIactor && gunderAI fact && not breakLoop
when mainUIunderAI $
handleUIunderAI side aid
factNew <- getsState $ (EM.! side) . sfactionD
let doQueryAI = not mainUIactor || gunderAI factNew
breakASAP <- getsServer sbreakASAP
-- If breaking out of the game loop, pretend there was a non-wait move.
-- we don't need additionally to check @sbreakLoop@, because it occurs alone
-- only via action of an actor and at most one action is performed here.
if breakASAP then return True else do
let mswitchLeader :: Maybe ActorId -> m ActorId
{-# NOINLINE mswitchLeader #-}
mswitchLeader (Just aidNew) = switchLeader side aidNew >> return aidNew
mswitchLeader Nothing = return aid
(aidNew, mtimed) <-
if doQueryAI then do
(cmd, maid) <- sendQueryAI side aid
aidNew <- mswitchLeader maid
mtimed <- handleRequestAI cmd
return (aidNew, mtimed)
else do
(cmd, maid) <- sendQueryUI RespQueryUI side aid
aidNew <- mswitchLeader maid
mtimed <- handleRequestUI side aidNew cmd
return (aidNew, mtimed)
case mtimed of
Just timed -> do
nonWaitMove <- handleRequestTimed side aidNew timed
-- Even if the actor got a free turn of time via a scroll,
-- he will not act again this clip, only next clip.
-- Clip is small, so not a big deal and it's faster and avoids
-- complete game time freezes, e.g., due to an exploit.
if nonWaitMove then return True else hActors rest
Nothing -> do
breakASAP2 <- getsServer sbreakASAP
If breaking out of the game lopp , pretend there was a non - wait move .
if breakASAP2 then return True else hActors as
handleUIunderAI :: (MonadServerAtomic m, MonadServerComm m)
=> FactionId -> ActorId -> m ()
handleUIunderAI side aid = do
cmdS <- sendQueryUI RespQueryUIunderAI side aid
case fst cmdS of
ReqUINop -> return ()
ReqUIAutomate -> execUpdAtomic $ UpdAutoFaction side False
ReqUIGameDropAndExit -> reqGameDropAndExit aid
ReqUIGameSaveAndExit -> reqGameSaveAndExit aid
_ -> error $ "" `showFailure` cmdS
dieSer :: MonadServerAtomic m => ActorId -> Actor -> m ()
dieSer aid b2 = do
if bproj b2 then
when (isJust $ btrajectory b2) $
execUpdAtomic $ UpdTrajectory aid (btrajectory b2) Nothing
-- needed only to ensure display of the last position of projectile
else do
kindId <- getsState $ getIidKindIdServer $ btrunk b2
execUpdAtomic $ UpdRecordKill aid kindId 1
-- At this point the actor's body exists and his items are not dropped.
deduceKilled aid
-- Most probabaly already done, but just in case (e.g., when actor
-- created with 0 HP):
electLeader (bfid b2) (blid b2) aid
-- If an explosion blast, before the particle is destroyed, it tries
-- to modify terrain with it as well as do some easy crafting,
-- e.g., cooking on fire.
arTrunk <- getsState $ (EM.! btrunk b2) . sdiscoAspect
let spentProj = bproj b2 && EM.null (beqp b2)
isBlast = IA.checkFlag Ability.Blast arTrunk
-- Let thrown food cook in fire (crafting) and other projectiles
-- transform terrain they fall onto. Big actors are inert at death.
(effScope, bumping) = if bproj b2
then (EffBareAndOnCombine, False)
else (EffBare, True)
when (not spentProj && isBlast) $
void $ reqAlterFail bumping effScope False aid (bpos b2)
b3 <- getsState $ getActorBody aid
-- Items need to do dropped now, so that they can be transformed by effects
-- of the embedded items, if they are activated.
-- If the actor was a projectile and no effect was triggered by hitting
-- an enemy, the item still exists and @OnSmash@ effects will be triggered.
dropAllEquippedItems aid b3
-- Also destroy, not just drop, all organs, to trigger any effects.
-- Note that some effects may be invoked on an actor that has
-- no trunk any more. Conditions are ignored to avoid spam about them ending.
bag <- getsState $ getBodyStoreBag b3 COrgan
discoAspect <- getsState sdiscoAspect
let f = void <$$> dropCStoreItem False True COrgan aid b3 maxBound
isCondition = IA.checkFlag Ability.Condition . (discoAspect EM.!)
mapM_ (uncurry f) $ filter (not . isCondition . fst) $ EM.assocs bag
-- As the last act of heroism, the actor (even if projectile)
-- changes the terrain with its embedded items, if possible.
-- Note that all the resulting effects are invoked on an actor that has
-- no trunk any more.
when (not spentProj && not isBlast) $
void $ reqAlterFail bumping effScope False aid (bpos b2)
old bpos ; OK , safer
b4 <- getsState $ getActorBody aid
execUpdAtomic $ UpdDestroyActor aid b4 []
restartGame :: MonadServerAtomic m
=> m () -> m () -> Maybe (GroupName ModeKind) -> m ()
restartGame updConn loop mgameMode = do
-- This goes only to the old UI client.
execSfxAtomic SfxRestart
soptionsNxt <- getsServer soptionsNxt
srandom <- getsServer srandom
factionDold <- getsState sfactionD
-- Create new factions.
s <- gameReset soptionsNxt mgameMode (Just srandom)
-- Note how we also no longer assert exploration, because there may not be
-- enough time left in the debug run to explore again in a new game.
let optionsBarRngs = soptionsNxt { sdungeonRng = Nothing
, smainRng = Nothing
, sassertExplored = Nothing }
modifyServer $ \ser -> ser { soptionsNxt = optionsBarRngs
, soptions = optionsBarRngs }
-- This reaches only the intersection of old and new clients.
execUpdAtomic $ UpdRestartServer s
-- Spawn new clients, as needed, according to new factions.
updConn
initPer
reinitGame factionDold
-- Save a just started noConfirm game to preserve history of the just
-- ended normal game, in case the user exits brutally.
writeSaveAll False True
loop
| null | https://raw.githubusercontent.com/LambdaHack/LambdaHack/57007a41e02f8c13e872acc23af2ff6ec00f531f/engine-src/Game/LambdaHack/Server/LoopM.hs | haskell | | The main loop of the server, processing human and computer player
moves turn by turn.
* Internal operations
| Start a game session, including the clients, and then loop,
communicating with the clients.
The loop is started in server state that is empty, see 'emptyStateServer'.
^ player-supplied server options
^ function that initializes a client and runs its main loop
Recover states and launch clients.
a restored game
wasn't run with @--dumpInitRngs@ previously, but we need the seeds,
e.g., to diagnose a crash.
starting new game for this savefile (--newGame or fresh save)
Set up commandline options.
Even spawners need an active arena for their leader,
or they start clogging stairs.
hide and sleep on lvl 3 and they are guaranteed aliens don't spawn.
However, animals still spawn, if slowly, and aliens resume
spawning when heroes move on again.
Update perception on all levels at once,
in case a leader is changed to actor on another
(possibly not even currently active) level.
This runs for all factions even if save is requested by UI.
Let players ponder new game state while the engine is busy saving.
Also, this ensures perception before game save is exactly the same
as at game resume, which is an invariant we check elsewhere.
However, if perception is not updated after the action, the actor
may not see his vicinity, so may not see enemy that displaces (or hits) him
resulting in breaking the displace action and temporary leader loss,
which is fine, though a bit alarming. So, we update it at the end.
Move a single actor only. Note that the skipped actors are not marked
as waiting. Normally they will act in the next clip or the next few,
E.g., they don't move, but still make nearby foes lose Calm.
Bail out if immediate loop break- requested by UI. No check
Start on arena with leader, if available. This is crucial to ensure
We update perception at the end, see comment above. This is usually
that's exactly where it prevents lost attack messages, etc.
If the move was a wait, perception unchanged, so no need to update,
unless the actor starts sleeping, in which case his perception
is reduced a bit later, so no harm done.
| Handle a clip (the smallest fraction of a game turn for which a frame may
potentially be generated). Run the leader and other actors moves.
Eventually advance the time and repeat.
Don't process other factions, even their perceptions,
if UI saves and/or exits.
because we may get here at arbitrary moment due to game over
and so have outdated perception.
Start handling actors with the single UI faction,
to safely save/exit. Note that this hack fails if there are many UI
factions (when we reenable multiplayer). Then players will request
after the clip has ended, not at the start.
Note that at most a single actor with a time-consuming action
is processed per faction, so it's fair, but many loops are needed.
decides to save or exit or restart or if there is game over.
This and UI leader acting before any other ordinary actors
ensures state is not changed and so the clip doesn't need
to be carried through before save.
must be last, in case performs a bkp save
The condition can be changed in @handleTrajectories@ by pushing
process next iteration unconditionally
| Handle the end of every clip. Do whatever has to be done
every fixed number of clips, e.g., monster generation.
Advance time. Perform periodic saves, if applicable.
This is never run if UI requested save or exit or restart and it's correct,
because we know nobody moved and no time was or needs to be advanced
and arenas are not changed. After game was saved and exited,
so arena time is not updated and nobody moves, nor anything happens,
but arenas are here correctly updated.
No check if @sbreakASAP@ is set, because then the function is not called.
We don't send a lot of useless info to the client if the game has already
ended. At best wasteful, at worst the player sees strange messages.
I need to send time updates, because I can't add time to each command,
because I'd need to send also all arenas, which should be updated,
and this is too expensive data for each, e.g., projectile move.
I send even if nothing changes so that UI time display can progress.
Possibly @arenas@ are invalid here, but all moves were performed
according to this value, so time should be replenished according
to this value as well.
This is crucial, because tiny time discrepancies can accumulate
magnified by hunders of actors that share the clip slots due to the
Perform periodic dungeon maintenance.
Periodic activation only once per turn, for speed,
but on all active arenas. Calm updates and domination
happen there as well. Once per turn is too rare for accurate
@applyPeriodicLevel@ might have, e.g., dominated actors, ending the game.
It could not have unended the game, though.
change leaders as part of their moves.
Next call to @loopUpdConn@ really moves actors and updates arena times
so we start in exactly the same place that UI save ended in.
Update all perception for visual feedback and to make sure saving
and resuming game doesn't affect gameplay (by updating perception).
periodic actions could have invalidated them.
Saving on the browser causes a huge lag, hence autosave disabled.
if by chance requested and periodic saves coincide
Periodic save needs to be at the end, so that restore can start
| Check if the given actor is dominated and update his calm.
triggered by zeroed Calm
animals/robots/human drones never Calm-dominated
unless very high impression, e.g., in a dominated hero
Update delta for the current player turn.
| Trigger periodic items for all actors on the given level.
Check if the item is still in the bag (previous items act!).
item dropped
destroy the item.
no periodic crafting
While it's fun when projectiles flash or speed up mid-air,
of projectiles exist due to ongoing explosions.
Nothing activates when actor dying to prevent a regenerating
to avoid the message that organ expired.
While we are at it, also update his Calm.
call (due to other actors following their trajectories),
so it's only used to decide which actors are processed in this
@handleTrajectories@ call. If an actor is added to the map,
the recursive call to @handleTrajectories@ will detect that
and process him later on.
If the actor is no longer on the level or no longer belongs
to the faction, it is nevertheless processed without a problem.
We are guaranteed the actor still exists.
Avoid frames between fadeout and fadein.
for speeds > tile/clip
No longer fulfills criteria and was not removed by dying; remove him.
Non-projectile actor stops flying (a projectile with empty trajectory
would be intercepted earlier on as dead).
to @handleTrajectories@.
don't move if game over via pushing
Here, @advanceTrajectory@ might have affected @actorDying@,
so we check again ASAP to make sure the body of the projectile
(or pushed actor) doesn't block movement of other actors,
but vanishes promptly.
Bodies of actors that die not flying remain on the battlefied until
their natural next turn, to give them a chance of rescue.
Note that domination of pushed actors is not checked
but also invulnerable in this respect.
delay next iteration only if still flying
if @actorDying@ due to @bhp b <= 0@:
Then the carried item is destroyed and that's all.
and possibly a new leader is elected.
if @actorDying@ due to @btrajectory@ null:
A projectile drops to the ground due to obstacles or range.
The carried item is not destroyed, unless it's fragile,
but drops to the ground.
| Manage trajectory of a projectile or a pushed other actor.
the projectile does not move (the move is blocked).
Not advancing time forces dead projectiles to be destroyed ASAP.
Otherwise, with some timings, it can stay on the game map dead,
blocking path of human-controlled actors and alarming the hapless human.
Kill counts for each blast particle is TMI.
target position
Hit will clear trajectories in @reqMelee@,
so no need to do that here.
projectiles always hit
Non-projectiles displace if they are ending their flight
or if only a projectile is in the way.
So, no chaos of displacing a whole line of enemies.
can't displace multiple
can't displace multiple
if not occupied, just move
to @handleTrajectories@.
No crafting by projectiles that bump tiles nor by pushed actors.
The only way is if they land in a tile (are engulfed by it)
and have enough skill. But projectiles transform when hitting,
if terrain permits, not just bump off the obstacle.
Too late to announce anything, but given that the way
because it's not a hard collision, but altering.
Try again with the cleared path and possibly actors
spawned in the way, etc.
Altering failed to open the passage, probably just a wall,
Never kill in this way.
wins the game and end game screens are show. This is OK-ish.
@Nothing@ trajectory of signals an obstacle hit.
will take care of dropping dead.
not needed, because trajectory is halted, so projectile
will die soon anyway
-2 is a lie to ensure display
The @sactorTime@ map may be outdated before @hActors@
call (due to other actors on the list acting),
so it's only used to decide which actors are processed in this call.
If the actor is no longer on the level or no longer belongs
to the faction, it is nevertheless processed without a problem
(the client may act wrt slightly outdated Perception and that's all).
We are guaranteed the actor still exists.
Will be killed in a later pass, making it possible to revive him now.
Checking @breakLoop@, to avoid doubly setting faction status to Camping
in case AI-controlled UI client asks to exit game at exactly
the same moment as natural game over was detected.
If breaking out of the game loop, pretend there was a non-wait move.
we don't need additionally to check @sbreakLoop@, because it occurs alone
only via action of an actor and at most one action is performed here.
# NOINLINE mswitchLeader #
Even if the actor got a free turn of time via a scroll,
he will not act again this clip, only next clip.
Clip is small, so not a big deal and it's faster and avoids
complete game time freezes, e.g., due to an exploit.
needed only to ensure display of the last position of projectile
At this point the actor's body exists and his items are not dropped.
Most probabaly already done, but just in case (e.g., when actor
created with 0 HP):
If an explosion blast, before the particle is destroyed, it tries
to modify terrain with it as well as do some easy crafting,
e.g., cooking on fire.
Let thrown food cook in fire (crafting) and other projectiles
transform terrain they fall onto. Big actors are inert at death.
Items need to do dropped now, so that they can be transformed by effects
of the embedded items, if they are activated.
If the actor was a projectile and no effect was triggered by hitting
an enemy, the item still exists and @OnSmash@ effects will be triggered.
Also destroy, not just drop, all organs, to trigger any effects.
Note that some effects may be invoked on an actor that has
no trunk any more. Conditions are ignored to avoid spam about them ending.
As the last act of heroism, the actor (even if projectile)
changes the terrain with its embedded items, if possible.
Note that all the resulting effects are invoked on an actor that has
no trunk any more.
This goes only to the old UI client.
Create new factions.
Note how we also no longer assert exploration, because there may not be
enough time left in the debug run to explore again in a new game.
This reaches only the intersection of old and new clients.
Spawn new clients, as needed, according to new factions.
Save a just started noConfirm game to preserve history of the just
ended normal game, in case the user exits brutally. | module Game.LambdaHack.Server.LoopM
( loopSer
#ifdef EXPOSE_INTERNAL
, factionArena, arenasForLoop, handleFidUpd, loopUpd, endClip
, manageCalmAndDomination, applyPeriodicLevel
, handleTrajectories, hTrajectories, advanceTrajectory
, handleActors, hActors, handleUIunderAI, dieSer, restartGame
#endif
) where
import Prelude ()
import Game.LambdaHack.Core.Prelude
import qualified Data.EnumMap.Strict as EM
import qualified Data.EnumSet as ES
import Game.LambdaHack.Atomic
import Game.LambdaHack.Client (ReqUI (..), Response (..))
import Game.LambdaHack.Common.Actor
import Game.LambdaHack.Common.ActorState
import Game.LambdaHack.Common.Analytics
import Game.LambdaHack.Common.Faction
import Game.LambdaHack.Common.Item
import qualified Game.LambdaHack.Common.ItemAspect as IA
import Game.LambdaHack.Common.Kind
import Game.LambdaHack.Common.Level
import Game.LambdaHack.Common.Misc
import Game.LambdaHack.Common.MonadStateRead
import Game.LambdaHack.Common.Perception
import Game.LambdaHack.Common.State
import qualified Game.LambdaHack.Common.Tile as Tile
import Game.LambdaHack.Common.Time
import Game.LambdaHack.Common.Types
import Game.LambdaHack.Common.Vector
import Game.LambdaHack.Content.FactionKind
import qualified Game.LambdaHack.Content.ItemKind as IK
import Game.LambdaHack.Content.ModeKind
import Game.LambdaHack.Content.RuleKind
import qualified Game.LambdaHack.Definition.Ability as Ability
import Game.LambdaHack.Definition.Defs
import Game.LambdaHack.Server.CommonM
import Game.LambdaHack.Server.HandleEffectM
import Game.LambdaHack.Server.HandleRequestM
import Game.LambdaHack.Server.MonadServer
import Game.LambdaHack.Server.PeriodicM
import Game.LambdaHack.Server.ProtocolM
import Game.LambdaHack.Server.ServerOptions
import Game.LambdaHack.Server.StartM
import Game.LambdaHack.Server.State
loopSer :: (MonadServerAtomic m, MonadServerComm m)
=> ServerOptions
-> (Bool -> FactionId -> ChanServer -> IO ())
-> m ()
loopSer serverOptions executorClient = do
modifyServer $ \ser -> ser { soptionsNxt = serverOptions
, soptions = serverOptions }
cops <- getsState scops
let updConn startsNewGame = updateConn $ executorClient startsNewGame
restored <- tryRestore
case restored of
execUpdAtomic $ UpdResumeServer
$ updateCOpsAndCachedData (const cops) sRaw
putServer ser {soptionsNxt = serverOptions}
applyDebug
factionD <- getsState sfactionD
let f fid = let cmd = UpdResumeServer
$ updateCOpsAndCachedData (const cops)
$ sclientStates ser EM.! fid
in execUpdAtomicFidCatch fid cmd
mapM_ (void <$> f) $ EM.keys factionD
updConn False
initPer
pers <- getsServer sperFid
let clear = const emptyPer
persFid fid | sknowEvents serverOptions = EM.map clear (pers EM.! fid)
| otherwise = pers EM.! fid
mapM_ (\fid -> sendUpdate fid $ UpdResume fid (persFid fid))
(EM.keys factionD)
arenasNew <- arenasForLoop
modifyServer $ \ser2 -> ser2 {sarenas = arenasNew, svalidArenas = True}
We dump RNG seeds here , based on @soptionsNxt@ , in case the game
rngs <- getsServer srngs
when (sdumpInitRngs serverOptions) $ dumpRngs rngs
factionDold <- getsState sfactionD
s <- gameReset serverOptions Nothing Nothing
get RNG from item boost
let optionsBarRngs =
serverOptions {sdungeonRng = Nothing, smainRng = Nothing}
modifyServer $ \ser -> ser { soptionsNxt = optionsBarRngs
, soptions = optionsBarRngs }
execUpdAtomic $ UpdRestartServer s
updConn True
initPer
reinitGame factionDold
writeSaveAll False False
loopUpd $ updConn True
factionArena :: MonadStateRead m => Faction -> m (Maybe LevelId)
factionArena fact = case gleader fact of
Just leader -> do
b <- getsState $ getActorBody leader
return $ Just $ blid b
Nothing -> return Nothing
This means Allure heroes can kill all aliens on lvl 4 , retreat ,
arenasForLoop :: MonadStateRead m => m (ES.EnumSet LevelId)
# INLINE arenasForLoop #
arenasForLoop = do
factionD <- getsState sfactionD
marenas <- mapM factionArena $ EM.elems factionD
let arenas = ES.fromList $ catMaybes marenas
!_A = assert (not (ES.null arenas)
`blame` "game over not caught earlier"
`swith` factionD) ()
return $! arenas
handleFidUpd :: forall m. (MonadServerAtomic m, MonadServerComm m)
=> (FactionId -> m ()) -> FactionId -> Faction -> m ()
# INLINE handleFidUpd #
handleFidUpd updatePerFid fid fact = do
updatePerFid fid
so that 's natural . But if there are dozens of them , this is .
However , for KISS , we leave it be .
for @sbreakLoop@ needed , for the same reasons as in @handleActors@.
let handle :: [LevelId] -> m Bool
handle [] = return False
handle (lid : rest) = do
breakASAP <- getsServer sbreakASAP
if breakASAP
then return False
else do
nonWaitMove <- handleActors lid fid
if nonWaitMove
then return True
else handle rest
killDying :: [LevelId] -> m ()
killDying = mapM_ killDyingLid
killDyingLid :: LevelId -> m ()
killDyingLid lid = do
localTime <- getsState $ getLocalTime lid
levelTime <- getsServer $ (EM.! lid) . (EM.! fid) . sactorTime
let l = filter (\(_, atime) -> atime <= localTime) $ EM.assocs levelTime
killAid (aid, _) = do
b1 <- getsState $ getActorBody aid
when (bhp b1 <= 0) $ dieSer aid b1
mapM_ killAid l
that no actor ( even ours ) moves before UI declares save(&exit ) .
fa <- factionArena fact
arenas <- getsServer sarenas
let myArenas = case fa of
Just myArena -> myArena : delete myArena (ES.elems arenas)
Nothing -> ES.elems arenas
nonWaitMove <- handle myArenas
breakASAP <- getsServer sbreakASAP
unless breakASAP $ killDying myArenas
cheap , and when not , if it 's AI faction , it 's a waste , but if it 's UI ,
when nonWaitMove $ updatePerFid fid
loopUpd :: forall m. (MonadServerAtomic m, MonadServerComm m)
=> m () -> m ()
loopUpd updConn = do
let updatePerFid :: FactionId -> m ()
# NOINLINE updatePerFid #
{ - # SCC updatePerFid # - } do
perValid <- getsServer $ (EM.! fid) . sperValidFid
mapM_ (\(lid, valid) -> unless valid $ updatePer fid lid)
(EM.assocs perValid)
handleFid :: (FactionId, Faction) -> m ()
# NOINLINE handleFid #
handleFid (fid, fact) = do
breakASAP <- getsServer sbreakASAP
unless breakASAP $ handleFidUpd updatePerFid fid fact
loopConditionally = do
factionD <- getsState sfactionD
Update perception one last time to satisfy save / resume assertions ,
mapM_ updatePerFid (EM.keys factionD)
modifyServer $ \ser -> ser { sbreakLoop = False
, sbreakASAP = False }
endOrLoop loopUpdConn (restartGame updConn loopUpdConn)
loopUpdConn = do
factionD <- getsState sfactionD
save&exit and others will vote on it and it will happen
let hasUI (_, fact) = fhasUI (gkind fact)
(factionUI, factionsRest) = case break hasUI $ EM.assocs factionD of
(noUI1, ui : noUI2) -> (ui, noUI1 ++ noUI2)
_ -> error "no UI faction in the game"
mapM_ handleFid $ factionUI : factionsRest
breakASAP <- getsServer sbreakASAP
breakLoop <- getsServer sbreakLoop
if breakASAP || breakLoop
then loopConditionally
else do
Projectiles are processed last and not at all if the UI leader
arenas <- getsServer sarenas
mapM_ (\fid -> mapM_ (`handleTrajectories` fid) $ ES.elems arenas)
(EM.keys factionD)
onto an escape and in @endClip@.
breakLoop2 <- getsServer sbreakLoop
if breakLoop2
then loopConditionally
loopUpdConn
on game resume the first clip is performed with empty arenas ,
endClip :: forall m. MonadServerAtomic m => (FactionId -> m ()) -> m ()
# INLINE endClip #
endClip updatePerFid = do
COps{corule} <- getsState scops
time <- getsState stime
let clipN = time `timeFit` timeClip
breakLoop <- getsServer sbreakLoop
unless breakLoop $ do
restriction that at most one faction member acts each clip .
arenas <- getsServer sarenas
execUpdAtomic $ UpdAgeGame arenas
when (clipN `mod` rleadLevelClips corule == 0) leadLevelSwitch
case clipN `mod` clipsInTurn of
0 ->
Spawn monsters at most once per 3 turns .
when (clipN `mod` (3 * clipsInTurn) == 0)
spawnMonster
4 ->
expiration of short conditions , e.g. , 1 - turn haste . TODO .
applyPeriodicLevel
_ -> return ()
breakLoop2 <- getsServer sbreakLoop
unless breakLoop2 $ do
Possibly a leader change due to , so update arenas here
for 100 % accuracy at least at the start of actor moves , before they
After game resume , this is the first non - vacuus computation .
validArenas <- getsServer svalidArenas
unless validArenas $ do
arenasNew <- arenasForLoop
modifyServer $ \ser -> ser {sarenas = arenasNew, svalidArenas = True}
Perception updates in @handleFidUpd@ are not enough , because
factionD <- getsState sfactionD
mapM_ updatePerFid (EM.keys factionD)
#ifndef USE_JSFILE
at the beginning . Double save on first turn is avoided with @succ@.
when (succ clipN `mod` rwriteSaveClips corule == 0) $
writeSaveAll False False
#endif
manageCalmAndDomination :: MonadServerAtomic m => ActorId -> Actor -> m ()
manageCalmAndDomination aid b = do
performedDomination <-
hiImpression <- highestImpression b
case hiImpression of
Nothing -> return False
Just (hiImpressionFid, hiImpressionK) -> do
fact <- getsState $ (EM.! bfid b) . sfactionD
if fhasPointman (gkind fact)
|| hiImpressionK >= 10
then dominateFidSfx aid aid (btrunk b) hiImpressionFid
else return False
unless performedDomination $ do
newCalmDelta <- getsState $ regenCalmDelta aid b
unless (newCalmDelta == 0) $
updateCalm aid newCalmDelta
applyPeriodicLevel :: MonadServerAtomic m => m ()
applyPeriodicLevel = do
arenas <- getsServer sarenas
let applyPeriodicItem _ _ (_, (_, [])) = return ()
periodic items always have at least one timer
applyPeriodicItem aid cstore (iid, _) = do
itemFull <- getsState $ itemToFull iid
let arItem = aspectRecordFull itemFull
when (IA.checkFlag Ability.Periodic arItem) $ do
b2 <- getsState $ getActorBody aid
bag <- getsState $ getBodyStoreBag b2 cstore
case iid `EM.lookup` bag of
Just (k, _) -> do
Activate even if effects null or vacuous , to possibly
let effApplyFlags = EffApplyFlags
, effVoluntary = True
, effUseAllCopies = k <= 1
, effKineticPerformed = False
, effActivation = Ability.ActivationPeriodic
, effMayDestroy = True
}
void $ effectAndDestroyAndAddKill
effApplyFlags
aid aid aid iid (CActor aid cstore) itemFull
applyPeriodicActor (aid, b) =
it 's very exotic and quite time - intensive whenever hundreds
actor from resurrecting each turn , resulting in silly stats .
when (not (bproj b) && bhp b > 0 && blid b `ES.member` arenas) $ do
Equipment goes first , to refresh organs before they expire ,
mapM_ (applyPeriodicItem aid CEqp) $ EM.assocs $ beqp b
mapM_ (applyPeriodicItem aid COrgan) $ EM.assocs $ borgan b
manageCalmAndDomination aid b
allActors <- getsState sactorD
mapM_ applyPeriodicActor $ EM.assocs allActors
handleTrajectories :: MonadServerAtomic m => LevelId -> FactionId -> m ()
handleTrajectories lid fid = do
localTime <- getsState $ getLocalTime lid
levelTime <- getsServer $ (EM.! lid) . (EM.! fid) . strajTime
let l = sort $ map fst
$ filter (\(_, atime) -> atime <= localTime) $ EM.assocs levelTime
The @strajTime@ map may be outdated before @hTrajectories@
mapM_ hTrajectories l
breakLoop <- getsServer sbreakLoop
unless (null l || breakLoop) $
hTrajectories :: MonadServerAtomic m => ActorId -> m ()
# INLINE hTrajectories #
hTrajectories aid = do
b1 <- getsState $ getActorBody aid
let removePushed b =
modifyServer $ \ser ->
ser { strajTime =
EM.adjust (EM.adjust (EM.delete aid) (blid b)) (bfid b)
(strajTime ser)
, strajPushedBy = EM.delete aid (strajPushedBy ser) }
removeTrajectory b =
Will be removed from @strajTime@ in recursive call
assert (not $ bproj b)
$ execUpdAtomic $ UpdTrajectory aid (btrajectory b) Nothing
breakLoop <- getsServer sbreakLoop
else if actorDying b1 then dieSer aid b1
else case btrajectory b1 of
Nothing -> removePushed b1
Just ([], _) -> removeTrajectory b1 >> removePushed b1
Just{} -> do
advanceTrajectory aid b1
nor is their calm updated . They are helpless wrt movement ,
b2 <- getsState $ getActorBody aid
if actorDying b2
then dieSer aid b2
else case btrajectory b2 of
Nothing -> removePushed b2
Just ([], _) -> removeTrajectory b2 >> removePushed b2
advanceTimeTraj aid
If @b@ is a projectile , it means hits an actor or is hit by actor .
If @b@ is not projectile , it dies , his items drop to the ground
Colliding with a wall or actor does n't take time , because
advanceTrajectory :: MonadServerAtomic m => ActorId -> Actor -> m ()
advanceTrajectory aid b1 = do
COps{coTileSpeedup} <- getsState scops
lvl <- getLevel $ blid b1
arTrunk <- getsState $ (EM.! btrunk b1) . sdiscoAspect
let registerKill killHow =
when (bproj b1
&& not (IA.checkFlag Ability.Blast arTrunk)) $ do
killer <- getsServer $ EM.findWithDefault aid aid . strajPushedBy
addKillToAnalytics killer killHow (bfid b1) (btrunk b1)
case btrajectory b1 of
Just (d : lv, speed) -> do
if Tile.isWalkable coTileSpeedup $ lvl `at` tpos then do
execUpdAtomic $ UpdTrajectory aid (btrajectory b1) (Just (lv, speed))
when (null lv) $ registerKill KillDropLaunch
let occupied = occupiedBigLvl tpos lvl || occupiedProjLvl tpos lvl
reqMoveHit = reqMoveGeneric False True aid d
reqDisp = reqDisplaceGeneric False aid
| occupied ->
case (posToBigLvl tpos lvl, posToProjsLvl tpos lvl) of
(Nothing, []) -> error "advanceTrajectory: not occupied"
(Nothing, [target]) -> reqDisp target
(Just target, []) ->
if null lv then reqDisp target else reqMoveHit
else do
Will be removed from @strajTime@ in recursive call
unless (bproj b1) $
execSfxAtomic $ SfxCollideTile aid tpos
embedsPre <- getsState $ getEmbedBag (blid b1) tpos
mfail <- reqAlterFail (not $ bproj b1) EffBare False aid tpos
embedsPost <- getsState $ getEmbedBag (blid b1) tpos
b2 <- getsState $ getActorBody aid
possibly another level and/or bpos
lvl2 <- getLevel $ blid b2
case mfail of
Nothing | Tile.isWalkable coTileSpeedup $ lvl2 `at` tpos2 ->
is opened , continue flight . Do n't even normally lose any HP ,
However , if embed was possibly triggered / removed , lose HP .
if embedsPre /= embedsPost && not (EM.null embedsPre) then
if bhp b2 > oneM then do
execUpdAtomic $ UpdRefillHP aid minusM
b3 <- getsState $ getActorBody aid
advanceTrajectory aid b3
else do
Projectile has too low HP to pierce ; terminate its flight .
execUpdAtomic $ UpdTrajectory aid (btrajectory b2)
$ Just ([], speed)
registerKill KillTileLaunch
else
advanceTrajectory aid b2
_ -> do
so lose HP due to being pushed into an obstacle .
Note that sometimes this may come already after one faction
If projectile , second call of @actorDying@ above
execUpdAtomic $ UpdTrajectory aid (btrajectory b2) Nothing
If projectile , losing HP due to hitting an obstacle
if bproj b2
then registerKill KillTileLaunch
else when (bhp b2 > oneM) $ do
execUpdAtomic $ UpdRefillHP aid minusM
execSfxAtomic $ SfxEffect (bfid b2) aid (btrunk b2) effect (-1)
_ -> error $ "Nothing or empty trajectory" `showFailure` (aid, b1)
handleActors :: (MonadServerAtomic m, MonadServerComm m)
=> LevelId -> FactionId -> m Bool
handleActors lid fid = do
localTime <- getsState $ getLocalTime lid
levelTime <- getsServer $ (EM.! lid) . (EM.! fid) . sactorTime
let l = sort $ map fst
$ filter (\(_, atime) -> atime <= localTime) $ EM.assocs levelTime
mleader <- getsState $ gleader . (EM.! fid) . sfactionD
Leader acts first , so that UI leader can before state changes .
hActors $ case mleader of
Just aid | aid `elem` l -> aid : delete aid l
_ -> l
hActors :: forall m. (MonadServerAtomic m, MonadServerComm m)
=> [ActorId] -> m Bool
hActors [] = return False
hActors as@(aid : rest) = do
b1 <- getsState $ getActorBody aid
let !_A = assert (not $ bproj b1) ()
if bhp b1 <= 0 then
hActors rest
else do
let side = bfid b1
fact <- getsState $ (EM.! side) . sfactionD
breakLoop <- getsServer sbreakLoop
let mleader = gleader fact
aidIsLeader = mleader == Just aid
mainUIactor = fhasUI (gkind fact)
&& (aidIsLeader || not (fhasPointman (gkind fact)))
mainUIunderAI = mainUIactor && gunderAI fact && not breakLoop
when mainUIunderAI $
handleUIunderAI side aid
factNew <- getsState $ (EM.! side) . sfactionD
let doQueryAI = not mainUIactor || gunderAI factNew
breakASAP <- getsServer sbreakASAP
if breakASAP then return True else do
let mswitchLeader :: Maybe ActorId -> m ActorId
mswitchLeader (Just aidNew) = switchLeader side aidNew >> return aidNew
mswitchLeader Nothing = return aid
(aidNew, mtimed) <-
if doQueryAI then do
(cmd, maid) <- sendQueryAI side aid
aidNew <- mswitchLeader maid
mtimed <- handleRequestAI cmd
return (aidNew, mtimed)
else do
(cmd, maid) <- sendQueryUI RespQueryUI side aid
aidNew <- mswitchLeader maid
mtimed <- handleRequestUI side aidNew cmd
return (aidNew, mtimed)
case mtimed of
Just timed -> do
nonWaitMove <- handleRequestTimed side aidNew timed
if nonWaitMove then return True else hActors rest
Nothing -> do
breakASAP2 <- getsServer sbreakASAP
If breaking out of the game lopp , pretend there was a non - wait move .
if breakASAP2 then return True else hActors as
handleUIunderAI :: (MonadServerAtomic m, MonadServerComm m)
=> FactionId -> ActorId -> m ()
handleUIunderAI side aid = do
cmdS <- sendQueryUI RespQueryUIunderAI side aid
case fst cmdS of
ReqUINop -> return ()
ReqUIAutomate -> execUpdAtomic $ UpdAutoFaction side False
ReqUIGameDropAndExit -> reqGameDropAndExit aid
ReqUIGameSaveAndExit -> reqGameSaveAndExit aid
_ -> error $ "" `showFailure` cmdS
dieSer :: MonadServerAtomic m => ActorId -> Actor -> m ()
dieSer aid b2 = do
if bproj b2 then
when (isJust $ btrajectory b2) $
execUpdAtomic $ UpdTrajectory aid (btrajectory b2) Nothing
else do
kindId <- getsState $ getIidKindIdServer $ btrunk b2
execUpdAtomic $ UpdRecordKill aid kindId 1
deduceKilled aid
electLeader (bfid b2) (blid b2) aid
arTrunk <- getsState $ (EM.! btrunk b2) . sdiscoAspect
let spentProj = bproj b2 && EM.null (beqp b2)
isBlast = IA.checkFlag Ability.Blast arTrunk
(effScope, bumping) = if bproj b2
then (EffBareAndOnCombine, False)
else (EffBare, True)
when (not spentProj && isBlast) $
void $ reqAlterFail bumping effScope False aid (bpos b2)
b3 <- getsState $ getActorBody aid
dropAllEquippedItems aid b3
bag <- getsState $ getBodyStoreBag b3 COrgan
discoAspect <- getsState sdiscoAspect
let f = void <$$> dropCStoreItem False True COrgan aid b3 maxBound
isCondition = IA.checkFlag Ability.Condition . (discoAspect EM.!)
mapM_ (uncurry f) $ filter (not . isCondition . fst) $ EM.assocs bag
when (not spentProj && not isBlast) $
void $ reqAlterFail bumping effScope False aid (bpos b2)
old bpos ; OK , safer
b4 <- getsState $ getActorBody aid
execUpdAtomic $ UpdDestroyActor aid b4 []
restartGame :: MonadServerAtomic m
=> m () -> m () -> Maybe (GroupName ModeKind) -> m ()
restartGame updConn loop mgameMode = do
execSfxAtomic SfxRestart
soptionsNxt <- getsServer soptionsNxt
srandom <- getsServer srandom
factionDold <- getsState sfactionD
s <- gameReset soptionsNxt mgameMode (Just srandom)
let optionsBarRngs = soptionsNxt { sdungeonRng = Nothing
, smainRng = Nothing
, sassertExplored = Nothing }
modifyServer $ \ser -> ser { soptionsNxt = optionsBarRngs
, soptions = optionsBarRngs }
execUpdAtomic $ UpdRestartServer s
updConn
initPer
reinitGame factionDold
writeSaveAll False True
loop
|
963184dc85a0f7d04ea1aa21b90db4e369b465daad22864674512d3260470d65 | liquidz/antq | github_action_test.clj | (ns antq.upgrade.github-action-test
(:require
[antq.constant.github-action :as const.gh-action]
[antq.dep.github-action :as dep.gha]
[antq.record :as r]
[antq.test-helper :as h]
[antq.upgrade :as upgrade]
[antq.upgrade.github-action]
[clojure.java.io :as io]
[clojure.test :as t]))
(def ^:private dummy-dep
(r/map->Dependency {:project :github-action
:type :github-tag
:name "foo/bar"
:latest-version "v9.0.0"
:file (io/resource "dep/test_github_action.yml")
:extra {const.gh-action/type-key "uses"}}))
(def ^:private dummy-not-supported-dep
(r/map->Dependency {:project :github-action
:type :github-tag
:name "bar/baz"
:latest-version "v9.0.0"
:file (io/resource "dep/test_github_action.yml")}))
(def ^:private dummy-clojure-cli-dep
(r/map->Dependency {:project :github-action
:type :github-tag
:name "clojure/brew-install"
:latest-version "9.0.0"
:file (io/resource "dep/test_github_action_third_party.yml")
:extra {const.gh-action/type-key "DeLaGuardo/setup-clojure"}}))
(def ^:private dummy-leiningen-dep
(r/map->Dependency {:project :github-action
:type :github-tag
:name "technomancy/leiningen"
:latest-version "9.0.0"
:file (io/resource "dep/test_github_action_third_party.yml")
:extra {const.gh-action/type-key "DeLaGuardo/setup-clojure"}}))
(def ^:private dummy-boot-dep
(r/map->Dependency {:project :github-action
:type :github-tag
:name "boot-clj/boot"
:latest-version "9.0.0"
:file (io/resource "dep/test_github_action_third_party.yml")
:extra {const.gh-action/type-key "DeLaGuardo/setup-clojure"}}))
(def ^:private dummy-clj-kondo-dep
(r/map->Dependency {:project :github-action
:type :java
:name "clj-kondo/clj-kondo"
:latest-version "9.0.0"
:file (io/resource "dep/test_github_action_third_party.yml")
:extra {const.gh-action/type-key "DeLaGuardo/setup-clj-kondo"}}))
(def ^:private dummy-graalvm-dep
(r/map->Dependency {:project :github-action
:type :github-tag
:name "graalvm/graalvm-ce-builds"
:latest-version "9.0.0"
:file (io/resource "dep/test_github_action_third_party.yml")
:extra {const.gh-action/type-key "DeLaGuardo/setup-graalvm"}}))
(def ^:private dummy-cljstyle-dep
(r/map->Dependency {:project :github-action
:type :github-tag
:name "greglook/cljstyle"
:latest-version "9.0.0"
:file (io/resource "dep/test_github_action_third_party.yml")
:extra {const.gh-action/type-key "0918nobita/setup-cljstyle"}}))
(t/deftest upgrade-dep-test
(t/testing "supported"
(let [from-deps (->> (:file dummy-dep)
(slurp)
(dep.gha/extract-deps ""))
to-deps (->> dummy-dep
(upgrade/upgrader)
(dep.gha/extract-deps ""))]
(t/is (= #{{:name "foo/bar" :version {:- "v1.0.0" :+ "v9.0.0"}}}
(h/diff-deps from-deps to-deps)))))
(t/testing "not supported"
(t/is (nil? (upgrade/upgrader dummy-not-supported-dep)))))
(t/deftest upgrade-third-party-dep-test
(t/testing "clojure"
(t/testing "clojure cli"
(let [from-deps (->> (:file dummy-clojure-cli-dep)
(slurp)
(dep.gha/extract-deps ""))
to-deps (->> dummy-clojure-cli-dep
(upgrade/upgrader)
(dep.gha/extract-deps ""))]
(t/is (= #{{:name "clojure/brew-install" :version {:- 1 :+ "9.0.0"}}}
(h/diff-deps from-deps to-deps)))))
(t/testing "leiningen"
(let [from-deps (->> (:file dummy-leiningen-dep)
(slurp)
(dep.gha/extract-deps ""))
to-deps (->> dummy-leiningen-dep
(upgrade/upgrader)
(dep.gha/extract-deps ""))]
(t/is (= #{{:name "technomancy/leiningen" :version {:- 2 :+ "9.0.0"}}}
(h/diff-deps from-deps to-deps)))))
(t/testing "boot"
(let [from-deps (->> (:file dummy-boot-dep)
(slurp)
(dep.gha/extract-deps ""))
to-deps (->> dummy-boot-dep
(upgrade/upgrader)
(dep.gha/extract-deps ""))]
(t/is (= #{{:name "boot-clj/boot" :version {:- 3 :+ "9.0.0"}}}
(h/diff-deps from-deps to-deps))))))
(t/testing "clj-kondo"
(let [from-deps (->> (:file dummy-clj-kondo-dep)
(slurp)
(dep.gha/extract-deps ""))
to-deps (->> dummy-clj-kondo-dep
(upgrade/upgrader)
(dep.gha/extract-deps ""))]
(t/is (= #{{:name "clj-kondo/clj-kondo" :version {:- "5" :+ "9.0.0"}}}
(h/diff-deps from-deps to-deps)))))
(t/testing "graalvm"
(let [from-deps (->> (:file dummy-graalvm-dep)
(slurp)
(dep.gha/extract-deps ""))
to-deps (->> dummy-graalvm-dep
(upgrade/upgrader)
(dep.gha/extract-deps ""))]
(t/is (= #{{:name "graalvm/graalvm-ce-builds" :version {:- "6" :+ "9.0.0"}}}
(h/diff-deps from-deps to-deps)))))
(t/testing "cljstyle"
(let [from-deps (->> (:file dummy-cljstyle-dep)
(slurp)
(dep.gha/extract-deps ""))
to-deps (->> dummy-cljstyle-dep
(upgrade/upgrader)
(dep.gha/extract-deps ""))]
(t/is (= #{{:name "greglook/cljstyle" :version {:- "7" :+ "9.0.0"}}}
(h/diff-deps from-deps to-deps))))))
| null | https://raw.githubusercontent.com/liquidz/antq/638cde7681128e8e4a540d9160571241e2f6bc17/test/antq/upgrade/github_action_test.clj | clojure | (ns antq.upgrade.github-action-test
(:require
[antq.constant.github-action :as const.gh-action]
[antq.dep.github-action :as dep.gha]
[antq.record :as r]
[antq.test-helper :as h]
[antq.upgrade :as upgrade]
[antq.upgrade.github-action]
[clojure.java.io :as io]
[clojure.test :as t]))
(def ^:private dummy-dep
(r/map->Dependency {:project :github-action
:type :github-tag
:name "foo/bar"
:latest-version "v9.0.0"
:file (io/resource "dep/test_github_action.yml")
:extra {const.gh-action/type-key "uses"}}))
(def ^:private dummy-not-supported-dep
(r/map->Dependency {:project :github-action
:type :github-tag
:name "bar/baz"
:latest-version "v9.0.0"
:file (io/resource "dep/test_github_action.yml")}))
(def ^:private dummy-clojure-cli-dep
(r/map->Dependency {:project :github-action
:type :github-tag
:name "clojure/brew-install"
:latest-version "9.0.0"
:file (io/resource "dep/test_github_action_third_party.yml")
:extra {const.gh-action/type-key "DeLaGuardo/setup-clojure"}}))
(def ^:private dummy-leiningen-dep
(r/map->Dependency {:project :github-action
:type :github-tag
:name "technomancy/leiningen"
:latest-version "9.0.0"
:file (io/resource "dep/test_github_action_third_party.yml")
:extra {const.gh-action/type-key "DeLaGuardo/setup-clojure"}}))
(def ^:private dummy-boot-dep
(r/map->Dependency {:project :github-action
:type :github-tag
:name "boot-clj/boot"
:latest-version "9.0.0"
:file (io/resource "dep/test_github_action_third_party.yml")
:extra {const.gh-action/type-key "DeLaGuardo/setup-clojure"}}))
(def ^:private dummy-clj-kondo-dep
(r/map->Dependency {:project :github-action
:type :java
:name "clj-kondo/clj-kondo"
:latest-version "9.0.0"
:file (io/resource "dep/test_github_action_third_party.yml")
:extra {const.gh-action/type-key "DeLaGuardo/setup-clj-kondo"}}))
(def ^:private dummy-graalvm-dep
(r/map->Dependency {:project :github-action
:type :github-tag
:name "graalvm/graalvm-ce-builds"
:latest-version "9.0.0"
:file (io/resource "dep/test_github_action_third_party.yml")
:extra {const.gh-action/type-key "DeLaGuardo/setup-graalvm"}}))
(def ^:private dummy-cljstyle-dep
(r/map->Dependency {:project :github-action
:type :github-tag
:name "greglook/cljstyle"
:latest-version "9.0.0"
:file (io/resource "dep/test_github_action_third_party.yml")
:extra {const.gh-action/type-key "0918nobita/setup-cljstyle"}}))
(t/deftest upgrade-dep-test
(t/testing "supported"
(let [from-deps (->> (:file dummy-dep)
(slurp)
(dep.gha/extract-deps ""))
to-deps (->> dummy-dep
(upgrade/upgrader)
(dep.gha/extract-deps ""))]
(t/is (= #{{:name "foo/bar" :version {:- "v1.0.0" :+ "v9.0.0"}}}
(h/diff-deps from-deps to-deps)))))
(t/testing "not supported"
(t/is (nil? (upgrade/upgrader dummy-not-supported-dep)))))
(t/deftest upgrade-third-party-dep-test
(t/testing "clojure"
(t/testing "clojure cli"
(let [from-deps (->> (:file dummy-clojure-cli-dep)
(slurp)
(dep.gha/extract-deps ""))
to-deps (->> dummy-clojure-cli-dep
(upgrade/upgrader)
(dep.gha/extract-deps ""))]
(t/is (= #{{:name "clojure/brew-install" :version {:- 1 :+ "9.0.0"}}}
(h/diff-deps from-deps to-deps)))))
(t/testing "leiningen"
(let [from-deps (->> (:file dummy-leiningen-dep)
(slurp)
(dep.gha/extract-deps ""))
to-deps (->> dummy-leiningen-dep
(upgrade/upgrader)
(dep.gha/extract-deps ""))]
(t/is (= #{{:name "technomancy/leiningen" :version {:- 2 :+ "9.0.0"}}}
(h/diff-deps from-deps to-deps)))))
(t/testing "boot"
(let [from-deps (->> (:file dummy-boot-dep)
(slurp)
(dep.gha/extract-deps ""))
to-deps (->> dummy-boot-dep
(upgrade/upgrader)
(dep.gha/extract-deps ""))]
(t/is (= #{{:name "boot-clj/boot" :version {:- 3 :+ "9.0.0"}}}
(h/diff-deps from-deps to-deps))))))
(t/testing "clj-kondo"
(let [from-deps (->> (:file dummy-clj-kondo-dep)
(slurp)
(dep.gha/extract-deps ""))
to-deps (->> dummy-clj-kondo-dep
(upgrade/upgrader)
(dep.gha/extract-deps ""))]
(t/is (= #{{:name "clj-kondo/clj-kondo" :version {:- "5" :+ "9.0.0"}}}
(h/diff-deps from-deps to-deps)))))
(t/testing "graalvm"
(let [from-deps (->> (:file dummy-graalvm-dep)
(slurp)
(dep.gha/extract-deps ""))
to-deps (->> dummy-graalvm-dep
(upgrade/upgrader)
(dep.gha/extract-deps ""))]
(t/is (= #{{:name "graalvm/graalvm-ce-builds" :version {:- "6" :+ "9.0.0"}}}
(h/diff-deps from-deps to-deps)))))
(t/testing "cljstyle"
(let [from-deps (->> (:file dummy-cljstyle-dep)
(slurp)
(dep.gha/extract-deps ""))
to-deps (->> dummy-cljstyle-dep
(upgrade/upgrader)
(dep.gha/extract-deps ""))]
(t/is (= #{{:name "greglook/cljstyle" :version {:- "7" :+ "9.0.0"}}}
(h/diff-deps from-deps to-deps))))))
|
|
e2b47ef39ebced3da08b0523e73db1b13c2975f4f2c3f046b9b23316daf16988 | rkallos/wrek | wrek_vert_t.erl | -module(wrek_vert_t).
-export([
new/0,
cancel/1,
fail/2,
from_defn/1,
has_succeeded/1,
is_finished/1,
succeed/2,
to_list/1,
% getters
args/1,
deps/1,
dir/1,
id/1,
kv/1,
module/1,
name/1,
reason/1,
status/1,
timeout/1,
% setters
set_args/2,
set_deps/2,
set_dir/2,
set_id/2,
set_kv/2,
set_module/2,
set_name/2,
set_reason/2,
set_status/2,
set_timeout/2
]).
-type args_t() :: list().
-type deps_t() :: list().
-type dir_t() :: file:filename_all() | undefined.
-type id_t() :: wrek:vert_id() | undefined.
-type kv_t() :: map().
-type module_t() :: module() | undefined.
-type name_t() :: any().
-type reason_t() :: any().
-type status_t() :: failed | done | cancelled | undefined.
-type timeout_t() :: pos_integer() | undefined.
-define(T, ?MODULE).
-record(?T, {
args = [] :: args_t(),
deps = [] :: deps_t(),
dir = undefined :: dir_t(),
id = undefined :: id_t(),
kv = #{} :: kv_t(),
module = undefined :: module_t(),
name = undefined :: name_t(),
reason = undefined :: reason_t(),
status = undefined :: status_t(),
timeout = undefined :: timeout_t()
}).
-type t() :: #?T{}.
-export_type([
t/0
]).
-spec new() -> t().
new() ->
#?T{}.
-spec cancel(t()) -> t().
cancel(Vert = #?T{}) ->
set_status(Vert, cancelled).
-spec fail(t(), reason_t()) -> t().
fail(Vert = #?T{}, Reason) ->
Vert2 = set_reason(Vert, Reason),
set_status(Vert2, failed).
-spec from_defn(map() | t()) -> {ok, t()} | {error, any()}.
from_defn(Map0) when is_map(Map0) ->
Res0 = #?T{},
MandatoryFields = [
{module, fun set_module/2},
{args, fun set_args/2},
{deps, fun set_deps/2}
],
OptionalFields = [
{name, fun set_name/2},
{timeout, fun set_timeout/2}
],
case load_mandatory(Res0, Map0, MandatoryFields) of
{error, _} = Err ->
Err;
{ok, {Res1, Map1}} ->
{ok, {Res2, Map2}} = load_optional(Res1, Map1, OptionalFields),
Res3 = set_kv(Res2, Map2),
{ok, Res3}
end;
from_defn(T = #?T{}) ->
{ok, T};
from_defn(_) ->
{error, not_map_or_record}.
-spec has_succeeded(t()) -> boolean().
has_succeeded(Vert = #?T{}) ->
case status(Vert) of
done ->
true;
_ ->
false
end.
-spec is_finished(t()) -> boolean().
is_finished(Vert = #?T{}) ->
case status(Vert) of
done ->
true;
failed ->
true;
cancelled ->
true;
_ ->
false
end.
-spec succeed(t(), map()) -> t().
succeed(Vert = #?T{kv = Kv}, Result) ->
Vert2 = set_kv(Vert, maps:merge(Kv, Result)),
set_status(Vert2, done).
-spec to_list(t()) -> [{atom(), any()}].
to_list(T = #?T{}) ->
Fields = record_info(fields, ?T),
[_Tag | Values] = tuple_to_list(T),
lists:zip(Fields, Values).
-spec args(t()) -> args_t().
args(#?T{args = Args}) ->
Args.
-spec deps(t()) -> deps_t().
deps(#?T{deps = Deps}) ->
Deps.
-spec dir(t()) -> dir_t().
dir(#?T{dir = Dir}) ->
Dir.
-spec id(t()) -> id_t().
id(#?T{id = Id}) ->
Id.
-spec kv(t()) -> kv_t().
kv(#?T{kv = Kv}) ->
Kv.
-spec module(t()) -> module_t().
module(#?T{module = Module}) ->
Module.
-spec name(t()) -> name_t().
name(#?T{name = Name}) ->
Name.
-spec reason(t()) -> reason_t().
reason(#?T{reason = Reason}) ->
Reason.
-spec status(t()) -> status_t().
status(#?T{status = Status}) ->
Status.
-spec timeout(t()) -> timeout_t().
timeout(#?T{timeout = Timeout}) ->
Timeout.
-spec set_args(t(), args_t()) -> t().
set_args(T = #?T{}, Args) ->
T#?T{args = Args}.
-spec set_deps(t(), deps_t()) -> t().
set_deps(T = #?T{}, Deps) ->
T#?T{deps = Deps}.
-spec set_dir(t(), dir_t()) -> t().
set_dir(T = #?T{}, Dir) ->
T#?T{dir = Dir}.
-spec set_id(t(), id_t()) -> t().
set_id(T = #?T{}, Id) ->
T#?T{id = Id}.
-spec set_kv(t(), kv_t()) -> t().
set_kv(T = #?T{}, Kv) ->
T#?T{kv = Kv}.
-spec set_module(t(), module_t()) -> t().
set_module(T = #?T{}, Module) ->
T#?T{module = Module}.
-spec set_name(t(), name_t()) -> t().
set_name(T = #?T{}, Name) ->
T#?T{name = Name}.
-spec set_reason(t(), reason_t()) -> t().
set_reason(T = #?T{}, Reason) ->
T#?T{reason = Reason}.
-spec set_status(t(), status_t()) -> t().
set_status(T = #?T{}, Status) ->
T#?T{status = Status}.
-spec set_timeout(t(), timeout_t()) -> t().
set_timeout(T = #?T{}, Timeout) ->
T#?T{timeout = Timeout}.
% private
-type setter_t() :: fun((t(), any()) -> t()).
-spec load_mandatory(t(), map(), [{atom(), setter_t()}]) ->
{ok, {t(), map()}} | {error, any()}.
load_mandatory(Vert, Map, FieldSetterPairs) ->
load(Vert, Map, FieldSetterPairs, error).
-spec load_optional(t(), map(), [{atom(), setter_t()}]) ->
{ok, {t(), map()}}.
load_optional(Vert, Map, FieldSetterPairs) ->
load(Vert, Map, FieldSetterPairs, continue).
-spec load(t(), map(), [{atom(), setter_t()}], error | continue) ->
{ok, {t(), map()}} | {error, any()}.
load(Vert = #?T{}, Map, [], _FailMode) ->
{ok, {Vert, Map}};
load(Vert = #?T{}, Map, [{FieldName, Setter} | Rest], FailMode) ->
case {Map, FailMode} of
{#{FieldName := FieldVal}, _} ->
Vert2 = Setter(Vert, FieldVal),
Map2 = maps:remove(FieldName, Map),
load(Vert2, Map2, Rest, FailMode);
{_, error} ->
{error, {missing_field, FieldName}};
{_, continue} ->
load(Vert, Map, Rest, FailMode)
end.
| null | https://raw.githubusercontent.com/rkallos/wrek/3859e9efdf21227e6e8e0ea81095b229eceb6641/src/wrek_vert_t.erl | erlang | getters
setters
private | -module(wrek_vert_t).
-export([
new/0,
cancel/1,
fail/2,
from_defn/1,
has_succeeded/1,
is_finished/1,
succeed/2,
to_list/1,
args/1,
deps/1,
dir/1,
id/1,
kv/1,
module/1,
name/1,
reason/1,
status/1,
timeout/1,
set_args/2,
set_deps/2,
set_dir/2,
set_id/2,
set_kv/2,
set_module/2,
set_name/2,
set_reason/2,
set_status/2,
set_timeout/2
]).
-type args_t() :: list().
-type deps_t() :: list().
-type dir_t() :: file:filename_all() | undefined.
-type id_t() :: wrek:vert_id() | undefined.
-type kv_t() :: map().
-type module_t() :: module() | undefined.
-type name_t() :: any().
-type reason_t() :: any().
-type status_t() :: failed | done | cancelled | undefined.
-type timeout_t() :: pos_integer() | undefined.
-define(T, ?MODULE).
-record(?T, {
args = [] :: args_t(),
deps = [] :: deps_t(),
dir = undefined :: dir_t(),
id = undefined :: id_t(),
kv = #{} :: kv_t(),
module = undefined :: module_t(),
name = undefined :: name_t(),
reason = undefined :: reason_t(),
status = undefined :: status_t(),
timeout = undefined :: timeout_t()
}).
-type t() :: #?T{}.
-export_type([
t/0
]).
-spec new() -> t().
new() ->
#?T{}.
-spec cancel(t()) -> t().
cancel(Vert = #?T{}) ->
set_status(Vert, cancelled).
-spec fail(t(), reason_t()) -> t().
fail(Vert = #?T{}, Reason) ->
Vert2 = set_reason(Vert, Reason),
set_status(Vert2, failed).
-spec from_defn(map() | t()) -> {ok, t()} | {error, any()}.
from_defn(Map0) when is_map(Map0) ->
Res0 = #?T{},
MandatoryFields = [
{module, fun set_module/2},
{args, fun set_args/2},
{deps, fun set_deps/2}
],
OptionalFields = [
{name, fun set_name/2},
{timeout, fun set_timeout/2}
],
case load_mandatory(Res0, Map0, MandatoryFields) of
{error, _} = Err ->
Err;
{ok, {Res1, Map1}} ->
{ok, {Res2, Map2}} = load_optional(Res1, Map1, OptionalFields),
Res3 = set_kv(Res2, Map2),
{ok, Res3}
end;
from_defn(T = #?T{}) ->
{ok, T};
from_defn(_) ->
{error, not_map_or_record}.
-spec has_succeeded(t()) -> boolean().
has_succeeded(Vert = #?T{}) ->
case status(Vert) of
done ->
true;
_ ->
false
end.
-spec is_finished(t()) -> boolean().
is_finished(Vert = #?T{}) ->
case status(Vert) of
done ->
true;
failed ->
true;
cancelled ->
true;
_ ->
false
end.
-spec succeed(t(), map()) -> t().
succeed(Vert = #?T{kv = Kv}, Result) ->
Vert2 = set_kv(Vert, maps:merge(Kv, Result)),
set_status(Vert2, done).
-spec to_list(t()) -> [{atom(), any()}].
to_list(T = #?T{}) ->
Fields = record_info(fields, ?T),
[_Tag | Values] = tuple_to_list(T),
lists:zip(Fields, Values).
-spec args(t()) -> args_t().
args(#?T{args = Args}) ->
Args.
-spec deps(t()) -> deps_t().
deps(#?T{deps = Deps}) ->
Deps.
-spec dir(t()) -> dir_t().
dir(#?T{dir = Dir}) ->
Dir.
-spec id(t()) -> id_t().
id(#?T{id = Id}) ->
Id.
-spec kv(t()) -> kv_t().
kv(#?T{kv = Kv}) ->
Kv.
-spec module(t()) -> module_t().
module(#?T{module = Module}) ->
Module.
-spec name(t()) -> name_t().
name(#?T{name = Name}) ->
Name.
-spec reason(t()) -> reason_t().
reason(#?T{reason = Reason}) ->
Reason.
-spec status(t()) -> status_t().
status(#?T{status = Status}) ->
Status.
-spec timeout(t()) -> timeout_t().
timeout(#?T{timeout = Timeout}) ->
Timeout.
-spec set_args(t(), args_t()) -> t().
set_args(T = #?T{}, Args) ->
T#?T{args = Args}.
-spec set_deps(t(), deps_t()) -> t().
set_deps(T = #?T{}, Deps) ->
T#?T{deps = Deps}.
-spec set_dir(t(), dir_t()) -> t().
set_dir(T = #?T{}, Dir) ->
T#?T{dir = Dir}.
-spec set_id(t(), id_t()) -> t().
set_id(T = #?T{}, Id) ->
T#?T{id = Id}.
-spec set_kv(t(), kv_t()) -> t().
set_kv(T = #?T{}, Kv) ->
T#?T{kv = Kv}.
-spec set_module(t(), module_t()) -> t().
set_module(T = #?T{}, Module) ->
T#?T{module = Module}.
-spec set_name(t(), name_t()) -> t().
set_name(T = #?T{}, Name) ->
T#?T{name = Name}.
-spec set_reason(t(), reason_t()) -> t().
set_reason(T = #?T{}, Reason) ->
T#?T{reason = Reason}.
-spec set_status(t(), status_t()) -> t().
set_status(T = #?T{}, Status) ->
T#?T{status = Status}.
-spec set_timeout(t(), timeout_t()) -> t().
set_timeout(T = #?T{}, Timeout) ->
T#?T{timeout = Timeout}.
-type setter_t() :: fun((t(), any()) -> t()).
-spec load_mandatory(t(), map(), [{atom(), setter_t()}]) ->
{ok, {t(), map()}} | {error, any()}.
load_mandatory(Vert, Map, FieldSetterPairs) ->
load(Vert, Map, FieldSetterPairs, error).
-spec load_optional(t(), map(), [{atom(), setter_t()}]) ->
{ok, {t(), map()}}.
load_optional(Vert, Map, FieldSetterPairs) ->
load(Vert, Map, FieldSetterPairs, continue).
-spec load(t(), map(), [{atom(), setter_t()}], error | continue) ->
{ok, {t(), map()}} | {error, any()}.
load(Vert = #?T{}, Map, [], _FailMode) ->
{ok, {Vert, Map}};
load(Vert = #?T{}, Map, [{FieldName, Setter} | Rest], FailMode) ->
case {Map, FailMode} of
{#{FieldName := FieldVal}, _} ->
Vert2 = Setter(Vert, FieldVal),
Map2 = maps:remove(FieldName, Map),
load(Vert2, Map2, Rest, FailMode);
{_, error} ->
{error, {missing_field, FieldName}};
{_, continue} ->
load(Vert, Map, Rest, FailMode)
end.
|
0cba433b2d91888ab412fffc20f4cd160cb031b31b22c6de59cc747df6b5d24c | spawnfest/eep49ers | wxActivateEvent.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2008 - 2020 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%% This file is generated DO NOT EDIT
-module(wxActivateEvent).
-include("wxe.hrl").
-export([getActive/1]).
%% inherited exports
-export([getId/1,getSkipped/1,getTimestamp/1,isCommandEvent/1,parent_class/1,
resumePropagation/2,shouldPropagate/1,skip/1,skip/2,stopPropagation/1]).
-type wxActivateEvent() :: wx:wx_object().
-include("wx.hrl").
-type wxActivateEventType() :: 'activate' | 'activate_app' | 'hibernate'.
-export_type([wxActivateEvent/0, wxActivate/0, wxActivateEventType/0]).
%% @hidden
parent_class(wxEvent) -> true;
parent_class(_Class) -> erlang:error({badtype, ?MODULE}).
@doc See < a href=" / manuals/2.8.12 / wx_wxactivateevent.html#wxactivateeventgetactive">external documentation</a > .
-spec getActive(This) -> boolean() when
This::wxActivateEvent().
getActive(#wx_ref{type=ThisT}=This) ->
?CLASS(ThisT,wxActivateEvent),
wxe_util:queue_cmd(This,?get_env(),?wxActivateEvent_GetActive),
wxe_util:rec(?wxActivateEvent_GetActive).
%% From wxEvent
%% @hidden
stopPropagation(This) -> wxEvent:stopPropagation(This).
%% @hidden
skip(This, Options) -> wxEvent:skip(This, Options).
%% @hidden
skip(This) -> wxEvent:skip(This).
%% @hidden
shouldPropagate(This) -> wxEvent:shouldPropagate(This).
%% @hidden
resumePropagation(This,PropagationLevel) -> wxEvent:resumePropagation(This,PropagationLevel).
%% @hidden
isCommandEvent(This) -> wxEvent:isCommandEvent(This).
%% @hidden
getTimestamp(This) -> wxEvent:getTimestamp(This).
%% @hidden
getSkipped(This) -> wxEvent:getSkipped(This).
%% @hidden
getId(This) -> wxEvent:getId(This).
| null | https://raw.githubusercontent.com/spawnfest/eep49ers/d1020fd625a0bbda8ab01caf0e1738eb1cf74886/lib/wx/src/gen/wxActivateEvent.erl | erlang |
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
This file is generated DO NOT EDIT
inherited exports
@hidden
From wxEvent
@hidden
@hidden
@hidden
@hidden
@hidden
@hidden
@hidden
@hidden
@hidden | Copyright Ericsson AB 2008 - 2020 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(wxActivateEvent).
-include("wxe.hrl").
-export([getActive/1]).
-export([getId/1,getSkipped/1,getTimestamp/1,isCommandEvent/1,parent_class/1,
resumePropagation/2,shouldPropagate/1,skip/1,skip/2,stopPropagation/1]).
-type wxActivateEvent() :: wx:wx_object().
-include("wx.hrl").
-type wxActivateEventType() :: 'activate' | 'activate_app' | 'hibernate'.
-export_type([wxActivateEvent/0, wxActivate/0, wxActivateEventType/0]).
parent_class(wxEvent) -> true;
parent_class(_Class) -> erlang:error({badtype, ?MODULE}).
@doc See < a href=" / manuals/2.8.12 / wx_wxactivateevent.html#wxactivateeventgetactive">external documentation</a > .
-spec getActive(This) -> boolean() when
This::wxActivateEvent().
getActive(#wx_ref{type=ThisT}=This) ->
?CLASS(ThisT,wxActivateEvent),
wxe_util:queue_cmd(This,?get_env(),?wxActivateEvent_GetActive),
wxe_util:rec(?wxActivateEvent_GetActive).
stopPropagation(This) -> wxEvent:stopPropagation(This).
skip(This, Options) -> wxEvent:skip(This, Options).
skip(This) -> wxEvent:skip(This).
shouldPropagate(This) -> wxEvent:shouldPropagate(This).
resumePropagation(This,PropagationLevel) -> wxEvent:resumePropagation(This,PropagationLevel).
isCommandEvent(This) -> wxEvent:isCommandEvent(This).
getTimestamp(This) -> wxEvent:getTimestamp(This).
getSkipped(This) -> wxEvent:getSkipped(This).
getId(This) -> wxEvent:getId(This).
|
983fecaa8d00d3ffaac6da0ca9aaf84289d917fefb724773d15f0d3bde0f46d7 | hiroshi-unno/coar | problem.ml | open Core
open Graph
open Common.Ext
open Ast
open Ast.LogicOld
let nondet_prefix = "#nondet"
cutpoint
and transition = string * command * string
and command = Skip | Assume of LogicOld.Formula.t | Subst of (Ident.tvar * LogicOld.Sort.t) * LogicOld.Term.t | Seq of command * command | Choice of command * command
type mode = Safe | NonSafe | Term | NonTerm | CondTerm | MuCal | Rel
type t = lts * mode
let seq = function [] -> Skip | c::cs -> List.fold_left ~init:c cs ~f:(fun c1 c2 -> Seq (c1, c2))
let seq cs = seq @@ List.filter cs ~f:(Stdlib.(<>) Skip)
let choice = function [] -> assert false | c::cs -> List.fold_left ~init:c cs ~f:(fun c1 c2 -> Choice (c1, c2))
let rec term_sort_env_of_command = function
| Skip -> Set.Poly.empty
| Assume atm -> Formula.term_sort_env_of atm
| Subst ((x, s), t) -> Set.Poly.add (Term.term_sort_env_of t) (x, s)
| Seq (c1, c2) | Choice (c1, c2) -> Set.Poly.union (term_sort_env_of_command c1) (term_sort_env_of_command c2)
let term_sort_env_of_transition (_, c, _) = term_sort_env_of_command c
let term_sort_env_of (_, _, _, trs) =
Set.Poly.union_list @@ List.map ~f:term_sort_env_of_transition trs
let rec is_effect_free = function
| Skip | Assume _ -> true
| Subst ((_, _), t) ->
Set.Poly.for_all (Term.tvs_of t) ~f:(fun x ->
not @@ String.is_prefix (Ident.name_of_tvar x) ~prefix:nondet_prefix)
| Seq (c1, c2) | Choice (c1, c2) -> is_effect_free c1 && is_effect_free c2
let rec str_of_command = function
| Skip -> "skip;\n"
| Assume atom ->
Printf.sprintf "assume(%s);\n" (LogicOld.Formula.str_of ~priority:20 atom)
| Subst ((x, _sort), t) ->
Printf.sprintf "%s := %s;\n" (Ident.name_of_tvar x) (LogicOld.Term.str_of t)
| Seq (c1, c2) -> str_of_command c1 ^ str_of_command c2
| Choice (c1, c2) -> "(\n" ^ str_of_command c1 ^ ") || (\n" ^ str_of_command c2 ^ ");\n"
let str_of_transition (from, c, to_) =
Printf.sprintf "FROM: %s;\n%sTO: %s;\n\n" from (str_of_command c) to_
let str_of_lts (s, e, c, trans) =
(match s with None -> "" | Some s -> Printf.sprintf "START: %s;\n" s) ^
(match e with None -> "" | Some e -> Printf.sprintf "ERROR: %s;\n" e) ^
(match c with None -> "" | Some c -> Printf.sprintf "CUTPOINT: %s;\n" c) ^
String.concat_map_list ~f:str_of_transition trans
let rec wp c phi = match c with
| Skip -> phi
| Assume phi' -> Formula.mk_imply phi' phi
| Subst ((x, _s), t) -> Formula.subst (Map.Poly.singleton x t) phi
| Seq (c1, c2) -> wp c1 (wp c2 phi)
| Choice (c1, c2) -> Formula.and_of [wp c1 phi; wp c2 phi]
let used_vars c =
let rec aux env = function
| Skip -> env
| Assume phi ->
let env' =
Set.Poly.filter (Formula.term_sort_env_of phi) ~f:(fun (x, _) ->
not @@ String.is_prefix ~prefix:nondet_prefix @@ Ident.name_of_tvar x) in
Set.Poly.union env' env
| Subst ((x, _), t) ->
let env' =
Set.Poly.filter (Term.term_sort_env_of t) ~f:(fun (x, _) ->
not @@ String.is_prefix ~prefix:nondet_prefix @@ Ident.name_of_tvar x) in
Set.Poly.union env' (Set.Poly.filter env ~f:(fun (y, _) -> Stdlib.(x <> y)))
| Seq (c1, c2) -> aux (aux env c2) c1
| Choice (c1, c2) -> Set.Poly.union (aux env c1) (aux env c2)
in aux Set.Poly.empty c
let defined_vars c =
let rec aux env = function
| Skip -> env
| Assume _ -> env
| Subst ((x, s), _) -> Set.Poly.union (Set.Poly.singleton (x, s)) env
| Seq (c1, c2) -> aux (aux env c2) c1
| Choice (c1, c2) -> Set.Poly.inter (aux env c1) (aux env c2)
in aux Set.Poly.empty c
: Sig . COMPARABLE
type t = string
let compare = String.compare
let hash = String.hash
let equal = String.equal
end
module E (*: Sig.ORDERED_TYPE_DFT*) = struct
type t = command
let compare = Stdlib.compare
let default = Skip
end
module G = Imperative.Digraph.ConcreteBidirectionalLabeled(V)(E)
let graph_of (trans : transition list) =
let g = G.create () in
List.iter trans ~f:(fun (f, c, t) ->
let v1 = G.V.create f in
let v2 = G.V.create t in
G.add_vertex g v1;
G.add_vertex g v2;
G.add_edge_e g (G.E.create v1 c v2));
g
let of_graph cfa : transition list =
G.fold_edges_e (fun (f, c, t) trans -> (f, c, t) :: trans) cfa []
exception Found_Edges of G.edge list
let contract_edges cfa =
try
G.iter_edges (fun s d -> let edges = G.find_all_edges cfa s d in if List.length edges > 1 then raise (Found_Edges edges) else ()) cfa;
false
with Found_Edges [] -> assert false
| Found_Edges (((s, _ ,d) :: _) as es) ->
(*print_endline ("eliminating edge (" ^ s ^ ", " ^ d ^ ")");*)
let c = choice @@ List.map es ~f:(fun (_, c, _) -> c) in
G.remove_edge cfa s d;
G.add_edge_e cfa (s, c, d);
true
exception Found_Vertex_1_1 of G.edge * G.vertex * G.edge
let contract_vertex_1_1 s cfa =
try
G.iter_vertex (fun v ->
if String.(s = v) (* ignore start node *) ||
Fn.non List.is_empty @@ G.find_all_edges cfa v v (*ignore vertex with a self-loop*)
then () else
match G.pred_e cfa v, G.succ_e cfa v with
| [e1], [e2] -> raise (Found_Vertex_1_1 (e1, v, e2))
| _ -> ()) cfa;
false
with Found_Vertex_1_1 ((s, c1, _d), v, (_s, c2, d)) ->
(*print_endline ("eliminating vertex " ^ v);*)
G.remove_vertex cfa v;
G.add_edge_e cfa (s, seq [c1; c2], d);
true
exception Found_Vertex_1_n of G.edge * G.vertex * G.edge list
let contract_vertex_1_n s cfa =
try
G.iter_vertex (fun v ->
if String.(s = v) (* ignore start node *) ||
Fn.non List.is_empty @@ G.find_all_edges cfa v v (*ignore vertex with a self-loop*)
then () else
match G.pred_e cfa v, G.succ_e cfa v with
| [(_, c, _) as e], es when is_effect_free c -> raise (Found_Vertex_1_n (e, v, es))
| _ -> ()) cfa;
false
with Found_Vertex_1_n ((s, c1, _d), v, es) ->
(*print_endline ("eliminating vertex " ^ v);*)
G.remove_vertex cfa v;
List.iter es ~f:(fun (_s, c2, d) -> G.add_edge_e cfa (s, seq [c1; c2], d));
true
exception Found_Vertex_n_1 of G.edge list * G.vertex * G.edge
let contract_vertex_n_1 s cfa =
try
G.iter_vertex (fun v ->
if String.(s = v) (* ignore start node *) ||
Fn.non List.is_empty @@ G.find_all_edges cfa v v (*ignore vertex with a self-loop*)
then () else
match G.pred_e cfa v, G.succ_e cfa v with
| es, [(_, c, _) as e] when is_effect_free c -> raise (Found_Vertex_n_1 (es, v, e))
| _ -> ()) cfa;
false
with Found_Vertex_n_1 (es, v, (_s, c2, d)) ->
(*print_endline ("eliminating vertex " ^ v);*)
G.remove_vertex cfa v;
List.iter es ~f:(fun (s, c1, _d) -> G.add_edge_e cfa (s, seq [c1; c2], d));
true
let rec simplify s cfa =
if contract_edges cfa || contract_vertex_1_1 s cfa || contract_vertex_1_n s cfa || contract_vertex_n_1 s cfa then
simplify s cfa
else cfa
module LiveVariables = Graph.Fixpoint.Make(G)
(struct
type vertex = G.E.vertex
type edge = G.E.t
type g = G.t
type data = sort_env_set
let direction = Graph.Fixpoint.Backward
let equal = Set.Poly.equal
let join = Set.Poly.union
let analyze (_, c, _) env =
let def = Set.Poly.map ~f:fst @@ defined_vars c in
let use = used_vars c in
Set.Poly.union use (Set.Poly.filter env ~f:(fun (x, _) -> not @@ Set.Poly.mem def x))
end)
let analyze (s, e, c, trans) =
match s with
| None ->
(fun _ -> Set.Poly.empty), Set.Poly.empty, (s, e, c, trans)
| Some s ->
let cfa = simplify s (graph_of trans) in
let live_vars = LiveVariables.analyze (fun _ -> Set.Poly.empty) cfa in
let cut_points = G.fold_vertex (fun v s -> Set.Poly.add s v) cfa Set.Poly.empty in
let trans' = of_graph cfa in
live_vars, cut_points, (Some s, e, c, trans')
| null | https://raw.githubusercontent.com/hiroshi-unno/coar/90a23a09332c68f380efd4115b3f6fdc825f413d/lib/LTS/problem.ml | ocaml | : Sig.ORDERED_TYPE_DFT
print_endline ("eliminating edge (" ^ s ^ ", " ^ d ^ ")");
ignore start node
ignore vertex with a self-loop
print_endline ("eliminating vertex " ^ v);
ignore start node
ignore vertex with a self-loop
print_endline ("eliminating vertex " ^ v);
ignore start node
ignore vertex with a self-loop
print_endline ("eliminating vertex " ^ v); | open Core
open Graph
open Common.Ext
open Ast
open Ast.LogicOld
let nondet_prefix = "#nondet"
cutpoint
and transition = string * command * string
and command = Skip | Assume of LogicOld.Formula.t | Subst of (Ident.tvar * LogicOld.Sort.t) * LogicOld.Term.t | Seq of command * command | Choice of command * command
type mode = Safe | NonSafe | Term | NonTerm | CondTerm | MuCal | Rel
type t = lts * mode
let seq = function [] -> Skip | c::cs -> List.fold_left ~init:c cs ~f:(fun c1 c2 -> Seq (c1, c2))
let seq cs = seq @@ List.filter cs ~f:(Stdlib.(<>) Skip)
let choice = function [] -> assert false | c::cs -> List.fold_left ~init:c cs ~f:(fun c1 c2 -> Choice (c1, c2))
let rec term_sort_env_of_command = function
| Skip -> Set.Poly.empty
| Assume atm -> Formula.term_sort_env_of atm
| Subst ((x, s), t) -> Set.Poly.add (Term.term_sort_env_of t) (x, s)
| Seq (c1, c2) | Choice (c1, c2) -> Set.Poly.union (term_sort_env_of_command c1) (term_sort_env_of_command c2)
let term_sort_env_of_transition (_, c, _) = term_sort_env_of_command c
let term_sort_env_of (_, _, _, trs) =
Set.Poly.union_list @@ List.map ~f:term_sort_env_of_transition trs
let rec is_effect_free = function
| Skip | Assume _ -> true
| Subst ((_, _), t) ->
Set.Poly.for_all (Term.tvs_of t) ~f:(fun x ->
not @@ String.is_prefix (Ident.name_of_tvar x) ~prefix:nondet_prefix)
| Seq (c1, c2) | Choice (c1, c2) -> is_effect_free c1 && is_effect_free c2
let rec str_of_command = function
| Skip -> "skip;\n"
| Assume atom ->
Printf.sprintf "assume(%s);\n" (LogicOld.Formula.str_of ~priority:20 atom)
| Subst ((x, _sort), t) ->
Printf.sprintf "%s := %s;\n" (Ident.name_of_tvar x) (LogicOld.Term.str_of t)
| Seq (c1, c2) -> str_of_command c1 ^ str_of_command c2
| Choice (c1, c2) -> "(\n" ^ str_of_command c1 ^ ") || (\n" ^ str_of_command c2 ^ ");\n"
let str_of_transition (from, c, to_) =
Printf.sprintf "FROM: %s;\n%sTO: %s;\n\n" from (str_of_command c) to_
let str_of_lts (s, e, c, trans) =
(match s with None -> "" | Some s -> Printf.sprintf "START: %s;\n" s) ^
(match e with None -> "" | Some e -> Printf.sprintf "ERROR: %s;\n" e) ^
(match c with None -> "" | Some c -> Printf.sprintf "CUTPOINT: %s;\n" c) ^
String.concat_map_list ~f:str_of_transition trans
let rec wp c phi = match c with
| Skip -> phi
| Assume phi' -> Formula.mk_imply phi' phi
| Subst ((x, _s), t) -> Formula.subst (Map.Poly.singleton x t) phi
| Seq (c1, c2) -> wp c1 (wp c2 phi)
| Choice (c1, c2) -> Formula.and_of [wp c1 phi; wp c2 phi]
let used_vars c =
let rec aux env = function
| Skip -> env
| Assume phi ->
let env' =
Set.Poly.filter (Formula.term_sort_env_of phi) ~f:(fun (x, _) ->
not @@ String.is_prefix ~prefix:nondet_prefix @@ Ident.name_of_tvar x) in
Set.Poly.union env' env
| Subst ((x, _), t) ->
let env' =
Set.Poly.filter (Term.term_sort_env_of t) ~f:(fun (x, _) ->
not @@ String.is_prefix ~prefix:nondet_prefix @@ Ident.name_of_tvar x) in
Set.Poly.union env' (Set.Poly.filter env ~f:(fun (y, _) -> Stdlib.(x <> y)))
| Seq (c1, c2) -> aux (aux env c2) c1
| Choice (c1, c2) -> Set.Poly.union (aux env c1) (aux env c2)
in aux Set.Poly.empty c
let defined_vars c =
let rec aux env = function
| Skip -> env
| Assume _ -> env
| Subst ((x, s), _) -> Set.Poly.union (Set.Poly.singleton (x, s)) env
| Seq (c1, c2) -> aux (aux env c2) c1
| Choice (c1, c2) -> Set.Poly.inter (aux env c1) (aux env c2)
in aux Set.Poly.empty c
: Sig . COMPARABLE
type t = string
let compare = String.compare
let hash = String.hash
let equal = String.equal
end
type t = command
let compare = Stdlib.compare
let default = Skip
end
module G = Imperative.Digraph.ConcreteBidirectionalLabeled(V)(E)
let graph_of (trans : transition list) =
let g = G.create () in
List.iter trans ~f:(fun (f, c, t) ->
let v1 = G.V.create f in
let v2 = G.V.create t in
G.add_vertex g v1;
G.add_vertex g v2;
G.add_edge_e g (G.E.create v1 c v2));
g
let of_graph cfa : transition list =
G.fold_edges_e (fun (f, c, t) trans -> (f, c, t) :: trans) cfa []
exception Found_Edges of G.edge list
let contract_edges cfa =
try
G.iter_edges (fun s d -> let edges = G.find_all_edges cfa s d in if List.length edges > 1 then raise (Found_Edges edges) else ()) cfa;
false
with Found_Edges [] -> assert false
| Found_Edges (((s, _ ,d) :: _) as es) ->
let c = choice @@ List.map es ~f:(fun (_, c, _) -> c) in
G.remove_edge cfa s d;
G.add_edge_e cfa (s, c, d);
true
exception Found_Vertex_1_1 of G.edge * G.vertex * G.edge
let contract_vertex_1_1 s cfa =
try
G.iter_vertex (fun v ->
then () else
match G.pred_e cfa v, G.succ_e cfa v with
| [e1], [e2] -> raise (Found_Vertex_1_1 (e1, v, e2))
| _ -> ()) cfa;
false
with Found_Vertex_1_1 ((s, c1, _d), v, (_s, c2, d)) ->
G.remove_vertex cfa v;
G.add_edge_e cfa (s, seq [c1; c2], d);
true
exception Found_Vertex_1_n of G.edge * G.vertex * G.edge list
let contract_vertex_1_n s cfa =
try
G.iter_vertex (fun v ->
then () else
match G.pred_e cfa v, G.succ_e cfa v with
| [(_, c, _) as e], es when is_effect_free c -> raise (Found_Vertex_1_n (e, v, es))
| _ -> ()) cfa;
false
with Found_Vertex_1_n ((s, c1, _d), v, es) ->
G.remove_vertex cfa v;
List.iter es ~f:(fun (_s, c2, d) -> G.add_edge_e cfa (s, seq [c1; c2], d));
true
exception Found_Vertex_n_1 of G.edge list * G.vertex * G.edge
let contract_vertex_n_1 s cfa =
try
G.iter_vertex (fun v ->
then () else
match G.pred_e cfa v, G.succ_e cfa v with
| es, [(_, c, _) as e] when is_effect_free c -> raise (Found_Vertex_n_1 (es, v, e))
| _ -> ()) cfa;
false
with Found_Vertex_n_1 (es, v, (_s, c2, d)) ->
G.remove_vertex cfa v;
List.iter es ~f:(fun (s, c1, _d) -> G.add_edge_e cfa (s, seq [c1; c2], d));
true
let rec simplify s cfa =
if contract_edges cfa || contract_vertex_1_1 s cfa || contract_vertex_1_n s cfa || contract_vertex_n_1 s cfa then
simplify s cfa
else cfa
module LiveVariables = Graph.Fixpoint.Make(G)
(struct
type vertex = G.E.vertex
type edge = G.E.t
type g = G.t
type data = sort_env_set
let direction = Graph.Fixpoint.Backward
let equal = Set.Poly.equal
let join = Set.Poly.union
let analyze (_, c, _) env =
let def = Set.Poly.map ~f:fst @@ defined_vars c in
let use = used_vars c in
Set.Poly.union use (Set.Poly.filter env ~f:(fun (x, _) -> not @@ Set.Poly.mem def x))
end)
let analyze (s, e, c, trans) =
match s with
| None ->
(fun _ -> Set.Poly.empty), Set.Poly.empty, (s, e, c, trans)
| Some s ->
let cfa = simplify s (graph_of trans) in
let live_vars = LiveVariables.analyze (fun _ -> Set.Poly.empty) cfa in
let cut_points = G.fold_vertex (fun v s -> Set.Poly.add s v) cfa Set.Poly.empty in
let trans' = of_graph cfa in
live_vars, cut_points, (Some s, e, c, trans')
|
c2976f53af4be1735ea171064f119af1693f899349b1e8eac6104e91a42c005a | jeffshrager/biobike | json-builder.lisp | (in-package :com.gigamonkeys.json.parser)
Parser for JSON syntax ( < / > )
(defvar *empty-object* (make-symbol "EMPTY-OBJECT"))
(defchartype string-char '(not (member #\\ #\")))
(defchartype digit1-9
'(member #\1 #\2 #\3 #\4 #\5 #\6 #\7 #\8 #\9))
(defchartype digit
'(or (eql #\0) digit1-9))
(defchartype hex
'(or digit (member #\a #\b #\c #\d #\e #\f #\A #\B #\C #\D #\E #\F)))
(defprod ws ()
(* (/ #\Space #\Tab #\Newline)))
(defun save-in-hash (key-value-pair hash)
(setf (gethash (car key-value-pair) hash) (cdr key-value-pair)))
(defun save-key-value-pair (key-value-pair vector)
(vector-push-extend (car key-value-pair) vector)
(vector-push-extend (cdr key-value-pair) vector))
;;; Main productions
(defprod object ()
((^ "{" (make-array 5 :adjustable t :fill-pointer 0))
ws
(? (@ key-value-pair (save-key-value-pair key-value-pair object)))
(* ws "," ws (@ key-value-pair (save-key-value-pair key-value-pair object)))
ws (^ "}" (or (coerce object 'list) *empty-object*))))
(defprod key-value-pair ()
(^ (string ws ":" ws value) (cons string value)))
(defprod array ()
((^ "[" (make-array 5 :adjustable t :fill-pointer 0))
ws
(? (@ value (vector-push-extend value array)))
(* ws "," ws (@ value (vector-push-extend value array)))
ws "]"))
(defprod value ()
(/ (^ string)
(^ number)
(^ object)
(^ array)
(^ "true" :true)
(^ "false" :false)
(^ "null" :null)))
(defprod xvalue ()
(^ array (coerce array 'list)))
(defprod string ()
In JSON syntax , unlike full Javascript , only double - quoted strings are allowed .
((^ "\"" (make-array 10 :adjustable t :fill-pointer 0 :element-type 'character))
(* (@ char-or-escape (vector-push-extend char-or-escape string)))
"\""))
(defprod char-or-escape ()
(^ (/ escape string-char)))
(defprod escape ()
("\\"
(/ (^ "\"" #\")
(^ "\\" #\\)
(^ "/" #\/)
(^ "b" #\Backspace)
(^ "f" #\Page)
(^ "n" #\Newline)
(^ "r" #\Return)
(^ "t" #\Tab)
("u" (^ hex4 (code-char (parse-integer hex4 :radix 16)))))))
(defprod hex4 () (hex hex hex hex))
(defprod number ()
(^ number-syntax (let ((*read-default-float-format* 'double-float)) (read-from-string number-syntax))))
(defprod number-syntax ()
(int (? (/ (frac (? exp)) exp))))
(defprod int ()
((? "-") (/ (digit1-9 (* digit)) "0")))
(defprod frac () ("." (* digit)))
(defprod exp () (e (* digit)))
(defprod e () ((/ "e" "E") (? (/ "-" "+"))))
(defparser json-parser (^ value))
(defun parse-json (text)
"Parse json text into Lisp objects. Hash tables are used to
represent Javascript objects and vectors to represent arrays."
(fix-empty-object (nth-value 1 (json-parser text))))
(defun fix-empty-object (json)
(cond
((eql json *empty-object*) ())
((consp json) (mapcar #'fix-empty-object json))
((stringp json) json)
((vectorp json) (map 'vector #'fix-empty-object json))
(t json)))
(defmacro tjp (production input)
`((lambda (x)
(parselet ((foo (^ ,production)))
(foo x))) ,input))
| null | https://raw.githubusercontent.com/jeffshrager/biobike/5313ec1fe8e82c21430d645e848ecc0386436f57/BioLisp/ThirdParty/monkeylib/json/json-builder.lisp | lisp | Main productions | (in-package :com.gigamonkeys.json.parser)
Parser for JSON syntax ( < / > )
(defvar *empty-object* (make-symbol "EMPTY-OBJECT"))
(defchartype string-char '(not (member #\\ #\")))
(defchartype digit1-9
'(member #\1 #\2 #\3 #\4 #\5 #\6 #\7 #\8 #\9))
(defchartype digit
'(or (eql #\0) digit1-9))
(defchartype hex
'(or digit (member #\a #\b #\c #\d #\e #\f #\A #\B #\C #\D #\E #\F)))
(defprod ws ()
(* (/ #\Space #\Tab #\Newline)))
(defun save-in-hash (key-value-pair hash)
(setf (gethash (car key-value-pair) hash) (cdr key-value-pair)))
(defun save-key-value-pair (key-value-pair vector)
(vector-push-extend (car key-value-pair) vector)
(vector-push-extend (cdr key-value-pair) vector))
(defprod object ()
((^ "{" (make-array 5 :adjustable t :fill-pointer 0))
ws
(? (@ key-value-pair (save-key-value-pair key-value-pair object)))
(* ws "," ws (@ key-value-pair (save-key-value-pair key-value-pair object)))
ws (^ "}" (or (coerce object 'list) *empty-object*))))
(defprod key-value-pair ()
(^ (string ws ":" ws value) (cons string value)))
(defprod array ()
((^ "[" (make-array 5 :adjustable t :fill-pointer 0))
ws
(? (@ value (vector-push-extend value array)))
(* ws "," ws (@ value (vector-push-extend value array)))
ws "]"))
(defprod value ()
(/ (^ string)
(^ number)
(^ object)
(^ array)
(^ "true" :true)
(^ "false" :false)
(^ "null" :null)))
(defprod xvalue ()
(^ array (coerce array 'list)))
(defprod string ()
In JSON syntax , unlike full Javascript , only double - quoted strings are allowed .
((^ "\"" (make-array 10 :adjustable t :fill-pointer 0 :element-type 'character))
(* (@ char-or-escape (vector-push-extend char-or-escape string)))
"\""))
(defprod char-or-escape ()
(^ (/ escape string-char)))
(defprod escape ()
("\\"
(/ (^ "\"" #\")
(^ "\\" #\\)
(^ "/" #\/)
(^ "b" #\Backspace)
(^ "f" #\Page)
(^ "n" #\Newline)
(^ "r" #\Return)
(^ "t" #\Tab)
("u" (^ hex4 (code-char (parse-integer hex4 :radix 16)))))))
(defprod hex4 () (hex hex hex hex))
(defprod number ()
(^ number-syntax (let ((*read-default-float-format* 'double-float)) (read-from-string number-syntax))))
(defprod number-syntax ()
(int (? (/ (frac (? exp)) exp))))
(defprod int ()
((? "-") (/ (digit1-9 (* digit)) "0")))
(defprod frac () ("." (* digit)))
(defprod exp () (e (* digit)))
(defprod e () ((/ "e" "E") (? (/ "-" "+"))))
(defparser json-parser (^ value))
(defun parse-json (text)
"Parse json text into Lisp objects. Hash tables are used to
represent Javascript objects and vectors to represent arrays."
(fix-empty-object (nth-value 1 (json-parser text))))
(defun fix-empty-object (json)
(cond
((eql json *empty-object*) ())
((consp json) (mapcar #'fix-empty-object json))
((stringp json) json)
((vectorp json) (map 'vector #'fix-empty-object json))
(t json)))
(defmacro tjp (production input)
`((lambda (x)
(parselet ((foo (^ ,production)))
(foo x))) ,input))
|
70ad4defc3b734981887eb4a13ebbe89a87e28d17b808fe00c93d6809561426e | codinuum/cca | label.ml |
Copyright 2012 - 2020 Codinuum Software Lab < >
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Copyright 2012-2020 Codinuum Software Lab <>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
open Printf
open Common
open Label_common
open Labels
type t =
| Dummy
| Error
| Empty
| SourceText
| LibraryText
| CompilerDirective of CompilerDirective.t
| ModuleDeclaration of ModuleSpec.t * identifier
| UdpDeclaration of identifier
| NetDeclaration of identifier list
| BindDirective of identifier
| Expr of Expression.t
| Stmt of Statement.t
| NetType of NetType.t
| LocalParameterDeclaration of identifier list
| ParameterDeclaration of identifier list
| ParameterPortDeclaration
| ModuleBody
| Instantiation of identifier
| GateInstantiation of Gate.t
| ContinuousAssign
| Assign
| ConcurrentAssertionItem
| DeferredImmediateAssertionItem
| PpIdentifier of identifier
| PackedDimension
| ParamAssignment of identifier
| DefparamAssignment
| IdSelect of identifier
| Select
| Range
| RangePlus
| RangeMinus
| RangeForeach
| Root
| This
| Super
| Cellpin of identifier
| CellpinStar
| CellpinAnon
| DelayValue of identifier
| PackageScope of identifier
| PackageScopeUnit
| PackageScopeLocal
| PackageImport of identifier
| PackageImportAny
| LifetimeStatic
| LifetimeAutomatic
| EndLabel of identifier
| EndLabelNew
| ClassType of identifier
| DataType of DataType.t
| ImplicitDataType
| VarDeclAssignments
| Signed
| Unsigned
| ArgsDotted of identifier
| Tagged
| StructUnionBody
| StructUnionMember
| ClassScopeId of identifier
| Void
| EnumNameDeclaration of identifier
| EnumBody
| IdClassSel of identifier
| Variable of identifier
| Extern
| PackageImportDeclaration
| PackageImportItem of identifier
| Packed
| ParameterValueAssignment
| Ports
| PortsStar
| BitSelect
| VariableDeclAssignment of identifier
| DynamicArrayNew
| VariableDimension
| VariableDimensionStar
| GenItemBegin
| GenBlockId of identifier
| GenerateRegion
| Scalared
| Vectored
| DelayControl
| NetSig of identifier
| ParameterOverride
| PortDeclaration
| PortDirection of PortDirection.t
| Strength of Strength.t
| StrengthSupply0
| StrengthSupply1
| StrengthSpec
| VarDataType
| Port of identifier
| InterfacePort of identifier
| InterfacePortInterface
| ModportIdentifier of identifier
| PortMulti
| ExprScope
| ExprScopeThis
| ExprScopeSuper
| ExprScopeDot
| ExprScopeDotSuper
| CondPredicate
| CondPattern
| Dist
| DistItem
| DistWeight
| DistWeightRange
| ArrayRange
| ArrayRangePlus
| ArrayRangeMinus
| CastingTypeSimple
| CastingTypeSigned
| CastingTypeUnsigned
| CastingTypeString
| CastingTypeConst
| ValueRange
| Pattern
| PatternId of identifier
| PatternStar
| PatternTagged of identifier
| EventControl
| EventControlStar
| EventControlParenStar
| EventControlRepeat
| EvExpr of EventExpression.t
| CaseItem
| CaseItemDefault
| CaseInsideItem
| CaseInsideItemDefault
| CaseItems
| CaseItemsMatches
| CaseItemsInside
| With
| Args
| ConstraintBlock
| ForInit
| ForInitItemDT of identifier
| ForInitItemLval
| StreamingConcat
| OrderRL
| OrderLR
| StreamConcat
| Solve
| SolveBefore
| ActionBlock
| CycleDelay of string
| CycleDelayId of identifier
| CycleDelayParen
| Priority
| Unique
| Unique0
| InstRange
| InstName of identifier
| PExpr of PropertyExpression.t
| ClockingEvent of identifier
| ClockingEventParen
| PropertyCase
| PropertyCaseDefault
| DisableIff
| CycleDelayRange of string
| CycleDelayRangeId of identifier
| CycleDelayRangeParen
| CycleDelayRangeBracket
| CycleDelayRangeBracketStar
| CycleDelayRangeBracketPlus
| SExpr of SequenceExpression.t
| ConsecutiveRepetition
| NonconsecutiveRepetition
| GotoRepetition
| NetAlias
| InitialConstruct
| FinalConstruct
| AlwaysConstruct of AlwaysSpec.t
| ConcurrentAssertionItemLabeled of identifier
| ConcurrentAssertionStmt of ConcurrentAssertion.t
| DeferredImmediateAssertionItemLabeled of identifier
| DeferredImmediateAssertionStmt of DeferredImmediateAssertion.t
| SimpleImmediateAssertionStmt of SimpleImmediateAssertion.t
| CheckerInstantiation of identifier
| LoopGenerateConstruct
| GenvarDeclaration of identifier list
| GenvarIterationAssign of AssignmentOperator.t * identifier
| GenvarIterationIncOrDec of IncOrDecOperator.t * identifier
| GenvarIdDecl of identifier
| GenvarInitId of identifier
| GenvarInit
| SpecifyBlock
| SpecparamDeclaration
| SpecparamAssignmentId of identifier (* specparam assignment*)
| SpecparamAssignmentPulseControl of identifier
| PulsestyleDeclarationOnevent
| PulsestyleDeclarationOndetect
| ShowcancelledDeclaration
| NoshowcancelledDeclaration
| SpecifyTerminalDescriptor
| InputOrOutputId of identifier
| InterfaceIdentifier of identifier
| ProgramDeclaration of identifier
| InterfaceDeclaration of identifier
| InterfaceDeclarationExtern of identifier
| TimeUnitsDeclaration
| TimeUnit of string
| Timeprecision of string
| PackageDeclaration of identifier
| AnonymousProgram
| AnonymousProgramItemEmpty
| FunctionDeclaration of identifier
| FunctionPrototype of identifier
| FuncId of identifier
| FuncIdVoid of identifier
| FuncIdNew
| TfIdScoped of identifier
| TaskDeclaration of identifier
| TaskPrototype of identifier
| ClassCtorPrototype
| TfPortListPart
| TfBody
| TfPortDeclaration
| TfPortItemAssignment of identifier
| TfPortItem
| TfVariableIdentifier of identifier
| CheckerDeclaration of identifier
| PropertyDeclaration of identifier
| PropertyDeclBody
| PropertyPortItem
| PropertyPortItemDir
| PropertyPortItemAssignment of identifier
| SequenceDeclaration of identifier
| SequenceDeclBody
| LetDeclaration of identifier
| PropertyStatementSpec
| AssertionVariableDeclaration
| SequenceFormalTypeSequence
| SequenceFormalTypeUntyped
| DataDeclarationVar
| Const
| DataDeclarationVarClass
| TypeDeclaration of identifier
| ScopedType of identifier
| TypeIdentifier of identifier
| TypeDeclEnum
| TypeDeclStruct
| TypeDeclUnion
| TypeDeclClass
| VirtualInterfaceDeclaration of identifier
| ModportDeclaration of identifier list
| ModportItem of identifier
| ModportSimplePortsDecl
| ModportClockingDecl of identifier
| ModportTfPortsDeclImport
| ModportTfPortsDeclExport
| ModportSimplePort of identifier
| ModportSimplePortDot of identifier
| ModportTfPort of identifier
| CovergroupDeclaration of identifier
| Paren
| CoverageOption of identifier * identifier
| CoverPoint
| CoverPointLabeled of identifier
| CoverCross
| CoverCrossLabeled of identifier
| CrossItem of identifier
| Iff
| BinsList
| BinsEmpty
| SelectBins
| SelectBinsEmpty
| Bins of BinsSpec.t * identifier
| BinsSelection of BinsSpec.t * identifier
| BinsExpressionVar of identifier
| BinsExpression of identifier * identifier
| NBins
| SelCondBinsof
| SelExprNot
| SelExprAnd
| SelExprOr
| SelExprParen
| Intersect
| Wildcard
| TransSet
| TransRangeList
| RepeatRange
| TransItem
| TransRepetitionConsecutive (* trans repetition *)
| TransRepetitionNonconsecutive (* trans repetition *)
| TransRepetitionGoto (* trans repetition *)
| Default
| DefaultSequence
| OpenRangeList
| CoverageEventWith of identifier
| CoverageEventBlockEvent
| BlockEventExpression
| BlockEventExpressionBegin
| BlockEventExpressionEnd
| HierarchicalBtfIdentifier of identifier
| PackageExportDeclarationStar
| PackageExportDeclaration
| DpiImport of string
| DpiExportFunc of string * identifier
| DpiExportTask of string * identifier
| DpiImportLabel of identifier
| DpiTfImportPropertyContext
| DpiTfImportPropertyPure
| ExternConstraintDeclaration
| Static
| Virtual
| ClassDeclaration of identifier
| ClassExtends
| ClassItemEmpty
| ClassMethod
| Qualifier of Qualifier.t
| ClassBody
| ClassConstraint of identifier
| Pure
| ClassProperty
| PackageOrGenerateItemEmpty
| Forkjoin
| ExternTfDeclaration of identifier
| TimingCheck of TimingCheck.t
| SystemTimingCheck
| Notifier of identifier
| Delayed of identifier
| TimingCheckEvent
| TimingCheckEventControlPosedge
| TimingCheckEventControlNegedge
| TimingCheckEventControl
| EdgeDescriptor of string
| OverloadDeclaration of OverloadOperator.t * identifier
| Params
| ClockingDeclaration of identifier
| Global
| ClockingBody
| ClockingItemDefault
| ClockingItem
| DefaultSkewInput
| DefaultSkewOutput
| DefaultSkewInputOutput
| ClockingDirectionInput
| ClockingDirectionInputOutput
| ClockingDirectionInout
| ClockingSkewPosedge
| ClockingSkewNegedge
| ClockingSkewEdge
| ClockingSkew
| ClockingDeclAssign of identifier
| Production of identifier
| ProductionItem of identifier
| RsCodeBlock
| RsRule
| RsProductionList
| RsProductionListRandJoin
| WeightSpecInt of string
| WeightSpecId
| WeightSpec
| RsProdIf
| RsProdRepeat
| RsProdCase
| RsCaseItem
| RsCaseItemDefault
| CheckerOrGenerateItemEmpty
| ConditionalGenerateConstructCase
| ConditionalGenerateConstructIf
| ElaborationSystemTask of SystemTask.t
| CaseGenerateItem
| CaseGenerateItemDefault
| AssignmentPattern
| AssignmentPatternExpr
| PatternKey
| PatternKeyDefault
| PatternMember
| SimplePathDeclaration
| ParallelPathDescription
| FullPathDescription
| PathInputs
| PathOutputs
| PathDelayValue
| PolarityPlus
| PolarityMinus
| EdgePosedge
| EdgeNegedge
| EdgeSensitivePathDeclaration
| ParallelEdgeSensitivePathDescription
| FullEdgeSensitivePathDescription
| ParallelEdgeSensitivePathDescriptionSub
| FullEdgeSensitivePathDescriptionSub
| StateDependentPathDeclarationIf
| StateDependentPathDeclarationIfnone
| VariableLvalue
| AttributeInstance
| AttrSpec of identifier
| UdpPort of identifier
| UdpPortDeclaration
| UdpOutputDeclaration of identifier
| UdpOutputDeclarationReg of identifier
| UdpInputDeclaration
| UdpRegDeclaration of identifier
| SequentialBody
| CombinationalBody
| UdpInitialStmt of identifier * string
| SequentialEntry
| EdgeIndicator
| EdgeSymbol of string
| LevelSymbol of string
| OutputSymbol of string
| CombinationalEntry
| NextStateMinus
| UdpPortsStar
| UdpPorts
| UdpPortDecls
| UdpDeclarationPorts
| AttributeInstances
| ConfigDeclaration of identifier
| DesignStatement
| CellId of identifier
| LibraryIdentifier of identifier
| LiblistClause
| CellClause of identifier
| UseClause
| ColonConfig
| InstanceName
| InstanceIdentifier of identifier
| TopModuleIdentifier of identifier
| InstClause
| ConfigRuleStatementDefault
| ConfigRuleStatement
| LibraryDeclaration of identifier
| Incdir
| FilePathSpec of string
| IncludeStatement of string
| PragmaExpression of identifier
| PragmaValueTuple
| PragmaValueNum of string
| PragmaValueStr of string
| PragmaValueId of identifier
| PackageImportDecls
| ParamPorts
| Ranges
| VariableDimensions
| CaseConds
| NetDeclAssignments of identifier list
| ParamAssignments of identifier list
| MacroExpr of string
| MacroStmt of string
| Var
let to_string = function
| Dummy -> "Dummy"
| Error -> "Error"
| Empty -> "Empty"
| SourceText -> "SourceText"
| LibraryText -> "LibraryText"
| CompilerDirective cd -> CompilerDirective.to_string cd
| ModuleDeclaration(mspec, id) -> "ModuleDeclaration:"^(ModuleSpec.to_string mspec)^":"^id
| UdpDeclaration id -> "UdpDeclaration:"^id
| NetDeclaration ids -> "NetDeclaration:"^(String.concat "," ids)
| BindDirective id -> "BindDirective:"^id
| Expr e -> Expression.to_string e
| Stmt stmt -> Statement.to_string stmt
| NetType nt -> NetType.to_string nt
| LocalParameterDeclaration ids -> "LocalParameterDeclaration:"^(String.concat "," ids)
| ParameterDeclaration ids -> "ParameterDeclaration:"^(String.concat "," ids)
| ParameterPortDeclaration -> "ParameterPortDeclaration"
| ModuleBody -> "ModuleBody"
| Instantiation id -> "Instantiation:"^id
| GateInstantiation g -> "GateInstantiation:"^(Gate.to_string g)
| ContinuousAssign -> "ContinuousAssign"
| Assign -> "Assign"
| ConcurrentAssertionItem -> "ConcurrentAssertionItem"
| DeferredImmediateAssertionItem -> "DeferredImmediateAssertionItem"
| PpIdentifier id -> "PpIdentifier:"^id
| PackedDimension -> "PackedDimension"
| ParamAssignment id -> "ParamAssignment:"^id
| DefparamAssignment -> "DefparamAssignment"
| IdSelect id -> "IdSelect:"^id
| Select -> "Select"
| Range -> "Range"
| RangePlus -> "RangePlus"
| RangeMinus -> "RangeMinus"
| RangeForeach -> "RangeForeach"
| Root -> "Root"
| This -> "This"
| Super -> "Super"
| Cellpin id -> "Cellpin:"^id
| CellpinStar -> "CellpinStar"
| CellpinAnon -> "CellpinAnon"
| DelayValue id -> "DelayValue:"^id
| PackageScope id -> "PackageScope:"^id
| PackageScopeUnit -> "PackageScopeUnit"
| PackageScopeLocal -> "PackageScopeLocal"
| PackageImport id -> "PackageImport:"^id
| PackageImportAny -> "PackageImportAny"
| LifetimeStatic -> "LifetimeStatic"
| LifetimeAutomatic -> "LifetimeAutomatic"
| EndLabel id -> "EndLabel:"^id
| EndLabelNew -> "EndLabelNew"
| ClassType id -> "ClassType:"^id
| DataType dt -> DataType.to_string dt
| ImplicitDataType -> "ImplicitDataType"
| VarDeclAssignments -> "VarDeclAssignments"
| Signed -> "Signed"
| Unsigned -> "Unsigned"
| ArgsDotted id -> "ArgsDotted:"^id
| Tagged -> "Tagged"
| StructUnionBody -> "StructUnionBody"
| StructUnionMember -> "StructUnionMember"
| ClassScopeId id -> "ClassScopeId:"^id
| Void -> "Void"
| EnumNameDeclaration id -> "EnumNameDeclaration:"^id
| EnumBody -> "EnumBody"
| IdClassSel id -> "IdClassSel:"^id
| Variable id -> "Variable:"^id
| Extern -> "Extern"
| PackageImportDeclaration -> "PackageImportDeclaration"
| PackageImportItem id -> "PackageImportItem:"^id
| Packed -> "Packed"
| ParameterValueAssignment -> "ParameterValueAssignment"
| Ports -> "Ports"
| PortsStar -> "PortsStar"
| BitSelect -> "BitSelect"
| VariableDeclAssignment id -> "VariableDeclAssignment:"^id
| DynamicArrayNew -> "DynamicArrayNew"
| VariableDimension -> "VariableDimension"
| VariableDimensionStar -> "VariableDimensionStar"
| GenItemBegin -> "GenItemBegin"
| GenBlockId id -> "GenBlockId:"^id
| GenerateRegion -> "GenerateRegion"
| Scalared -> "Scalared"
| Vectored -> "Vectored"
| DelayControl -> "DelayControl"
| NetSig id -> "NetSig:"^id
| ParameterOverride -> "ParameterOverride"
| PortDeclaration -> "PortDeclaration"
| PortDirection pd -> "PortDirection:"^(PortDirection.to_string pd)
| Strength strength -> "Strength:"^(Strength.to_string strength)
| StrengthSupply0 -> "StrengthSupply0"
| StrengthSupply1 -> "StrengthSupply1"
| StrengthSpec -> "StrengthSpec"
| VarDataType -> "VarDataType"
| Port id -> "Port:"^id
| InterfacePort id -> "InterfacePort:"^id
| InterfacePortInterface -> "InterfacePortInterface"
| ModportIdentifier id -> "ModportIdentifier:"^id
| PortMulti -> "PortMulti"
| ExprScope -> "ExprScope"
| ExprScopeThis -> "ExprScopeThis"
| ExprScopeSuper -> "ExprScopeSuper"
| ExprScopeDot -> "ExprScopeDot"
| ExprScopeDotSuper -> "ExprScopeDotSuper"
| CondPredicate -> "CondPredicate"
| CondPattern -> "CondPattern"
| Dist -> "Dist"
| DistItem -> "DistItem"
| DistWeight -> "DistWeight"
| DistWeightRange -> "DistWeightRange"
| ArrayRange -> "ArrayRange"
| ArrayRangePlus -> "ArrayRangePlus"
| ArrayRangeMinus -> "ArrayRangeMinus"
| CastingTypeSimple -> "CastingTypeSimple"
| CastingTypeSigned -> "CastingTypeSigned"
| CastingTypeUnsigned -> "CastingTypeUnsigned"
| CastingTypeString -> "CastingTypeString"
| CastingTypeConst -> "CastingTypeConst"
| ValueRange -> "ValueRange"
| Pattern -> "Pattern"
| PatternId id -> "PatternId:"^id
| PatternStar -> "PatternStar"
| PatternTagged id -> "PatternTagged:"^id
| EventControl -> "EventControl"
| EventControlStar -> "EventControlStar"
| EventControlParenStar -> "EventControlParenStar"
| EventControlRepeat -> "EventControlRepeat"
| EvExpr ee -> EventExpression.to_string ee
| CaseItem -> "CaseItem"
| CaseItemDefault -> "CaseItemDefault"
| CaseInsideItem -> "CaseInsideItem"
| CaseInsideItemDefault -> "CaseInsideItemDefault"
| CaseItems -> "CaseItems"
| CaseItemsMatches -> "CaseItemsMatches"
| CaseItemsInside -> "CaseItemsInside"
| With -> "With"
| Args -> "Args"
| ConstraintBlock -> "ConstraintBlock"
| ForInit -> "ForInit"
| ForInitItemDT id -> "ForInitItemDT:"^id
| ForInitItemLval -> "ForInitItemLval"
| StreamingConcat -> "StreamingConcat"
| OrderRL -> "OrderRL"
| OrderLR -> "OrderLR"
| StreamConcat -> "StreamConcat"
| Solve -> "Solve"
| SolveBefore -> "SolveBefore"
| ActionBlock -> "ActionBlock"
| CycleDelay s -> "CycleDelay:"^s
| CycleDelayId id -> "CycleDelayId:"^id
| CycleDelayParen -> "CycleDelayParen"
| Priority -> "Priority"
| Unique -> "Unique"
| Unique0 -> "Unique0"
| InstRange -> "InstRange"
| InstName id -> "InstName:"^id
| PExpr pe -> PropertyExpression.to_string pe
| ClockingEvent id -> "ClockingEvent:"^id
| ClockingEventParen -> "ClockingEventParen"
| PropertyCase -> "PropertyCase"
| PropertyCaseDefault -> "PropertyCaseDefault"
| DisableIff -> "DisableIff"
| CycleDelayRange s -> "CycleDelayRange:"^s
| CycleDelayRangeId id -> "CycleDelayRangeId:"^id
| CycleDelayRangeParen -> "CycleDelayRangeParen"
| CycleDelayRangeBracket -> "CycleDelayRangeBracket"
| CycleDelayRangeBracketStar -> "CycleDelayRangeBracketStar"
| CycleDelayRangeBracketPlus -> "CycleDelayRangeBracketPlus"
| SExpr se -> SequenceExpression.to_string se
| ConsecutiveRepetition -> "ConsecutiveRepetition"
| NonconsecutiveRepetition -> "NonconsecutiveRepetition"
| GotoRepetition -> "GotoRepetition"
| NetAlias -> "NetAlias"
| InitialConstruct -> "InitialConstruct"
| FinalConstruct -> "FinalConstruct"
| AlwaysConstruct aspec -> "AlwaysConstruct:"^(AlwaysSpec.to_string aspec)
| ConcurrentAssertionItemLabeled id -> "ConcurrentAssertionItemLabeled:"^id
| ConcurrentAssertionStmt ca -> ConcurrentAssertion.to_string ca
| DeferredImmediateAssertionItemLabeled id -> "DeferredImmediateAssertionItemLabeled:"^id
| DeferredImmediateAssertionStmt dia -> DeferredImmediateAssertion.to_string dia
| SimpleImmediateAssertionStmt sia -> SimpleImmediateAssertion.to_string sia
| CheckerInstantiation id -> "CheckerInstantiation:"^id
| LoopGenerateConstruct -> "LoopGenerateConstruct"
| GenvarDeclaration ids -> "GenvarDeclaration:"^(String.concat "," ids)
| GenvarIterationAssign(ao, id) -> "GenvarIteration:"^(AssignmentOperator.to_string ao)^":"^id
| GenvarIterationIncOrDec(iod, id) -> "GenvarIteration:"^(IncOrDecOperator.to_string iod)^":"^id
| GenvarIdDecl id -> "GenvarIdDecl:"^id
| GenvarInitId id -> "GenvarInitId:"^id
| GenvarInit -> "GenvarInit"
| SpecifyBlock -> "SpecifyBlock"
| SpecparamDeclaration -> "SpecparamDeclaration"
| SpecparamAssignmentId id -> "SpecparamAssignmentId:"^id
| SpecparamAssignmentPulseControl id -> "SpecparamAssignmentPulseControl:"^id
| PulsestyleDeclarationOnevent -> "PulsestyleDeclarationOnevent"
| PulsestyleDeclarationOndetect -> "PulsestyleDeclarationOndetect"
| ShowcancelledDeclaration -> "ShowcancelledDeclaration"
| NoshowcancelledDeclaration -> "NoshowcancelledDeclaration"
| SpecifyTerminalDescriptor -> "SpecifyTerminalDescriptor"
| InputOrOutputId id -> "InputOrOutputId:"^id
| InterfaceIdentifier id -> "InterfaceIdentifier:"^id
| ProgramDeclaration id -> "ProgramDeclaration:"^id
| InterfaceDeclaration id -> "InterfaceDeclaration:"^id
| InterfaceDeclarationExtern id -> "InterfaceDeclarationExtern:"^id
| TimeUnitsDeclaration -> "TimeUnitsDeclaration"
| TimeUnit s -> "TimeUnit:"^s
| Timeprecision s -> "Timeprecision:"^s
| PackageDeclaration id -> "PackageDeclaration:"^id
| AnonymousProgram -> "AnonymousProgram"
| AnonymousProgramItemEmpty -> "AnonymousProgramItemEmpty"
| FunctionDeclaration id -> "FunctionDeclaration:"^id
| FunctionPrototype id -> "FunctionPrototype:"^id
| FuncId id -> "FuncId:"^id
| FuncIdVoid id -> "FuncIdVoid:"^id
| FuncIdNew -> "FuncIdNew"
| TfIdScoped id -> "TfIdScoped:"^id
| TaskDeclaration id -> "TaskDeclaration:"^id
| TaskPrototype id -> "TaskPrototype:"^id
| ClassCtorPrototype -> "ClassCtorPrototype"
| TfPortListPart -> "TfPortListPart"
| TfBody -> "TfBody"
| TfPortDeclaration -> "TfPortDeclaration"
| TfPortItemAssignment id -> "TfPortItemAssignment:"^id
| TfPortItem -> "TfPortItem"
| TfVariableIdentifier id -> "TfVariableIdentifier:"^id
| CheckerDeclaration id -> "CheckerDeclaration:"^id
| PropertyDeclaration id -> "PropertyDeclaration:"^id
| PropertyDeclBody -> "PropertyDeclBody"
| PropertyPortItem -> "PropertyPortItem"
| PropertyPortItemDir -> "PropertyPortItemDir"
| PropertyPortItemAssignment id -> "PropertyPortItemAssignment:"^id
| SequenceDeclaration id -> "SequenceDeclaration:"^id
| SequenceDeclBody -> "SequenceDeclBody"
| LetDeclaration id -> "LetDeclaration:"^id
| PropertyStatementSpec -> "PropertyStatementSpec"
| AssertionVariableDeclaration -> "AssertionVariableDeclaration"
| SequenceFormalTypeSequence -> "SequenceFormalTypeSequence"
| SequenceFormalTypeUntyped -> "SequenceFormalTypeUntyped"
| DataDeclarationVar -> "DataDeclarationVar"
| Const -> "Const"
| DataDeclarationVarClass -> "DataDeclarationVarClass"
| TypeDeclaration id -> "TypeDeclaration:"^id
| ScopedType id -> "ScopedType:"^id
| TypeIdentifier id -> "TypeIdentifier:"^id
| TypeDeclEnum -> "TypeDeclEnum"
| TypeDeclStruct -> "TypeDeclStruct"
| TypeDeclUnion -> "TypeDeclUnion"
| TypeDeclClass -> "TypeDeclClass"
| VirtualInterfaceDeclaration id -> "VirtualInterfaceDeclaration:"^id
| ModportDeclaration ids -> "ModportDeclaration:"^(String.concat "," ids)
| ModportItem id -> "ModportItem:"^id
| ModportSimplePortsDecl -> "ModportSimplePortsDecl"
| ModportClockingDecl id -> "ModportClockingDecl:"^id
| ModportTfPortsDeclImport -> "ModportTfPortsDeclImport"
| ModportTfPortsDeclExport -> "ModportTfPortsDeclExport"
| ModportSimplePort id -> "ModportSimplePort:"^id
| ModportSimplePortDot id -> "ModportSimplePortDot:"^id
| ModportTfPort id -> "ModportTfPort:"^id
| CovergroupDeclaration id -> "CovergroupDeclaration:"^id
| Paren -> "Paren"
| CoverageOption(id1, id2) -> "CoverageOption:"^id1^":"^id2
| CoverPoint -> "CoverPoint"
| CoverPointLabeled id -> "CoverPointLabeled:"^id
| CoverCross -> "CoverCross"
| CoverCrossLabeled id -> "CoverCrossLabeled:"^id
| CrossItem id -> "CrossItem:"^id
| Iff -> "Iff"
| BinsList -> "BinsList"
| BinsEmpty -> "BinsEmpty"
| SelectBins -> "SelectBins"
| SelectBinsEmpty -> "SelectBinsEmpty"
| Bins(bspec, id) -> "Bins:"^(BinsSpec.to_string bspec)^":"^id
| BinsSelection(bspec, id) -> "BinsSelection:"^(BinsSpec.to_string bspec)^":"^id
| BinsExpressionVar id -> "BinsExpressionVar:"^id
| BinsExpression(id1, id2) -> "BinsExpression:"^id1^":"^id2
| NBins -> "NBins"
| SelCondBinsof -> "SelCondBinsof"
| SelExprNot -> "SelExprNot"
| SelExprAnd -> "SelExprAnd"
| SelExprOr -> "SelExprOr"
| SelExprParen -> "SelExprParen"
| Intersect -> "Intersect"
| Wildcard -> "Wildcard"
| TransSet -> "TransSet"
| TransRangeList -> "TransRangeList"
| RepeatRange -> "RepeatRange"
| TransItem -> "TransItem"
| TransRepetitionConsecutive -> "TransRepetitionConsecutive"
| TransRepetitionNonconsecutive -> "TransRepetitionNonconsecutive"
| TransRepetitionGoto -> "TransRepetitionGoto"
| Default -> "Default"
| DefaultSequence -> "DefaultSequence"
| OpenRangeList -> "OpenRangeList"
| CoverageEventWith id -> "CoverageEventWith:"^id
| CoverageEventBlockEvent -> "CoverageEventBlockEvent"
| BlockEventExpression -> "BlockEventExpression"
| BlockEventExpressionBegin -> "BlockEventExpressionBegin"
| BlockEventExpressionEnd -> "BlockEventExpressionEnd"
| HierarchicalBtfIdentifier id -> "HierarchicalBtfIdentifier:"^id
| PackageExportDeclarationStar -> "PackageExportDeclarationStar"
| PackageExportDeclaration -> "PackageExportDeclaration"
| DpiImport s -> "DpiImport:"^s
| DpiExportFunc(s, id) -> "DpiExportFunc:"^s^":"^id
| DpiExportTask(s, id) -> "DpiExportTask:"^s^":"^id
| DpiImportLabel id -> "DpiImportLabel:"^id
| DpiTfImportPropertyContext -> "DpiTfImportPropertyContext"
| DpiTfImportPropertyPure -> "DpiTfImportPropertyPure"
| ExternConstraintDeclaration -> "ExternConstraintDeclaration"
| Static -> "Static"
| Virtual -> "Virtual"
| ClassDeclaration id -> "ClassDeclaration:"^id
| ClassExtends -> "ClassExtends"
| ClassItemEmpty -> "ClassItemEmpty"
| ClassMethod -> "ClassMethod"
| Qualifier q -> "Qualifier:"^(Qualifier.to_string q)
| ClassBody -> "ClassBody"
| ClassConstraint id -> "ClassConstraint:"^id
| Pure -> "Pure"
| ClassProperty -> "ClassProperty"
| PackageOrGenerateItemEmpty -> "PackageOrGenerateItemEmpty"
| Forkjoin -> "Forkjoin"
| ExternTfDeclaration id -> "ExternTfDeclaration:"^id
| TimingCheck tc -> TimingCheck.to_string tc
| SystemTimingCheck -> "SystemTimingCheck"
| Notifier id -> "Notifier:"^id
| Delayed id -> "Delayed:"^id
| TimingCheckEvent -> "TimingCheckEvent"
| TimingCheckEventControlPosedge -> "TimingCheckEventControlPosedge"
| TimingCheckEventControlNegedge -> "TimingCheckEventControlNegedge"
| TimingCheckEventControl -> "TimingCheckEventControl"
| EdgeDescriptor s -> "EdgeDescriptor:"^s
| OverloadDeclaration(oo, id) -> "OverloadDeclaration:"^(OverloadOperator.to_string oo)^":"^id
| Params -> "Params"
| ClockingDeclaration id -> "ClockingDeclaration:"^id
| Global -> "Global"
| ClockingBody -> "ClockingBody"
| ClockingItemDefault -> "ClockingItemDefault"
| ClockingItem -> "ClockingItem"
| DefaultSkewInput -> "DefaultSkewInput"
| DefaultSkewOutput -> "DefaultSkewOutput"
| DefaultSkewInputOutput -> "DefaultSkewInputOutput"
| ClockingDirectionInput -> "ClockingDirectionInput"
| ClockingDirectionInputOutput -> "ClockingDirectionInputOutput"
| ClockingDirectionInout -> "ClockingDirectionInout"
| ClockingSkewPosedge -> "ClockingSkewPosedge"
| ClockingSkewNegedge -> "ClockingSkewNegedge"
| ClockingSkewEdge -> "ClockingSkewEdge"
| ClockingSkew -> "ClockingSkew"
| ClockingDeclAssign id -> "ClockingDeclAssign:"^id
| Production id -> "Production:"^id
| ProductionItem id -> "ProductionItem:"^id
| RsCodeBlock -> "RsCodeBlock"
| RsRule -> "RsRule"
| RsProductionList -> "RsProductionList"
| RsProductionListRandJoin -> "RsProductionListRandJoin"
| WeightSpecInt s -> "WeightSpecInt:"^s
| WeightSpecId -> "WeightSpecId"
| WeightSpec -> "WeightSpec"
| RsProdIf -> "RsProdIf"
| RsProdRepeat -> "RsProdRepeat"
| RsProdCase -> "RsProdCase"
| RsCaseItem -> "RsCaseItem"
| RsCaseItemDefault -> "RsCaseItemDefault"
| CheckerOrGenerateItemEmpty -> "CheckerOrGenerateItemEmpty"
| ConditionalGenerateConstructCase -> "ConditionalGenerateConstructCase"
| ConditionalGenerateConstructIf -> "ConditionalGenerateConstructIf"
| ElaborationSystemTask st -> "ElaborationSystemTask:"^(SystemTask.to_string st)
| CaseGenerateItem -> "CaseGenerateItem"
| CaseGenerateItemDefault -> "CaseGenerateItemDefault"
| AssignmentPattern -> "AssignmentPattern"
| AssignmentPatternExpr -> "AssignmentPatternExpr"
| PatternKey -> "PatternKey"
| PatternKeyDefault -> "PatternKeyDefault"
| PatternMember -> "PatternMember"
| SimplePathDeclaration -> "SimplePathDeclaration"
| ParallelPathDescription -> "ParallelPathDescription"
| FullPathDescription -> "FullPathDescription"
| PathInputs -> "PathInputs"
| PathOutputs -> "PathOutputs"
| PathDelayValue -> "PathDelayValue"
| PolarityPlus -> "PolarityPlus"
| PolarityMinus -> "PolarityMinus"
| EdgePosedge -> "EdgePosedge"
| EdgeNegedge -> "EdgeNegedge"
| EdgeSensitivePathDeclaration -> "EdgeSensitivePathDeclaration"
| ParallelEdgeSensitivePathDescription -> "ParallelEdgeSensitivePathDescription"
| FullEdgeSensitivePathDescription -> "FullEdgeSensitivePathDescription"
| ParallelEdgeSensitivePathDescriptionSub -> "ParallelEdgeSensitivePathDescriptionSub"
| FullEdgeSensitivePathDescriptionSub -> "FullEdgeSensitivePathDescriptionSub"
| StateDependentPathDeclarationIf -> "StateDependentPathDeclarationIf"
| StateDependentPathDeclarationIfnone -> "StateDependentPathDeclarationIfnone"
| VariableLvalue -> "VariableLvalue"
| AttributeInstance -> "AttributeInstance"
| AttrSpec id -> "AttrSpec:"^id
| UdpPort id -> "UdpPort:"^id
| UdpPortDeclaration -> "UdpPortDeclaration"
| UdpOutputDeclaration id -> "UdpOutputDeclaration:"^id
| UdpOutputDeclarationReg id -> "UdpOutputDeclarationReg:"^id
| UdpInputDeclaration -> "UdpInputDeclaration"
| UdpRegDeclaration id -> "UdpRegDeclaration:"^id
| SequentialBody -> "SequentialBody"
| CombinationalBody -> "CombinationalBody"
| UdpInitialStmt(id, s) -> "UdpInitialStmt:"^id^":"^s
| SequentialEntry -> "SequentialEntry"
| EdgeIndicator -> "EdgeIndicator"
| EdgeSymbol s -> "EdgeSymbol:"^s
| LevelSymbol s -> "LevelSymbol:"^s
| OutputSymbol s -> "OutputSymbol:"^s
| CombinationalEntry -> "CombinationalEntry"
| NextStateMinus -> "NextStateMinus"
| UdpPortsStar -> "UdpPortsStar"
| UdpPorts -> "UdpPorts"
| UdpPortDecls -> "UdpPortDecls"
| UdpDeclarationPorts -> "UdpDeclarationPorts"
| AttributeInstances -> "AttributeInstances"
| ConfigDeclaration id -> "ConfigDeclaration:"^id
| DesignStatement -> "DesignStatement"
| CellId id -> "CellId:"^id
| LibraryIdentifier id -> "LibraryIdentifier:"^id
| LiblistClause -> "LiblistClause"
| CellClause id -> "CellClause:"^id
| UseClause -> "UseClause"
| ColonConfig -> "ColonConfig"
| InstanceName -> "InstanceName"
| InstanceIdentifier id -> "InstanceIdentifier:"^id
| TopModuleIdentifier id -> "TopModuleIdentifier:"^id
| InstClause -> "InstClause"
| ConfigRuleStatementDefault -> "ConfigRuleStatementDefault"
| ConfigRuleStatement -> "ConfigRuleStatement"
| LibraryDeclaration id -> "LibraryDeclaration:"^id
| Incdir -> "Incdir"
| FilePathSpec s -> "FilePathSpec:"^s
| IncludeStatement s -> "IncludeStatement:"^s
| PragmaExpression id -> "PragmaExpression:"^id
| PragmaValueTuple -> "PragmaValueTuple"
| PragmaValueNum s -> "PragmaValueNum:"^s
| PragmaValueStr s -> "PragmaValueStr:"^s
| PragmaValueId id -> "PragmaValueId:"^id
| PackageImportDecls -> "PackageImportDecls"
| ParamPorts -> "ParamPorts"
| Ranges -> "Ranges"
| VariableDimensions -> "VariableDimensions"
| CaseConds -> "CaseConds"
| NetDeclAssignments ids -> "NetDeclAssignments:"^(String.concat "," ids)
| ParamAssignments ids -> "ParamAssignments:"^(String.concat "," ids)
| MacroExpr s -> "MacroExpr:"^s
| MacroStmt s -> "MacroStmt:"^s
| Var -> "Var"
let to_simple_string = function
| Dummy -> "<dummy>"
| Error -> "<error>"
| Empty -> ";"
| SourceText -> "<source_text>"
| LibraryText -> "<library_text>"
| CompilerDirective cd -> CompilerDirective.to_simple_string cd
| ModuleDeclaration(mspec, id) -> (ModuleSpec.to_rep mspec)^" "^id
| UdpDeclaration id -> "primitive "^id
| NetDeclaration ids -> "<net_decl:"^(String.concat "," ids)^">"
| BindDirective id -> "bind "^id
| Expr e -> Expression.to_simple_string e
| Stmt stmt -> Statement.to_simple_string stmt
| NetType nt -> NetType.to_simple_string nt
| LocalParameterDeclaration ids -> "<local_param_decl>"
| ParameterDeclaration ids -> "<param_decl>"
| ParameterPortDeclaration -> "<param_port_decl>"
| ModuleBody -> "<module_body>"
| Instantiation id -> "<inst:"^id^">"
| GateInstantiation g -> Gate.to_simple_string g
| ContinuousAssign -> "<continuous_assign>"
| Assign -> "assign"
| ConcurrentAssertionItem -> "<concur_assert_item>"
| DeferredImmediateAssertionItem -> "<deferred_immediate_assert_item>"
| PpIdentifier id -> id
| PackedDimension -> "<packed_dim>"
| ParamAssignment id -> "<param_assign:"^id^">"
| DefparamAssignment -> "<def_param_assign>"
| IdSelect id -> id
| Select -> "<sel>"
| Range -> ":"
| RangePlus -> ":+"
| RangeMinus -> ":-"
| RangeForeach -> "<range_foreach>"
| Root -> "$root"
| This -> "this"
| Super -> "super"
| Cellpin id -> "."^id
| CellpinStar -> ".*"
| CellpinAnon -> "<cellpin_anon>"
| DelayValue id -> id
| PackageScope id -> id^"::"
| PackageScopeUnit -> "$unit::"
| PackageScopeLocal -> "local::"
| PackageImport id -> id
| PackageImportAny -> "*"
| LifetimeStatic -> "static"
| LifetimeAutomatic -> "automatic"
| EndLabel id -> ":"^id
| EndLabelNew -> ":new"
| ClassType id -> id
| DataType dt -> DataType.to_simple_string dt
| ImplicitDataType -> "<implicit_data_type>"
| VarDeclAssignments -> "<var_decl_assignments>"
| Signed -> "signed"
| Unsigned -> "unsigned"
| ArgsDotted id -> "."^id
| Tagged -> "tagged"
| StructUnionBody -> "<struct_union_body>"
| StructUnionMember -> "<struct_union_mem>"
| ClassScopeId id -> id
| Void -> "void"
| EnumNameDeclaration id -> id
| EnumBody -> "<enum_body>"
| IdClassSel id -> id
| Variable id -> id
| Extern -> "extern"
| PackageImportDeclaration -> "import"
| PackageImportItem id -> id^"::"
| Packed -> "packed"
| ParameterValueAssignment -> "<param_val_assign>"
| Ports -> "<ports>"
| PortsStar -> "(*)"
| BitSelect -> "<bit_sel>"
| VariableDeclAssignment id -> "<var_decl_assign:"^id^">"
| DynamicArrayNew -> "new[]"
| VariableDimension -> "<var_dim>"
| VariableDimensionStar -> "[*]"
| GenItemBegin -> "<gen_item_begin>"
| GenBlockId id -> ":"^id
| GenerateRegion -> "generate"
| Scalared -> "scalared"
| Vectored -> "vectored"
| DelayControl -> "#"
| NetSig id -> id
| ParameterOverride -> "defparam"
| PortDeclaration -> "<port_decl>"
| PortDirection pd -> PortDirection.to_simple_string pd
| Strength strength -> Strength.to_rep strength
| StrengthSupply0 -> "supply0"
| StrengthSupply1 -> "supply1"
| StrengthSpec -> "spec"
| VarDataType -> "<var_datatype>"
| Port id -> id
| InterfacePort id -> id
| InterfacePortInterface -> "interface"
| ModportIdentifier id -> id
| PortMulti -> "<port_multi>"
| ExprScope -> "<expr_scope>"
| ExprScopeThis -> "this"
| ExprScopeSuper -> "super"
| ExprScopeDot -> "."
| ExprScopeDotSuper -> ".super"
| CondPredicate -> "&&&"
| CondPattern -> "matches"
| Dist -> "dist"
| DistItem -> "<dist_item>"
| DistWeight -> ":="
| DistWeightRange -> ":/"
| ArrayRange -> "<array_range>"
| ArrayRangePlus -> ":+"
| ArrayRangeMinus -> ":-"
| CastingTypeSimple -> "<casting_type_simple>"
| CastingTypeSigned -> "signed"
| CastingTypeUnsigned -> "unsigned"
| CastingTypeString -> "string"
| CastingTypeConst -> "const"
| ValueRange -> "<value_range>"
| Pattern -> "<pat>"
| PatternId id -> "."^id
| PatternStar -> ".*"
| PatternTagged id -> "tagged:"^id
| EventControl -> "@"
| EventControlStar -> "@*"
| EventControlParenStar -> "@(*)"
| EventControlRepeat -> "repeat"
| EvExpr ee -> EventExpression.to_simple_string ee
| CaseItem -> "<case_item>"
| CaseItemDefault -> "default"
| CaseInsideItem -> "<case_inside_item>"
| CaseInsideItemDefault -> "default"
| CaseItems -> "<case_items>"
| CaseItemsMatches -> "matches"
| CaseItemsInside -> "inside"
| With -> "with"
| Args -> "<args>"
| ConstraintBlock -> "<constraint_block>"
| ForInit -> "<for_ini>"
| ForInitItemDT id -> id
| ForInitItemLval -> "<for_ini_item_lval>"
| StreamingConcat -> "<streaming_concat>"
| OrderRL -> "<<"
| OrderLR -> ">>"
| StreamConcat -> "<stream_concat>"
| Solve -> "solve"
| SolveBefore -> "<solve_before>"
| ActionBlock -> "<act_block>"
| CycleDelay s -> "##"^s
| CycleDelayId id -> "##"^id
| CycleDelayParen -> "##"
| Priority -> "priority"
| Unique -> "unique"
| Unique0 -> "unique0"
| InstRange -> "<inst_range>"
| InstName id -> id
| PExpr pe -> PropertyExpression.to_simple_string pe
| ClockingEvent id -> "@"^id
| ClockingEventParen -> "@"
| PropertyCase -> "<prop_case>"
| PropertyCaseDefault -> "default"
| DisableIff -> "disable iff"
| CycleDelayRange s -> "##"^s
| CycleDelayRangeId id -> "##"^id
| CycleDelayRangeParen -> "##"
| CycleDelayRangeBracket -> "##[]"
| CycleDelayRangeBracketStar -> "##[*]"
| CycleDelayRangeBracketPlus -> "##[+]"
| SExpr se -> SequenceExpression.to_simple_string se
| ConsecutiveRepetition -> "[*]"
| NonconsecutiveRepetition -> "[=]"
| GotoRepetition -> "[->]"
| NetAlias -> "alias"
| InitialConstruct -> "initial"
| FinalConstruct -> "final"
| AlwaysConstruct aspec -> AlwaysSpec.to_rep aspec
| ConcurrentAssertionItemLabeled id -> id^":"
| ConcurrentAssertionStmt ca -> ConcurrentAssertion.to_simple_string ca
| DeferredImmediateAssertionItemLabeled id -> id^":"
| DeferredImmediateAssertionStmt dia -> DeferredImmediateAssertion.to_simple_string dia
| SimpleImmediateAssertionStmt sia -> SimpleImmediateAssertion.to_simple_string sia
| CheckerInstantiation id -> id
| LoopGenerateConstruct -> "for"
| GenvarDeclaration ids -> "genvar"
| GenvarIterationAssign(ao, id) -> "genvar_iter:"^(AssignmentOperator.to_string ao)^":"^id
| GenvarIterationIncOrDec(iod, id) -> "genvar_iter:"^(IncOrDecOperator.to_string iod)^":"^id
| GenvarIdDecl id -> "<genvar_id_decl:"^id^">"
| GenvarInitId id -> "<genvar_ini_id:"^id^">"
| GenvarInit -> "genvar"
| SpecifyBlock -> "specify"
| SpecparamDeclaration -> "specparam"
| SpecparamAssignmentId id -> id
| SpecparamAssignmentPulseControl id -> "PATHPULSE$"^id
| PulsestyleDeclarationOnevent -> "pulsestyle_onevent"
| PulsestyleDeclarationOndetect -> "pulsestyle_ondetect"
| ShowcancelledDeclaration -> "showcancelled"
| NoshowcancelledDeclaration -> "noshowcancelled"
| SpecifyTerminalDescriptor -> "<spec_term_desc>"
| InputOrOutputId id -> id
| InterfaceIdentifier id -> id
| ProgramDeclaration id -> "program "^id
| InterfaceDeclaration id -> "interface "^id
| InterfaceDeclarationExtern id -> "extern interface "^id
| TimeUnitsDeclaration -> "timeunits"
| TimeUnit s -> s
| Timeprecision s -> s
| PackageDeclaration id -> "package "^id
| AnonymousProgram -> "program"
| AnonymousProgramItemEmpty -> ";"
| FunctionDeclaration id -> "function "^id
| FunctionPrototype id -> "function "^id
| FuncId id -> "<func_id:"^id^">"
| FuncIdVoid id -> "<fun_id_void:"^id^">"
| FuncIdNew -> "new"
| TfIdScoped id -> id
| TaskDeclaration id -> "task "^id
| TaskPrototype id -> "task "^id
| ClassCtorPrototype -> "<class_ctor_proto>"
| TfPortListPart -> "<tf_port_list_part>"
| TfBody -> "<tf_body>"
| TfPortDeclaration -> "<tf_port_decl>"
| TfPortItemAssignment id -> "<tf_port_item_assign:"^id^">"
| TfPortItem -> "<tf_port_item>"
| TfVariableIdentifier id -> id
| CheckerDeclaration id -> "checker "^id
| PropertyDeclaration id -> "property "^id
| PropertyDeclBody -> "<prop_decl_body>"
| PropertyPortItem -> "<prop_port_item>"
| PropertyPortItemDir -> "<prop_port_item_dir>"
| PropertyPortItemAssignment id -> "<prop_port_item_assign:"^id^">"
| SequenceDeclaration id -> "sequence "^id
| SequenceDeclBody -> "<seq_decl_body>"
| LetDeclaration id -> "let "^id
| PropertyStatementSpec -> "<prop_stmt_spec>"
| AssertionVariableDeclaration -> "<asser_var_decl>"
| SequenceFormalTypeSequence -> "sequence"
| SequenceFormalTypeUntyped -> "untyped"
| DataDeclarationVar -> "<data_decl_var>"
| Const -> "const"
| DataDeclarationVarClass -> "<data_decl_var_class>"
| TypeDeclaration id -> "typedef "^id
| ScopedType id -> "<scoped_type:"^id^">"
| TypeIdentifier id -> id
| TypeDeclEnum -> "enum"
| TypeDeclStruct -> "struct"
| TypeDeclUnion -> "union"
| TypeDeclClass -> "class"
| VirtualInterfaceDeclaration id -> "virtual interface "^id
| ModportDeclaration ids -> "modport "^(String.concat "," ids)
| ModportItem id -> "<modport_item:"^id^">"
| ModportSimplePortsDecl -> "<modport_simple_ports_decl>"
| ModportClockingDecl id -> "clocking "^id
| ModportTfPortsDeclImport -> "import"
| ModportTfPortsDeclExport -> "export"
| ModportSimplePort id -> id
| ModportSimplePortDot id -> "."^id
| ModportTfPort id -> id
| CovergroupDeclaration id -> "covergroup "^id
| Paren -> "()"
| CoverageOption(id1, id2) -> "<coverage_opt:"^id1^":"^id2^">"
| CoverPoint -> "coverpoint"
| CoverPointLabeled id -> id^":coverpoint"
| CoverCross -> "covercross"
| CoverCrossLabeled id -> id^":covercross"
| CrossItem id -> "<cross_item:"^id^">"
| Iff -> "iff"
| BinsList -> "<bins_list>"
| BinsEmpty -> ";"
| SelectBins -> "<sel_bins>"
| SelectBinsEmpty -> ";"
| Bins(bspec, id) -> (BinsSpec.to_rep bspec)^" "^id
| BinsSelection(bspec, id) -> (BinsSpec.to_rep bspec)^" "^id
| BinsExpressionVar id -> id
| BinsExpression(id1, id2) -> id1^"."^id2
| NBins -> "[]"
| SelCondBinsof -> "binsof"
| SelExprNot -> "!"
| SelExprAnd -> "&&"
| SelExprOr -> "||"
| SelExprParen -> "()"
| Intersect -> "intersect"
| Wildcard -> "wildcard"
| TransSet -> "<trans_set>"
| TransRangeList -> "<trans_range_list>"
| RepeatRange -> "<repeat_range>"
| TransItem -> "<trans_item>"
| TransRepetitionConsecutive -> "[*]"
| TransRepetitionNonconsecutive -> "[=]"
| TransRepetitionGoto -> "[->]"
| Default -> "default"
| DefaultSequence -> "default sequence"
| OpenRangeList -> "<open_range_list>"
| CoverageEventWith id -> "with function "^id
| CoverageEventBlockEvent -> "@@"
| BlockEventExpression -> "<block_ev_expr>"
| BlockEventExpressionBegin -> "begin"
| BlockEventExpressionEnd -> "end"
| HierarchicalBtfIdentifier id -> "<hierarchical_Btf_id:"^id^">"
| PackageExportDeclarationStar -> "export *::*"
| PackageExportDeclaration -> "export"
| DpiImport s -> "import "^s
| DpiExportFunc(s, id) -> "export "^s^" function "^id
| DpiExportTask(s, id) -> "export "^s^" task "^id
| DpiImportLabel id -> id^"="
| DpiTfImportPropertyContext -> "context"
| DpiTfImportPropertyPure -> "pure"
| ExternConstraintDeclaration -> "constraint"
| Static -> "static"
| Virtual -> "virtual"
| ClassDeclaration id -> "class "^id
| ClassExtends -> "extends"
| ClassItemEmpty -> ";"
| ClassMethod -> "<class_meth>"
| Qualifier q -> Qualifier.to_simple_string q
| ClassBody -> "<class_body>"
| ClassConstraint id -> "constraint "^id
| Pure -> "pure"
| ClassProperty -> "<class_prop>"
| PackageOrGenerateItemEmpty -> ";"
| Forkjoin -> "forkjoin"
| ExternTfDeclaration ids -> "extern"
| TimingCheck tc -> TimingCheck.to_simple_string tc
| SystemTimingCheck -> "<sys_timing_check>"
| Notifier id -> "<notifier:"^id^""
| Delayed id -> "<delayed:"^id^">"
| TimingCheckEvent -> "<timing_check_ev>"
| TimingCheckEventControlPosedge -> "posedge"
| TimingCheckEventControlNegedge -> "negedge"
| TimingCheckEventControl -> "edge"
| EdgeDescriptor s -> s
| OverloadDeclaration(oo, id) -> (OverloadOperator.to_simple_string oo)^" "^id
| Params -> "<params>"
| ClockingDeclaration id -> "clocking "^id
| Global -> "global"
| ClockingBody -> "<clocking_body>"
| ClockingItemDefault -> "default"
| ClockingItem -> "<clocking_item>"
| DefaultSkewInput -> "input"
| DefaultSkewOutput -> "output"
| DefaultSkewInputOutput -> "input output"
| ClockingDirectionInput -> "input"
| ClockingDirectionInputOutput -> "input output"
| ClockingDirectionInout -> "inout"
| ClockingSkewPosedge -> "posedge"
| ClockingSkewNegedge -> "negedge"
| ClockingSkewEdge -> "edge"
| ClockingSkew -> "<clocking_skew>"
| ClockingDeclAssign id -> "<clocking_decl_assign:"^id^">"
| Production id -> "<prod:"^id^">"
| ProductionItem id -> "<prod_item:"^id^">"
| RsCodeBlock -> "<rs_code_block>"
| RsRule -> "<rs_rule>"
| RsProductionList -> "<rs_prod_list>"
| RsProductionListRandJoin -> "rand join"
| WeightSpecInt s -> s
| WeightSpecId -> "<weight_spec_id>"
| WeightSpec -> "<weight_spec>"
| RsProdIf -> "if"
| RsProdRepeat -> "repeat"
| RsProdCase -> "case"
| RsCaseItem -> "<rs_case_item>"
| RsCaseItemDefault -> "default"
| CheckerOrGenerateItemEmpty -> ";"
| ConditionalGenerateConstructCase -> "case"
| ConditionalGenerateConstructIf -> "if"
| ElaborationSystemTask st -> SystemTask.to_simple_string st
| CaseGenerateItem -> "<case_gen_item>"
| CaseGenerateItemDefault -> "default"
| AssignmentPattern -> "<assign_pat>"
| AssignmentPatternExpr -> "<assign_pat_expr>"
| PatternKey -> "<pat_key>"
| PatternKeyDefault -> "default"
| PatternMember -> "<pat_mem>"
| SimplePathDeclaration -> "<simple_path_decl>"
| ParallelPathDescription -> "<par_path_desc>"
| FullPathDescription -> "<full_path_desc>"
| PathInputs -> "<path_inputs>"
| PathOutputs -> "<path_outputs>"
| PathDelayValue -> "<path_delay_val>"
| PolarityPlus -> "+"
| PolarityMinus -> "-"
| EdgePosedge -> "posedge"
| EdgeNegedge -> "negedge"
| EdgeSensitivePathDeclaration -> "<edge_sensitive_path_decl>"
| ParallelEdgeSensitivePathDescription -> "<par_edge_sensitive_path_desc>"
| FullEdgeSensitivePathDescription -> "<full_edge_sensitive_path_desc>"
| ParallelEdgeSensitivePathDescriptionSub -> "<par_edge_sensitive_path_desc_sub>"
| FullEdgeSensitivePathDescriptionSub -> "<full_edge_sensitive_path_desc_sub>"
| StateDependentPathDeclarationIf -> "if"
| StateDependentPathDeclarationIfnone -> "ifnone"
| VariableLvalue -> "<var_lval>"
| AttributeInstance -> "(* *)"
| AttrSpec id -> "<attr_spec:"^id^">"
| UdpPort id -> id
| UdpPortDeclaration -> "<udp_port_decl>"
| UdpOutputDeclaration id -> "output "^id
| UdpOutputDeclarationReg id -> "output reg "^id
| UdpInputDeclaration -> "input"
| UdpRegDeclaration id -> "reg "^id
| SequentialBody -> "<seq_body>"
| CombinationalBody -> "<combi_body>"
| UdpInitialStmt(id, s) -> "initial "^id^"="^s
| SequentialEntry -> "<seq_entry>"
| EdgeIndicator -> "<edge_ind>"
| EdgeSymbol s -> s
| LevelSymbol s -> s
| OutputSymbol s -> s
| CombinationalEntry -> "<combi_entry>"
| NextStateMinus -> "-"
| UdpPortsStar -> "(.*)"
| UdpPorts -> "<udp_ports>"
| UdpPortDecls -> "<udp_port_decls>"
| UdpDeclarationPorts -> "<udp_decl_ports>"
| AttributeInstances -> "<attr_insts>"
| ConfigDeclaration id -> "config "^id
| DesignStatement -> "design"
| CellId id -> id
| LibraryIdentifier id -> id
| LiblistClause -> "liblist"
| CellClause id -> "cell "^id
| UseClause -> "use"
| ColonConfig -> ":config"
| InstanceName -> "<instance_name>"
| InstanceIdentifier id -> id
| TopModuleIdentifier id -> id
| InstClause -> "instance"
| ConfigRuleStatementDefault -> "default"
| ConfigRuleStatement -> "<conf_rule_stmt>"
| LibraryDeclaration id -> "library "^id
| Incdir -> "-incdir"
| FilePathSpec s -> s
| IncludeStatement s -> "include "^s^";"
| PragmaExpression id -> "<pragma_expr:"^id^">"
| PragmaValueTuple -> "<pragma_val_tuple>"
| PragmaValueNum s -> s
| PragmaValueStr s -> s
| PragmaValueId id -> id
| PackageImportDecls -> "<package_import_decls>"
| ParamPorts -> "<param_ports>"
| Ranges -> "<ranges>"
| VariableDimensions -> "<variable_dimensions>"
| CaseConds -> "<case_conds>"
| NetDeclAssignments ids -> "<net_decl_assigns:"^(String.concat "," ids)^">"
| ParamAssignments ids -> "<param_assigns:"^(String.concat "," ids)^">"
| MacroExpr s -> "<macro_expr:"^s^">"
| MacroStmt s -> "<macro_stmt:"^s^">"
| Var -> "var"
let to_tag ?(strip=false) lab =
let name, attrs =
match lab with
| Dummy -> "DUMMY", []
| Error -> "ERROR", []
| Empty -> "EMPTY", []
| SourceText -> "SourceText", []
| LibraryText -> "LibraryText", []
| CompilerDirective cd -> CompilerDirective.to_tag cd
| ModuleDeclaration(mspec, id) -> "ModuleDecl", [spec_attr_name,ModuleSpec.to_rep mspec;ident_attr_name,id]
| UdpDeclaration id -> "UdpDecl", [ident_attr_name,id]
| NetDeclaration ids -> "NetDecl", [ident_attr_name,String.concat ";" ids]
| BindDirective id -> "BindDirective", [ident_attr_name,id]
| Expr e -> Expression.to_tag e
| Stmt stmt -> Statement.to_tag stmt
| NetType nt -> NetType.to_tag nt
| LocalParameterDeclaration ids -> "LocalParamDecl", [ident_attr_name,String.concat ";" ids]
| ParameterDeclaration ids -> "ParamDecl", [ident_attr_name,String.concat ";" ids]
| ParameterPortDeclaration -> "ParamPortDecl", []
| ModuleBody -> "ModuleBody", []
| Instantiation id -> "Instantiation", [ident_attr_name,id]
| GateInstantiation g -> "GateInstantiataion", ["gate",Gate.to_simple_string g]
| ContinuousAssign -> "ContinuousAssign", []
| Assign -> "Assign", []
| ConcurrentAssertionItem -> "ConcurrentAssertionItem", []
| DeferredImmediateAssertionItem -> "DeferredImmediateAssertItem", []
| PpIdentifier id -> "PpIdentifier", [ident_attr_name,id]
| PackedDimension -> "PackedDimension", []
| ParamAssignment id -> "ParamAssignment", [ident_attr_name,id]
| DefparamAssignment -> "DefparamAssign", []
| IdSelect id -> "IdSelect", [ident_attr_name,id]
| Select -> "Select", []
| Range -> "Range", []
| RangePlus -> "RangePlus", []
| RangeMinus -> "RangeMinus", []
| RangeForeach -> "RangeForeach", []
| Root -> "Root", []
| This -> "This", []
| Super -> "Super", []
| Cellpin id -> "Cellpin", [ident_attr_name,id]
| CellpinStar -> "Cellpin_Star", []
| CellpinAnon -> "Cellpin_Anon", []
| DelayValue id -> "DelayValue", [ident_attr_name,id]
| PackageScope id -> "PackageScope", [ident_attr_name,id]
| PackageScopeUnit -> "PackageScopeUnit", []
| PackageScopeLocal -> "PackageScopeLocal", []
| PackageImport id -> "PackageImport", [ident_attr_name,id]
| PackageImportAny -> "PackageImportAny", []
| LifetimeStatic -> "LifetimeStatic", []
| LifetimeAutomatic -> "LifetimeAutomatic", []
| EndLabel id -> "EndLabel", [ident_attr_name,id]
| EndLabelNew -> "EndLabelNew", []
| ClassType id -> "ClassType", [ident_attr_name,id]
| DataType dt -> DataType.to_tag dt
| ImplicitDataType -> "ImplicitDataType", []
| VarDeclAssignments -> "VarDeclAssignments", []
| Signed -> "Signed", []
| Unsigned -> "Unsigned", []
| ArgsDotted id -> "ArgsDotted", [ident_attr_name,id]
| Tagged -> "Tagged", []
| StructUnionBody -> "StructUnionBody", []
| StructUnionMember -> "StructUnionMem", []
| ClassScopeId id -> "ClassScopeId", [ident_attr_name,id]
| Void -> "Void", []
| EnumNameDeclaration id -> "EnumNameDecl", [ident_attr_name,id]
| EnumBody -> "EnumBody", []
| IdClassSel id -> "IdClassSel", [ident_attr_name,id]
| Variable id -> "Var", [ident_attr_name,id]
| Extern -> "Extern", []
| PackageImportDeclaration -> "PackageImportDecl", []
| PackageImportItem id -> "PackageImportItem", [ident_attr_name,id]
| Packed -> "Packed", []
| ParameterValueAssignment -> "ParamValAssign", []
| Ports -> "Ports", []
| PortsStar -> "PortsStar", []
| BitSelect -> "BitSel", []
| VariableDeclAssignment id -> "VarDeclAssign", [ident_attr_name,id]
| DynamicArrayNew -> "DynArrayNew", []
| VariableDimension -> "VarDim", []
| VariableDimensionStar -> "VarDimStar", []
| GenItemBegin -> "GenItemBegin", []
| GenBlockId id -> "GenBlockId", [ident_attr_name,id]
| GenerateRegion -> "GenerateRegion", []
| Scalared -> "Scalared", []
| Vectored -> "Vectored", []
| DelayControl -> "DelayControl", []
| NetSig id -> "NetSig", [ident_attr_name,id]
| ParameterOverride -> "ParamOverride", []
| PortDeclaration -> "PortDecl", []
| PortDirection pd -> PortDirection.to_tag pd
| Strength strength -> Strength.to_tag strength
| StrengthSupply0 -> "StrengthSupply0", []
| StrengthSupply1 -> "StrengthSupply1", []
| StrengthSpec -> "StrengthSpec", []
| VarDataType -> "VarDataType", []
| Port id -> "Port", [ident_attr_name,id]
| InterfacePort id -> "InterfacePort", [ident_attr_name,id]
| InterfacePortInterface -> "Interface", []
| ModportIdentifier id -> "ModportId", [ident_attr_name,id]
| PortMulti -> "PortMulti", []
| ExprScope -> "ExprScope", []
| ExprScopeThis -> "ExprScopeThis", []
| ExprScopeSuper -> "ExprScopeSuper", []
| ExprScopeDot -> "ExprScopeDot", []
| ExprScopeDotSuper -> "ExprScopeDotSuper", []
| CondPredicate -> "CondPred", []
| CondPattern -> "CondPat", []
| Dist -> "Dist", []
| DistItem -> "DistItem", []
| DistWeight -> "DistWeight", []
| DistWeightRange -> "DistWeightRange", []
| ArrayRange -> "ArrayRange", []
| ArrayRangePlus -> "ArrayRangePlus", []
| ArrayRangeMinus -> "ArrayRangeMinus", []
| CastingTypeSimple -> "CastingTypeSimple", []
| CastingTypeSigned -> "CastingTypeSigned", []
| CastingTypeUnsigned -> "CastingTypeUnsigned", []
| CastingTypeString -> "CastingTypeString", []
| CastingTypeConst -> "CastingTypeConst", []
| ValueRange -> "ValueRange", []
| Pattern -> "Pattern", []
| PatternId id -> "PatternId", [ident_attr_name,id]
| PatternStar -> "PatternStar", []
| PatternTagged id -> "PatternTagged", [ident_attr_name,id]
| EventControl -> "EventControl", []
| EventControlStar -> "EventControlStar", []
| EventControlParenStar -> "EventControlParenStar", []
| EventControlRepeat -> "EventControlRepeat", []
| EvExpr ee -> EventExpression.to_tag ee
| CaseItem -> "CaseItem", []
| CaseItemDefault -> "CaseItemDefault", []
| CaseInsideItem -> "CaseInsideItem", []
| CaseInsideItemDefault -> "CaseInsideItemDefault", []
| CaseItems -> "CaseItems", []
| CaseItemsMatches -> "CaseItemMatches", []
| CaseItemsInside -> "CaseItemInside", []
| With -> "With", []
| Args -> "Args", []
| ConstraintBlock -> "ConstraintBlock", []
| ForInit -> "ForInit", []
| ForInitItemDT id -> "ForInitItemDt", [ident_attr_name,id]
| ForInitItemLval -> "ForInitItemLval", []
| StreamingConcat -> "StreamingConcat", []
| OrderRL -> "OrderRL", []
| OrderLR -> "OrderLR", []
| StreamConcat -> "StreamConcat", []
| Solve -> "Solve", []
| SolveBefore -> "SolveBefore", []
| ActionBlock -> "ActionBlock", []
| CycleDelay s -> "CycleDelay", ["delay",XML.encode_string s]
| CycleDelayId id -> "CycleDelayId", [ident_attr_name,id]
| CycleDelayParen -> "CycleDelayParen", []
| Priority -> "Priority", []
| Unique -> "Unique", []
| Unique0 -> "Unique0", []
| InstRange -> "InstRange", []
| InstName id -> "InstName", [ident_attr_name,id]
| PExpr pe -> PropertyExpression.to_tag pe
| ClockingEvent id -> "ClockingEvent", [ident_attr_name,id]
| ClockingEventParen -> "ClockingEventParen", []
| PropertyCase -> "PropertyCase", []
| PropertyCaseDefault -> "PropertyCaseDefault", []
| DisableIff -> "DisableIff", []
| CycleDelayRange s -> "CycleDelayRange", ["delay",XML.encode_string s]
| CycleDelayRangeId id -> "CycleDelayRangeId", [ident_attr_name,id]
| CycleDelayRangeParen -> "CycleDelayRangeParen", []
| CycleDelayRangeBracket -> "CycleDelayRangeBracket", []
| CycleDelayRangeBracketStar -> "CycleDelayRangeBracketStar", []
| CycleDelayRangeBracketPlus -> "CycleDelayRangeBracketPlus", []
| SExpr se -> SequenceExpression.to_tag se
| ConsecutiveRepetition -> "ConsecutiveRepetition", []
| NonconsecutiveRepetition -> "NonconsecutiveRepetition", []
| GotoRepetition -> "GotoRepetition", []
| NetAlias -> "NetAlias", []
| InitialConstruct -> "InitialConstruct", []
| FinalConstruct -> "FinalConstruct", []
| AlwaysConstruct aspec -> "AlwaysConstruct", [spec_attr_name,AlwaysSpec.to_rep aspec]
| ConcurrentAssertionItemLabeled id -> "ConcurrentAssertionItem", [label_attr_name,id]
| ConcurrentAssertionStmt ca -> ConcurrentAssertion.to_tag ca
| DeferredImmediateAssertionItemLabeled id -> "DeferredImmediateAssertionItem", [label_attr_name,id]
| DeferredImmediateAssertionStmt dia -> DeferredImmediateAssertion.to_tag dia
| SimpleImmediateAssertionStmt sia -> SimpleImmediateAssertion.to_tag sia
| CheckerInstantiation id -> "CheckerInst", [ident_attr_name,id]
| LoopGenerateConstruct -> "LoopGenerateConstruct", []
| GenvarDeclaration ids -> "GenvarDecl", [ident_attr_name,String.concat ";" ids]
| GenvarIterationAssign(ao, id) -> "GenvarIter", ["op",AssignmentOperator.to_tag_name ao;ident_attr_name,id]
| GenvarIterationIncOrDec(iod, id) -> "GenvarIter", ["op",IncOrDecOperator.to_tag_name iod;ident_attr_name,id]
| GenvarIdDecl id -> "GenvarIdDecl", [ident_attr_name,id]
| GenvarInitId id -> "GenvarInitId", [ident_attr_name,id]
| GenvarInit -> "GenvarInit", []
| SpecifyBlock -> "SpecifyBlock", []
| SpecparamDeclaration -> "SpecparamDecl", []
| SpecparamAssignmentId id -> "SpecparamAssignmentId", [ident_attr_name,id]
| SpecparamAssignmentPulseControl id -> "SpecparamAssignmentPulseControl", [ident_attr_name,id]
| PulsestyleDeclarationOnevent -> "PulsestyleDeclOnevent", []
| PulsestyleDeclarationOndetect -> "PulsestyleDeclOndetect", []
| ShowcancelledDeclaration -> "Showcancelled", []
| NoshowcancelledDeclaration -> "Noshowcancelled", []
| SpecifyTerminalDescriptor -> "SpecifyTerminalDtor", []
| InputOrOutputId id -> "InputOrOutputId", [ident_attr_name,id]
| InterfaceIdentifier id -> "InterfaceId", [ident_attr_name,id]
| ProgramDeclaration id -> "ProgramDecl", [ident_attr_name,id]
| InterfaceDeclaration id -> "InterfaceDecl", [ident_attr_name,id]
| InterfaceDeclarationExtern id -> "InterfaceDeclExtern", [ident_attr_name,id]
| TimeUnitsDeclaration -> "TimeunitsDecl", []
| TimeUnit s -> "Timeunit", ["unit",XML.encode_string s]
| Timeprecision s -> "Timeprecision", ["precision",XML.encode_string s]
| PackageDeclaration id -> "PackageDecl", [ident_attr_name,id]
| AnonymousProgram -> "AnonymousProg", []
| AnonymousProgramItemEmpty -> "AnonymousProgItemEmpty", []
| FunctionDeclaration id -> "FunctionDecl", [ident_attr_name,id]
| FunctionPrototype id -> "FunctionPrototype", [ident_attr_name,id]
| FuncId id -> "FunctionId", [ident_attr_name,id]
| FuncIdVoid id -> "FunctionIdVoid", [ident_attr_name,id]
| FuncIdNew -> "FunctionIdNew", []
| TfIdScoped id -> "TfIdScoped", [ident_attr_name,id]
| TaskDeclaration id -> "TaskDecl", [ident_attr_name,id]
| TaskPrototype id -> "TaskPrototype", [ident_attr_name,id]
| ClassCtorPrototype -> "ClassCtorProto", []
| TfPortListPart -> "TfPortListPart", []
| TfBody -> "TfBody", []
| TfPortDeclaration -> "TfPortDecl", []
| TfPortItemAssignment id -> "TfPortItemAssign", [ident_attr_name,id]
| TfPortItem -> "TfPortItem", []
| TfVariableIdentifier id -> "TfVarId", [ident_attr_name,id]
| CheckerDeclaration id -> "CheckerDecl", [ident_attr_name,id]
| PropertyDeclaration id -> "PropertyDecl", [ident_attr_name,id]
| PropertyDeclBody -> "PropertyDeclBody", []
| PropertyPortItem -> "PropertyPortItem", []
| PropertyPortItemDir -> "PropertyPortItemDir", []
| PropertyPortItemAssignment id -> "PropertyPortItemAssign", [ident_attr_name,id]
| SequenceDeclaration id -> "SequenceDecl", [ident_attr_name,id]
| SequenceDeclBody -> "SequenceDeclBody", []
| LetDeclaration id -> "LetDecl", [ident_attr_name,id]
| PropertyStatementSpec -> "PropertyStmtSpec", []
| AssertionVariableDeclaration -> "AssertionVarDecl", []
| SequenceFormalTypeSequence -> "SequenceFormalTypeSequence", []
| SequenceFormalTypeUntyped -> "SequenceFormalTypeUntyped", []
| DataDeclarationVar -> "DataDeclVar", []
| Const -> "Const", []
| DataDeclarationVarClass -> "DataDeclVarClass", []
| TypeDeclaration id -> "TypeDecl", [ident_attr_name,id]
| ScopedType id -> "ScopedType", [ident_attr_name,id]
| TypeIdentifier id -> "TypeId", [ident_attr_name,id]
| TypeDeclEnum -> "TypeDeclEnum", []
| TypeDeclStruct -> "TypeDeclStruct", []
| TypeDeclUnion -> "TypeDeclUnion", []
| TypeDeclClass -> "TypeDeclClass", []
| VirtualInterfaceDeclaration id -> "VirtualInterfaceDecl", [ident_attr_name,id]
| ModportDeclaration ids -> "ModportDecl", [ident_attr_name,String.concat ";" ids]
| ModportItem id -> "ModportItem", [ident_attr_name,id]
| ModportSimplePortsDecl -> "ModportSimplePortsDecl", []
| ModportClockingDecl id -> "ModportClockingDecl", [ident_attr_name,id]
| ModportTfPortsDeclImport -> "ModportTfPortsDeclImport", []
| ModportTfPortsDeclExport -> "ModportTfPortsDeclExport", []
| ModportSimplePort id -> "ModportSimplePort", [ident_attr_name,id]
| ModportSimplePortDot id -> "ModportSimplePortDot.", [ident_attr_name,id]
| ModportTfPort id -> "ModportTfPort", [ident_attr_name,id]
| CovergroupDeclaration id -> "CovergroupDecl", [ident_attr_name,id]
| Paren -> "Paren", []
| CoverageOption(id1, id2) -> "CoverageOption", ["inst",id1;"opt",id2]
| CoverPoint -> "Coverpoint", []
| CoverPointLabeled id -> "Coverpoint", [label_attr_name,id]
| CoverCross -> "Covercross", []
| CoverCrossLabeled id -> "Covercross", [label_attr_name,id]
| CrossItem id -> "CrossItem", [ident_attr_name,id]
| Iff -> "Iff", []
| BinsList -> "BinsList", []
| BinsEmpty -> "BinsEmpty", []
| SelectBins -> "SelectBins", []
| SelectBinsEmpty -> "SelectBinsEmpty", []
| Bins(bspec, id) -> "Bins", [spec_attr_name,BinsSpec.to_rep bspec;ident_attr_name,id]
| BinsSelection(bspec, id) -> "BinsSelection", [spec_attr_name,BinsSpec.to_rep bspec;ident_attr_name,id]
| BinsExpressionVar id -> "BinsExprVar", [ident_attr_name,id]
| BinsExpression(id1, id2) -> "BinsExpr", ["cover_point",id1;"bins",id2]
| NBins -> "NBins", []
| SelCondBinsof -> "SelCondBinsof", []
| SelExprNot -> "SelExprNot", []
| SelExprAnd -> "SelExprAnd", []
| SelExprOr -> "SelExprOr", []
| SelExprParen -> "SelExprParen", []
| Intersect -> "Intersect", []
| Wildcard -> "Wildcard", []
| TransSet -> "TransSet", []
| TransRangeList -> "TransRangeList", []
| RepeatRange -> "RepeatRange", []
| TransItem -> "TransItem", []
| TransRepetitionConsecutive -> "TransRepetitionConsecutive", []
| TransRepetitionNonconsecutive -> "TransRepetitionNonconsecutive", []
| TransRepetitionGoto -> "TransRepetitionGoto", []
| Default -> "Default", []
| DefaultSequence -> "DefaultSequence", []
| OpenRangeList -> "OpenRangeList", []
| CoverageEventWith id -> "CoverageEventWith", [ident_attr_name,id]
| CoverageEventBlockEvent -> "CoverageEventBlockEvent", []
| BlockEventExpression -> "BlockEventExpr", []
| BlockEventExpressionBegin -> "BlockEventExprBegin", []
| BlockEventExpressionEnd -> "BlockEventExprEnd", []
| HierarchicalBtfIdentifier id -> "HierarchicalBtfId", [ident_attr_name,id]
| PackageExportDeclarationStar -> "PackageExportDeclStar", []
| PackageExportDeclaration -> "PackageExportDecl", []
| DpiImport s -> "DpiImport", ["dpi_spec",XML.encode_string s]
| DpiExportFunc(s, id) -> "DpiExportFunc", ["dpi_spec",XML.encode_string s;ident_attr_name,id]
| DpiExportTask(s, id) -> "DpiExportTask", ["dpi_spec",XML.encode_string s;ident_attr_name,id]
| DpiImportLabel id -> "DpiImportLabel", [label_attr_name,id]
| DpiTfImportPropertyContext -> "DpiTfImportPropertyContext", []
| DpiTfImportPropertyPure -> "DpiTfImportPropertyPure", []
| ExternConstraintDeclaration -> "ExternConstraintDecl", []
| Static -> "Static", []
| Virtual -> "Virtual", []
| ClassDeclaration id -> "ClassDecl", [ident_attr_name,id]
| ClassExtends -> "ClassExtends", []
| ClassItemEmpty -> "ClassItemEmpty", []
| ClassMethod -> "ClassMethod", []
| Qualifier q -> Qualifier.to_tag q
| ClassBody -> "ClassBody", []
| ClassConstraint id -> "ClassConstraint", [ident_attr_name,id]
| Pure -> "Pure", []
| ClassProperty -> "ClassProperty", []
| PackageOrGenerateItemEmpty -> "PackageOrGenerateItemEmpty", []
| Forkjoin -> "Forkjoin", []
| ExternTfDeclaration id -> "ExternTfDecl", [ident_attr_name,id]
| TimingCheck tc -> TimingCheck.to_tag tc
| SystemTimingCheck -> "SystemTimingCheck", []
| Notifier id -> "Notifier", [ident_attr_name,id]
| Delayed id -> "Delayed", [ident_attr_name,id]
| TimingCheckEvent -> "TimingCheckEvent", []
| TimingCheckEventControlPosedge -> "TimingCheckEventControlPosedge", []
| TimingCheckEventControlNegedge -> "TimingCheckEventControlNegedge", []
| TimingCheckEventControl -> "TimingCheckEventControl", []
| EdgeDescriptor s -> "EdgeDesc", ["desc",s]
| OverloadDeclaration(oo, id) -> "OverloadDecl", ["op",OverloadOperator.to_tag_name oo;ident_attr_name,id]
| Params -> "Params", []
| ClockingDeclaration id -> "ClockingDecl", [ident_attr_name,id]
| Global -> "Global", []
| ClockingBody -> "Clockingbody", []
| ClockingItemDefault -> "ClockingItemDefault", []
| ClockingItem -> "ClockingItem", []
| DefaultSkewInput -> "defaultSkewInput", []
| DefaultSkewOutput -> "defaultSkewOutput", []
| DefaultSkewInputOutput -> "defaultSkewInputOutput", []
| ClockingDirectionInput -> "ClockingDirInput", []
| ClockingDirectionInputOutput -> "ClockingDirInputOutput", []
| ClockingDirectionInout -> "ClockingDirInout", []
| ClockingSkewPosedge -> "ClockingSkewPosedge", []
| ClockingSkewNegedge -> "ClockingSkewNegedge", []
| ClockingSkewEdge -> "ClockingSkewEdge", []
| ClockingSkew -> "ClockingSkew", []
| ClockingDeclAssign id -> "ClockingDeclAssign", [ident_attr_name,id]
| Production id -> "Production", [ident_attr_name,id]
| ProductionItem id -> "ProductionItem", [ident_attr_name,id]
| RsCodeBlock -> "RsCode_block", []
| RsRule -> "RsRule", []
| RsProductionList -> "RsProductionList", []
| RsProductionListRandJoin -> "RsProductionListRandJoin", []
| WeightSpecInt s -> "WeightSpecInt", [value_attr_name,XML.encode_string s]
| WeightSpecId -> "WeightSpecId", []
| WeightSpec -> "WeightSpec", []
| RsProdIf -> "RsProdIf", []
| RsProdRepeat -> "RsProdRepeat", []
| RsProdCase -> "RsProdCase", []
| RsCaseItem -> "RsCaseItem", []
| RsCaseItemDefault -> "RsCaseItemDefault", []
| CheckerOrGenerateItemEmpty -> "CheckerOrGenerateItemEmpty", []
| ConditionalGenerateConstructCase -> "ConditionalGenerateConstructCase", []
| ConditionalGenerateConstructIf -> "ConditionalGenerateConstructIf", []
| ElaborationSystemTask st -> "ElaborationSystemTask", ["task",SystemTask.to_simple_string st]
| CaseGenerateItem -> "CaseGenerateItem", []
| CaseGenerateItemDefault -> "CaseGenerateItemDefault", []
| AssignmentPattern -> "AssignmentPattern", []
| AssignmentPatternExpr -> "AssignmentPatternExpr", []
| PatternKey -> "PatternKey", []
| PatternKeyDefault -> "PatternKeyDefault", []
| PatternMember -> "PatternMember", []
| SimplePathDeclaration -> "SimplePathDecl", []
| ParallelPathDescription -> "ParallelPathDesc", []
| FullPathDescription -> "FullPathDesc", []
| PathInputs -> "PathInputs", []
| PathOutputs -> "PathOutputs", []
| PathDelayValue -> "PathDelayValue", []
| PolarityPlus -> "PolarityPlus", []
| PolarityMinus -> "PolarityMinus", []
| EdgePosedge -> "Posedge", []
| EdgeNegedge -> "Negedge", []
| EdgeSensitivePathDeclaration -> "EdgeSensitivePathDecl", []
| ParallelEdgeSensitivePathDescription -> "ParallelEdgeSensitivePathDesc", []
| FullEdgeSensitivePathDescription -> "FullEdgeSensitivePathDesc", []
| ParallelEdgeSensitivePathDescriptionSub -> "ParallelEdgeSensitivePathDesc_sub", []
| FullEdgeSensitivePathDescriptionSub -> "FullEdgeSensitivePathDesc_sub", []
| StateDependentPathDeclarationIf -> "StateDependentPathDeclIf", []
| StateDependentPathDeclarationIfnone -> "StateDependentPathDeclIfnone", []
| VariableLvalue -> "VariableLvalue", []
| AttributeInstance -> "AttributeInstance", []
| AttrSpec id -> "AttrSpec", [ident_attr_name,id]
| UdpPort id -> "UdpPort", [ident_attr_name,id]
| UdpPortDeclaration -> "UdpPortDecl", []
| UdpOutputDeclaration id -> "UdpOutputDecl", [ident_attr_name,id]
| UdpOutputDeclarationReg id -> "UdpOutputDeclReg ", [ident_attr_name,id]
| UdpInputDeclaration -> "UdpInputDecl", []
| UdpRegDeclaration id -> "UdpRegDecl", [ident_attr_name,id]
| SequentialBody -> "SequentialBody", []
| CombinationalBody -> "CombinationalBody", []
| UdpInitialStmt(id, s) -> "UdpInitialStmt", ["output_port",id;value_attr_name,XML.encode_string s]
| SequentialEntry -> "SequentialEntry", []
| EdgeIndicator -> "EdgeIndicator", []
| EdgeSymbol s -> "EdgeSymbol", ["symbol",XML.encode_string s]
| LevelSymbol s -> "LevelSymbol", ["symbol",XML.encode_string s]
| OutputSymbol s -> "OutputSymbol", ["symbol",XML.encode_string s]
| CombinationalEntry -> "CombinationalEntry", []
| NextStateMinus -> "NextStateMinus", []
| UdpPortsStar -> "UdpPortsStar", []
| UdpPorts -> "UdpPorts", []
| UdpPortDecls -> "UdpPortDecls", []
| UdpDeclarationPorts -> "UdpDeclPorts", []
| AttributeInstances -> "AttributeInstances", []
| ConfigDeclaration id -> "ConfigDecl", [ident_attr_name,id]
| DesignStatement -> "DesignStmt", []
| CellId id -> "CellId", [ident_attr_name,id]
| LibraryIdentifier id -> "LibraryId", [ident_attr_name,id]
| LiblistClause -> "LiblistClause", []
| CellClause id -> "CellClause", [ident_attr_name,id]
| UseClause -> "UseClause", []
| ColonConfig -> "ColonConfig", []
| InstanceName -> "InstanceName", []
| InstanceIdentifier id -> "InstanceId", [ident_attr_name,id]
| TopModuleIdentifier id -> "TopModuleId", [ident_attr_name,id]
| InstClause -> "InstClause", []
| ConfigRuleStatementDefault -> "ConfigRuleStmtDefault", []
| ConfigRuleStatement -> "ConfigRuleStmt", []
| LibraryDeclaration id -> "LibraryDecl", [ident_attr_name,id]
| Incdir -> "Incdir", []
| FilePathSpec s -> "FilePathSpec", [path_attr_name,strlit_to_encoded_path s]
| IncludeStatement s -> "IncludeStmt", [path_attr_name,strlit_to_encoded_path s]
| PragmaExpression id -> "PragmaExpr", [ident_attr_name,id]
| PragmaValueTuple -> "PragmaValueTuple", []
| PragmaValueNum s -> "PragmaValueNumber", [value_attr_name,XML.encode_string s]
| PragmaValueStr s -> "PragmaValueString", [value_attr_name,XML.encode_string s]
| PragmaValueId id -> "PragmaValueId", [ident_attr_name,id]
| PackageImportDecls -> "PackageImportDecls", []
| ParamPorts -> "ParamPorts", []
| Ranges -> "Ranges", []
| VariableDimensions -> "VariableDimensions", []
| CaseConds -> "CaseConds", []
| NetDeclAssignments ids -> "NetDeclAssignments", [ident_attr_name,String.concat ";" ids]
| ParamAssignments ids -> "ParamAssignments", [ident_attr_name,String.concat ";" ids]
| MacroExpr s -> "MacroExpr", [ident_attr_name,s]
| MacroStmt s -> "MacroStmt", [ident_attr_name,s]
| Var -> "Var", []
in
name, attrs
let get_identifiers = function
| NetDeclaration ids
| ModportDeclaration ids
| GenvarDeclaration ids
| NetDeclAssignments ids
| ParamAssignments ids
| LocalParameterDeclaration ids
| ParameterDeclaration ids
-> ids
| _ -> raise Not_found
let get_identifier lab =
DEBUG_MSG "\"%s\"" (to_string lab);
match lab with
| Expr e -> Expression.get_identifier e
| Stmt s -> Statement.get_identifier s
| DataType dt -> DataType.get_identifier dt
| EvExpr ee -> EventExpression.get_identifier ee
| PExpr pe -> PropertyExpression.get_identifier pe
| SExpr se -> SequenceExpression.get_identifier se
| ModuleDeclaration(_, id)
| UdpDeclaration id
| BindDirective id
| Instantiation id
| PpIdentifier id
| ParamAssignment id
| IdSelect id
| Cellpin id
| DelayValue id
| PackageScope id
| PackageImport id
| EndLabel id
| ClassType id
| ArgsDotted id
| ClassScopeId id
| EnumNameDeclaration id
| Variable id
| PackageImportItem id
| VariableDeclAssignment id
| GenBlockId id
| NetSig id
| Port id
| InterfacePort id
| ModportIdentifier id
| PatternId id
| PatternTagged id
| ForInitItemDT id
| CycleDelayId id
| InstName id
| ClockingEvent id
| CycleDelayRangeId id
| ConcurrentAssertionItemLabeled id
| DeferredImmediateAssertionItemLabeled id
| CheckerInstantiation id
| GenvarIterationAssign(_, id)
| GenvarIterationIncOrDec(_, id)
| GenvarIdDecl id
| GenvarInitId id
| SpecparamAssignmentId id
| SpecparamAssignmentPulseControl id
| InputOrOutputId id
| InterfaceIdentifier id
| ProgramDeclaration id
| InterfaceDeclaration id
| InterfaceDeclarationExtern id
| PackageDeclaration id
| FunctionDeclaration id
| FunctionPrototype id
| FuncId id
| FuncIdVoid id
| TfIdScoped id
| TaskDeclaration id
| TaskPrototype id
| TfPortItemAssignment id
| TfVariableIdentifier id
| CheckerDeclaration id
| PropertyDeclaration id
| PropertyPortItemAssignment id
| SequenceDeclaration id
| LetDeclaration id
| TypeDeclaration id
| ScopedType id
| TypeIdentifier id
| VirtualInterfaceDeclaration id
| ModportItem id
| ModportClockingDecl id
| ModportSimplePort id
| ModportSimplePortDot id
| ModportTfPort id
| CovergroupDeclaration id
| CoverageOption(_, id)
| CoverPointLabeled id
| CoverCrossLabeled id
| CrossItem id
| Bins(_, id)
| BinsSelection(_, id)
| BinsExpressionVar id
| BinsExpression(_, id)
| CoverageEventWith id
| HierarchicalBtfIdentifier id
| DpiExportFunc(_, id)
| DpiExportTask(_, id)
| ClassDeclaration id
| ClassConstraint id
| ExternTfDeclaration id
| Notifier id
| Delayed id
| OverloadDeclaration(_, id)
| ClockingDeclaration id
| ClockingDeclAssign id
| Production id
| ProductionItem id
| AttrSpec id
| UdpPort id
| UdpOutputDeclaration id
| UdpOutputDeclarationReg id
| UdpRegDeclaration id
| UdpInitialStmt(id, _)
| ConfigDeclaration id
| CellId id
| LibraryIdentifier id
| CellClause id
| InstanceIdentifier id
| TopModuleIdentifier id
| LibraryDeclaration id
-> id
| FuncIdNew -> "new"
| _ -> raise Not_found
let pexpr_to_stmt = function
| PExpr PropertyExpression.Case -> Stmt Statement.Case
| PExpr PropertyExpression.If -> Stmt Statement.Conditional
| PExpr x -> Stmt (Statement.PExpr x)
| x -> raise (Invalid_argument ("Ast.Label.pexpr_to_stmt: "^(to_string x)))
let expr_to_stmt = function
| Expr x -> Stmt (Statement.Expr x)
| _ -> raise (Invalid_argument "Ast.Label.expr_to_stmt")
let expr_of_integral_number i = Expr (Expression.IntegralNumber i)
let expr e = Expr e
let expr_uo uo = Expr (Expression.UOp uo)
let expr_bo bo = Expr (Expression.BOp bo)
let expr_ao ao = Expr (Expression.OperatorAssignment ao)
let pexpr pe = PExpr pe
let sexpr se = SExpr se
let ev_expr ee = EvExpr ee
let stmt s = Stmt s
let stmt_ao ao = Stmt (Statement.OperatorAssignment ao)
let sia_stmt ss = SimpleImmediateAssertionStmt ss
let dia_stmt ds = DeferredImmediateAssertionStmt ds
let ca_stmt cs = ConcurrentAssertionStmt cs
let qualifier q = Qualifier q
let timing_check tc = TimingCheck tc
let data_type dt = DataType dt
let net_type nt = NetType nt
let compiler_directive cd = CompilerDirective cd
let is_error = function
| Error -> true
| _ -> false
| null | https://raw.githubusercontent.com/codinuum/cca/c22bbe5b4d8824695a5844b23309a8652d371e70/src/ast/analyzing/langs/verilog/parsing/src/label.ml | ocaml | specparam assignment
trans repetition
trans repetition
trans repetition |
Copyright 2012 - 2020 Codinuum Software Lab < >
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Copyright 2012-2020 Codinuum Software Lab <>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
open Printf
open Common
open Label_common
open Labels
type t =
| Dummy
| Error
| Empty
| SourceText
| LibraryText
| CompilerDirective of CompilerDirective.t
| ModuleDeclaration of ModuleSpec.t * identifier
| UdpDeclaration of identifier
| NetDeclaration of identifier list
| BindDirective of identifier
| Expr of Expression.t
| Stmt of Statement.t
| NetType of NetType.t
| LocalParameterDeclaration of identifier list
| ParameterDeclaration of identifier list
| ParameterPortDeclaration
| ModuleBody
| Instantiation of identifier
| GateInstantiation of Gate.t
| ContinuousAssign
| Assign
| ConcurrentAssertionItem
| DeferredImmediateAssertionItem
| PpIdentifier of identifier
| PackedDimension
| ParamAssignment of identifier
| DefparamAssignment
| IdSelect of identifier
| Select
| Range
| RangePlus
| RangeMinus
| RangeForeach
| Root
| This
| Super
| Cellpin of identifier
| CellpinStar
| CellpinAnon
| DelayValue of identifier
| PackageScope of identifier
| PackageScopeUnit
| PackageScopeLocal
| PackageImport of identifier
| PackageImportAny
| LifetimeStatic
| LifetimeAutomatic
| EndLabel of identifier
| EndLabelNew
| ClassType of identifier
| DataType of DataType.t
| ImplicitDataType
| VarDeclAssignments
| Signed
| Unsigned
| ArgsDotted of identifier
| Tagged
| StructUnionBody
| StructUnionMember
| ClassScopeId of identifier
| Void
| EnumNameDeclaration of identifier
| EnumBody
| IdClassSel of identifier
| Variable of identifier
| Extern
| PackageImportDeclaration
| PackageImportItem of identifier
| Packed
| ParameterValueAssignment
| Ports
| PortsStar
| BitSelect
| VariableDeclAssignment of identifier
| DynamicArrayNew
| VariableDimension
| VariableDimensionStar
| GenItemBegin
| GenBlockId of identifier
| GenerateRegion
| Scalared
| Vectored
| DelayControl
| NetSig of identifier
| ParameterOverride
| PortDeclaration
| PortDirection of PortDirection.t
| Strength of Strength.t
| StrengthSupply0
| StrengthSupply1
| StrengthSpec
| VarDataType
| Port of identifier
| InterfacePort of identifier
| InterfacePortInterface
| ModportIdentifier of identifier
| PortMulti
| ExprScope
| ExprScopeThis
| ExprScopeSuper
| ExprScopeDot
| ExprScopeDotSuper
| CondPredicate
| CondPattern
| Dist
| DistItem
| DistWeight
| DistWeightRange
| ArrayRange
| ArrayRangePlus
| ArrayRangeMinus
| CastingTypeSimple
| CastingTypeSigned
| CastingTypeUnsigned
| CastingTypeString
| CastingTypeConst
| ValueRange
| Pattern
| PatternId of identifier
| PatternStar
| PatternTagged of identifier
| EventControl
| EventControlStar
| EventControlParenStar
| EventControlRepeat
| EvExpr of EventExpression.t
| CaseItem
| CaseItemDefault
| CaseInsideItem
| CaseInsideItemDefault
| CaseItems
| CaseItemsMatches
| CaseItemsInside
| With
| Args
| ConstraintBlock
| ForInit
| ForInitItemDT of identifier
| ForInitItemLval
| StreamingConcat
| OrderRL
| OrderLR
| StreamConcat
| Solve
| SolveBefore
| ActionBlock
| CycleDelay of string
| CycleDelayId of identifier
| CycleDelayParen
| Priority
| Unique
| Unique0
| InstRange
| InstName of identifier
| PExpr of PropertyExpression.t
| ClockingEvent of identifier
| ClockingEventParen
| PropertyCase
| PropertyCaseDefault
| DisableIff
| CycleDelayRange of string
| CycleDelayRangeId of identifier
| CycleDelayRangeParen
| CycleDelayRangeBracket
| CycleDelayRangeBracketStar
| CycleDelayRangeBracketPlus
| SExpr of SequenceExpression.t
| ConsecutiveRepetition
| NonconsecutiveRepetition
| GotoRepetition
| NetAlias
| InitialConstruct
| FinalConstruct
| AlwaysConstruct of AlwaysSpec.t
| ConcurrentAssertionItemLabeled of identifier
| ConcurrentAssertionStmt of ConcurrentAssertion.t
| DeferredImmediateAssertionItemLabeled of identifier
| DeferredImmediateAssertionStmt of DeferredImmediateAssertion.t
| SimpleImmediateAssertionStmt of SimpleImmediateAssertion.t
| CheckerInstantiation of identifier
| LoopGenerateConstruct
| GenvarDeclaration of identifier list
| GenvarIterationAssign of AssignmentOperator.t * identifier
| GenvarIterationIncOrDec of IncOrDecOperator.t * identifier
| GenvarIdDecl of identifier
| GenvarInitId of identifier
| GenvarInit
| SpecifyBlock
| SpecparamDeclaration
| SpecparamAssignmentPulseControl of identifier
| PulsestyleDeclarationOnevent
| PulsestyleDeclarationOndetect
| ShowcancelledDeclaration
| NoshowcancelledDeclaration
| SpecifyTerminalDescriptor
| InputOrOutputId of identifier
| InterfaceIdentifier of identifier
| ProgramDeclaration of identifier
| InterfaceDeclaration of identifier
| InterfaceDeclarationExtern of identifier
| TimeUnitsDeclaration
| TimeUnit of string
| Timeprecision of string
| PackageDeclaration of identifier
| AnonymousProgram
| AnonymousProgramItemEmpty
| FunctionDeclaration of identifier
| FunctionPrototype of identifier
| FuncId of identifier
| FuncIdVoid of identifier
| FuncIdNew
| TfIdScoped of identifier
| TaskDeclaration of identifier
| TaskPrototype of identifier
| ClassCtorPrototype
| TfPortListPart
| TfBody
| TfPortDeclaration
| TfPortItemAssignment of identifier
| TfPortItem
| TfVariableIdentifier of identifier
| CheckerDeclaration of identifier
| PropertyDeclaration of identifier
| PropertyDeclBody
| PropertyPortItem
| PropertyPortItemDir
| PropertyPortItemAssignment of identifier
| SequenceDeclaration of identifier
| SequenceDeclBody
| LetDeclaration of identifier
| PropertyStatementSpec
| AssertionVariableDeclaration
| SequenceFormalTypeSequence
| SequenceFormalTypeUntyped
| DataDeclarationVar
| Const
| DataDeclarationVarClass
| TypeDeclaration of identifier
| ScopedType of identifier
| TypeIdentifier of identifier
| TypeDeclEnum
| TypeDeclStruct
| TypeDeclUnion
| TypeDeclClass
| VirtualInterfaceDeclaration of identifier
| ModportDeclaration of identifier list
| ModportItem of identifier
| ModportSimplePortsDecl
| ModportClockingDecl of identifier
| ModportTfPortsDeclImport
| ModportTfPortsDeclExport
| ModportSimplePort of identifier
| ModportSimplePortDot of identifier
| ModportTfPort of identifier
| CovergroupDeclaration of identifier
| Paren
| CoverageOption of identifier * identifier
| CoverPoint
| CoverPointLabeled of identifier
| CoverCross
| CoverCrossLabeled of identifier
| CrossItem of identifier
| Iff
| BinsList
| BinsEmpty
| SelectBins
| SelectBinsEmpty
| Bins of BinsSpec.t * identifier
| BinsSelection of BinsSpec.t * identifier
| BinsExpressionVar of identifier
| BinsExpression of identifier * identifier
| NBins
| SelCondBinsof
| SelExprNot
| SelExprAnd
| SelExprOr
| SelExprParen
| Intersect
| Wildcard
| TransSet
| TransRangeList
| RepeatRange
| TransItem
| Default
| DefaultSequence
| OpenRangeList
| CoverageEventWith of identifier
| CoverageEventBlockEvent
| BlockEventExpression
| BlockEventExpressionBegin
| BlockEventExpressionEnd
| HierarchicalBtfIdentifier of identifier
| PackageExportDeclarationStar
| PackageExportDeclaration
| DpiImport of string
| DpiExportFunc of string * identifier
| DpiExportTask of string * identifier
| DpiImportLabel of identifier
| DpiTfImportPropertyContext
| DpiTfImportPropertyPure
| ExternConstraintDeclaration
| Static
| Virtual
| ClassDeclaration of identifier
| ClassExtends
| ClassItemEmpty
| ClassMethod
| Qualifier of Qualifier.t
| ClassBody
| ClassConstraint of identifier
| Pure
| ClassProperty
| PackageOrGenerateItemEmpty
| Forkjoin
| ExternTfDeclaration of identifier
| TimingCheck of TimingCheck.t
| SystemTimingCheck
| Notifier of identifier
| Delayed of identifier
| TimingCheckEvent
| TimingCheckEventControlPosedge
| TimingCheckEventControlNegedge
| TimingCheckEventControl
| EdgeDescriptor of string
| OverloadDeclaration of OverloadOperator.t * identifier
| Params
| ClockingDeclaration of identifier
| Global
| ClockingBody
| ClockingItemDefault
| ClockingItem
| DefaultSkewInput
| DefaultSkewOutput
| DefaultSkewInputOutput
| ClockingDirectionInput
| ClockingDirectionInputOutput
| ClockingDirectionInout
| ClockingSkewPosedge
| ClockingSkewNegedge
| ClockingSkewEdge
| ClockingSkew
| ClockingDeclAssign of identifier
| Production of identifier
| ProductionItem of identifier
| RsCodeBlock
| RsRule
| RsProductionList
| RsProductionListRandJoin
| WeightSpecInt of string
| WeightSpecId
| WeightSpec
| RsProdIf
| RsProdRepeat
| RsProdCase
| RsCaseItem
| RsCaseItemDefault
| CheckerOrGenerateItemEmpty
| ConditionalGenerateConstructCase
| ConditionalGenerateConstructIf
| ElaborationSystemTask of SystemTask.t
| CaseGenerateItem
| CaseGenerateItemDefault
| AssignmentPattern
| AssignmentPatternExpr
| PatternKey
| PatternKeyDefault
| PatternMember
| SimplePathDeclaration
| ParallelPathDescription
| FullPathDescription
| PathInputs
| PathOutputs
| PathDelayValue
| PolarityPlus
| PolarityMinus
| EdgePosedge
| EdgeNegedge
| EdgeSensitivePathDeclaration
| ParallelEdgeSensitivePathDescription
| FullEdgeSensitivePathDescription
| ParallelEdgeSensitivePathDescriptionSub
| FullEdgeSensitivePathDescriptionSub
| StateDependentPathDeclarationIf
| StateDependentPathDeclarationIfnone
| VariableLvalue
| AttributeInstance
| AttrSpec of identifier
| UdpPort of identifier
| UdpPortDeclaration
| UdpOutputDeclaration of identifier
| UdpOutputDeclarationReg of identifier
| UdpInputDeclaration
| UdpRegDeclaration of identifier
| SequentialBody
| CombinationalBody
| UdpInitialStmt of identifier * string
| SequentialEntry
| EdgeIndicator
| EdgeSymbol of string
| LevelSymbol of string
| OutputSymbol of string
| CombinationalEntry
| NextStateMinus
| UdpPortsStar
| UdpPorts
| UdpPortDecls
| UdpDeclarationPorts
| AttributeInstances
| ConfigDeclaration of identifier
| DesignStatement
| CellId of identifier
| LibraryIdentifier of identifier
| LiblistClause
| CellClause of identifier
| UseClause
| ColonConfig
| InstanceName
| InstanceIdentifier of identifier
| TopModuleIdentifier of identifier
| InstClause
| ConfigRuleStatementDefault
| ConfigRuleStatement
| LibraryDeclaration of identifier
| Incdir
| FilePathSpec of string
| IncludeStatement of string
| PragmaExpression of identifier
| PragmaValueTuple
| PragmaValueNum of string
| PragmaValueStr of string
| PragmaValueId of identifier
| PackageImportDecls
| ParamPorts
| Ranges
| VariableDimensions
| CaseConds
| NetDeclAssignments of identifier list
| ParamAssignments of identifier list
| MacroExpr of string
| MacroStmt of string
| Var
let to_string = function
| Dummy -> "Dummy"
| Error -> "Error"
| Empty -> "Empty"
| SourceText -> "SourceText"
| LibraryText -> "LibraryText"
| CompilerDirective cd -> CompilerDirective.to_string cd
| ModuleDeclaration(mspec, id) -> "ModuleDeclaration:"^(ModuleSpec.to_string mspec)^":"^id
| UdpDeclaration id -> "UdpDeclaration:"^id
| NetDeclaration ids -> "NetDeclaration:"^(String.concat "," ids)
| BindDirective id -> "BindDirective:"^id
| Expr e -> Expression.to_string e
| Stmt stmt -> Statement.to_string stmt
| NetType nt -> NetType.to_string nt
| LocalParameterDeclaration ids -> "LocalParameterDeclaration:"^(String.concat "," ids)
| ParameterDeclaration ids -> "ParameterDeclaration:"^(String.concat "," ids)
| ParameterPortDeclaration -> "ParameterPortDeclaration"
| ModuleBody -> "ModuleBody"
| Instantiation id -> "Instantiation:"^id
| GateInstantiation g -> "GateInstantiation:"^(Gate.to_string g)
| ContinuousAssign -> "ContinuousAssign"
| Assign -> "Assign"
| ConcurrentAssertionItem -> "ConcurrentAssertionItem"
| DeferredImmediateAssertionItem -> "DeferredImmediateAssertionItem"
| PpIdentifier id -> "PpIdentifier:"^id
| PackedDimension -> "PackedDimension"
| ParamAssignment id -> "ParamAssignment:"^id
| DefparamAssignment -> "DefparamAssignment"
| IdSelect id -> "IdSelect:"^id
| Select -> "Select"
| Range -> "Range"
| RangePlus -> "RangePlus"
| RangeMinus -> "RangeMinus"
| RangeForeach -> "RangeForeach"
| Root -> "Root"
| This -> "This"
| Super -> "Super"
| Cellpin id -> "Cellpin:"^id
| CellpinStar -> "CellpinStar"
| CellpinAnon -> "CellpinAnon"
| DelayValue id -> "DelayValue:"^id
| PackageScope id -> "PackageScope:"^id
| PackageScopeUnit -> "PackageScopeUnit"
| PackageScopeLocal -> "PackageScopeLocal"
| PackageImport id -> "PackageImport:"^id
| PackageImportAny -> "PackageImportAny"
| LifetimeStatic -> "LifetimeStatic"
| LifetimeAutomatic -> "LifetimeAutomatic"
| EndLabel id -> "EndLabel:"^id
| EndLabelNew -> "EndLabelNew"
| ClassType id -> "ClassType:"^id
| DataType dt -> DataType.to_string dt
| ImplicitDataType -> "ImplicitDataType"
| VarDeclAssignments -> "VarDeclAssignments"
| Signed -> "Signed"
| Unsigned -> "Unsigned"
| ArgsDotted id -> "ArgsDotted:"^id
| Tagged -> "Tagged"
| StructUnionBody -> "StructUnionBody"
| StructUnionMember -> "StructUnionMember"
| ClassScopeId id -> "ClassScopeId:"^id
| Void -> "Void"
| EnumNameDeclaration id -> "EnumNameDeclaration:"^id
| EnumBody -> "EnumBody"
| IdClassSel id -> "IdClassSel:"^id
| Variable id -> "Variable:"^id
| Extern -> "Extern"
| PackageImportDeclaration -> "PackageImportDeclaration"
| PackageImportItem id -> "PackageImportItem:"^id
| Packed -> "Packed"
| ParameterValueAssignment -> "ParameterValueAssignment"
| Ports -> "Ports"
| PortsStar -> "PortsStar"
| BitSelect -> "BitSelect"
| VariableDeclAssignment id -> "VariableDeclAssignment:"^id
| DynamicArrayNew -> "DynamicArrayNew"
| VariableDimension -> "VariableDimension"
| VariableDimensionStar -> "VariableDimensionStar"
| GenItemBegin -> "GenItemBegin"
| GenBlockId id -> "GenBlockId:"^id
| GenerateRegion -> "GenerateRegion"
| Scalared -> "Scalared"
| Vectored -> "Vectored"
| DelayControl -> "DelayControl"
| NetSig id -> "NetSig:"^id
| ParameterOverride -> "ParameterOverride"
| PortDeclaration -> "PortDeclaration"
| PortDirection pd -> "PortDirection:"^(PortDirection.to_string pd)
| Strength strength -> "Strength:"^(Strength.to_string strength)
| StrengthSupply0 -> "StrengthSupply0"
| StrengthSupply1 -> "StrengthSupply1"
| StrengthSpec -> "StrengthSpec"
| VarDataType -> "VarDataType"
| Port id -> "Port:"^id
| InterfacePort id -> "InterfacePort:"^id
| InterfacePortInterface -> "InterfacePortInterface"
| ModportIdentifier id -> "ModportIdentifier:"^id
| PortMulti -> "PortMulti"
| ExprScope -> "ExprScope"
| ExprScopeThis -> "ExprScopeThis"
| ExprScopeSuper -> "ExprScopeSuper"
| ExprScopeDot -> "ExprScopeDot"
| ExprScopeDotSuper -> "ExprScopeDotSuper"
| CondPredicate -> "CondPredicate"
| CondPattern -> "CondPattern"
| Dist -> "Dist"
| DistItem -> "DistItem"
| DistWeight -> "DistWeight"
| DistWeightRange -> "DistWeightRange"
| ArrayRange -> "ArrayRange"
| ArrayRangePlus -> "ArrayRangePlus"
| ArrayRangeMinus -> "ArrayRangeMinus"
| CastingTypeSimple -> "CastingTypeSimple"
| CastingTypeSigned -> "CastingTypeSigned"
| CastingTypeUnsigned -> "CastingTypeUnsigned"
| CastingTypeString -> "CastingTypeString"
| CastingTypeConst -> "CastingTypeConst"
| ValueRange -> "ValueRange"
| Pattern -> "Pattern"
| PatternId id -> "PatternId:"^id
| PatternStar -> "PatternStar"
| PatternTagged id -> "PatternTagged:"^id
| EventControl -> "EventControl"
| EventControlStar -> "EventControlStar"
| EventControlParenStar -> "EventControlParenStar"
| EventControlRepeat -> "EventControlRepeat"
| EvExpr ee -> EventExpression.to_string ee
| CaseItem -> "CaseItem"
| CaseItemDefault -> "CaseItemDefault"
| CaseInsideItem -> "CaseInsideItem"
| CaseInsideItemDefault -> "CaseInsideItemDefault"
| CaseItems -> "CaseItems"
| CaseItemsMatches -> "CaseItemsMatches"
| CaseItemsInside -> "CaseItemsInside"
| With -> "With"
| Args -> "Args"
| ConstraintBlock -> "ConstraintBlock"
| ForInit -> "ForInit"
| ForInitItemDT id -> "ForInitItemDT:"^id
| ForInitItemLval -> "ForInitItemLval"
| StreamingConcat -> "StreamingConcat"
| OrderRL -> "OrderRL"
| OrderLR -> "OrderLR"
| StreamConcat -> "StreamConcat"
| Solve -> "Solve"
| SolveBefore -> "SolveBefore"
| ActionBlock -> "ActionBlock"
| CycleDelay s -> "CycleDelay:"^s
| CycleDelayId id -> "CycleDelayId:"^id
| CycleDelayParen -> "CycleDelayParen"
| Priority -> "Priority"
| Unique -> "Unique"
| Unique0 -> "Unique0"
| InstRange -> "InstRange"
| InstName id -> "InstName:"^id
| PExpr pe -> PropertyExpression.to_string pe
| ClockingEvent id -> "ClockingEvent:"^id
| ClockingEventParen -> "ClockingEventParen"
| PropertyCase -> "PropertyCase"
| PropertyCaseDefault -> "PropertyCaseDefault"
| DisableIff -> "DisableIff"
| CycleDelayRange s -> "CycleDelayRange:"^s
| CycleDelayRangeId id -> "CycleDelayRangeId:"^id
| CycleDelayRangeParen -> "CycleDelayRangeParen"
| CycleDelayRangeBracket -> "CycleDelayRangeBracket"
| CycleDelayRangeBracketStar -> "CycleDelayRangeBracketStar"
| CycleDelayRangeBracketPlus -> "CycleDelayRangeBracketPlus"
| SExpr se -> SequenceExpression.to_string se
| ConsecutiveRepetition -> "ConsecutiveRepetition"
| NonconsecutiveRepetition -> "NonconsecutiveRepetition"
| GotoRepetition -> "GotoRepetition"
| NetAlias -> "NetAlias"
| InitialConstruct -> "InitialConstruct"
| FinalConstruct -> "FinalConstruct"
| AlwaysConstruct aspec -> "AlwaysConstruct:"^(AlwaysSpec.to_string aspec)
| ConcurrentAssertionItemLabeled id -> "ConcurrentAssertionItemLabeled:"^id
| ConcurrentAssertionStmt ca -> ConcurrentAssertion.to_string ca
| DeferredImmediateAssertionItemLabeled id -> "DeferredImmediateAssertionItemLabeled:"^id
| DeferredImmediateAssertionStmt dia -> DeferredImmediateAssertion.to_string dia
| SimpleImmediateAssertionStmt sia -> SimpleImmediateAssertion.to_string sia
| CheckerInstantiation id -> "CheckerInstantiation:"^id
| LoopGenerateConstruct -> "LoopGenerateConstruct"
| GenvarDeclaration ids -> "GenvarDeclaration:"^(String.concat "," ids)
| GenvarIterationAssign(ao, id) -> "GenvarIteration:"^(AssignmentOperator.to_string ao)^":"^id
| GenvarIterationIncOrDec(iod, id) -> "GenvarIteration:"^(IncOrDecOperator.to_string iod)^":"^id
| GenvarIdDecl id -> "GenvarIdDecl:"^id
| GenvarInitId id -> "GenvarInitId:"^id
| GenvarInit -> "GenvarInit"
| SpecifyBlock -> "SpecifyBlock"
| SpecparamDeclaration -> "SpecparamDeclaration"
| SpecparamAssignmentId id -> "SpecparamAssignmentId:"^id
| SpecparamAssignmentPulseControl id -> "SpecparamAssignmentPulseControl:"^id
| PulsestyleDeclarationOnevent -> "PulsestyleDeclarationOnevent"
| PulsestyleDeclarationOndetect -> "PulsestyleDeclarationOndetect"
| ShowcancelledDeclaration -> "ShowcancelledDeclaration"
| NoshowcancelledDeclaration -> "NoshowcancelledDeclaration"
| SpecifyTerminalDescriptor -> "SpecifyTerminalDescriptor"
| InputOrOutputId id -> "InputOrOutputId:"^id
| InterfaceIdentifier id -> "InterfaceIdentifier:"^id
| ProgramDeclaration id -> "ProgramDeclaration:"^id
| InterfaceDeclaration id -> "InterfaceDeclaration:"^id
| InterfaceDeclarationExtern id -> "InterfaceDeclarationExtern:"^id
| TimeUnitsDeclaration -> "TimeUnitsDeclaration"
| TimeUnit s -> "TimeUnit:"^s
| Timeprecision s -> "Timeprecision:"^s
| PackageDeclaration id -> "PackageDeclaration:"^id
| AnonymousProgram -> "AnonymousProgram"
| AnonymousProgramItemEmpty -> "AnonymousProgramItemEmpty"
| FunctionDeclaration id -> "FunctionDeclaration:"^id
| FunctionPrototype id -> "FunctionPrototype:"^id
| FuncId id -> "FuncId:"^id
| FuncIdVoid id -> "FuncIdVoid:"^id
| FuncIdNew -> "FuncIdNew"
| TfIdScoped id -> "TfIdScoped:"^id
| TaskDeclaration id -> "TaskDeclaration:"^id
| TaskPrototype id -> "TaskPrototype:"^id
| ClassCtorPrototype -> "ClassCtorPrototype"
| TfPortListPart -> "TfPortListPart"
| TfBody -> "TfBody"
| TfPortDeclaration -> "TfPortDeclaration"
| TfPortItemAssignment id -> "TfPortItemAssignment:"^id
| TfPortItem -> "TfPortItem"
| TfVariableIdentifier id -> "TfVariableIdentifier:"^id
| CheckerDeclaration id -> "CheckerDeclaration:"^id
| PropertyDeclaration id -> "PropertyDeclaration:"^id
| PropertyDeclBody -> "PropertyDeclBody"
| PropertyPortItem -> "PropertyPortItem"
| PropertyPortItemDir -> "PropertyPortItemDir"
| PropertyPortItemAssignment id -> "PropertyPortItemAssignment:"^id
| SequenceDeclaration id -> "SequenceDeclaration:"^id
| SequenceDeclBody -> "SequenceDeclBody"
| LetDeclaration id -> "LetDeclaration:"^id
| PropertyStatementSpec -> "PropertyStatementSpec"
| AssertionVariableDeclaration -> "AssertionVariableDeclaration"
| SequenceFormalTypeSequence -> "SequenceFormalTypeSequence"
| SequenceFormalTypeUntyped -> "SequenceFormalTypeUntyped"
| DataDeclarationVar -> "DataDeclarationVar"
| Const -> "Const"
| DataDeclarationVarClass -> "DataDeclarationVarClass"
| TypeDeclaration id -> "TypeDeclaration:"^id
| ScopedType id -> "ScopedType:"^id
| TypeIdentifier id -> "TypeIdentifier:"^id
| TypeDeclEnum -> "TypeDeclEnum"
| TypeDeclStruct -> "TypeDeclStruct"
| TypeDeclUnion -> "TypeDeclUnion"
| TypeDeclClass -> "TypeDeclClass"
| VirtualInterfaceDeclaration id -> "VirtualInterfaceDeclaration:"^id
| ModportDeclaration ids -> "ModportDeclaration:"^(String.concat "," ids)
| ModportItem id -> "ModportItem:"^id
| ModportSimplePortsDecl -> "ModportSimplePortsDecl"
| ModportClockingDecl id -> "ModportClockingDecl:"^id
| ModportTfPortsDeclImport -> "ModportTfPortsDeclImport"
| ModportTfPortsDeclExport -> "ModportTfPortsDeclExport"
| ModportSimplePort id -> "ModportSimplePort:"^id
| ModportSimplePortDot id -> "ModportSimplePortDot:"^id
| ModportTfPort id -> "ModportTfPort:"^id
| CovergroupDeclaration id -> "CovergroupDeclaration:"^id
| Paren -> "Paren"
| CoverageOption(id1, id2) -> "CoverageOption:"^id1^":"^id2
| CoverPoint -> "CoverPoint"
| CoverPointLabeled id -> "CoverPointLabeled:"^id
| CoverCross -> "CoverCross"
| CoverCrossLabeled id -> "CoverCrossLabeled:"^id
| CrossItem id -> "CrossItem:"^id
| Iff -> "Iff"
| BinsList -> "BinsList"
| BinsEmpty -> "BinsEmpty"
| SelectBins -> "SelectBins"
| SelectBinsEmpty -> "SelectBinsEmpty"
| Bins(bspec, id) -> "Bins:"^(BinsSpec.to_string bspec)^":"^id
| BinsSelection(bspec, id) -> "BinsSelection:"^(BinsSpec.to_string bspec)^":"^id
| BinsExpressionVar id -> "BinsExpressionVar:"^id
| BinsExpression(id1, id2) -> "BinsExpression:"^id1^":"^id2
| NBins -> "NBins"
| SelCondBinsof -> "SelCondBinsof"
| SelExprNot -> "SelExprNot"
| SelExprAnd -> "SelExprAnd"
| SelExprOr -> "SelExprOr"
| SelExprParen -> "SelExprParen"
| Intersect -> "Intersect"
| Wildcard -> "Wildcard"
| TransSet -> "TransSet"
| TransRangeList -> "TransRangeList"
| RepeatRange -> "RepeatRange"
| TransItem -> "TransItem"
| TransRepetitionConsecutive -> "TransRepetitionConsecutive"
| TransRepetitionNonconsecutive -> "TransRepetitionNonconsecutive"
| TransRepetitionGoto -> "TransRepetitionGoto"
| Default -> "Default"
| DefaultSequence -> "DefaultSequence"
| OpenRangeList -> "OpenRangeList"
| CoverageEventWith id -> "CoverageEventWith:"^id
| CoverageEventBlockEvent -> "CoverageEventBlockEvent"
| BlockEventExpression -> "BlockEventExpression"
| BlockEventExpressionBegin -> "BlockEventExpressionBegin"
| BlockEventExpressionEnd -> "BlockEventExpressionEnd"
| HierarchicalBtfIdentifier id -> "HierarchicalBtfIdentifier:"^id
| PackageExportDeclarationStar -> "PackageExportDeclarationStar"
| PackageExportDeclaration -> "PackageExportDeclaration"
| DpiImport s -> "DpiImport:"^s
| DpiExportFunc(s, id) -> "DpiExportFunc:"^s^":"^id
| DpiExportTask(s, id) -> "DpiExportTask:"^s^":"^id
| DpiImportLabel id -> "DpiImportLabel:"^id
| DpiTfImportPropertyContext -> "DpiTfImportPropertyContext"
| DpiTfImportPropertyPure -> "DpiTfImportPropertyPure"
| ExternConstraintDeclaration -> "ExternConstraintDeclaration"
| Static -> "Static"
| Virtual -> "Virtual"
| ClassDeclaration id -> "ClassDeclaration:"^id
| ClassExtends -> "ClassExtends"
| ClassItemEmpty -> "ClassItemEmpty"
| ClassMethod -> "ClassMethod"
| Qualifier q -> "Qualifier:"^(Qualifier.to_string q)
| ClassBody -> "ClassBody"
| ClassConstraint id -> "ClassConstraint:"^id
| Pure -> "Pure"
| ClassProperty -> "ClassProperty"
| PackageOrGenerateItemEmpty -> "PackageOrGenerateItemEmpty"
| Forkjoin -> "Forkjoin"
| ExternTfDeclaration id -> "ExternTfDeclaration:"^id
| TimingCheck tc -> TimingCheck.to_string tc
| SystemTimingCheck -> "SystemTimingCheck"
| Notifier id -> "Notifier:"^id
| Delayed id -> "Delayed:"^id
| TimingCheckEvent -> "TimingCheckEvent"
| TimingCheckEventControlPosedge -> "TimingCheckEventControlPosedge"
| TimingCheckEventControlNegedge -> "TimingCheckEventControlNegedge"
| TimingCheckEventControl -> "TimingCheckEventControl"
| EdgeDescriptor s -> "EdgeDescriptor:"^s
| OverloadDeclaration(oo, id) -> "OverloadDeclaration:"^(OverloadOperator.to_string oo)^":"^id
| Params -> "Params"
| ClockingDeclaration id -> "ClockingDeclaration:"^id
| Global -> "Global"
| ClockingBody -> "ClockingBody"
| ClockingItemDefault -> "ClockingItemDefault"
| ClockingItem -> "ClockingItem"
| DefaultSkewInput -> "DefaultSkewInput"
| DefaultSkewOutput -> "DefaultSkewOutput"
| DefaultSkewInputOutput -> "DefaultSkewInputOutput"
| ClockingDirectionInput -> "ClockingDirectionInput"
| ClockingDirectionInputOutput -> "ClockingDirectionInputOutput"
| ClockingDirectionInout -> "ClockingDirectionInout"
| ClockingSkewPosedge -> "ClockingSkewPosedge"
| ClockingSkewNegedge -> "ClockingSkewNegedge"
| ClockingSkewEdge -> "ClockingSkewEdge"
| ClockingSkew -> "ClockingSkew"
| ClockingDeclAssign id -> "ClockingDeclAssign:"^id
| Production id -> "Production:"^id
| ProductionItem id -> "ProductionItem:"^id
| RsCodeBlock -> "RsCodeBlock"
| RsRule -> "RsRule"
| RsProductionList -> "RsProductionList"
| RsProductionListRandJoin -> "RsProductionListRandJoin"
| WeightSpecInt s -> "WeightSpecInt:"^s
| WeightSpecId -> "WeightSpecId"
| WeightSpec -> "WeightSpec"
| RsProdIf -> "RsProdIf"
| RsProdRepeat -> "RsProdRepeat"
| RsProdCase -> "RsProdCase"
| RsCaseItem -> "RsCaseItem"
| RsCaseItemDefault -> "RsCaseItemDefault"
| CheckerOrGenerateItemEmpty -> "CheckerOrGenerateItemEmpty"
| ConditionalGenerateConstructCase -> "ConditionalGenerateConstructCase"
| ConditionalGenerateConstructIf -> "ConditionalGenerateConstructIf"
| ElaborationSystemTask st -> "ElaborationSystemTask:"^(SystemTask.to_string st)
| CaseGenerateItem -> "CaseGenerateItem"
| CaseGenerateItemDefault -> "CaseGenerateItemDefault"
| AssignmentPattern -> "AssignmentPattern"
| AssignmentPatternExpr -> "AssignmentPatternExpr"
| PatternKey -> "PatternKey"
| PatternKeyDefault -> "PatternKeyDefault"
| PatternMember -> "PatternMember"
| SimplePathDeclaration -> "SimplePathDeclaration"
| ParallelPathDescription -> "ParallelPathDescription"
| FullPathDescription -> "FullPathDescription"
| PathInputs -> "PathInputs"
| PathOutputs -> "PathOutputs"
| PathDelayValue -> "PathDelayValue"
| PolarityPlus -> "PolarityPlus"
| PolarityMinus -> "PolarityMinus"
| EdgePosedge -> "EdgePosedge"
| EdgeNegedge -> "EdgeNegedge"
| EdgeSensitivePathDeclaration -> "EdgeSensitivePathDeclaration"
| ParallelEdgeSensitivePathDescription -> "ParallelEdgeSensitivePathDescription"
| FullEdgeSensitivePathDescription -> "FullEdgeSensitivePathDescription"
| ParallelEdgeSensitivePathDescriptionSub -> "ParallelEdgeSensitivePathDescriptionSub"
| FullEdgeSensitivePathDescriptionSub -> "FullEdgeSensitivePathDescriptionSub"
| StateDependentPathDeclarationIf -> "StateDependentPathDeclarationIf"
| StateDependentPathDeclarationIfnone -> "StateDependentPathDeclarationIfnone"
| VariableLvalue -> "VariableLvalue"
| AttributeInstance -> "AttributeInstance"
| AttrSpec id -> "AttrSpec:"^id
| UdpPort id -> "UdpPort:"^id
| UdpPortDeclaration -> "UdpPortDeclaration"
| UdpOutputDeclaration id -> "UdpOutputDeclaration:"^id
| UdpOutputDeclarationReg id -> "UdpOutputDeclarationReg:"^id
| UdpInputDeclaration -> "UdpInputDeclaration"
| UdpRegDeclaration id -> "UdpRegDeclaration:"^id
| SequentialBody -> "SequentialBody"
| CombinationalBody -> "CombinationalBody"
| UdpInitialStmt(id, s) -> "UdpInitialStmt:"^id^":"^s
| SequentialEntry -> "SequentialEntry"
| EdgeIndicator -> "EdgeIndicator"
| EdgeSymbol s -> "EdgeSymbol:"^s
| LevelSymbol s -> "LevelSymbol:"^s
| OutputSymbol s -> "OutputSymbol:"^s
| CombinationalEntry -> "CombinationalEntry"
| NextStateMinus -> "NextStateMinus"
| UdpPortsStar -> "UdpPortsStar"
| UdpPorts -> "UdpPorts"
| UdpPortDecls -> "UdpPortDecls"
| UdpDeclarationPorts -> "UdpDeclarationPorts"
| AttributeInstances -> "AttributeInstances"
| ConfigDeclaration id -> "ConfigDeclaration:"^id
| DesignStatement -> "DesignStatement"
| CellId id -> "CellId:"^id
| LibraryIdentifier id -> "LibraryIdentifier:"^id
| LiblistClause -> "LiblistClause"
| CellClause id -> "CellClause:"^id
| UseClause -> "UseClause"
| ColonConfig -> "ColonConfig"
| InstanceName -> "InstanceName"
| InstanceIdentifier id -> "InstanceIdentifier:"^id
| TopModuleIdentifier id -> "TopModuleIdentifier:"^id
| InstClause -> "InstClause"
| ConfigRuleStatementDefault -> "ConfigRuleStatementDefault"
| ConfigRuleStatement -> "ConfigRuleStatement"
| LibraryDeclaration id -> "LibraryDeclaration:"^id
| Incdir -> "Incdir"
| FilePathSpec s -> "FilePathSpec:"^s
| IncludeStatement s -> "IncludeStatement:"^s
| PragmaExpression id -> "PragmaExpression:"^id
| PragmaValueTuple -> "PragmaValueTuple"
| PragmaValueNum s -> "PragmaValueNum:"^s
| PragmaValueStr s -> "PragmaValueStr:"^s
| PragmaValueId id -> "PragmaValueId:"^id
| PackageImportDecls -> "PackageImportDecls"
| ParamPorts -> "ParamPorts"
| Ranges -> "Ranges"
| VariableDimensions -> "VariableDimensions"
| CaseConds -> "CaseConds"
| NetDeclAssignments ids -> "NetDeclAssignments:"^(String.concat "," ids)
| ParamAssignments ids -> "ParamAssignments:"^(String.concat "," ids)
| MacroExpr s -> "MacroExpr:"^s
| MacroStmt s -> "MacroStmt:"^s
| Var -> "Var"
let to_simple_string = function
| Dummy -> "<dummy>"
| Error -> "<error>"
| Empty -> ";"
| SourceText -> "<source_text>"
| LibraryText -> "<library_text>"
| CompilerDirective cd -> CompilerDirective.to_simple_string cd
| ModuleDeclaration(mspec, id) -> (ModuleSpec.to_rep mspec)^" "^id
| UdpDeclaration id -> "primitive "^id
| NetDeclaration ids -> "<net_decl:"^(String.concat "," ids)^">"
| BindDirective id -> "bind "^id
| Expr e -> Expression.to_simple_string e
| Stmt stmt -> Statement.to_simple_string stmt
| NetType nt -> NetType.to_simple_string nt
| LocalParameterDeclaration ids -> "<local_param_decl>"
| ParameterDeclaration ids -> "<param_decl>"
| ParameterPortDeclaration -> "<param_port_decl>"
| ModuleBody -> "<module_body>"
| Instantiation id -> "<inst:"^id^">"
| GateInstantiation g -> Gate.to_simple_string g
| ContinuousAssign -> "<continuous_assign>"
| Assign -> "assign"
| ConcurrentAssertionItem -> "<concur_assert_item>"
| DeferredImmediateAssertionItem -> "<deferred_immediate_assert_item>"
| PpIdentifier id -> id
| PackedDimension -> "<packed_dim>"
| ParamAssignment id -> "<param_assign:"^id^">"
| DefparamAssignment -> "<def_param_assign>"
| IdSelect id -> id
| Select -> "<sel>"
| Range -> ":"
| RangePlus -> ":+"
| RangeMinus -> ":-"
| RangeForeach -> "<range_foreach>"
| Root -> "$root"
| This -> "this"
| Super -> "super"
| Cellpin id -> "."^id
| CellpinStar -> ".*"
| CellpinAnon -> "<cellpin_anon>"
| DelayValue id -> id
| PackageScope id -> id^"::"
| PackageScopeUnit -> "$unit::"
| PackageScopeLocal -> "local::"
| PackageImport id -> id
| PackageImportAny -> "*"
| LifetimeStatic -> "static"
| LifetimeAutomatic -> "automatic"
| EndLabel id -> ":"^id
| EndLabelNew -> ":new"
| ClassType id -> id
| DataType dt -> DataType.to_simple_string dt
| ImplicitDataType -> "<implicit_data_type>"
| VarDeclAssignments -> "<var_decl_assignments>"
| Signed -> "signed"
| Unsigned -> "unsigned"
| ArgsDotted id -> "."^id
| Tagged -> "tagged"
| StructUnionBody -> "<struct_union_body>"
| StructUnionMember -> "<struct_union_mem>"
| ClassScopeId id -> id
| Void -> "void"
| EnumNameDeclaration id -> id
| EnumBody -> "<enum_body>"
| IdClassSel id -> id
| Variable id -> id
| Extern -> "extern"
| PackageImportDeclaration -> "import"
| PackageImportItem id -> id^"::"
| Packed -> "packed"
| ParameterValueAssignment -> "<param_val_assign>"
| Ports -> "<ports>"
| PortsStar -> "(*)"
| BitSelect -> "<bit_sel>"
| VariableDeclAssignment id -> "<var_decl_assign:"^id^">"
| DynamicArrayNew -> "new[]"
| VariableDimension -> "<var_dim>"
| VariableDimensionStar -> "[*]"
| GenItemBegin -> "<gen_item_begin>"
| GenBlockId id -> ":"^id
| GenerateRegion -> "generate"
| Scalared -> "scalared"
| Vectored -> "vectored"
| DelayControl -> "#"
| NetSig id -> id
| ParameterOverride -> "defparam"
| PortDeclaration -> "<port_decl>"
| PortDirection pd -> PortDirection.to_simple_string pd
| Strength strength -> Strength.to_rep strength
| StrengthSupply0 -> "supply0"
| StrengthSupply1 -> "supply1"
| StrengthSpec -> "spec"
| VarDataType -> "<var_datatype>"
| Port id -> id
| InterfacePort id -> id
| InterfacePortInterface -> "interface"
| ModportIdentifier id -> id
| PortMulti -> "<port_multi>"
| ExprScope -> "<expr_scope>"
| ExprScopeThis -> "this"
| ExprScopeSuper -> "super"
| ExprScopeDot -> "."
| ExprScopeDotSuper -> ".super"
| CondPredicate -> "&&&"
| CondPattern -> "matches"
| Dist -> "dist"
| DistItem -> "<dist_item>"
| DistWeight -> ":="
| DistWeightRange -> ":/"
| ArrayRange -> "<array_range>"
| ArrayRangePlus -> ":+"
| ArrayRangeMinus -> ":-"
| CastingTypeSimple -> "<casting_type_simple>"
| CastingTypeSigned -> "signed"
| CastingTypeUnsigned -> "unsigned"
| CastingTypeString -> "string"
| CastingTypeConst -> "const"
| ValueRange -> "<value_range>"
| Pattern -> "<pat>"
| PatternId id -> "."^id
| PatternStar -> ".*"
| PatternTagged id -> "tagged:"^id
| EventControl -> "@"
| EventControlStar -> "@*"
| EventControlParenStar -> "@(*)"
| EventControlRepeat -> "repeat"
| EvExpr ee -> EventExpression.to_simple_string ee
| CaseItem -> "<case_item>"
| CaseItemDefault -> "default"
| CaseInsideItem -> "<case_inside_item>"
| CaseInsideItemDefault -> "default"
| CaseItems -> "<case_items>"
| CaseItemsMatches -> "matches"
| CaseItemsInside -> "inside"
| With -> "with"
| Args -> "<args>"
| ConstraintBlock -> "<constraint_block>"
| ForInit -> "<for_ini>"
| ForInitItemDT id -> id
| ForInitItemLval -> "<for_ini_item_lval>"
| StreamingConcat -> "<streaming_concat>"
| OrderRL -> "<<"
| OrderLR -> ">>"
| StreamConcat -> "<stream_concat>"
| Solve -> "solve"
| SolveBefore -> "<solve_before>"
| ActionBlock -> "<act_block>"
| CycleDelay s -> "##"^s
| CycleDelayId id -> "##"^id
| CycleDelayParen -> "##"
| Priority -> "priority"
| Unique -> "unique"
| Unique0 -> "unique0"
| InstRange -> "<inst_range>"
| InstName id -> id
| PExpr pe -> PropertyExpression.to_simple_string pe
| ClockingEvent id -> "@"^id
| ClockingEventParen -> "@"
| PropertyCase -> "<prop_case>"
| PropertyCaseDefault -> "default"
| DisableIff -> "disable iff"
| CycleDelayRange s -> "##"^s
| CycleDelayRangeId id -> "##"^id
| CycleDelayRangeParen -> "##"
| CycleDelayRangeBracket -> "##[]"
| CycleDelayRangeBracketStar -> "##[*]"
| CycleDelayRangeBracketPlus -> "##[+]"
| SExpr se -> SequenceExpression.to_simple_string se
| ConsecutiveRepetition -> "[*]"
| NonconsecutiveRepetition -> "[=]"
| GotoRepetition -> "[->]"
| NetAlias -> "alias"
| InitialConstruct -> "initial"
| FinalConstruct -> "final"
| AlwaysConstruct aspec -> AlwaysSpec.to_rep aspec
| ConcurrentAssertionItemLabeled id -> id^":"
| ConcurrentAssertionStmt ca -> ConcurrentAssertion.to_simple_string ca
| DeferredImmediateAssertionItemLabeled id -> id^":"
| DeferredImmediateAssertionStmt dia -> DeferredImmediateAssertion.to_simple_string dia
| SimpleImmediateAssertionStmt sia -> SimpleImmediateAssertion.to_simple_string sia
| CheckerInstantiation id -> id
| LoopGenerateConstruct -> "for"
| GenvarDeclaration ids -> "genvar"
| GenvarIterationAssign(ao, id) -> "genvar_iter:"^(AssignmentOperator.to_string ao)^":"^id
| GenvarIterationIncOrDec(iod, id) -> "genvar_iter:"^(IncOrDecOperator.to_string iod)^":"^id
| GenvarIdDecl id -> "<genvar_id_decl:"^id^">"
| GenvarInitId id -> "<genvar_ini_id:"^id^">"
| GenvarInit -> "genvar"
| SpecifyBlock -> "specify"
| SpecparamDeclaration -> "specparam"
| SpecparamAssignmentId id -> id
| SpecparamAssignmentPulseControl id -> "PATHPULSE$"^id
| PulsestyleDeclarationOnevent -> "pulsestyle_onevent"
| PulsestyleDeclarationOndetect -> "pulsestyle_ondetect"
| ShowcancelledDeclaration -> "showcancelled"
| NoshowcancelledDeclaration -> "noshowcancelled"
| SpecifyTerminalDescriptor -> "<spec_term_desc>"
| InputOrOutputId id -> id
| InterfaceIdentifier id -> id
| ProgramDeclaration id -> "program "^id
| InterfaceDeclaration id -> "interface "^id
| InterfaceDeclarationExtern id -> "extern interface "^id
| TimeUnitsDeclaration -> "timeunits"
| TimeUnit s -> s
| Timeprecision s -> s
| PackageDeclaration id -> "package "^id
| AnonymousProgram -> "program"
| AnonymousProgramItemEmpty -> ";"
| FunctionDeclaration id -> "function "^id
| FunctionPrototype id -> "function "^id
| FuncId id -> "<func_id:"^id^">"
| FuncIdVoid id -> "<fun_id_void:"^id^">"
| FuncIdNew -> "new"
| TfIdScoped id -> id
| TaskDeclaration id -> "task "^id
| TaskPrototype id -> "task "^id
| ClassCtorPrototype -> "<class_ctor_proto>"
| TfPortListPart -> "<tf_port_list_part>"
| TfBody -> "<tf_body>"
| TfPortDeclaration -> "<tf_port_decl>"
| TfPortItemAssignment id -> "<tf_port_item_assign:"^id^">"
| TfPortItem -> "<tf_port_item>"
| TfVariableIdentifier id -> id
| CheckerDeclaration id -> "checker "^id
| PropertyDeclaration id -> "property "^id
| PropertyDeclBody -> "<prop_decl_body>"
| PropertyPortItem -> "<prop_port_item>"
| PropertyPortItemDir -> "<prop_port_item_dir>"
| PropertyPortItemAssignment id -> "<prop_port_item_assign:"^id^">"
| SequenceDeclaration id -> "sequence "^id
| SequenceDeclBody -> "<seq_decl_body>"
| LetDeclaration id -> "let "^id
| PropertyStatementSpec -> "<prop_stmt_spec>"
| AssertionVariableDeclaration -> "<asser_var_decl>"
| SequenceFormalTypeSequence -> "sequence"
| SequenceFormalTypeUntyped -> "untyped"
| DataDeclarationVar -> "<data_decl_var>"
| Const -> "const"
| DataDeclarationVarClass -> "<data_decl_var_class>"
| TypeDeclaration id -> "typedef "^id
| ScopedType id -> "<scoped_type:"^id^">"
| TypeIdentifier id -> id
| TypeDeclEnum -> "enum"
| TypeDeclStruct -> "struct"
| TypeDeclUnion -> "union"
| TypeDeclClass -> "class"
| VirtualInterfaceDeclaration id -> "virtual interface "^id
| ModportDeclaration ids -> "modport "^(String.concat "," ids)
| ModportItem id -> "<modport_item:"^id^">"
| ModportSimplePortsDecl -> "<modport_simple_ports_decl>"
| ModportClockingDecl id -> "clocking "^id
| ModportTfPortsDeclImport -> "import"
| ModportTfPortsDeclExport -> "export"
| ModportSimplePort id -> id
| ModportSimplePortDot id -> "."^id
| ModportTfPort id -> id
| CovergroupDeclaration id -> "covergroup "^id
| Paren -> "()"
| CoverageOption(id1, id2) -> "<coverage_opt:"^id1^":"^id2^">"
| CoverPoint -> "coverpoint"
| CoverPointLabeled id -> id^":coverpoint"
| CoverCross -> "covercross"
| CoverCrossLabeled id -> id^":covercross"
| CrossItem id -> "<cross_item:"^id^">"
| Iff -> "iff"
| BinsList -> "<bins_list>"
| BinsEmpty -> ";"
| SelectBins -> "<sel_bins>"
| SelectBinsEmpty -> ";"
| Bins(bspec, id) -> (BinsSpec.to_rep bspec)^" "^id
| BinsSelection(bspec, id) -> (BinsSpec.to_rep bspec)^" "^id
| BinsExpressionVar id -> id
| BinsExpression(id1, id2) -> id1^"."^id2
| NBins -> "[]"
| SelCondBinsof -> "binsof"
| SelExprNot -> "!"
| SelExprAnd -> "&&"
| SelExprOr -> "||"
| SelExprParen -> "()"
| Intersect -> "intersect"
| Wildcard -> "wildcard"
| TransSet -> "<trans_set>"
| TransRangeList -> "<trans_range_list>"
| RepeatRange -> "<repeat_range>"
| TransItem -> "<trans_item>"
| TransRepetitionConsecutive -> "[*]"
| TransRepetitionNonconsecutive -> "[=]"
| TransRepetitionGoto -> "[->]"
| Default -> "default"
| DefaultSequence -> "default sequence"
| OpenRangeList -> "<open_range_list>"
| CoverageEventWith id -> "with function "^id
| CoverageEventBlockEvent -> "@@"
| BlockEventExpression -> "<block_ev_expr>"
| BlockEventExpressionBegin -> "begin"
| BlockEventExpressionEnd -> "end"
| HierarchicalBtfIdentifier id -> "<hierarchical_Btf_id:"^id^">"
| PackageExportDeclarationStar -> "export *::*"
| PackageExportDeclaration -> "export"
| DpiImport s -> "import "^s
| DpiExportFunc(s, id) -> "export "^s^" function "^id
| DpiExportTask(s, id) -> "export "^s^" task "^id
| DpiImportLabel id -> id^"="
| DpiTfImportPropertyContext -> "context"
| DpiTfImportPropertyPure -> "pure"
| ExternConstraintDeclaration -> "constraint"
| Static -> "static"
| Virtual -> "virtual"
| ClassDeclaration id -> "class "^id
| ClassExtends -> "extends"
| ClassItemEmpty -> ";"
| ClassMethod -> "<class_meth>"
| Qualifier q -> Qualifier.to_simple_string q
| ClassBody -> "<class_body>"
| ClassConstraint id -> "constraint "^id
| Pure -> "pure"
| ClassProperty -> "<class_prop>"
| PackageOrGenerateItemEmpty -> ";"
| Forkjoin -> "forkjoin"
| ExternTfDeclaration ids -> "extern"
| TimingCheck tc -> TimingCheck.to_simple_string tc
| SystemTimingCheck -> "<sys_timing_check>"
| Notifier id -> "<notifier:"^id^""
| Delayed id -> "<delayed:"^id^">"
| TimingCheckEvent -> "<timing_check_ev>"
| TimingCheckEventControlPosedge -> "posedge"
| TimingCheckEventControlNegedge -> "negedge"
| TimingCheckEventControl -> "edge"
| EdgeDescriptor s -> s
| OverloadDeclaration(oo, id) -> (OverloadOperator.to_simple_string oo)^" "^id
| Params -> "<params>"
| ClockingDeclaration id -> "clocking "^id
| Global -> "global"
| ClockingBody -> "<clocking_body>"
| ClockingItemDefault -> "default"
| ClockingItem -> "<clocking_item>"
| DefaultSkewInput -> "input"
| DefaultSkewOutput -> "output"
| DefaultSkewInputOutput -> "input output"
| ClockingDirectionInput -> "input"
| ClockingDirectionInputOutput -> "input output"
| ClockingDirectionInout -> "inout"
| ClockingSkewPosedge -> "posedge"
| ClockingSkewNegedge -> "negedge"
| ClockingSkewEdge -> "edge"
| ClockingSkew -> "<clocking_skew>"
| ClockingDeclAssign id -> "<clocking_decl_assign:"^id^">"
| Production id -> "<prod:"^id^">"
| ProductionItem id -> "<prod_item:"^id^">"
| RsCodeBlock -> "<rs_code_block>"
| RsRule -> "<rs_rule>"
| RsProductionList -> "<rs_prod_list>"
| RsProductionListRandJoin -> "rand join"
| WeightSpecInt s -> s
| WeightSpecId -> "<weight_spec_id>"
| WeightSpec -> "<weight_spec>"
| RsProdIf -> "if"
| RsProdRepeat -> "repeat"
| RsProdCase -> "case"
| RsCaseItem -> "<rs_case_item>"
| RsCaseItemDefault -> "default"
| CheckerOrGenerateItemEmpty -> ";"
| ConditionalGenerateConstructCase -> "case"
| ConditionalGenerateConstructIf -> "if"
| ElaborationSystemTask st -> SystemTask.to_simple_string st
| CaseGenerateItem -> "<case_gen_item>"
| CaseGenerateItemDefault -> "default"
| AssignmentPattern -> "<assign_pat>"
| AssignmentPatternExpr -> "<assign_pat_expr>"
| PatternKey -> "<pat_key>"
| PatternKeyDefault -> "default"
| PatternMember -> "<pat_mem>"
| SimplePathDeclaration -> "<simple_path_decl>"
| ParallelPathDescription -> "<par_path_desc>"
| FullPathDescription -> "<full_path_desc>"
| PathInputs -> "<path_inputs>"
| PathOutputs -> "<path_outputs>"
| PathDelayValue -> "<path_delay_val>"
| PolarityPlus -> "+"
| PolarityMinus -> "-"
| EdgePosedge -> "posedge"
| EdgeNegedge -> "negedge"
| EdgeSensitivePathDeclaration -> "<edge_sensitive_path_decl>"
| ParallelEdgeSensitivePathDescription -> "<par_edge_sensitive_path_desc>"
| FullEdgeSensitivePathDescription -> "<full_edge_sensitive_path_desc>"
| ParallelEdgeSensitivePathDescriptionSub -> "<par_edge_sensitive_path_desc_sub>"
| FullEdgeSensitivePathDescriptionSub -> "<full_edge_sensitive_path_desc_sub>"
| StateDependentPathDeclarationIf -> "if"
| StateDependentPathDeclarationIfnone -> "ifnone"
| VariableLvalue -> "<var_lval>"
| AttributeInstance -> "(* *)"
| AttrSpec id -> "<attr_spec:"^id^">"
| UdpPort id -> id
| UdpPortDeclaration -> "<udp_port_decl>"
| UdpOutputDeclaration id -> "output "^id
| UdpOutputDeclarationReg id -> "output reg "^id
| UdpInputDeclaration -> "input"
| UdpRegDeclaration id -> "reg "^id
| SequentialBody -> "<seq_body>"
| CombinationalBody -> "<combi_body>"
| UdpInitialStmt(id, s) -> "initial "^id^"="^s
| SequentialEntry -> "<seq_entry>"
| EdgeIndicator -> "<edge_ind>"
| EdgeSymbol s -> s
| LevelSymbol s -> s
| OutputSymbol s -> s
| CombinationalEntry -> "<combi_entry>"
| NextStateMinus -> "-"
| UdpPortsStar -> "(.*)"
| UdpPorts -> "<udp_ports>"
| UdpPortDecls -> "<udp_port_decls>"
| UdpDeclarationPorts -> "<udp_decl_ports>"
| AttributeInstances -> "<attr_insts>"
| ConfigDeclaration id -> "config "^id
| DesignStatement -> "design"
| CellId id -> id
| LibraryIdentifier id -> id
| LiblistClause -> "liblist"
| CellClause id -> "cell "^id
| UseClause -> "use"
| ColonConfig -> ":config"
| InstanceName -> "<instance_name>"
| InstanceIdentifier id -> id
| TopModuleIdentifier id -> id
| InstClause -> "instance"
| ConfigRuleStatementDefault -> "default"
| ConfigRuleStatement -> "<conf_rule_stmt>"
| LibraryDeclaration id -> "library "^id
| Incdir -> "-incdir"
| FilePathSpec s -> s
| IncludeStatement s -> "include "^s^";"
| PragmaExpression id -> "<pragma_expr:"^id^">"
| PragmaValueTuple -> "<pragma_val_tuple>"
| PragmaValueNum s -> s
| PragmaValueStr s -> s
| PragmaValueId id -> id
| PackageImportDecls -> "<package_import_decls>"
| ParamPorts -> "<param_ports>"
| Ranges -> "<ranges>"
| VariableDimensions -> "<variable_dimensions>"
| CaseConds -> "<case_conds>"
| NetDeclAssignments ids -> "<net_decl_assigns:"^(String.concat "," ids)^">"
| ParamAssignments ids -> "<param_assigns:"^(String.concat "," ids)^">"
| MacroExpr s -> "<macro_expr:"^s^">"
| MacroStmt s -> "<macro_stmt:"^s^">"
| Var -> "var"
let to_tag ?(strip=false) lab =
let name, attrs =
match lab with
| Dummy -> "DUMMY", []
| Error -> "ERROR", []
| Empty -> "EMPTY", []
| SourceText -> "SourceText", []
| LibraryText -> "LibraryText", []
| CompilerDirective cd -> CompilerDirective.to_tag cd
| ModuleDeclaration(mspec, id) -> "ModuleDecl", [spec_attr_name,ModuleSpec.to_rep mspec;ident_attr_name,id]
| UdpDeclaration id -> "UdpDecl", [ident_attr_name,id]
| NetDeclaration ids -> "NetDecl", [ident_attr_name,String.concat ";" ids]
| BindDirective id -> "BindDirective", [ident_attr_name,id]
| Expr e -> Expression.to_tag e
| Stmt stmt -> Statement.to_tag stmt
| NetType nt -> NetType.to_tag nt
| LocalParameterDeclaration ids -> "LocalParamDecl", [ident_attr_name,String.concat ";" ids]
| ParameterDeclaration ids -> "ParamDecl", [ident_attr_name,String.concat ";" ids]
| ParameterPortDeclaration -> "ParamPortDecl", []
| ModuleBody -> "ModuleBody", []
| Instantiation id -> "Instantiation", [ident_attr_name,id]
| GateInstantiation g -> "GateInstantiataion", ["gate",Gate.to_simple_string g]
| ContinuousAssign -> "ContinuousAssign", []
| Assign -> "Assign", []
| ConcurrentAssertionItem -> "ConcurrentAssertionItem", []
| DeferredImmediateAssertionItem -> "DeferredImmediateAssertItem", []
| PpIdentifier id -> "PpIdentifier", [ident_attr_name,id]
| PackedDimension -> "PackedDimension", []
| ParamAssignment id -> "ParamAssignment", [ident_attr_name,id]
| DefparamAssignment -> "DefparamAssign", []
| IdSelect id -> "IdSelect", [ident_attr_name,id]
| Select -> "Select", []
| Range -> "Range", []
| RangePlus -> "RangePlus", []
| RangeMinus -> "RangeMinus", []
| RangeForeach -> "RangeForeach", []
| Root -> "Root", []
| This -> "This", []
| Super -> "Super", []
| Cellpin id -> "Cellpin", [ident_attr_name,id]
| CellpinStar -> "Cellpin_Star", []
| CellpinAnon -> "Cellpin_Anon", []
| DelayValue id -> "DelayValue", [ident_attr_name,id]
| PackageScope id -> "PackageScope", [ident_attr_name,id]
| PackageScopeUnit -> "PackageScopeUnit", []
| PackageScopeLocal -> "PackageScopeLocal", []
| PackageImport id -> "PackageImport", [ident_attr_name,id]
| PackageImportAny -> "PackageImportAny", []
| LifetimeStatic -> "LifetimeStatic", []
| LifetimeAutomatic -> "LifetimeAutomatic", []
| EndLabel id -> "EndLabel", [ident_attr_name,id]
| EndLabelNew -> "EndLabelNew", []
| ClassType id -> "ClassType", [ident_attr_name,id]
| DataType dt -> DataType.to_tag dt
| ImplicitDataType -> "ImplicitDataType", []
| VarDeclAssignments -> "VarDeclAssignments", []
| Signed -> "Signed", []
| Unsigned -> "Unsigned", []
| ArgsDotted id -> "ArgsDotted", [ident_attr_name,id]
| Tagged -> "Tagged", []
| StructUnionBody -> "StructUnionBody", []
| StructUnionMember -> "StructUnionMem", []
| ClassScopeId id -> "ClassScopeId", [ident_attr_name,id]
| Void -> "Void", []
| EnumNameDeclaration id -> "EnumNameDecl", [ident_attr_name,id]
| EnumBody -> "EnumBody", []
| IdClassSel id -> "IdClassSel", [ident_attr_name,id]
| Variable id -> "Var", [ident_attr_name,id]
| Extern -> "Extern", []
| PackageImportDeclaration -> "PackageImportDecl", []
| PackageImportItem id -> "PackageImportItem", [ident_attr_name,id]
| Packed -> "Packed", []
| ParameterValueAssignment -> "ParamValAssign", []
| Ports -> "Ports", []
| PortsStar -> "PortsStar", []
| BitSelect -> "BitSel", []
| VariableDeclAssignment id -> "VarDeclAssign", [ident_attr_name,id]
| DynamicArrayNew -> "DynArrayNew", []
| VariableDimension -> "VarDim", []
| VariableDimensionStar -> "VarDimStar", []
| GenItemBegin -> "GenItemBegin", []
| GenBlockId id -> "GenBlockId", [ident_attr_name,id]
| GenerateRegion -> "GenerateRegion", []
| Scalared -> "Scalared", []
| Vectored -> "Vectored", []
| DelayControl -> "DelayControl", []
| NetSig id -> "NetSig", [ident_attr_name,id]
| ParameterOverride -> "ParamOverride", []
| PortDeclaration -> "PortDecl", []
| PortDirection pd -> PortDirection.to_tag pd
| Strength strength -> Strength.to_tag strength
| StrengthSupply0 -> "StrengthSupply0", []
| StrengthSupply1 -> "StrengthSupply1", []
| StrengthSpec -> "StrengthSpec", []
| VarDataType -> "VarDataType", []
| Port id -> "Port", [ident_attr_name,id]
| InterfacePort id -> "InterfacePort", [ident_attr_name,id]
| InterfacePortInterface -> "Interface", []
| ModportIdentifier id -> "ModportId", [ident_attr_name,id]
| PortMulti -> "PortMulti", []
| ExprScope -> "ExprScope", []
| ExprScopeThis -> "ExprScopeThis", []
| ExprScopeSuper -> "ExprScopeSuper", []
| ExprScopeDot -> "ExprScopeDot", []
| ExprScopeDotSuper -> "ExprScopeDotSuper", []
| CondPredicate -> "CondPred", []
| CondPattern -> "CondPat", []
| Dist -> "Dist", []
| DistItem -> "DistItem", []
| DistWeight -> "DistWeight", []
| DistWeightRange -> "DistWeightRange", []
| ArrayRange -> "ArrayRange", []
| ArrayRangePlus -> "ArrayRangePlus", []
| ArrayRangeMinus -> "ArrayRangeMinus", []
| CastingTypeSimple -> "CastingTypeSimple", []
| CastingTypeSigned -> "CastingTypeSigned", []
| CastingTypeUnsigned -> "CastingTypeUnsigned", []
| CastingTypeString -> "CastingTypeString", []
| CastingTypeConst -> "CastingTypeConst", []
| ValueRange -> "ValueRange", []
| Pattern -> "Pattern", []
| PatternId id -> "PatternId", [ident_attr_name,id]
| PatternStar -> "PatternStar", []
| PatternTagged id -> "PatternTagged", [ident_attr_name,id]
| EventControl -> "EventControl", []
| EventControlStar -> "EventControlStar", []
| EventControlParenStar -> "EventControlParenStar", []
| EventControlRepeat -> "EventControlRepeat", []
| EvExpr ee -> EventExpression.to_tag ee
| CaseItem -> "CaseItem", []
| CaseItemDefault -> "CaseItemDefault", []
| CaseInsideItem -> "CaseInsideItem", []
| CaseInsideItemDefault -> "CaseInsideItemDefault", []
| CaseItems -> "CaseItems", []
| CaseItemsMatches -> "CaseItemMatches", []
| CaseItemsInside -> "CaseItemInside", []
| With -> "With", []
| Args -> "Args", []
| ConstraintBlock -> "ConstraintBlock", []
| ForInit -> "ForInit", []
| ForInitItemDT id -> "ForInitItemDt", [ident_attr_name,id]
| ForInitItemLval -> "ForInitItemLval", []
| StreamingConcat -> "StreamingConcat", []
| OrderRL -> "OrderRL", []
| OrderLR -> "OrderLR", []
| StreamConcat -> "StreamConcat", []
| Solve -> "Solve", []
| SolveBefore -> "SolveBefore", []
| ActionBlock -> "ActionBlock", []
| CycleDelay s -> "CycleDelay", ["delay",XML.encode_string s]
| CycleDelayId id -> "CycleDelayId", [ident_attr_name,id]
| CycleDelayParen -> "CycleDelayParen", []
| Priority -> "Priority", []
| Unique -> "Unique", []
| Unique0 -> "Unique0", []
| InstRange -> "InstRange", []
| InstName id -> "InstName", [ident_attr_name,id]
| PExpr pe -> PropertyExpression.to_tag pe
| ClockingEvent id -> "ClockingEvent", [ident_attr_name,id]
| ClockingEventParen -> "ClockingEventParen", []
| PropertyCase -> "PropertyCase", []
| PropertyCaseDefault -> "PropertyCaseDefault", []
| DisableIff -> "DisableIff", []
| CycleDelayRange s -> "CycleDelayRange", ["delay",XML.encode_string s]
| CycleDelayRangeId id -> "CycleDelayRangeId", [ident_attr_name,id]
| CycleDelayRangeParen -> "CycleDelayRangeParen", []
| CycleDelayRangeBracket -> "CycleDelayRangeBracket", []
| CycleDelayRangeBracketStar -> "CycleDelayRangeBracketStar", []
| CycleDelayRangeBracketPlus -> "CycleDelayRangeBracketPlus", []
| SExpr se -> SequenceExpression.to_tag se
| ConsecutiveRepetition -> "ConsecutiveRepetition", []
| NonconsecutiveRepetition -> "NonconsecutiveRepetition", []
| GotoRepetition -> "GotoRepetition", []
| NetAlias -> "NetAlias", []
| InitialConstruct -> "InitialConstruct", []
| FinalConstruct -> "FinalConstruct", []
| AlwaysConstruct aspec -> "AlwaysConstruct", [spec_attr_name,AlwaysSpec.to_rep aspec]
| ConcurrentAssertionItemLabeled id -> "ConcurrentAssertionItem", [label_attr_name,id]
| ConcurrentAssertionStmt ca -> ConcurrentAssertion.to_tag ca
| DeferredImmediateAssertionItemLabeled id -> "DeferredImmediateAssertionItem", [label_attr_name,id]
| DeferredImmediateAssertionStmt dia -> DeferredImmediateAssertion.to_tag dia
| SimpleImmediateAssertionStmt sia -> SimpleImmediateAssertion.to_tag sia
| CheckerInstantiation id -> "CheckerInst", [ident_attr_name,id]
| LoopGenerateConstruct -> "LoopGenerateConstruct", []
| GenvarDeclaration ids -> "GenvarDecl", [ident_attr_name,String.concat ";" ids]
| GenvarIterationAssign(ao, id) -> "GenvarIter", ["op",AssignmentOperator.to_tag_name ao;ident_attr_name,id]
| GenvarIterationIncOrDec(iod, id) -> "GenvarIter", ["op",IncOrDecOperator.to_tag_name iod;ident_attr_name,id]
| GenvarIdDecl id -> "GenvarIdDecl", [ident_attr_name,id]
| GenvarInitId id -> "GenvarInitId", [ident_attr_name,id]
| GenvarInit -> "GenvarInit", []
| SpecifyBlock -> "SpecifyBlock", []
| SpecparamDeclaration -> "SpecparamDecl", []
| SpecparamAssignmentId id -> "SpecparamAssignmentId", [ident_attr_name,id]
| SpecparamAssignmentPulseControl id -> "SpecparamAssignmentPulseControl", [ident_attr_name,id]
| PulsestyleDeclarationOnevent -> "PulsestyleDeclOnevent", []
| PulsestyleDeclarationOndetect -> "PulsestyleDeclOndetect", []
| ShowcancelledDeclaration -> "Showcancelled", []
| NoshowcancelledDeclaration -> "Noshowcancelled", []
| SpecifyTerminalDescriptor -> "SpecifyTerminalDtor", []
| InputOrOutputId id -> "InputOrOutputId", [ident_attr_name,id]
| InterfaceIdentifier id -> "InterfaceId", [ident_attr_name,id]
| ProgramDeclaration id -> "ProgramDecl", [ident_attr_name,id]
| InterfaceDeclaration id -> "InterfaceDecl", [ident_attr_name,id]
| InterfaceDeclarationExtern id -> "InterfaceDeclExtern", [ident_attr_name,id]
| TimeUnitsDeclaration -> "TimeunitsDecl", []
| TimeUnit s -> "Timeunit", ["unit",XML.encode_string s]
| Timeprecision s -> "Timeprecision", ["precision",XML.encode_string s]
| PackageDeclaration id -> "PackageDecl", [ident_attr_name,id]
| AnonymousProgram -> "AnonymousProg", []
| AnonymousProgramItemEmpty -> "AnonymousProgItemEmpty", []
| FunctionDeclaration id -> "FunctionDecl", [ident_attr_name,id]
| FunctionPrototype id -> "FunctionPrototype", [ident_attr_name,id]
| FuncId id -> "FunctionId", [ident_attr_name,id]
| FuncIdVoid id -> "FunctionIdVoid", [ident_attr_name,id]
| FuncIdNew -> "FunctionIdNew", []
| TfIdScoped id -> "TfIdScoped", [ident_attr_name,id]
| TaskDeclaration id -> "TaskDecl", [ident_attr_name,id]
| TaskPrototype id -> "TaskPrototype", [ident_attr_name,id]
| ClassCtorPrototype -> "ClassCtorProto", []
| TfPortListPart -> "TfPortListPart", []
| TfBody -> "TfBody", []
| TfPortDeclaration -> "TfPortDecl", []
| TfPortItemAssignment id -> "TfPortItemAssign", [ident_attr_name,id]
| TfPortItem -> "TfPortItem", []
| TfVariableIdentifier id -> "TfVarId", [ident_attr_name,id]
| CheckerDeclaration id -> "CheckerDecl", [ident_attr_name,id]
| PropertyDeclaration id -> "PropertyDecl", [ident_attr_name,id]
| PropertyDeclBody -> "PropertyDeclBody", []
| PropertyPortItem -> "PropertyPortItem", []
| PropertyPortItemDir -> "PropertyPortItemDir", []
| PropertyPortItemAssignment id -> "PropertyPortItemAssign", [ident_attr_name,id]
| SequenceDeclaration id -> "SequenceDecl", [ident_attr_name,id]
| SequenceDeclBody -> "SequenceDeclBody", []
| LetDeclaration id -> "LetDecl", [ident_attr_name,id]
| PropertyStatementSpec -> "PropertyStmtSpec", []
| AssertionVariableDeclaration -> "AssertionVarDecl", []
| SequenceFormalTypeSequence -> "SequenceFormalTypeSequence", []
| SequenceFormalTypeUntyped -> "SequenceFormalTypeUntyped", []
| DataDeclarationVar -> "DataDeclVar", []
| Const -> "Const", []
| DataDeclarationVarClass -> "DataDeclVarClass", []
| TypeDeclaration id -> "TypeDecl", [ident_attr_name,id]
| ScopedType id -> "ScopedType", [ident_attr_name,id]
| TypeIdentifier id -> "TypeId", [ident_attr_name,id]
| TypeDeclEnum -> "TypeDeclEnum", []
| TypeDeclStruct -> "TypeDeclStruct", []
| TypeDeclUnion -> "TypeDeclUnion", []
| TypeDeclClass -> "TypeDeclClass", []
| VirtualInterfaceDeclaration id -> "VirtualInterfaceDecl", [ident_attr_name,id]
| ModportDeclaration ids -> "ModportDecl", [ident_attr_name,String.concat ";" ids]
| ModportItem id -> "ModportItem", [ident_attr_name,id]
| ModportSimplePortsDecl -> "ModportSimplePortsDecl", []
| ModportClockingDecl id -> "ModportClockingDecl", [ident_attr_name,id]
| ModportTfPortsDeclImport -> "ModportTfPortsDeclImport", []
| ModportTfPortsDeclExport -> "ModportTfPortsDeclExport", []
| ModportSimplePort id -> "ModportSimplePort", [ident_attr_name,id]
| ModportSimplePortDot id -> "ModportSimplePortDot.", [ident_attr_name,id]
| ModportTfPort id -> "ModportTfPort", [ident_attr_name,id]
| CovergroupDeclaration id -> "CovergroupDecl", [ident_attr_name,id]
| Paren -> "Paren", []
| CoverageOption(id1, id2) -> "CoverageOption", ["inst",id1;"opt",id2]
| CoverPoint -> "Coverpoint", []
| CoverPointLabeled id -> "Coverpoint", [label_attr_name,id]
| CoverCross -> "Covercross", []
| CoverCrossLabeled id -> "Covercross", [label_attr_name,id]
| CrossItem id -> "CrossItem", [ident_attr_name,id]
| Iff -> "Iff", []
| BinsList -> "BinsList", []
| BinsEmpty -> "BinsEmpty", []
| SelectBins -> "SelectBins", []
| SelectBinsEmpty -> "SelectBinsEmpty", []
| Bins(bspec, id) -> "Bins", [spec_attr_name,BinsSpec.to_rep bspec;ident_attr_name,id]
| BinsSelection(bspec, id) -> "BinsSelection", [spec_attr_name,BinsSpec.to_rep bspec;ident_attr_name,id]
| BinsExpressionVar id -> "BinsExprVar", [ident_attr_name,id]
| BinsExpression(id1, id2) -> "BinsExpr", ["cover_point",id1;"bins",id2]
| NBins -> "NBins", []
| SelCondBinsof -> "SelCondBinsof", []
| SelExprNot -> "SelExprNot", []
| SelExprAnd -> "SelExprAnd", []
| SelExprOr -> "SelExprOr", []
| SelExprParen -> "SelExprParen", []
| Intersect -> "Intersect", []
| Wildcard -> "Wildcard", []
| TransSet -> "TransSet", []
| TransRangeList -> "TransRangeList", []
| RepeatRange -> "RepeatRange", []
| TransItem -> "TransItem", []
| TransRepetitionConsecutive -> "TransRepetitionConsecutive", []
| TransRepetitionNonconsecutive -> "TransRepetitionNonconsecutive", []
| TransRepetitionGoto -> "TransRepetitionGoto", []
| Default -> "Default", []
| DefaultSequence -> "DefaultSequence", []
| OpenRangeList -> "OpenRangeList", []
| CoverageEventWith id -> "CoverageEventWith", [ident_attr_name,id]
| CoverageEventBlockEvent -> "CoverageEventBlockEvent", []
| BlockEventExpression -> "BlockEventExpr", []
| BlockEventExpressionBegin -> "BlockEventExprBegin", []
| BlockEventExpressionEnd -> "BlockEventExprEnd", []
| HierarchicalBtfIdentifier id -> "HierarchicalBtfId", [ident_attr_name,id]
| PackageExportDeclarationStar -> "PackageExportDeclStar", []
| PackageExportDeclaration -> "PackageExportDecl", []
| DpiImport s -> "DpiImport", ["dpi_spec",XML.encode_string s]
| DpiExportFunc(s, id) -> "DpiExportFunc", ["dpi_spec",XML.encode_string s;ident_attr_name,id]
| DpiExportTask(s, id) -> "DpiExportTask", ["dpi_spec",XML.encode_string s;ident_attr_name,id]
| DpiImportLabel id -> "DpiImportLabel", [label_attr_name,id]
| DpiTfImportPropertyContext -> "DpiTfImportPropertyContext", []
| DpiTfImportPropertyPure -> "DpiTfImportPropertyPure", []
| ExternConstraintDeclaration -> "ExternConstraintDecl", []
| Static -> "Static", []
| Virtual -> "Virtual", []
| ClassDeclaration id -> "ClassDecl", [ident_attr_name,id]
| ClassExtends -> "ClassExtends", []
| ClassItemEmpty -> "ClassItemEmpty", []
| ClassMethod -> "ClassMethod", []
| Qualifier q -> Qualifier.to_tag q
| ClassBody -> "ClassBody", []
| ClassConstraint id -> "ClassConstraint", [ident_attr_name,id]
| Pure -> "Pure", []
| ClassProperty -> "ClassProperty", []
| PackageOrGenerateItemEmpty -> "PackageOrGenerateItemEmpty", []
| Forkjoin -> "Forkjoin", []
| ExternTfDeclaration id -> "ExternTfDecl", [ident_attr_name,id]
| TimingCheck tc -> TimingCheck.to_tag tc
| SystemTimingCheck -> "SystemTimingCheck", []
| Notifier id -> "Notifier", [ident_attr_name,id]
| Delayed id -> "Delayed", [ident_attr_name,id]
| TimingCheckEvent -> "TimingCheckEvent", []
| TimingCheckEventControlPosedge -> "TimingCheckEventControlPosedge", []
| TimingCheckEventControlNegedge -> "TimingCheckEventControlNegedge", []
| TimingCheckEventControl -> "TimingCheckEventControl", []
| EdgeDescriptor s -> "EdgeDesc", ["desc",s]
| OverloadDeclaration(oo, id) -> "OverloadDecl", ["op",OverloadOperator.to_tag_name oo;ident_attr_name,id]
| Params -> "Params", []
| ClockingDeclaration id -> "ClockingDecl", [ident_attr_name,id]
| Global -> "Global", []
| ClockingBody -> "Clockingbody", []
| ClockingItemDefault -> "ClockingItemDefault", []
| ClockingItem -> "ClockingItem", []
| DefaultSkewInput -> "defaultSkewInput", []
| DefaultSkewOutput -> "defaultSkewOutput", []
| DefaultSkewInputOutput -> "defaultSkewInputOutput", []
| ClockingDirectionInput -> "ClockingDirInput", []
| ClockingDirectionInputOutput -> "ClockingDirInputOutput", []
| ClockingDirectionInout -> "ClockingDirInout", []
| ClockingSkewPosedge -> "ClockingSkewPosedge", []
| ClockingSkewNegedge -> "ClockingSkewNegedge", []
| ClockingSkewEdge -> "ClockingSkewEdge", []
| ClockingSkew -> "ClockingSkew", []
| ClockingDeclAssign id -> "ClockingDeclAssign", [ident_attr_name,id]
| Production id -> "Production", [ident_attr_name,id]
| ProductionItem id -> "ProductionItem", [ident_attr_name,id]
| RsCodeBlock -> "RsCode_block", []
| RsRule -> "RsRule", []
| RsProductionList -> "RsProductionList", []
| RsProductionListRandJoin -> "RsProductionListRandJoin", []
| WeightSpecInt s -> "WeightSpecInt", [value_attr_name,XML.encode_string s]
| WeightSpecId -> "WeightSpecId", []
| WeightSpec -> "WeightSpec", []
| RsProdIf -> "RsProdIf", []
| RsProdRepeat -> "RsProdRepeat", []
| RsProdCase -> "RsProdCase", []
| RsCaseItem -> "RsCaseItem", []
| RsCaseItemDefault -> "RsCaseItemDefault", []
| CheckerOrGenerateItemEmpty -> "CheckerOrGenerateItemEmpty", []
| ConditionalGenerateConstructCase -> "ConditionalGenerateConstructCase", []
| ConditionalGenerateConstructIf -> "ConditionalGenerateConstructIf", []
| ElaborationSystemTask st -> "ElaborationSystemTask", ["task",SystemTask.to_simple_string st]
| CaseGenerateItem -> "CaseGenerateItem", []
| CaseGenerateItemDefault -> "CaseGenerateItemDefault", []
| AssignmentPattern -> "AssignmentPattern", []
| AssignmentPatternExpr -> "AssignmentPatternExpr", []
| PatternKey -> "PatternKey", []
| PatternKeyDefault -> "PatternKeyDefault", []
| PatternMember -> "PatternMember", []
| SimplePathDeclaration -> "SimplePathDecl", []
| ParallelPathDescription -> "ParallelPathDesc", []
| FullPathDescription -> "FullPathDesc", []
| PathInputs -> "PathInputs", []
| PathOutputs -> "PathOutputs", []
| PathDelayValue -> "PathDelayValue", []
| PolarityPlus -> "PolarityPlus", []
| PolarityMinus -> "PolarityMinus", []
| EdgePosedge -> "Posedge", []
| EdgeNegedge -> "Negedge", []
| EdgeSensitivePathDeclaration -> "EdgeSensitivePathDecl", []
| ParallelEdgeSensitivePathDescription -> "ParallelEdgeSensitivePathDesc", []
| FullEdgeSensitivePathDescription -> "FullEdgeSensitivePathDesc", []
| ParallelEdgeSensitivePathDescriptionSub -> "ParallelEdgeSensitivePathDesc_sub", []
| FullEdgeSensitivePathDescriptionSub -> "FullEdgeSensitivePathDesc_sub", []
| StateDependentPathDeclarationIf -> "StateDependentPathDeclIf", []
| StateDependentPathDeclarationIfnone -> "StateDependentPathDeclIfnone", []
| VariableLvalue -> "VariableLvalue", []
| AttributeInstance -> "AttributeInstance", []
| AttrSpec id -> "AttrSpec", [ident_attr_name,id]
| UdpPort id -> "UdpPort", [ident_attr_name,id]
| UdpPortDeclaration -> "UdpPortDecl", []
| UdpOutputDeclaration id -> "UdpOutputDecl", [ident_attr_name,id]
| UdpOutputDeclarationReg id -> "UdpOutputDeclReg ", [ident_attr_name,id]
| UdpInputDeclaration -> "UdpInputDecl", []
| UdpRegDeclaration id -> "UdpRegDecl", [ident_attr_name,id]
| SequentialBody -> "SequentialBody", []
| CombinationalBody -> "CombinationalBody", []
| UdpInitialStmt(id, s) -> "UdpInitialStmt", ["output_port",id;value_attr_name,XML.encode_string s]
| SequentialEntry -> "SequentialEntry", []
| EdgeIndicator -> "EdgeIndicator", []
| EdgeSymbol s -> "EdgeSymbol", ["symbol",XML.encode_string s]
| LevelSymbol s -> "LevelSymbol", ["symbol",XML.encode_string s]
| OutputSymbol s -> "OutputSymbol", ["symbol",XML.encode_string s]
| CombinationalEntry -> "CombinationalEntry", []
| NextStateMinus -> "NextStateMinus", []
| UdpPortsStar -> "UdpPortsStar", []
| UdpPorts -> "UdpPorts", []
| UdpPortDecls -> "UdpPortDecls", []
| UdpDeclarationPorts -> "UdpDeclPorts", []
| AttributeInstances -> "AttributeInstances", []
| ConfigDeclaration id -> "ConfigDecl", [ident_attr_name,id]
| DesignStatement -> "DesignStmt", []
| CellId id -> "CellId", [ident_attr_name,id]
| LibraryIdentifier id -> "LibraryId", [ident_attr_name,id]
| LiblistClause -> "LiblistClause", []
| CellClause id -> "CellClause", [ident_attr_name,id]
| UseClause -> "UseClause", []
| ColonConfig -> "ColonConfig", []
| InstanceName -> "InstanceName", []
| InstanceIdentifier id -> "InstanceId", [ident_attr_name,id]
| TopModuleIdentifier id -> "TopModuleId", [ident_attr_name,id]
| InstClause -> "InstClause", []
| ConfigRuleStatementDefault -> "ConfigRuleStmtDefault", []
| ConfigRuleStatement -> "ConfigRuleStmt", []
| LibraryDeclaration id -> "LibraryDecl", [ident_attr_name,id]
| Incdir -> "Incdir", []
| FilePathSpec s -> "FilePathSpec", [path_attr_name,strlit_to_encoded_path s]
| IncludeStatement s -> "IncludeStmt", [path_attr_name,strlit_to_encoded_path s]
| PragmaExpression id -> "PragmaExpr", [ident_attr_name,id]
| PragmaValueTuple -> "PragmaValueTuple", []
| PragmaValueNum s -> "PragmaValueNumber", [value_attr_name,XML.encode_string s]
| PragmaValueStr s -> "PragmaValueString", [value_attr_name,XML.encode_string s]
| PragmaValueId id -> "PragmaValueId", [ident_attr_name,id]
| PackageImportDecls -> "PackageImportDecls", []
| ParamPorts -> "ParamPorts", []
| Ranges -> "Ranges", []
| VariableDimensions -> "VariableDimensions", []
| CaseConds -> "CaseConds", []
| NetDeclAssignments ids -> "NetDeclAssignments", [ident_attr_name,String.concat ";" ids]
| ParamAssignments ids -> "ParamAssignments", [ident_attr_name,String.concat ";" ids]
| MacroExpr s -> "MacroExpr", [ident_attr_name,s]
| MacroStmt s -> "MacroStmt", [ident_attr_name,s]
| Var -> "Var", []
in
name, attrs
let get_identifiers = function
| NetDeclaration ids
| ModportDeclaration ids
| GenvarDeclaration ids
| NetDeclAssignments ids
| ParamAssignments ids
| LocalParameterDeclaration ids
| ParameterDeclaration ids
-> ids
| _ -> raise Not_found
let get_identifier lab =
DEBUG_MSG "\"%s\"" (to_string lab);
match lab with
| Expr e -> Expression.get_identifier e
| Stmt s -> Statement.get_identifier s
| DataType dt -> DataType.get_identifier dt
| EvExpr ee -> EventExpression.get_identifier ee
| PExpr pe -> PropertyExpression.get_identifier pe
| SExpr se -> SequenceExpression.get_identifier se
| ModuleDeclaration(_, id)
| UdpDeclaration id
| BindDirective id
| Instantiation id
| PpIdentifier id
| ParamAssignment id
| IdSelect id
| Cellpin id
| DelayValue id
| PackageScope id
| PackageImport id
| EndLabel id
| ClassType id
| ArgsDotted id
| ClassScopeId id
| EnumNameDeclaration id
| Variable id
| PackageImportItem id
| VariableDeclAssignment id
| GenBlockId id
| NetSig id
| Port id
| InterfacePort id
| ModportIdentifier id
| PatternId id
| PatternTagged id
| ForInitItemDT id
| CycleDelayId id
| InstName id
| ClockingEvent id
| CycleDelayRangeId id
| ConcurrentAssertionItemLabeled id
| DeferredImmediateAssertionItemLabeled id
| CheckerInstantiation id
| GenvarIterationAssign(_, id)
| GenvarIterationIncOrDec(_, id)
| GenvarIdDecl id
| GenvarInitId id
| SpecparamAssignmentId id
| SpecparamAssignmentPulseControl id
| InputOrOutputId id
| InterfaceIdentifier id
| ProgramDeclaration id
| InterfaceDeclaration id
| InterfaceDeclarationExtern id
| PackageDeclaration id
| FunctionDeclaration id
| FunctionPrototype id
| FuncId id
| FuncIdVoid id
| TfIdScoped id
| TaskDeclaration id
| TaskPrototype id
| TfPortItemAssignment id
| TfVariableIdentifier id
| CheckerDeclaration id
| PropertyDeclaration id
| PropertyPortItemAssignment id
| SequenceDeclaration id
| LetDeclaration id
| TypeDeclaration id
| ScopedType id
| TypeIdentifier id
| VirtualInterfaceDeclaration id
| ModportItem id
| ModportClockingDecl id
| ModportSimplePort id
| ModportSimplePortDot id
| ModportTfPort id
| CovergroupDeclaration id
| CoverageOption(_, id)
| CoverPointLabeled id
| CoverCrossLabeled id
| CrossItem id
| Bins(_, id)
| BinsSelection(_, id)
| BinsExpressionVar id
| BinsExpression(_, id)
| CoverageEventWith id
| HierarchicalBtfIdentifier id
| DpiExportFunc(_, id)
| DpiExportTask(_, id)
| ClassDeclaration id
| ClassConstraint id
| ExternTfDeclaration id
| Notifier id
| Delayed id
| OverloadDeclaration(_, id)
| ClockingDeclaration id
| ClockingDeclAssign id
| Production id
| ProductionItem id
| AttrSpec id
| UdpPort id
| UdpOutputDeclaration id
| UdpOutputDeclarationReg id
| UdpRegDeclaration id
| UdpInitialStmt(id, _)
| ConfigDeclaration id
| CellId id
| LibraryIdentifier id
| CellClause id
| InstanceIdentifier id
| TopModuleIdentifier id
| LibraryDeclaration id
-> id
| FuncIdNew -> "new"
| _ -> raise Not_found
let pexpr_to_stmt = function
| PExpr PropertyExpression.Case -> Stmt Statement.Case
| PExpr PropertyExpression.If -> Stmt Statement.Conditional
| PExpr x -> Stmt (Statement.PExpr x)
| x -> raise (Invalid_argument ("Ast.Label.pexpr_to_stmt: "^(to_string x)))
let expr_to_stmt = function
| Expr x -> Stmt (Statement.Expr x)
| _ -> raise (Invalid_argument "Ast.Label.expr_to_stmt")
let expr_of_integral_number i = Expr (Expression.IntegralNumber i)
let expr e = Expr e
let expr_uo uo = Expr (Expression.UOp uo)
let expr_bo bo = Expr (Expression.BOp bo)
let expr_ao ao = Expr (Expression.OperatorAssignment ao)
let pexpr pe = PExpr pe
let sexpr se = SExpr se
let ev_expr ee = EvExpr ee
let stmt s = Stmt s
let stmt_ao ao = Stmt (Statement.OperatorAssignment ao)
let sia_stmt ss = SimpleImmediateAssertionStmt ss
let dia_stmt ds = DeferredImmediateAssertionStmt ds
let ca_stmt cs = ConcurrentAssertionStmt cs
let qualifier q = Qualifier q
let timing_check tc = TimingCheck tc
let data_type dt = DataType dt
let net_type nt = NetType nt
let compiler_directive cd = CompilerDirective cd
let is_error = function
| Error -> true
| _ -> false
|
8c9188cf5df18f84a889c91c2fbe0edf4257bf9e26f0170fd65cfad97f4ebef1 | mmontone/cl-xul | test.lisp | (in-package :xul-test)
(defparameter *test-app*
(make-instance 'xul-application
:name "testapp"
:javascripts (list (asdf::system-relative-pathname :cl-xul #p"test/test-app.js"))
:xul (xul
(window (:title "Test application"
:width "500"
:height "500")
(menu-bar (:id "sample-menubar")
(menu (:id "action-menu" :label "Action")
(menu-popup (:id "action-popup")
(menu-item (:label "New"))
(menu-item (:label "Save" :disabled "true"))
(menu-item (:label "Close"))
(menu-separator ())
(menu-item (:label "Quit"))))
(menu (:id "edit-menu" :label "Edit")
(menu-popup (:id "edit-popup")
(menu-item (:label "Undo"))
(menu-item (:label "Redo")))))
(vbox (:style "overflow:auto" :height "500")
(label (:control "hello-label"
:accesskey "h"
:value "Hello"))
(label (:control "bye-label"
:accesskey "b"
:value "Bye"))
(button (:label "Hello"
:on-command "alert('Hello!!');"))
(button (:type "menu-button" :label "New")
(menu-popup ()
(menu-item (:label "New Document"))
(menu-item (:label "New Image"))))
(checkbox (:label "Enable JavaScript" :checked t))
(checkbox (:label "Enable Java" :checked nil))
(date-picker (:value "2007/03/26"))
(date-picker (:type :grid))
(date-picker (:type :popup :value "2008/08/24"))
(color-picker (:color "#FF0000"))
(color-picker (:type :button :color "#CC0080"))
(time-picker (:value "12:05"))
(grid ()
(columns ()
(column (:flex "1"))
(column (:flex "2")))
(rows ()
(row ()
(label (:value "User name"))
;(textbox (:id "user"))
(row ()
(label (:value "Group"))
(menu-list ()
(menu-popup ()
(menu-item (:label "Accounts"))
(menu-item (:label "Sales" :selected t))
(menu-item (:label "Support"))))))))
(description (:value "This is a long section of text that will word wrap when displayed."))
(text-box (:id "your-name" :value "John"))
(deck (:selected-index "1")
(description (:value "This is the first page"))
(button (:label "This is the second page"))
(box ()
(description (:value "This is the third page"))
(button (:label "This is also the third page"))))
(list-box ()
(list-item (:label "Butter Pecan"))
(list-item (:label "Chocolate Chip"))
(list-item (:label "Raspberry Ripple"))
(list-item (:label "Squash Swirl")))
(radio-group ()
(radio (:id "orange" :label "Red" :accesskey "R"))
(radio (:id "violet" :label "Green" :accesskey "G" :selected "true"))
(radio (:id "yellow" :label "Blue" :accesskey "B" :disabled "true")))
(list-box ()
(list-head ()
(list-header (:label "Name"))
(list-header (:label "Occupation")))
(list-cols ()
(list-col ())
(list-col (:flex "1")))
(list-item ()
(list-cell (:label "George"))
(list-cell (:label "House Painter")))
(list-item ()
(list-cell (:label "Mary Ellen"))
(list-cell (:label "Candle Maker" )))
(list-item ()
(list-cell (:label "Roger"))
(list-cell (:label "Swashbuckler"))))
)))
:build-id "0001"
:id "TestApplication"))
(run-app *test-app*)
| null | https://raw.githubusercontent.com/mmontone/cl-xul/049c8664bb58d177c71d93ac5b5e1a2ba9b9469b/test/test.lisp | lisp | (textbox (:id "user")) | (in-package :xul-test)
(defparameter *test-app*
(make-instance 'xul-application
:name "testapp"
:javascripts (list (asdf::system-relative-pathname :cl-xul #p"test/test-app.js"))
:xul (xul
(window (:title "Test application"
:width "500"
:height "500")
(menu-bar (:id "sample-menubar")
(menu (:id "action-menu" :label "Action")
(menu-popup (:id "action-popup")
(menu-item (:label "New"))
(menu-item (:label "Save" :disabled "true"))
(menu-item (:label "Close"))
(menu-separator ())
(menu-item (:label "Quit"))))
(menu (:id "edit-menu" :label "Edit")
(menu-popup (:id "edit-popup")
(menu-item (:label "Undo"))
(menu-item (:label "Redo")))))
(vbox (:style "overflow:auto" :height "500")
(label (:control "hello-label"
:accesskey "h"
:value "Hello"))
(label (:control "bye-label"
:accesskey "b"
:value "Bye"))
(button (:label "Hello"
:on-command "alert('Hello!!');"))
(button (:type "menu-button" :label "New")
(menu-popup ()
(menu-item (:label "New Document"))
(menu-item (:label "New Image"))))
(checkbox (:label "Enable JavaScript" :checked t))
(checkbox (:label "Enable Java" :checked nil))
(date-picker (:value "2007/03/26"))
(date-picker (:type :grid))
(date-picker (:type :popup :value "2008/08/24"))
(color-picker (:color "#FF0000"))
(color-picker (:type :button :color "#CC0080"))
(time-picker (:value "12:05"))
(grid ()
(columns ()
(column (:flex "1"))
(column (:flex "2")))
(rows ()
(row ()
(label (:value "User name"))
(row ()
(label (:value "Group"))
(menu-list ()
(menu-popup ()
(menu-item (:label "Accounts"))
(menu-item (:label "Sales" :selected t))
(menu-item (:label "Support"))))))))
(description (:value "This is a long section of text that will word wrap when displayed."))
(text-box (:id "your-name" :value "John"))
(deck (:selected-index "1")
(description (:value "This is the first page"))
(button (:label "This is the second page"))
(box ()
(description (:value "This is the third page"))
(button (:label "This is also the third page"))))
(list-box ()
(list-item (:label "Butter Pecan"))
(list-item (:label "Chocolate Chip"))
(list-item (:label "Raspberry Ripple"))
(list-item (:label "Squash Swirl")))
(radio-group ()
(radio (:id "orange" :label "Red" :accesskey "R"))
(radio (:id "violet" :label "Green" :accesskey "G" :selected "true"))
(radio (:id "yellow" :label "Blue" :accesskey "B" :disabled "true")))
(list-box ()
(list-head ()
(list-header (:label "Name"))
(list-header (:label "Occupation")))
(list-cols ()
(list-col ())
(list-col (:flex "1")))
(list-item ()
(list-cell (:label "George"))
(list-cell (:label "House Painter")))
(list-item ()
(list-cell (:label "Mary Ellen"))
(list-cell (:label "Candle Maker" )))
(list-item ()
(list-cell (:label "Roger"))
(list-cell (:label "Swashbuckler"))))
)))
:build-id "0001"
:id "TestApplication"))
(run-app *test-app*)
|
6f09e7e9395b80e31f7a30b53d7a50d18b107dcc22fdfabbedfa846141ed1c03 | alphagov/govuk-guix | signon.scm | (define-module (gds services govuk signon)
#:use-module (srfi srfi-1)
#:use-module (ice-9 match)
#:use-module (ice-9 rdelim)
#:use-module (srfi srfi-26)
#:use-module (guix records)
#:use-module (guix gexp)
#:use-module (guix build utils)
#:use-module (gnu services)
#:use-module (gnu services shepherd)
#:use-module (gds services)
#:use-module (gds services utils)
#:use-module (gds services utils databases)
#:use-module (gds services utils databases mysql)
#:use-module (gds services rails)
#:use-module (gds services sidekiq)
#:use-module (gds services govuk tailon)
#:use-module (gds services govuk plek)
#:export (<signon-application>
signon-application
signon-application?
signon-application-name
signon-application-description
signon-application-redirect-uri
signon-application-home-uri
signon-application-supported-permissions
signon-application-oauth-id
signon-application-oauth-secret
<signon-user>
signon-user
signon-user?
signon-user-name
signon-user-email
signon-user-passphrase
signon-user-application-permissions
<signon-api-user>
signon-api-user
signon-api-user?
signon-api-user-name
signon-api-user-email
signon-api-user-authorisation-permissions
<signon-authorisation>
signon-authorisation
signon-authorisation?
signon-authorisation-application-name
signon-authorisation-token
signon-authorisation-environment-variable
use-gds-sso-strategy
update-signon-application-with-random-oauth
update-signon-api-user-with-random-authorisation-tokens
filter-signon-user-application-permissions
signon-setup-users-script
signon-setup-api-users-script
signon-setup-applications-script
<signon-config>
signon-config
signon-config?
signon-config-applications
signon-config-users
signon-config-devise-pepper
signon-config-devise-secret-key
signon-config-instance-name
signon-config-with-random-secrets
signon-dev-user-passphrase
update-signon-service-add-users
update-services-with-random-signon-secrets
set-random-devise-secrets-for-the-signon-service
modify-service-extensions-for-signon
modify-service-extensions-for-signon-and-plek))
(define-record-type* <signon-application>
signon-application make-signon-application
signon-application?
(name signon-application-name)
(description signon-application-description
(default ""))
(redirect-uri signon-application-redirect-uri
(default #f))
(home-uri signon-application-home-uri
(default #f))
(supported-permissions signon-application-supported-permissions
(default '()))
(oauth-id signon-application-oauth-id
(default #f))
(oauth-secret signon-application-oauth-secret
(default #f)))
(define-record-type* <signon-user>
signon-user make-signon-user
signon-user?
(name signon-user-name)
(email signon-user-email)
(passphrase signon-user-passphrase)
(role signon-user-role)
(application-permissions signon-user-application-permissions
(default '())))
(define-record-type* <signon-api-user>
signon-api-user make-signon-api-user
signon-api-user?
(name signon-api-user-name)
(email signon-api-user-email)
(authorisation-permissions signon-api-user-authorisation-permissions
(default '())))
(define-record-type* <signon-authorisation>
signon-authorisation make-signon-authorisation
signon-authorisation?
(application-name signon-authorisation-application-name)
(token signon-authorisation-token
(default #f))
(environment-variable signon-authorisation-environment-variable
(default #f))) ;; If #f, the default pattern
;; will be used
(define (update-signon-application-with-random-oauth app)
(signon-application
(inherit app)
(oauth-id (random-base16-string 64))
(oauth-secret (random-base16-string 64))))
(define (update-signon-authorisation-with-random-token authorisation)
(signon-authorisation
(inherit authorisation)
(token (random-base16-string 30))))
(define (update-signon-api-user-with-random-authorisation-tokens api-user)
(signon-api-user
(inherit api-user)
(authorisation-permissions
(map
(match-lambda
((authorisation . permissions)
(cons
(update-signon-authorisation-with-random-token authorisation)
permissions)))
(signon-api-user-authorisation-permissions api-user)))))
(define (filter-signon-user-application-permissions user applications)
(signon-user
(inherit user)
(application-permissions
(let ((application-names
(map
(match-lambda (($ <signon-application> name) name)
((and name string) name))
applications)))
(filter
(lambda (permission)
(member (car permission) application-names))
(signon-user-application-permissions user))))))
(define (use-gds-sso-strategy services strategy)
(map
(lambda (s)
(service
(service-kind s)
(if
(list? (service-parameters s))
(map
(lambda (parameter)
(if
(service-startup-config? parameter)
(service-startup-config-with-additional-environment-variables
parameter
`(("GDS_SSO_STRATEGY" . ,strategy)))
parameter))
(service-parameters s))
(service-parameters s))))
services))
(define (signon-setup-users-script signon-users)
(plain-file
"signon-setup-users.rb"
(string-join
`("users = ["
,(string-join
(map
(lambda (user)
(define sq (cut string-append "'" <> "'"))
(string-append
"["
(string-join
(list
(sq (signon-user-name user))
(sq (signon-user-email user))
(sq (signon-user-passphrase user))
(sq (signon-user-role user))
(string-append
"["
(string-join
(map
(match-lambda
((application . permissions)
(string-append
"[ '" application "', ["
(string-join (map sq permissions) ", ")
"]]")))
(signon-user-application-permissions user))
", ")
"]"))
", ")
"]"))
signon-users)
",\n")
"]"
"
puts \"#{users.length} users to create\"
Devise.deny_old_passwords = false
users.each do |name, email, passphrase, role, application_permissions|
puts \"Creating #{name}\"
u = User.where(name: name, email: email).first_or_initialize
u.password = passphrase
u.role = role
u.skip_invitation = true
u.skip_confirmation!
u.save!
application_permissions.each do |application_name, permissions|
app = Doorkeeper::Application.find_by_name!(application_name)
u.grant_application_permissions(app, permissions)
end
end")
"\n")))
(define (signon-setup-api-users-script signon-api-users)
(plain-file
"signon-setup-api-users.rb"
(string-join
`("users = ["
,(string-join
(map
(lambda (user)
(define sq (cut string-append "'" <> "'"))
(string-append
" ["
(string-join
(list
(sq (signon-api-user-name user))
(sq (signon-api-user-email user))
(string-append
"["
(string-join
(map
(match-lambda
((($ <signon-authorisation> application-name token)
.
permissions)
(string-append
"\n ['" application-name "', '" token "', ["
(string-join (map sq permissions) ", ")
"]]")))
(signon-api-user-authorisation-permissions user))
",")
"]"))
", ")
"]"))
signon-api-users)
",\n")
"]"
"
puts \"#{users.length} api users to create\"
users.each do |name, email, authorisation_permissions|
puts \"Creating #{name}\"
passphrase = SecureRandom.urlsafe_base64
u = ApiUser.where(email: email).first_or_initialize(
name: name,
password: passphrase,
password_confirmation: passphrase
)
u.api_user = true
u.skip_confirmation!
u.save!
authorisation_permissions.each do |application_name, token, permissions|
app = Doorkeeper::Application.find_by_name(application_name)
unless app
puts \"signon-setup-api-users: warning: #{application_name} not found, skipping\"
next
end
u.grant_application_permissions(app, permissions)
authorisation = u.authorisations.where(
application_id: app.id
).first_or_initialize(
application_id: app.id
)
authorisation.expires_in = ApiUser::DEFAULT_TOKEN_LIFE
authorisation.save!
authorisation.token = token
authorisation.save!
end
end")
"\n")))
(define (signon-setup-applications-script signon-applications)
(plain-file
"signon-setup-applications.rb"
(string-join
`("apps = ["
,(string-join
(map
(lambda (app)
(define sq (cut string-append "'" <> "'"))
(string-append
"["
(string-join
(list
(sq (signon-application-name app))
(sq (signon-application-description app))
(sq (signon-application-redirect-uri app))
(sq (signon-application-home-uri app))
(string-append
"["
(string-join
(map sq (signon-application-supported-permissions app))
", ")
"]")
(sq (signon-application-oauth-id app))
(sq (signon-application-oauth-secret app)))
", ")
"]"))
signon-applications)
",\n")
"]"
"
puts \"#{apps.length} applications to create\"
apps.each do |name, description, redirect_uri, home_uri, supported_permissions, oauth_id, oauth_secret|
puts \"Creating #{name}\"
app = Doorkeeper::Application.where(name: name).first_or_create
app.update!(
redirect_uri: redirect_uri,
description: description,
home_uri: home_uri,
uid: oauth_id,
secret: oauth_secret
)
supported_permissions.each do |permission|
SupportedPermission.where(
name: permission,
application_id: app.id
).first_or_create!
end
end")
"\n")))
(define-record-type* <signon-config>
signon-config make-signon-config
signon-config?
(applications signon-config-applications
(default '()))
(users signon-config-users
(default '()))
(api-users signon-config-api-users
(default '()))
(devise-pepper signon-config-devise-pepper
(default #f))
(devise-secret-key signon-config-devise-secret-key
(default #f))
(instance-name signon-config-instance-name
(default #f)))
(define (signon-config-with-random-secrets config)
(signon-config
(inherit config)
(devise-pepper (random-base16-string 30))
(devise-secret-key (random-base16-string 30))))
(define-public signon-service-type
(service-type
(name 'signon)
(description "Single sign-on and user management service for GOV.UK")
(extensions
(service-extensions-modify-parameters
(modify-service-extensions-for-plek
name
(standard-rails-service-type-extensions name))
(lambda (parameters)
(let ((config (find signon-config? parameters)))
(map
(lambda (parameter)
(if (service-startup-config? parameter)
(service-startup-config-add-pre-startup-scripts
(service-startup-config-with-additional-environment-variables
parameter
(let ((pepper (signon-config-devise-pepper config))
(secret-key (signon-config-devise-secret-key config))
(instance-name (signon-config-instance-name config)))
`(,@(if pepper
`(("DEVISE_PEPPER" . ,pepper))
'())
,@(if secret-key
`(("DEVISE_SECRET_KEY" . ,secret-key))
'())
,@(if instance-name
`(("INSTANCE_NAME" . ,instance-name))
'()))))
`((signon-setup
.
,#~(lambda ()
(run-command
"rails" "runner"
(string-join
(map
(lambda (script)
(string-append "load '" script "';"))
(list
#$(signon-setup-applications-script
(signon-config-applications config))
#$(signon-setup-users-script
(map
(cut filter-signon-user-application-permissions
<> (signon-config-applications config))
(signon-config-users config)))
#$(signon-setup-api-users-script
(signon-config-api-users config))))))))))
parameter))
parameters)))))
(compose concatenate)
(extend (lambda (parameters extension-parameters)
(map
(lambda (parameter)
(if (signon-config? parameter)
(signon-config
(inherit parameter)
(applications (append
(signon-config-applications parameter)
(filter signon-application?
extension-parameters)))
(users (append
(signon-config-users parameter)
(filter signon-user?
extension-parameters)))
(api-users (append
(signon-config-api-users parameter)
(filter signon-api-user?
extension-parameters))))
parameter))
parameters)))
(default-value
(list (shepherd-service
(inherit default-shepherd-service)
(provision '(signon))
(requirement '(mysql loopback redis)))
(service-startup-config)
(plek-config) (rails-app-config) (@ (gds packages govuk) signon)
(signon-config)
(sidekiq-config
(file "config/sidekiq.yml"))
(mysql-connection-config
(user "signon")
(database "signon_production")
(password (random-base16-string 30)))
(redis-connection-config)))))
(define (signon-dev-user-passphrase)
(define (new-passphrase)
(random-base16-string 16))
(or (getenv "GOVUK_GUIX_DEVELOPMENT_PASSPHRASE")
(let ((data-dir (or (getenv "XDG_DATA_HOME")
(and=> (getenv "HOME")
(cut string-append <> "/.local/share")))))
(if (file-exists? data-dir)
(let* ((govuk-guix-dir
(string-append data-dir "/govuk-guix"))
(system-dir
(string-append govuk-guix-dir "/systems/development"))
(passphrase-file
(string-append system-dir "/passphrase")))
(if (file-exists? passphrase-file)
(call-with-input-file passphrase-file read-line)
(let ((passphrase (new-passphrase)))
(mkdir-p system-dir)
(call-with-output-file passphrase-file
(cut display passphrase <>))
passphrase)))
(let ((passphrase (new-passphrase)))
(simple-format #t "\nUnable to find directory to place
the Signon Dev user passphrase in\n")
(simple-format #t "The following passphrase will be used, but this will not be persisted: ~A\n\n" passphrase)
passphrase)))))
(define (update-signon-service-add-users users services)
(update-services-parameters
services
(list
(cons
signon-service-type
(list
(cons
signon-config?
(lambda (config)
(signon-config
(inherit config)
(users
(append (signon-config-users config)
users))))))))))
(define (update-services-with-random-signon-secrets services)
(map
(lambda (service)
(update-service-parameters
service
(list
(cons
signon-application?
(lambda (app)
(update-signon-application-with-random-oauth app)))
(cons
signon-api-user?
(lambda (api-user)
(update-signon-api-user-with-random-authorisation-tokens api-user))))))
services))
(define (set-random-devise-secrets-for-the-signon-service services)
(modify-services
services
(signon-service-type
parameters =>
(map
(lambda (parameter)
(if (signon-config? parameter)
(signon-config-with-random-secrets parameter)
parameter))
parameters))))
(define (update-service-startup-config-for-signon-application parameters)
(let ((signon-application (find signon-application? parameters)))
(if signon-application
(map
(lambda (parameter)
(if (service-startup-config? parameter)
(service-startup-config-with-additional-environment-variables
parameter
`(("OAUTH_ID" . ,(signon-application-oauth-id
signon-application))
("OAUTH_SECRET" . ,(signon-application-oauth-secret
signon-application))))
parameter))
parameters)
parameters)))
(define (update-service-startup-config-for-signon-api-user parameters)
(map
(lambda (parameter)
(if (service-startup-config? parameter)
(service-startup-config-with-additional-environment-variables
parameter
(map
(match-lambda
(($ <signon-authorisation> application-name token
environment-variable)
(let ((name
(or environment-variable
(string-append
(string-map
(lambda (c)
(if (eq? c #\space) #\_ c))
(string-upcase application-name))
"_BEARER_TOKEN"))))
(cons name token))))
(concatenate
(map
(match-lambda
(($ <signon-api-user> name email authorisation-permissions)
(map car authorisation-permissions)))
(filter signon-api-user? parameters)))))
parameter))
parameters))
(define (update-signon-application name parameters)
(let ((plek-config (find plek-config? parameters)))
(if plek-config
(map
(lambda (parameter)
(if (signon-application? parameter)
(let ((service-uri
(if (eq? name 'authenticating-proxy)
(plek-config-draft-origin plek-config)
(service-uri-from-plek-config plek-config
name))))
(signon-application
(inherit parameter)
(home-uri service-uri)
(redirect-uri
(string-append service-uri "/auth/gds/callback"))))
parameter))
parameters)
parameters)))
(define (generic-rails-app-log-files name . rest)
(let*
((string-name (symbol->string name))
(ss (find shepherd-service? rest))
(sidekiq-config (find sidekiq-config? rest))
(sidekiq-service-name
(string-append
(symbol->string
(first (shepherd-service-provision ss)))
"-sidekiq")))
(cons
(string-append "/var/log/" string-name ".log")
(if sidekiq-config
(list
(string-append "/var/log/" sidekiq-service-name ".log"))
'()))))
(define (assert-shepherd-service-requirements-contain-signon parameters)
(and=> (find signon-application? parameters)
(lambda (signon-application)
(and=> (find shepherd-service? parameters)
(lambda (shepherd-service)
(unless (memq 'signon
(shepherd-service-requirement shepherd-service))
(error (string-append
"Missing signon requirement for "
(signon-application-name signon-application)))))))))
(define (modify-service-extensions-for-signon name service-extensions)
(service-extensions-modify-parameters
(cons*
(service-extension signon-service-type
(lambda (parameters)
(assert-shepherd-service-requirements-contain-signon parameters)
(filter
(lambda (parameter)
(or (signon-application? parameter)
(signon-api-user? parameter)
(signon-user? parameter)))
parameters)))
;; TODO Ideally this would not be in this module, as it's not
directly related to
;; (service-extension govuk-tailon-service-type
;; (lambda (parameters)
;; (let ((log-files
;; (apply
;; generic-rails-app-log-files
;; name
;; parameters)))
( if ( eq ? ( length log - files ) 1 )
;; log-files
;; (list
;; (cons (symbol->string name)
;; log-files))))))
service-extensions)
(lambda (parameters)
(update-service-startup-config-for-signon-application
(update-service-startup-config-for-signon-api-user
(update-signon-application name parameters))))))
(define (modify-service-extensions-for-signon-and-plek name service-extensions)
(modify-service-extensions-for-signon
name
(modify-service-extensions-for-plek name service-extensions)))
| null | https://raw.githubusercontent.com/alphagov/govuk-guix/dea8c26d2ae882d0278be5c745e23abb25d4a4e2/gds/services/govuk/signon.scm | scheme | If #f, the default pattern
will be used
TODO Ideally this would not be in this module, as it's not
(service-extension govuk-tailon-service-type
(lambda (parameters)
(let ((log-files
(apply
generic-rails-app-log-files
name
parameters)))
log-files
(list
(cons (symbol->string name)
log-files)))))) | (define-module (gds services govuk signon)
#:use-module (srfi srfi-1)
#:use-module (ice-9 match)
#:use-module (ice-9 rdelim)
#:use-module (srfi srfi-26)
#:use-module (guix records)
#:use-module (guix gexp)
#:use-module (guix build utils)
#:use-module (gnu services)
#:use-module (gnu services shepherd)
#:use-module (gds services)
#:use-module (gds services utils)
#:use-module (gds services utils databases)
#:use-module (gds services utils databases mysql)
#:use-module (gds services rails)
#:use-module (gds services sidekiq)
#:use-module (gds services govuk tailon)
#:use-module (gds services govuk plek)
#:export (<signon-application>
signon-application
signon-application?
signon-application-name
signon-application-description
signon-application-redirect-uri
signon-application-home-uri
signon-application-supported-permissions
signon-application-oauth-id
signon-application-oauth-secret
<signon-user>
signon-user
signon-user?
signon-user-name
signon-user-email
signon-user-passphrase
signon-user-application-permissions
<signon-api-user>
signon-api-user
signon-api-user?
signon-api-user-name
signon-api-user-email
signon-api-user-authorisation-permissions
<signon-authorisation>
signon-authorisation
signon-authorisation?
signon-authorisation-application-name
signon-authorisation-token
signon-authorisation-environment-variable
use-gds-sso-strategy
update-signon-application-with-random-oauth
update-signon-api-user-with-random-authorisation-tokens
filter-signon-user-application-permissions
signon-setup-users-script
signon-setup-api-users-script
signon-setup-applications-script
<signon-config>
signon-config
signon-config?
signon-config-applications
signon-config-users
signon-config-devise-pepper
signon-config-devise-secret-key
signon-config-instance-name
signon-config-with-random-secrets
signon-dev-user-passphrase
update-signon-service-add-users
update-services-with-random-signon-secrets
set-random-devise-secrets-for-the-signon-service
modify-service-extensions-for-signon
modify-service-extensions-for-signon-and-plek))
(define-record-type* <signon-application>
signon-application make-signon-application
signon-application?
(name signon-application-name)
(description signon-application-description
(default ""))
(redirect-uri signon-application-redirect-uri
(default #f))
(home-uri signon-application-home-uri
(default #f))
(supported-permissions signon-application-supported-permissions
(default '()))
(oauth-id signon-application-oauth-id
(default #f))
(oauth-secret signon-application-oauth-secret
(default #f)))
(define-record-type* <signon-user>
signon-user make-signon-user
signon-user?
(name signon-user-name)
(email signon-user-email)
(passphrase signon-user-passphrase)
(role signon-user-role)
(application-permissions signon-user-application-permissions
(default '())))
(define-record-type* <signon-api-user>
signon-api-user make-signon-api-user
signon-api-user?
(name signon-api-user-name)
(email signon-api-user-email)
(authorisation-permissions signon-api-user-authorisation-permissions
(default '())))
(define-record-type* <signon-authorisation>
signon-authorisation make-signon-authorisation
signon-authorisation?
(application-name signon-authorisation-application-name)
(token signon-authorisation-token
(default #f))
(environment-variable signon-authorisation-environment-variable
(define (update-signon-application-with-random-oauth app)
(signon-application
(inherit app)
(oauth-id (random-base16-string 64))
(oauth-secret (random-base16-string 64))))
(define (update-signon-authorisation-with-random-token authorisation)
(signon-authorisation
(inherit authorisation)
(token (random-base16-string 30))))
(define (update-signon-api-user-with-random-authorisation-tokens api-user)
(signon-api-user
(inherit api-user)
(authorisation-permissions
(map
(match-lambda
((authorisation . permissions)
(cons
(update-signon-authorisation-with-random-token authorisation)
permissions)))
(signon-api-user-authorisation-permissions api-user)))))
(define (filter-signon-user-application-permissions user applications)
(signon-user
(inherit user)
(application-permissions
(let ((application-names
(map
(match-lambda (($ <signon-application> name) name)
((and name string) name))
applications)))
(filter
(lambda (permission)
(member (car permission) application-names))
(signon-user-application-permissions user))))))
(define (use-gds-sso-strategy services strategy)
(map
(lambda (s)
(service
(service-kind s)
(if
(list? (service-parameters s))
(map
(lambda (parameter)
(if
(service-startup-config? parameter)
(service-startup-config-with-additional-environment-variables
parameter
`(("GDS_SSO_STRATEGY" . ,strategy)))
parameter))
(service-parameters s))
(service-parameters s))))
services))
(define (signon-setup-users-script signon-users)
(plain-file
"signon-setup-users.rb"
(string-join
`("users = ["
,(string-join
(map
(lambda (user)
(define sq (cut string-append "'" <> "'"))
(string-append
"["
(string-join
(list
(sq (signon-user-name user))
(sq (signon-user-email user))
(sq (signon-user-passphrase user))
(sq (signon-user-role user))
(string-append
"["
(string-join
(map
(match-lambda
((application . permissions)
(string-append
"[ '" application "', ["
(string-join (map sq permissions) ", ")
"]]")))
(signon-user-application-permissions user))
", ")
"]"))
", ")
"]"))
signon-users)
",\n")
"]"
"
puts \"#{users.length} users to create\"
Devise.deny_old_passwords = false
users.each do |name, email, passphrase, role, application_permissions|
puts \"Creating #{name}\"
u = User.where(name: name, email: email).first_or_initialize
u.password = passphrase
u.role = role
u.skip_invitation = true
u.skip_confirmation!
u.save!
application_permissions.each do |application_name, permissions|
app = Doorkeeper::Application.find_by_name!(application_name)
u.grant_application_permissions(app, permissions)
end
end")
"\n")))
(define (signon-setup-api-users-script signon-api-users)
(plain-file
"signon-setup-api-users.rb"
(string-join
`("users = ["
,(string-join
(map
(lambda (user)
(define sq (cut string-append "'" <> "'"))
(string-append
" ["
(string-join
(list
(sq (signon-api-user-name user))
(sq (signon-api-user-email user))
(string-append
"["
(string-join
(map
(match-lambda
((($ <signon-authorisation> application-name token)
.
permissions)
(string-append
"\n ['" application-name "', '" token "', ["
(string-join (map sq permissions) ", ")
"]]")))
(signon-api-user-authorisation-permissions user))
",")
"]"))
", ")
"]"))
signon-api-users)
",\n")
"]"
"
puts \"#{users.length} api users to create\"
users.each do |name, email, authorisation_permissions|
puts \"Creating #{name}\"
passphrase = SecureRandom.urlsafe_base64
u = ApiUser.where(email: email).first_or_initialize(
name: name,
password: passphrase,
password_confirmation: passphrase
)
u.api_user = true
u.skip_confirmation!
u.save!
authorisation_permissions.each do |application_name, token, permissions|
app = Doorkeeper::Application.find_by_name(application_name)
unless app
puts \"signon-setup-api-users: warning: #{application_name} not found, skipping\"
next
end
u.grant_application_permissions(app, permissions)
authorisation = u.authorisations.where(
application_id: app.id
).first_or_initialize(
application_id: app.id
)
authorisation.expires_in = ApiUser::DEFAULT_TOKEN_LIFE
authorisation.save!
authorisation.token = token
authorisation.save!
end
end")
"\n")))
(define (signon-setup-applications-script signon-applications)
(plain-file
"signon-setup-applications.rb"
(string-join
`("apps = ["
,(string-join
(map
(lambda (app)
(define sq (cut string-append "'" <> "'"))
(string-append
"["
(string-join
(list
(sq (signon-application-name app))
(sq (signon-application-description app))
(sq (signon-application-redirect-uri app))
(sq (signon-application-home-uri app))
(string-append
"["
(string-join
(map sq (signon-application-supported-permissions app))
", ")
"]")
(sq (signon-application-oauth-id app))
(sq (signon-application-oauth-secret app)))
", ")
"]"))
signon-applications)
",\n")
"]"
"
puts \"#{apps.length} applications to create\"
apps.each do |name, description, redirect_uri, home_uri, supported_permissions, oauth_id, oauth_secret|
puts \"Creating #{name}\"
app = Doorkeeper::Application.where(name: name).first_or_create
app.update!(
redirect_uri: redirect_uri,
description: description,
home_uri: home_uri,
uid: oauth_id,
secret: oauth_secret
)
supported_permissions.each do |permission|
SupportedPermission.where(
name: permission,
application_id: app.id
).first_or_create!
end
end")
"\n")))
(define-record-type* <signon-config>
signon-config make-signon-config
signon-config?
(applications signon-config-applications
(default '()))
(users signon-config-users
(default '()))
(api-users signon-config-api-users
(default '()))
(devise-pepper signon-config-devise-pepper
(default #f))
(devise-secret-key signon-config-devise-secret-key
(default #f))
(instance-name signon-config-instance-name
(default #f)))
(define (signon-config-with-random-secrets config)
(signon-config
(inherit config)
(devise-pepper (random-base16-string 30))
(devise-secret-key (random-base16-string 30))))
(define-public signon-service-type
(service-type
(name 'signon)
(description "Single sign-on and user management service for GOV.UK")
(extensions
(service-extensions-modify-parameters
(modify-service-extensions-for-plek
name
(standard-rails-service-type-extensions name))
(lambda (parameters)
(let ((config (find signon-config? parameters)))
(map
(lambda (parameter)
(if (service-startup-config? parameter)
(service-startup-config-add-pre-startup-scripts
(service-startup-config-with-additional-environment-variables
parameter
(let ((pepper (signon-config-devise-pepper config))
(secret-key (signon-config-devise-secret-key config))
(instance-name (signon-config-instance-name config)))
`(,@(if pepper
`(("DEVISE_PEPPER" . ,pepper))
'())
,@(if secret-key
`(("DEVISE_SECRET_KEY" . ,secret-key))
'())
,@(if instance-name
`(("INSTANCE_NAME" . ,instance-name))
'()))))
`((signon-setup
.
,#~(lambda ()
(run-command
"rails" "runner"
(string-join
(map
(lambda (script)
(string-append "load '" script "';"))
(list
#$(signon-setup-applications-script
(signon-config-applications config))
#$(signon-setup-users-script
(map
(cut filter-signon-user-application-permissions
<> (signon-config-applications config))
(signon-config-users config)))
#$(signon-setup-api-users-script
(signon-config-api-users config))))))))))
parameter))
parameters)))))
(compose concatenate)
(extend (lambda (parameters extension-parameters)
(map
(lambda (parameter)
(if (signon-config? parameter)
(signon-config
(inherit parameter)
(applications (append
(signon-config-applications parameter)
(filter signon-application?
extension-parameters)))
(users (append
(signon-config-users parameter)
(filter signon-user?
extension-parameters)))
(api-users (append
(signon-config-api-users parameter)
(filter signon-api-user?
extension-parameters))))
parameter))
parameters)))
(default-value
(list (shepherd-service
(inherit default-shepherd-service)
(provision '(signon))
(requirement '(mysql loopback redis)))
(service-startup-config)
(plek-config) (rails-app-config) (@ (gds packages govuk) signon)
(signon-config)
(sidekiq-config
(file "config/sidekiq.yml"))
(mysql-connection-config
(user "signon")
(database "signon_production")
(password (random-base16-string 30)))
(redis-connection-config)))))
(define (signon-dev-user-passphrase)
(define (new-passphrase)
(random-base16-string 16))
(or (getenv "GOVUK_GUIX_DEVELOPMENT_PASSPHRASE")
(let ((data-dir (or (getenv "XDG_DATA_HOME")
(and=> (getenv "HOME")
(cut string-append <> "/.local/share")))))
(if (file-exists? data-dir)
(let* ((govuk-guix-dir
(string-append data-dir "/govuk-guix"))
(system-dir
(string-append govuk-guix-dir "/systems/development"))
(passphrase-file
(string-append system-dir "/passphrase")))
(if (file-exists? passphrase-file)
(call-with-input-file passphrase-file read-line)
(let ((passphrase (new-passphrase)))
(mkdir-p system-dir)
(call-with-output-file passphrase-file
(cut display passphrase <>))
passphrase)))
(let ((passphrase (new-passphrase)))
(simple-format #t "\nUnable to find directory to place
the Signon Dev user passphrase in\n")
(simple-format #t "The following passphrase will be used, but this will not be persisted: ~A\n\n" passphrase)
passphrase)))))
(define (update-signon-service-add-users users services)
(update-services-parameters
services
(list
(cons
signon-service-type
(list
(cons
signon-config?
(lambda (config)
(signon-config
(inherit config)
(users
(append (signon-config-users config)
users))))))))))
(define (update-services-with-random-signon-secrets services)
(map
(lambda (service)
(update-service-parameters
service
(list
(cons
signon-application?
(lambda (app)
(update-signon-application-with-random-oauth app)))
(cons
signon-api-user?
(lambda (api-user)
(update-signon-api-user-with-random-authorisation-tokens api-user))))))
services))
(define (set-random-devise-secrets-for-the-signon-service services)
(modify-services
services
(signon-service-type
parameters =>
(map
(lambda (parameter)
(if (signon-config? parameter)
(signon-config-with-random-secrets parameter)
parameter))
parameters))))
(define (update-service-startup-config-for-signon-application parameters)
(let ((signon-application (find signon-application? parameters)))
(if signon-application
(map
(lambda (parameter)
(if (service-startup-config? parameter)
(service-startup-config-with-additional-environment-variables
parameter
`(("OAUTH_ID" . ,(signon-application-oauth-id
signon-application))
("OAUTH_SECRET" . ,(signon-application-oauth-secret
signon-application))))
parameter))
parameters)
parameters)))
(define (update-service-startup-config-for-signon-api-user parameters)
(map
(lambda (parameter)
(if (service-startup-config? parameter)
(service-startup-config-with-additional-environment-variables
parameter
(map
(match-lambda
(($ <signon-authorisation> application-name token
environment-variable)
(let ((name
(or environment-variable
(string-append
(string-map
(lambda (c)
(if (eq? c #\space) #\_ c))
(string-upcase application-name))
"_BEARER_TOKEN"))))
(cons name token))))
(concatenate
(map
(match-lambda
(($ <signon-api-user> name email authorisation-permissions)
(map car authorisation-permissions)))
(filter signon-api-user? parameters)))))
parameter))
parameters))
(define (update-signon-application name parameters)
(let ((plek-config (find plek-config? parameters)))
(if plek-config
(map
(lambda (parameter)
(if (signon-application? parameter)
(let ((service-uri
(if (eq? name 'authenticating-proxy)
(plek-config-draft-origin plek-config)
(service-uri-from-plek-config plek-config
name))))
(signon-application
(inherit parameter)
(home-uri service-uri)
(redirect-uri
(string-append service-uri "/auth/gds/callback"))))
parameter))
parameters)
parameters)))
(define (generic-rails-app-log-files name . rest)
(let*
((string-name (symbol->string name))
(ss (find shepherd-service? rest))
(sidekiq-config (find sidekiq-config? rest))
(sidekiq-service-name
(string-append
(symbol->string
(first (shepherd-service-provision ss)))
"-sidekiq")))
(cons
(string-append "/var/log/" string-name ".log")
(if sidekiq-config
(list
(string-append "/var/log/" sidekiq-service-name ".log"))
'()))))
(define (assert-shepherd-service-requirements-contain-signon parameters)
(and=> (find signon-application? parameters)
(lambda (signon-application)
(and=> (find shepherd-service? parameters)
(lambda (shepherd-service)
(unless (memq 'signon
(shepherd-service-requirement shepherd-service))
(error (string-append
"Missing signon requirement for "
(signon-application-name signon-application)))))))))
(define (modify-service-extensions-for-signon name service-extensions)
(service-extensions-modify-parameters
(cons*
(service-extension signon-service-type
(lambda (parameters)
(assert-shepherd-service-requirements-contain-signon parameters)
(filter
(lambda (parameter)
(or (signon-application? parameter)
(signon-api-user? parameter)
(signon-user? parameter)))
parameters)))
directly related to
( if ( eq ? ( length log - files ) 1 )
service-extensions)
(lambda (parameters)
(update-service-startup-config-for-signon-application
(update-service-startup-config-for-signon-api-user
(update-signon-application name parameters))))))
(define (modify-service-extensions-for-signon-and-plek name service-extensions)
(modify-service-extensions-for-signon
name
(modify-service-extensions-for-plek name service-extensions)))
|
1cd82b5c3f90344f5f1f4953b1b85829d43abdd1343c7ed14241bf4b77cf96f0 | basho/rebar | rebar_erlydtl_compiler.erl | -*- erlang - indent - level : 4;indent - tabs - mode : nil -*-
%% ex: ts=4 sw=4 et
%% -------------------------------------------------------------------
%%
rebar : Erlang Build Tools
%%
Copyright ( c ) 2009 ( ) ,
( )
%%
%% Permission is hereby granted, free of charge, to any person obtaining a copy
%% of this software and associated documentation files (the "Software"), to deal
in the Software without restriction , including without limitation the rights
%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is
%% furnished to do so, subject to the following conditions:
%%
%% The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
%%
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
%% THE SOFTWARE.
%% -------------------------------------------------------------------
%% The rebar_erlydtl_compiler module is a plugin for rebar that compiles
%% ErlyDTL templates. By default, it compiles all templates/*.dtl
%% to ebin/*_dtl.beam.
%%
%% Configuration options should be placed in rebar.config under
%% 'erlydtl_opts'. It can be a list of name-value tuples or a list of
%% lists of name-value tuples if you have multiple template directories
%% that need to have different settings (see example below).
%%
%% Available options include:
%%
%% doc_root: where to find templates to compile
%% "templates" by default
%%
%% out_dir: where to put compiled template beam files
%% "ebin" by default
%%
%% source_ext: the file extension the template sources have
%% ".dtl" by default
%%
%% module_ext: characters to append to the template's module name
%% "_dtl" by default
%%
%% recursive: boolean that determines if doc_root(s) need to be
%% scanned recursively for matching template file names
%% (default: true).
%% For example, if you had:
%% /t_src/
%% base.html
%% foo.html
%%
%% And you wanted them compiled to:
/priv/
%% base.beam
%% foo.beam
%%
%% You would add to your rebar.config:
%% {erlydtl_opts, [
, " t_src " } ,
%% {out_dir, "priv"},
%% {source_ext, ".html"},
%% {module_ext, ""}
%% ]}.
%%
%% The default settings are the equivalent of:
%% {erlydtl_opts, [
, " templates " } ,
%% {out_dir, "ebin"},
%% {source_ext, ".dtl"},
%% {module_ext, "_dtl"}
%% ]}.
%%
%% The following example will compile the following templates:
%% "src/*.dtl" files into "ebin/*_dtl.beam" and
%% "templates/*.html" into "ebin/*.beam". Note that any tuple option
%% (such as 'out_dir') in the outer list is added to each inner list:
%% {erlydtl_opts, [
%% {out_dir, "ebin"},
%% {recursive, false},
%% [
, " src " } , { module_ext , " _ dtl " }
%% ],
%% [
, " templates " , { module_ext , " " } , { source_ext , " .html " }
%% ]
%% ]}.
-module(rebar_erlydtl_compiler).
-export([compile/2]).
%% for internal use only
-export([info/2]).
-include("rebar.hrl").
%% ===================================================================
%% Public API
%% ===================================================================
compile(Config, _AppFile) ->
MultiDtlOpts = erlydtl_opts(Config),
OrigPath = code:get_path(),
true = code:add_path(rebar_utils:ebin_dir()),
Result = lists:foldl(fun(DtlOpts, _) ->
rebar_base_compiler:run(Config, [],
option(doc_root, DtlOpts),
option(source_ext, DtlOpts),
option(out_dir, DtlOpts),
option(module_ext, DtlOpts) ++ ".beam",
fun(S, T, C) ->
compile_dtl(C, S, T, DtlOpts)
end,
[{check_last_mod, false},
{recursive, option(recursive, DtlOpts)}])
end, ok, MultiDtlOpts),
true = code:set_path(OrigPath),
Result.
%% ===================================================================
Internal functions
%% ===================================================================
info(help, compile) ->
?CONSOLE(
"Build ErlyDtl (*.dtl) sources.~n"
"~n"
"Valid rebar.config options:~n"
" ~p~n",
[
{erlydtl_opts, [{doc_root, "templates"},
{out_dir, "ebin"},
{source_ext, ".dtl"},
{module_ext, "_dtl"},
{recursive, true}]}
]).
erlydtl_opts(Config) ->
Opts = rebar_config:get(Config, erlydtl_opts, []),
Tuples = [{K,V} || {K,V} <- Opts],
case [L || L <- Opts, is_list(L), not io_lib:printable_list(L)] of
[] ->
[lists:keysort(1, Tuples)];
Lists ->
lists:map(
fun(L) ->
lists:keysort(1,
lists:foldl(
fun({K,T}, Acc) ->
lists:keystore(K, 1, Acc, {K, T})
end, Tuples, L))
end, Lists)
end.
option(Opt, DtlOpts) ->
proplists:get_value(Opt, DtlOpts, default(Opt)).
default(doc_root) -> "templates";
default(out_dir) -> "ebin";
default(source_ext) -> ".dtl";
default(module_ext) -> "_dtl";
default(custom_tags_dir) -> "";
default(compiler_options) -> [return];
default(recursive) -> true.
compile_dtl(Config, Source, Target, DtlOpts) ->
case code:which(erlydtl) of
non_existing ->
?ERROR("~n===============================================~n"
" You need to install erlydtl to compile DTL templates~n"
" Download the latest tarball release from github~n"
" /~n"
" and install it into your erlang library dir~n"
"===============================================~n~n", []),
?FAIL;
_ ->
case needs_compile(Source, Target, DtlOpts) of
true ->
do_compile(Config, Source, Target, DtlOpts);
false ->
skipped
end
end.
do_compile(Config, Source, Target, DtlOpts) ->
%% TODO: Check last mod on target and referenced DTLs here..
ensure that and out_dir are defined ,
%% using defaults if necessary
Opts = lists:ukeymerge(1,
DtlOpts,
lists:sort(
[{out_dir, option(out_dir, DtlOpts)},
{doc_root, option(doc_root, DtlOpts)},
{custom_tags_dir, option(custom_tags_dir, DtlOpts)},
{compiler_options, option(compiler_options, DtlOpts)}])),
?INFO("Compiling \"~s\" -> \"~s\" with options:~n ~s~n",
[Source, Target, io_lib:format("~p", [Opts])]),
case erlydtl:compile(Source,
module_name(Target),
Opts) of
ok ->
ok;
error ->
rebar_base_compiler:error_tuple(Config, Source, [], [], Opts);
{error, {_File, _Msgs} = Error} ->
rebar_base_compiler:error_tuple(Config, Source, [Error], [], Opts);
{error, Msg} ->
Es = [{Source, [{erlydtl_parser, Msg}]}],
rebar_base_compiler:error_tuple(Config, Source, Es, [], Opts)
end.
module_name(Target) ->
F = filename:basename(Target),
string:substr(F, 1, length(F)-length(".beam")).
needs_compile(Source, Target, DtlOpts) ->
LM = filelib:last_modified(Target),
LM < filelib:last_modified(Source) orelse
lists:any(fun(D) -> LM < filelib:last_modified(D) end,
referenced_dtls(Source, DtlOpts)).
referenced_dtls(Source, DtlOpts) ->
DtlOpts1 = lists:keyreplace(doc_root, 1, DtlOpts,
{doc_root, filename:dirname(Source)}),
Set = referenced_dtls1([Source], DtlOpts1,
sets:add_element(Source, sets:new())),
sets:to_list(sets:del_element(Source, Set)).
referenced_dtls1(Step, DtlOpts, Seen) ->
ExtMatch = re:replace(option(source_ext, DtlOpts), "\.", "\\\\\\\\.",
[{return, list}]),
ShOpts = [{use_stdout, false}, return_on_error],
AllRefs =
lists:append(
[begin
Cmd = lists:flatten(["grep -o [^\\\"]*\\",
ExtMatch, "[^\\\"]* ", F]),
case rebar_utils:sh(Cmd, ShOpts) of
{ok, Res} ->
string:tokens(Res, "\n");
{error, _} ->
""
end
end || F <- Step]),
DocRoot = option(doc_root, DtlOpts),
WithPaths = [ filename:join([DocRoot, F]) || F <- AllRefs ],
?DEBUG("All deps: ~p\n", [WithPaths]),
Existing = [F || F <- WithPaths, filelib:is_regular(F)],
New = sets:subtract(sets:from_list(Existing), Seen),
case sets:size(New) of
0 -> Seen;
_ -> referenced_dtls1(sets:to_list(New), DtlOpts,
sets:union(New, Seen))
end.
| null | https://raw.githubusercontent.com/basho/rebar/cd55176009df794f506771fd574de9303ff2a42e/src/rebar_erlydtl_compiler.erl | erlang | ex: ts=4 sw=4 et
-------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-------------------------------------------------------------------
The rebar_erlydtl_compiler module is a plugin for rebar that compiles
ErlyDTL templates. By default, it compiles all templates/*.dtl
to ebin/*_dtl.beam.
Configuration options should be placed in rebar.config under
'erlydtl_opts'. It can be a list of name-value tuples or a list of
lists of name-value tuples if you have multiple template directories
that need to have different settings (see example below).
Available options include:
doc_root: where to find templates to compile
"templates" by default
out_dir: where to put compiled template beam files
"ebin" by default
source_ext: the file extension the template sources have
".dtl" by default
module_ext: characters to append to the template's module name
"_dtl" by default
recursive: boolean that determines if doc_root(s) need to be
scanned recursively for matching template file names
(default: true).
For example, if you had:
/t_src/
base.html
foo.html
And you wanted them compiled to:
base.beam
foo.beam
You would add to your rebar.config:
{erlydtl_opts, [
{out_dir, "priv"},
{source_ext, ".html"},
{module_ext, ""}
]}.
The default settings are the equivalent of:
{erlydtl_opts, [
{out_dir, "ebin"},
{source_ext, ".dtl"},
{module_ext, "_dtl"}
]}.
The following example will compile the following templates:
"src/*.dtl" files into "ebin/*_dtl.beam" and
"templates/*.html" into "ebin/*.beam". Note that any tuple option
(such as 'out_dir') in the outer list is added to each inner list:
{erlydtl_opts, [
{out_dir, "ebin"},
{recursive, false},
[
],
[
]
]}.
for internal use only
===================================================================
Public API
===================================================================
===================================================================
===================================================================
TODO: Check last mod on target and referenced DTLs here..
using defaults if necessary | -*- erlang - indent - level : 4;indent - tabs - mode : nil -*-
rebar : Erlang Build Tools
Copyright ( c ) 2009 ( ) ,
( )
in the Software without restriction , including without limitation the rights
copies of the Software , and to permit persons to whom the Software is
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
/priv/
, " t_src " } ,
, " templates " } ,
, " src " } , { module_ext , " _ dtl " }
, " templates " , { module_ext , " " } , { source_ext , " .html " }
-module(rebar_erlydtl_compiler).
-export([compile/2]).
-export([info/2]).
-include("rebar.hrl").
compile(Config, _AppFile) ->
MultiDtlOpts = erlydtl_opts(Config),
OrigPath = code:get_path(),
true = code:add_path(rebar_utils:ebin_dir()),
Result = lists:foldl(fun(DtlOpts, _) ->
rebar_base_compiler:run(Config, [],
option(doc_root, DtlOpts),
option(source_ext, DtlOpts),
option(out_dir, DtlOpts),
option(module_ext, DtlOpts) ++ ".beam",
fun(S, T, C) ->
compile_dtl(C, S, T, DtlOpts)
end,
[{check_last_mod, false},
{recursive, option(recursive, DtlOpts)}])
end, ok, MultiDtlOpts),
true = code:set_path(OrigPath),
Result.
Internal functions
info(help, compile) ->
?CONSOLE(
"Build ErlyDtl (*.dtl) sources.~n"
"~n"
"Valid rebar.config options:~n"
" ~p~n",
[
{erlydtl_opts, [{doc_root, "templates"},
{out_dir, "ebin"},
{source_ext, ".dtl"},
{module_ext, "_dtl"},
{recursive, true}]}
]).
erlydtl_opts(Config) ->
Opts = rebar_config:get(Config, erlydtl_opts, []),
Tuples = [{K,V} || {K,V} <- Opts],
case [L || L <- Opts, is_list(L), not io_lib:printable_list(L)] of
[] ->
[lists:keysort(1, Tuples)];
Lists ->
lists:map(
fun(L) ->
lists:keysort(1,
lists:foldl(
fun({K,T}, Acc) ->
lists:keystore(K, 1, Acc, {K, T})
end, Tuples, L))
end, Lists)
end.
option(Opt, DtlOpts) ->
proplists:get_value(Opt, DtlOpts, default(Opt)).
default(doc_root) -> "templates";
default(out_dir) -> "ebin";
default(source_ext) -> ".dtl";
default(module_ext) -> "_dtl";
default(custom_tags_dir) -> "";
default(compiler_options) -> [return];
default(recursive) -> true.
compile_dtl(Config, Source, Target, DtlOpts) ->
case code:which(erlydtl) of
non_existing ->
?ERROR("~n===============================================~n"
" You need to install erlydtl to compile DTL templates~n"
" Download the latest tarball release from github~n"
" /~n"
" and install it into your erlang library dir~n"
"===============================================~n~n", []),
?FAIL;
_ ->
case needs_compile(Source, Target, DtlOpts) of
true ->
do_compile(Config, Source, Target, DtlOpts);
false ->
skipped
end
end.
do_compile(Config, Source, Target, DtlOpts) ->
ensure that and out_dir are defined ,
Opts = lists:ukeymerge(1,
DtlOpts,
lists:sort(
[{out_dir, option(out_dir, DtlOpts)},
{doc_root, option(doc_root, DtlOpts)},
{custom_tags_dir, option(custom_tags_dir, DtlOpts)},
{compiler_options, option(compiler_options, DtlOpts)}])),
?INFO("Compiling \"~s\" -> \"~s\" with options:~n ~s~n",
[Source, Target, io_lib:format("~p", [Opts])]),
case erlydtl:compile(Source,
module_name(Target),
Opts) of
ok ->
ok;
error ->
rebar_base_compiler:error_tuple(Config, Source, [], [], Opts);
{error, {_File, _Msgs} = Error} ->
rebar_base_compiler:error_tuple(Config, Source, [Error], [], Opts);
{error, Msg} ->
Es = [{Source, [{erlydtl_parser, Msg}]}],
rebar_base_compiler:error_tuple(Config, Source, Es, [], Opts)
end.
module_name(Target) ->
F = filename:basename(Target),
string:substr(F, 1, length(F)-length(".beam")).
needs_compile(Source, Target, DtlOpts) ->
LM = filelib:last_modified(Target),
LM < filelib:last_modified(Source) orelse
lists:any(fun(D) -> LM < filelib:last_modified(D) end,
referenced_dtls(Source, DtlOpts)).
referenced_dtls(Source, DtlOpts) ->
DtlOpts1 = lists:keyreplace(doc_root, 1, DtlOpts,
{doc_root, filename:dirname(Source)}),
Set = referenced_dtls1([Source], DtlOpts1,
sets:add_element(Source, sets:new())),
sets:to_list(sets:del_element(Source, Set)).
referenced_dtls1(Step, DtlOpts, Seen) ->
ExtMatch = re:replace(option(source_ext, DtlOpts), "\.", "\\\\\\\\.",
[{return, list}]),
ShOpts = [{use_stdout, false}, return_on_error],
AllRefs =
lists:append(
[begin
Cmd = lists:flatten(["grep -o [^\\\"]*\\",
ExtMatch, "[^\\\"]* ", F]),
case rebar_utils:sh(Cmd, ShOpts) of
{ok, Res} ->
string:tokens(Res, "\n");
{error, _} ->
""
end
end || F <- Step]),
DocRoot = option(doc_root, DtlOpts),
WithPaths = [ filename:join([DocRoot, F]) || F <- AllRefs ],
?DEBUG("All deps: ~p\n", [WithPaths]),
Existing = [F || F <- WithPaths, filelib:is_regular(F)],
New = sets:subtract(sets:from_list(Existing), Seen),
case sets:size(New) of
0 -> Seen;
_ -> referenced_dtls1(sets:to_list(New), DtlOpts,
sets:union(New, Seen))
end.
|
6b6d8ac405530db3feb508fa59c4f500d7457273393baeda3e03f878d95fb971 | micahcantor/racket-lox | function.rkt | #lang typed/racket/base
(require "stmt.rkt")
(require "env.rkt")
(provide (all-defined-out))
(struct function ([declaration : FunDecl] [closure : Env] [is-initalizer? : Boolean]))
(define-type Function function)
(struct return exn ([value : Any]))
(define-type Return return)
(: make-return (-> Any Return))
(define (make-return v)
(return "" (current-continuation-marks) v))
| null | https://raw.githubusercontent.com/micahcantor/racket-lox/6bc0b4b3af39977cac34478c9ba8874d1379dcdf/src/function.rkt | racket | #lang typed/racket/base
(require "stmt.rkt")
(require "env.rkt")
(provide (all-defined-out))
(struct function ([declaration : FunDecl] [closure : Env] [is-initalizer? : Boolean]))
(define-type Function function)
(struct return exn ([value : Any]))
(define-type Return return)
(: make-return (-> Any Return))
(define (make-return v)
(return "" (current-continuation-marks) v))
|
|
73bf578be709ea540b769eab9e550490cc8b5b4c87afe9249cfe21da2c225925 | mzp/websocket-ocaml | frame.ml | open Base
open ExtString
type t =
Text of string
let rec unpack s =
match s with parser
[< '\x00' = Stream.next; xs = Parsec.until '\xFF'>] ->
Text (String.implode xs)
| [< >] ->
unpack s
let pack = function
| Text s ->
Printf.sprintf "\x00%s\xFF" s
| null | https://raw.githubusercontent.com/mzp/websocket-ocaml/b584bd20dfe6d95f65bc6e1ba8838b1ecfa8ec0e/webSocket/frame.ml | ocaml | open Base
open ExtString
type t =
Text of string
let rec unpack s =
match s with parser
[< '\x00' = Stream.next; xs = Parsec.until '\xFF'>] ->
Text (String.implode xs)
| [< >] ->
unpack s
let pack = function
| Text s ->
Printf.sprintf "\x00%s\xFF" s
|
|
fe2f5fcc9357b7b15d8a6091c8384e435f4e88bb5ee5eef6f9ffe61e782baef1 | biocad/cobot | Main.hs | module Main where
import Bio.Chain (Chain, fromList)
import Bio.Chain.Alignment (AffineGap (..),
GlobalAlignment (..),
LocalAlignment (..),
SemiglobalAlignment (..),
SimpleGap, align)
import Bio.Chain.Alignment.Scoring (nuc44)
import Control.DeepSeq (NFData (..), deepseq)
import Control.Monad (replicateM)
import Control.Monad.State (State, evalState, state)
import Control.Parallel.Strategies (dot, parListChunk, rdeepseq, rpar,
withStrategy)
import Criterion (bench, bgroup, env, nfIO)
import Criterion.Main (defaultMain)
import GHC.Conc (numCapabilities)
import System.Clock (Clock (Monotonic), diffTimeSpec,
getTime, nsec, sec)
import System.Random (RandomGen, getStdGen, randomR)
makeRandomChain :: RandomGen g => Int -> State g String
makeRandomChain 0 = pure ""
makeRandomChain len = do
c <- ("ATGC" !!) <$> state (randomR (0, 3))
cs <- makeRandomChain (len - 1)
pure (c : cs)
makeRandomChainIO :: Int -> IO (Chain Int Char)
makeRandomChainIO len = do
list <- evalState (makeRandomChain len) <$> getStdGen
pure (fromList list)
measureTime :: NFData a => String -> a -> IO ()
measureTime label value = do
t1 <- getTime Monotonic
t2 <- value `deepseq` getTime Monotonic
let dt = diffTimeSpec t2 t1
let timeInSeconds = fromIntegral (sec dt) + fromIntegral (nsec dt) / 1000000000 :: Double
let padding = " " <> replicate (max 0 (50 - length label)) '.' <> " "
putStrLn $ label <> padding <> show timeInSeconds <> "s"
parMap' :: NFData b => Int -> (a -> b) -> [a] -> [b]
parMap' chunkSize f = withStrategy (parListChunk chunkSize (rdeepseq `dot` rpar)) . map f
setupEnv :: IO (Chain Int Char, [Chain Int Char], Int)
setupEnv = do
a <- makeRandomChainIO 600
bs <- replicateM 20 $ makeRandomChainIO 4500
let chunkSize = length bs `div` numCapabilities
pure (a, bs, chunkSize)
main :: IO ()
main = defaultMain [
env setupEnv $ \ ~(a, bs, chunkSize) -> bgroup "main" [
bench "Local alignment" $
let align' = align (LocalAlignment nuc44 (-10 :: SimpleGap)) a
in nfIO . pure $ parMap' chunkSize align' bs,
bench "Global alignment" $
let align' = align (GlobalAlignment nuc44 (-10 :: SimpleGap)) a
in nfIO . pure $ parMap' chunkSize align' bs,
bench "Semiglobal alignment" $
let align' = align (SemiglobalAlignment nuc44 (-10 :: SimpleGap)) a
in nfIO . pure $ parMap' chunkSize align' bs,
bench "Local alignment with affine gap" $
let align' = align (LocalAlignment nuc44 (AffineGap (-10) (-1))) a
in nfIO . pure $ parMap' chunkSize align' bs,
bench "Global alignment with affine gap" $
let align' = align (GlobalAlignment nuc44 (AffineGap (-10) (-1))) a
in nfIO . pure $ parMap' chunkSize align' bs,
bench "Semiglobal alignment with affine gap" $
let align' = align (SemiglobalAlignment nuc44 (AffineGap (-10) (-1))) a
in nfIO . pure $ parMap' chunkSize align' bs
]
]
| null | https://raw.githubusercontent.com/biocad/cobot/44a3f017a8630812e45f7ca2dba1ca4f20f05c35/bench/Main.hs | haskell | module Main where
import Bio.Chain (Chain, fromList)
import Bio.Chain.Alignment (AffineGap (..),
GlobalAlignment (..),
LocalAlignment (..),
SemiglobalAlignment (..),
SimpleGap, align)
import Bio.Chain.Alignment.Scoring (nuc44)
import Control.DeepSeq (NFData (..), deepseq)
import Control.Monad (replicateM)
import Control.Monad.State (State, evalState, state)
import Control.Parallel.Strategies (dot, parListChunk, rdeepseq, rpar,
withStrategy)
import Criterion (bench, bgroup, env, nfIO)
import Criterion.Main (defaultMain)
import GHC.Conc (numCapabilities)
import System.Clock (Clock (Monotonic), diffTimeSpec,
getTime, nsec, sec)
import System.Random (RandomGen, getStdGen, randomR)
makeRandomChain :: RandomGen g => Int -> State g String
makeRandomChain 0 = pure ""
makeRandomChain len = do
c <- ("ATGC" !!) <$> state (randomR (0, 3))
cs <- makeRandomChain (len - 1)
pure (c : cs)
makeRandomChainIO :: Int -> IO (Chain Int Char)
makeRandomChainIO len = do
list <- evalState (makeRandomChain len) <$> getStdGen
pure (fromList list)
measureTime :: NFData a => String -> a -> IO ()
measureTime label value = do
t1 <- getTime Monotonic
t2 <- value `deepseq` getTime Monotonic
let dt = diffTimeSpec t2 t1
let timeInSeconds = fromIntegral (sec dt) + fromIntegral (nsec dt) / 1000000000 :: Double
let padding = " " <> replicate (max 0 (50 - length label)) '.' <> " "
putStrLn $ label <> padding <> show timeInSeconds <> "s"
parMap' :: NFData b => Int -> (a -> b) -> [a] -> [b]
parMap' chunkSize f = withStrategy (parListChunk chunkSize (rdeepseq `dot` rpar)) . map f
setupEnv :: IO (Chain Int Char, [Chain Int Char], Int)
setupEnv = do
a <- makeRandomChainIO 600
bs <- replicateM 20 $ makeRandomChainIO 4500
let chunkSize = length bs `div` numCapabilities
pure (a, bs, chunkSize)
main :: IO ()
main = defaultMain [
env setupEnv $ \ ~(a, bs, chunkSize) -> bgroup "main" [
bench "Local alignment" $
let align' = align (LocalAlignment nuc44 (-10 :: SimpleGap)) a
in nfIO . pure $ parMap' chunkSize align' bs,
bench "Global alignment" $
let align' = align (GlobalAlignment nuc44 (-10 :: SimpleGap)) a
in nfIO . pure $ parMap' chunkSize align' bs,
bench "Semiglobal alignment" $
let align' = align (SemiglobalAlignment nuc44 (-10 :: SimpleGap)) a
in nfIO . pure $ parMap' chunkSize align' bs,
bench "Local alignment with affine gap" $
let align' = align (LocalAlignment nuc44 (AffineGap (-10) (-1))) a
in nfIO . pure $ parMap' chunkSize align' bs,
bench "Global alignment with affine gap" $
let align' = align (GlobalAlignment nuc44 (AffineGap (-10) (-1))) a
in nfIO . pure $ parMap' chunkSize align' bs,
bench "Semiglobal alignment with affine gap" $
let align' = align (SemiglobalAlignment nuc44 (AffineGap (-10) (-1))) a
in nfIO . pure $ parMap' chunkSize align' bs
]
]
|
|
6f9ed502b74ea6892f167eb12b51e7bffea5d98d95c64dd170310f1377708705 | janestreet/async_smtp | smtp_extension.mli | open! Core
type t =
| Start_tls
| Auth of string list
| Mime_8bit_transport
| Other of string
[@@deriving compare, sexp, enumerate]
include Equal.S with type t := t
val of_string : string -> t
val to_string : t -> string
| null | https://raw.githubusercontent.com/janestreet/async_smtp/c2c1f8b7b27f571a99d2f21e8a31ce150fbd6ced/types/smtp_extension.mli | ocaml | open! Core
type t =
| Start_tls
| Auth of string list
| Mime_8bit_transport
| Other of string
[@@deriving compare, sexp, enumerate]
include Equal.S with type t := t
val of_string : string -> t
val to_string : t -> string
|
|
e3cb55ea703a9d0710e24c13621613d21a9f99381df1ba1102d67bd30948623f | ocaml-community/obus | oBus_bus.mli |
* oBus_bus.mli
* ------------
* Copyright : ( c ) 2008 , < >
* Licence : BSD3
*
* This file is a part of obus , an ocaml implementation of D - Bus .
* oBus_bus.mli
* ------------
* Copyright : (c) 2008, Jeremie Dimino <>
* Licence : BSD3
*
* This file is a part of obus, an ocaml implementation of D-Bus.
*)
(** Message buses management *)
type t = OBus_connection.t
* { 6 Well - known instances }
val session : ?switch : Lwt_switch.t -> unit -> t Lwt.t
* [ session ? switch ( ) ] returns a connection to the user session
message bus . Subsequent calls to { ! session } will return the same
bus . OBus will automatically exit the program when an error
happens on the session bus . You can change this behavior by
calling { ! OBus_connection.set_on_disconnect } .
message bus. Subsequent calls to {!session} will return the same
bus. OBus will automatically exit the program when an error
happens on the session bus. You can change this behavior by
calling {!OBus_connection.set_on_disconnect}. *)
val system : ?switch : Lwt_switch.t -> unit -> t Lwt.t
(** [system ?switch ()] returns a connection to the system message
bus. As for {!session}, subsequent calls to {!system} will
return the same bus. However, if the connection is closed or
crashes, {!system} will try to reopen it. *)
(** {6 Creation} *)
val of_addresses : ?switch : Lwt_switch.t -> OBus_address.t list -> t Lwt.t
* Establish a connection with a message bus . The bus must be
accessible with at least one of the given addresses
accessible with at least one of the given addresses *)
val register_connection : OBus_connection.t -> unit Lwt.t
* Register the given connection to a message bus . It has the side
effect of requesting a name to the message bus if not already
done .
If the connection is a connection to a message bus , created with
one of the function of { ! OBus_connection } then
{ ! register_connection } must be called on it before any other
functions .
effect of requesting a name to the message bus if not already
done.
If the connection is a connection to a message bus, created with
one of the function of {!OBus_connection} then
{!register_connection} must be called on it before any other
functions. *)
val exit_on_disconnect : exn -> 'a
* Function which exit the program as follow :
- if [ exn ] is { ! OBus_connection . Connection_lost } , it exits the
program with a return code of 0
- if [ exn ] is a fatal error , it prints a message on stderr and
exits the program with an exit code of 1
- if [exn] is {!OBus_connection.Connection_lost}, it exits the
program with a return code of 0
- if [exn] is a fatal error, it prints a message on stderr and
exits the program with an exit code of 1
*)
* { 6 Peer / proxy helpers }
val get_peer : t -> OBus_name.bus -> OBus_peer.t Lwt.t
(** [get_peer bus name] returns the peer owning the bus name
[name]. If the service is not activated and is activable, then
it is started *)
val get_proxy : t -> OBus_name.bus -> OBus_path.t -> OBus_proxy.t Lwt.t
(** [get_proxy bus name path] resolves [name] with {!get_peer} and
returns a proxy for the object with path [path] on this
service *)
(** {6 Bus names} *)
val name : t -> OBus_name.bus
(** Same as {!OBus_connection.name}. *)
val names : t -> Set.Make(String).t React.signal
(** [names bus] is the signal holding the set of all names we
currently own. It raises [Invalid_argument] if the connection is
not a connection to a message bus. *)
val hello : t -> OBus_name.bus Lwt.t
(** [hello connection] sends an hello message to the message bus,
which returns the unique connection name of the connection. Note
that if the hello message has already been sent, it will
fail. *)
exception Access_denied of string
(** Exception raised when a name cannot be owned due to security
policies *)
type request_name_result =
[ `Primary_owner
(** You are now the primary owner of the connection *)
| `In_queue
(** You will get the name when it will be available *)
| `Exists
(** Somebody else already have the name and nobody specified
what to do in this case *)
| `Already_owner
(** You already have the name *) ]
val request_name : t ->
?allow_replacement:bool ->
?replace_existing:bool ->
?do_not_queue:bool ->
OBus_name.bus -> request_name_result Lwt.t
(** Request a name to the bus. This is the way to acquire a
well-know name.
All optional parameters default to [false], their meaning are:
- [allow_replacement]: allow other application to steal this name from you
- [replace_existing]: replace any existing owner of the name
- [do_not_queue]: do not queue if not available
*)
type release_name_result =
[ `Released
| `Non_existent
| `Not_owner ]
val release_name : t -> OBus_name.bus -> release_name_result Lwt.t
(** {6 Service starting/discovering} *)
exception Service_unknown of string
(** Exception raised when a service is not present on a message bus
and can not be started automatically *)
type start_service_by_name_result =
[ `Success
| `Already_running ]
val start_service_by_name : t -> OBus_name.bus -> start_service_by_name_result Lwt.t
(** Start a service on the given bus by its name *)
val name_has_owner : t -> OBus_name.bus -> bool Lwt.t
(** Returns [true] if the service is currently running, i.e. some
application offers it on the message bus *)
val list_names : t -> OBus_name.bus list Lwt.t
(** List names currently running on the message bus *)
val list_activatable_names : t -> OBus_name.bus list Lwt.t
(** List services that can be activated. A service is automatically
activated when you call one of its method or when you use
[start_service_by_name] *)
exception Name_has_no_owner of string
val get_name_owner : t -> OBus_name.bus -> OBus_name.bus Lwt.t
(** Return the connection unique name of the given service. Raise a
[Name_has_no_owner] if the given name does not have an owner. *)
val list_queued_owners : t -> OBus_name.bus -> OBus_name.bus list Lwt.t
(** Return the connection unique names of the applications waiting for a
name *)
* { 6 Messages routing }
(** Note that you should prefer using {!OBus_match.export} and
{!OBus_match.remove} since they do not add duplicated rules
several times. *)
exception Match_rule_invalid of string
(** Exception raised when the program tries to send an invalid match
rule. This should never happen since values of type
{!OBus_match.rule} are always valid. *)
val add_match : t -> OBus_match.rule -> unit Lwt.t
* Add a matching rule on a message bus . This means that every
message routed on the message bus matching this rule will be
sent to us .
It can raise { ! OBus_error . } .
message routed on the message bus matching this rule will be
sent to us.
It can raise {!OBus_error.No_memory}.
*)
exception Match_rule_not_found of string
val remove_match : t -> OBus_match.rule -> unit Lwt.t
* Remove a match rule from the message bus . It raises
{ ! } if the rule does not exists
{!Match_rule_not_found} if the rule does not exists *)
* { 6 Other }
(** These functions are also offered by the message bus *)
exception Adt_audit_data_unknown of string
exception Selinux_security_context_unknown of string
val update_activation_environment : t -> (string * string) list -> unit Lwt.t
val get_connection_unix_user : t -> OBus_name.bus -> int Lwt.t
val get_connection_unix_process_id : t -> OBus_name.bus -> int Lwt.t
val get_adt_audit_session_data : t -> OBus_name.bus -> string Lwt.t
val get_connection_selinux_security_context : t -> OBus_name.bus -> string Lwt.t
val reload_config : t -> unit Lwt.t
val get_id : t -> OBus_uuid.t Lwt.t
* { 6 Signals }
val name_owner_changed : t -> (OBus_name.bus * OBus_name.bus * OBus_name.bus) OBus_signal.t
(** This signal is emitted each time the owner of a name (unique
connection name or service name) changes. *)
val name_lost : t -> OBus_name.bus OBus_signal.t
val name_acquired : t -> OBus_name.bus OBus_signal.t
| null | https://raw.githubusercontent.com/ocaml-community/obus/8d38ee6750587ae6519644630b75d53a0a011acd/src/protocol/oBus_bus.mli | ocaml | * Message buses management
* [system ?switch ()] returns a connection to the system message
bus. As for {!session}, subsequent calls to {!system} will
return the same bus. However, if the connection is closed or
crashes, {!system} will try to reopen it.
* {6 Creation}
* [get_peer bus name] returns the peer owning the bus name
[name]. If the service is not activated and is activable, then
it is started
* [get_proxy bus name path] resolves [name] with {!get_peer} and
returns a proxy for the object with path [path] on this
service
* {6 Bus names}
* Same as {!OBus_connection.name}.
* [names bus] is the signal holding the set of all names we
currently own. It raises [Invalid_argument] if the connection is
not a connection to a message bus.
* [hello connection] sends an hello message to the message bus,
which returns the unique connection name of the connection. Note
that if the hello message has already been sent, it will
fail.
* Exception raised when a name cannot be owned due to security
policies
* You are now the primary owner of the connection
* You will get the name when it will be available
* Somebody else already have the name and nobody specified
what to do in this case
* You already have the name
* Request a name to the bus. This is the way to acquire a
well-know name.
All optional parameters default to [false], their meaning are:
- [allow_replacement]: allow other application to steal this name from you
- [replace_existing]: replace any existing owner of the name
- [do_not_queue]: do not queue if not available
* {6 Service starting/discovering}
* Exception raised when a service is not present on a message bus
and can not be started automatically
* Start a service on the given bus by its name
* Returns [true] if the service is currently running, i.e. some
application offers it on the message bus
* List names currently running on the message bus
* List services that can be activated. A service is automatically
activated when you call one of its method or when you use
[start_service_by_name]
* Return the connection unique name of the given service. Raise a
[Name_has_no_owner] if the given name does not have an owner.
* Return the connection unique names of the applications waiting for a
name
* Note that you should prefer using {!OBus_match.export} and
{!OBus_match.remove} since they do not add duplicated rules
several times.
* Exception raised when the program tries to send an invalid match
rule. This should never happen since values of type
{!OBus_match.rule} are always valid.
* These functions are also offered by the message bus
* This signal is emitted each time the owner of a name (unique
connection name or service name) changes. |
* oBus_bus.mli
* ------------
* Copyright : ( c ) 2008 , < >
* Licence : BSD3
*
* This file is a part of obus , an ocaml implementation of D - Bus .
* oBus_bus.mli
* ------------
* Copyright : (c) 2008, Jeremie Dimino <>
* Licence : BSD3
*
* This file is a part of obus, an ocaml implementation of D-Bus.
*)
type t = OBus_connection.t
* { 6 Well - known instances }
val session : ?switch : Lwt_switch.t -> unit -> t Lwt.t
* [ session ? switch ( ) ] returns a connection to the user session
message bus . Subsequent calls to { ! session } will return the same
bus . OBus will automatically exit the program when an error
happens on the session bus . You can change this behavior by
calling { ! OBus_connection.set_on_disconnect } .
message bus. Subsequent calls to {!session} will return the same
bus. OBus will automatically exit the program when an error
happens on the session bus. You can change this behavior by
calling {!OBus_connection.set_on_disconnect}. *)
val system : ?switch : Lwt_switch.t -> unit -> t Lwt.t
val of_addresses : ?switch : Lwt_switch.t -> OBus_address.t list -> t Lwt.t
* Establish a connection with a message bus . The bus must be
accessible with at least one of the given addresses
accessible with at least one of the given addresses *)
val register_connection : OBus_connection.t -> unit Lwt.t
* Register the given connection to a message bus . It has the side
effect of requesting a name to the message bus if not already
done .
If the connection is a connection to a message bus , created with
one of the function of { ! OBus_connection } then
{ ! register_connection } must be called on it before any other
functions .
effect of requesting a name to the message bus if not already
done.
If the connection is a connection to a message bus, created with
one of the function of {!OBus_connection} then
{!register_connection} must be called on it before any other
functions. *)
val exit_on_disconnect : exn -> 'a
* Function which exit the program as follow :
- if [ exn ] is { ! OBus_connection . Connection_lost } , it exits the
program with a return code of 0
- if [ exn ] is a fatal error , it prints a message on stderr and
exits the program with an exit code of 1
- if [exn] is {!OBus_connection.Connection_lost}, it exits the
program with a return code of 0
- if [exn] is a fatal error, it prints a message on stderr and
exits the program with an exit code of 1
*)
* { 6 Peer / proxy helpers }
val get_peer : t -> OBus_name.bus -> OBus_peer.t Lwt.t
val get_proxy : t -> OBus_name.bus -> OBus_path.t -> OBus_proxy.t Lwt.t
val name : t -> OBus_name.bus
val names : t -> Set.Make(String).t React.signal
val hello : t -> OBus_name.bus Lwt.t
exception Access_denied of string
type request_name_result =
[ `Primary_owner
| `In_queue
| `Exists
| `Already_owner
val request_name : t ->
?allow_replacement:bool ->
?replace_existing:bool ->
?do_not_queue:bool ->
OBus_name.bus -> request_name_result Lwt.t
type release_name_result =
[ `Released
| `Non_existent
| `Not_owner ]
val release_name : t -> OBus_name.bus -> release_name_result Lwt.t
exception Service_unknown of string
type start_service_by_name_result =
[ `Success
| `Already_running ]
val start_service_by_name : t -> OBus_name.bus -> start_service_by_name_result Lwt.t
val name_has_owner : t -> OBus_name.bus -> bool Lwt.t
val list_names : t -> OBus_name.bus list Lwt.t
val list_activatable_names : t -> OBus_name.bus list Lwt.t
exception Name_has_no_owner of string
val get_name_owner : t -> OBus_name.bus -> OBus_name.bus Lwt.t
val list_queued_owners : t -> OBus_name.bus -> OBus_name.bus list Lwt.t
* { 6 Messages routing }
exception Match_rule_invalid of string
val add_match : t -> OBus_match.rule -> unit Lwt.t
* Add a matching rule on a message bus . This means that every
message routed on the message bus matching this rule will be
sent to us .
It can raise { ! OBus_error . } .
message routed on the message bus matching this rule will be
sent to us.
It can raise {!OBus_error.No_memory}.
*)
exception Match_rule_not_found of string
val remove_match : t -> OBus_match.rule -> unit Lwt.t
* Remove a match rule from the message bus . It raises
{ ! } if the rule does not exists
{!Match_rule_not_found} if the rule does not exists *)
* { 6 Other }
exception Adt_audit_data_unknown of string
exception Selinux_security_context_unknown of string
val update_activation_environment : t -> (string * string) list -> unit Lwt.t
val get_connection_unix_user : t -> OBus_name.bus -> int Lwt.t
val get_connection_unix_process_id : t -> OBus_name.bus -> int Lwt.t
val get_adt_audit_session_data : t -> OBus_name.bus -> string Lwt.t
val get_connection_selinux_security_context : t -> OBus_name.bus -> string Lwt.t
val reload_config : t -> unit Lwt.t
val get_id : t -> OBus_uuid.t Lwt.t
* { 6 Signals }
val name_owner_changed : t -> (OBus_name.bus * OBus_name.bus * OBus_name.bus) OBus_signal.t
val name_lost : t -> OBus_name.bus OBus_signal.t
val name_acquired : t -> OBus_name.bus OBus_signal.t
|
857e5e88c14b3e0a3ca5b28f5b831dce91cfe221f96dcfaf438c8d3ce49a1c2f | ailisp/Graphic-Forms | progress-bar.lisp | (in-package :graphic-forms.uitoolkit.widgets)
;;;
;;; helper functions
;;;
(declaim (inline pb-get-pos))
(defun pb-get-pos (p-bar)
"Returns the current position of a progress bar."
(gfs::send-message (gfs:handle p-bar) gfs::+pbm-getpos+ 0 0))
(defun pb-get-range (p-bar)
"Returns the range of a progress bar."
(cffi:with-foreign-object (r-ptr '(:struct gfs::pbrange))
(gfs::send-message (gfs:handle p-bar) gfs::+pbm-getrange+ 0 (cffi:pointer-address r-ptr))
(cffi:with-foreign-slots ((gfs::low gfs::high) r-ptr (:struct gfs::pbrange))
(gfs:make-span :start gfs::low :end gfs::high))))
(declaim (inline pb-get-step))
(defun pb-get-step (p-bar)
"Returns the step increment for a progress bar."
(gfs::send-message (gfs:handle p-bar) gfs::+pbm-getstep+ 0 0))
(declaim (inline pb-horz-flags))
(defun pb-horz-flags (flags)
(logand flags (lognot gfs::+pbs-vertical+)))
(declaim (inline pb-set-pos-absolute))
(defun pb-set-pos-absolute (p-bar pos)
"Sets the absolute position of a progress bar and redraws it; returns the previous position."
(gfs::send-message (gfs:handle p-bar) gfs::+pbm-setpos+ (logand pos #xFFFF) 0))
(declaim (inline pb-set-pos-delta))
(defun pb-set-pos-delta (p-bar delta)
"Updates the position of a progress bar by delta and redraws it; returns the previous position."
(gfs::send-message (gfs:handle p-bar) gfs::+pbm-deltapos+ (logand delta #xFFFF) 0))
(defun pb-set-range (p-bar span)
"Sets the range of a progress bar; returns the previous range."
(let ((result (gfs::send-message (gfs:handle p-bar)
gfs::+pbm-setrange32+
(logand (gfs:span-start span) #xFFFFFFFF)
(logand (gfs:span-end span) #xFFFFFFFF))))
(gfs:make-span :start (gfs::lparam-low-word result)
:end (gfs::lparam-high-word result))))
(declaim (inline pb-set-step))
(defun pb-set-step (p-bar increment)
"Sets the step increment for a progress bar; returns the previous increment."
(gfs::send-message (gfs:handle p-bar) gfs::+pbm-setstep+ (logand increment #xFFFF) 0))
(declaim (inline pb-smooth-flags))
(defun pb-smooth-flags (flags)
(logior flags gfs::+pbs-smooth+))
(declaim (inline pb-stepit))
(defun pb-stepit (p-bar)
"Advances the progress bar's position by its step increment and redraws it; returns the previous position."
(gfs::send-message (gfs:handle p-bar) gfs::+pbm-stepit+ 0 0))
(declaim (inline pb-vert-flags))
(defun pb-vert-flags (flags)
(logior flags gfs::+pbs-vertical+))
;;;
;;; methods
;;;
(defmethod bar-position ((p-bar progress-bar))
(if (gfs:disposed-p p-bar)
(error 'gfs:disposed-error))
(pb-get-pos p-bar))
(defmethod (setf bar-position) (pos (p-bar progress-bar))
(if (gfs:disposed-p p-bar)
(error 'gfs:disposed-error))
(pb-set-pos-absolute p-bar pos))
(defmethod compute-style-flags ((p-bar progress-bar) &rest extra-data)
(declare (ignore extra-data))
(let ((std-flags +default-child-style+)
(style (style-of p-bar)))
(loop for sym in style
do (ecase sym
;; primary progress-bar styles
;;
(:horizontal (setf std-flags (pb-horz-flags std-flags)))
(:vertical (setf std-flags (pb-vert-flags std-flags)))
;; styles that can be combined
;;
(:smooth (setf std-flags (pb-smooth-flags std-flags)))))
(values std-flags 0)))
(defmethod initialize-instance :after ((p-bar progress-bar) &key parent &allow-other-keys)
(create-control p-bar parent "" gfs::+icc-win95-classes+))
(defmethod inner-limits ((p-bar progress-bar))
(if (gfs:disposed-p p-bar)
(error 'gfs:disposed-error))
(pb-get-range p-bar))
(defmethod (setf inner-limits) (limits (p-bar progress-bar))
(if (gfs:disposed-p p-bar)
(error 'gfs:disposed-error))
(pb-set-range p-bar limits))
(defmethod preferred-size ((p-bar progress-bar) width-hint height-hint)
(let ((size (gfs:make-size :width width-hint :height height-hint))
(b-width (* (border-width p-bar) 2)))
(if (<= width-hint 0)
(setf (gfs:size-width size) +default-widget-width+))
(incf (gfs:size-width size) b-width)
(if (<= height-hint 0)
(setf (gfs:size-height size)
(floor (* (gfs::get-system-metrics gfs::+sm-cyvscroll+) 3) 4)))
(incf (gfs:size-height size) b-width)
size))
(defmethod step ((p-bar progress-bar))
(if (gfs:disposed-p p-bar)
(error 'gfs:disposed-error))
(pb-stepit p-bar))
(defmethod step-increment ((p-bar progress-bar))
(if (gfs:disposed-p p-bar)
(error 'gfs:disposed-error))
(pb-get-step p-bar))
(defmethod (setf step-increment) (increment (p-bar progress-bar))
(if (gfs:disposed-p p-bar)
(error 'gfs:disposed-error))
(pb-set-step p-bar increment))
| null | https://raw.githubusercontent.com/ailisp/Graphic-Forms/1e0723d07e1e4e02b8ae375db8f3d65d1b444f11/src/uitoolkit/widgets/progress-bar.lisp | lisp |
helper functions
methods
primary progress-bar styles
styles that can be combined
| (in-package :graphic-forms.uitoolkit.widgets)
(declaim (inline pb-get-pos))
(defun pb-get-pos (p-bar)
"Returns the current position of a progress bar."
(gfs::send-message (gfs:handle p-bar) gfs::+pbm-getpos+ 0 0))
(defun pb-get-range (p-bar)
"Returns the range of a progress bar."
(cffi:with-foreign-object (r-ptr '(:struct gfs::pbrange))
(gfs::send-message (gfs:handle p-bar) gfs::+pbm-getrange+ 0 (cffi:pointer-address r-ptr))
(cffi:with-foreign-slots ((gfs::low gfs::high) r-ptr (:struct gfs::pbrange))
(gfs:make-span :start gfs::low :end gfs::high))))
(declaim (inline pb-get-step))
(defun pb-get-step (p-bar)
"Returns the step increment for a progress bar."
(gfs::send-message (gfs:handle p-bar) gfs::+pbm-getstep+ 0 0))
(declaim (inline pb-horz-flags))
(defun pb-horz-flags (flags)
(logand flags (lognot gfs::+pbs-vertical+)))
(declaim (inline pb-set-pos-absolute))
(defun pb-set-pos-absolute (p-bar pos)
"Sets the absolute position of a progress bar and redraws it; returns the previous position."
(gfs::send-message (gfs:handle p-bar) gfs::+pbm-setpos+ (logand pos #xFFFF) 0))
(declaim (inline pb-set-pos-delta))
(defun pb-set-pos-delta (p-bar delta)
"Updates the position of a progress bar by delta and redraws it; returns the previous position."
(gfs::send-message (gfs:handle p-bar) gfs::+pbm-deltapos+ (logand delta #xFFFF) 0))
(defun pb-set-range (p-bar span)
"Sets the range of a progress bar; returns the previous range."
(let ((result (gfs::send-message (gfs:handle p-bar)
gfs::+pbm-setrange32+
(logand (gfs:span-start span) #xFFFFFFFF)
(logand (gfs:span-end span) #xFFFFFFFF))))
(gfs:make-span :start (gfs::lparam-low-word result)
:end (gfs::lparam-high-word result))))
(declaim (inline pb-set-step))
(defun pb-set-step (p-bar increment)
"Sets the step increment for a progress bar; returns the previous increment."
(gfs::send-message (gfs:handle p-bar) gfs::+pbm-setstep+ (logand increment #xFFFF) 0))
(declaim (inline pb-smooth-flags))
(defun pb-smooth-flags (flags)
(logior flags gfs::+pbs-smooth+))
(declaim (inline pb-stepit))
(defun pb-stepit (p-bar)
"Advances the progress bar's position by its step increment and redraws it; returns the previous position."
(gfs::send-message (gfs:handle p-bar) gfs::+pbm-stepit+ 0 0))
(declaim (inline pb-vert-flags))
(defun pb-vert-flags (flags)
(logior flags gfs::+pbs-vertical+))
(defmethod bar-position ((p-bar progress-bar))
(if (gfs:disposed-p p-bar)
(error 'gfs:disposed-error))
(pb-get-pos p-bar))
(defmethod (setf bar-position) (pos (p-bar progress-bar))
(if (gfs:disposed-p p-bar)
(error 'gfs:disposed-error))
(pb-set-pos-absolute p-bar pos))
(defmethod compute-style-flags ((p-bar progress-bar) &rest extra-data)
(declare (ignore extra-data))
(let ((std-flags +default-child-style+)
(style (style-of p-bar)))
(loop for sym in style
do (ecase sym
(:horizontal (setf std-flags (pb-horz-flags std-flags)))
(:vertical (setf std-flags (pb-vert-flags std-flags)))
(:smooth (setf std-flags (pb-smooth-flags std-flags)))))
(values std-flags 0)))
(defmethod initialize-instance :after ((p-bar progress-bar) &key parent &allow-other-keys)
(create-control p-bar parent "" gfs::+icc-win95-classes+))
(defmethod inner-limits ((p-bar progress-bar))
(if (gfs:disposed-p p-bar)
(error 'gfs:disposed-error))
(pb-get-range p-bar))
(defmethod (setf inner-limits) (limits (p-bar progress-bar))
(if (gfs:disposed-p p-bar)
(error 'gfs:disposed-error))
(pb-set-range p-bar limits))
(defmethod preferred-size ((p-bar progress-bar) width-hint height-hint)
(let ((size (gfs:make-size :width width-hint :height height-hint))
(b-width (* (border-width p-bar) 2)))
(if (<= width-hint 0)
(setf (gfs:size-width size) +default-widget-width+))
(incf (gfs:size-width size) b-width)
(if (<= height-hint 0)
(setf (gfs:size-height size)
(floor (* (gfs::get-system-metrics gfs::+sm-cyvscroll+) 3) 4)))
(incf (gfs:size-height size) b-width)
size))
(defmethod step ((p-bar progress-bar))
(if (gfs:disposed-p p-bar)
(error 'gfs:disposed-error))
(pb-stepit p-bar))
(defmethod step-increment ((p-bar progress-bar))
(if (gfs:disposed-p p-bar)
(error 'gfs:disposed-error))
(pb-get-step p-bar))
(defmethod (setf step-increment) (increment (p-bar progress-bar))
(if (gfs:disposed-p p-bar)
(error 'gfs:disposed-error))
(pb-set-step p-bar increment))
|
0f1cacd13bef81e6710120acedb8fe56204a7b5ebff49e7d65962d4a08752333 | batebobo/fp1819 | 15-repeat-lists.rkt | #lang racket
(require rackunit)
(require rackunit/text-ui)
ΠΡΠΊΠ°ΠΌΠ΅ Π΄Π° ΡΡΠ·Π΄Π°Π΄Π΅ΠΌ ΠΏΠΎΡΠΎΠΊ , ΠΊΠ°ΡΠΎ ΠΏΠΎΠ²ΡΠ°ΡΡΠΌΠ΅ Π½ΡΠΊΠΎΠ»ΠΊΠΎ ΡΠΏΠΈΡΡΠΊΠ°
(define tests
(test-suite "repeat lists tests"
(test-case "" (check-equal? (stream->list (stream-take (repeat-lists '(1 2) '(3 4)) 7))
'(1 2 3 4 1 2 3)))
)
)
(run-tests tests 'verbose)
| null | https://raw.githubusercontent.com/batebobo/fp1819/2061b7e62a1a9ade3a5fff9753f9fe0da5684275/scheme/7/15-repeat-lists.rkt | racket | #lang racket
(require rackunit)
(require rackunit/text-ui)
ΠΡΠΊΠ°ΠΌΠ΅ Π΄Π° ΡΡΠ·Π΄Π°Π΄Π΅ΠΌ ΠΏΠΎΡΠΎΠΊ , ΠΊΠ°ΡΠΎ ΠΏΠΎΠ²ΡΠ°ΡΡΠΌΠ΅ Π½ΡΠΊΠΎΠ»ΠΊΠΎ ΡΠΏΠΈΡΡΠΊΠ°
(define tests
(test-suite "repeat lists tests"
(test-case "" (check-equal? (stream->list (stream-take (repeat-lists '(1 2) '(3 4)) 7))
'(1 2 3 4 1 2 3)))
)
)
(run-tests tests 'verbose)
|
|
53ff78a5504d5cea50ede26890819a20a6fb3086fd860b7bfcbf1387eec7b132 | yakaz/yamerl | yamerl_constr.erl | %-
Copyright ( c ) 2012 - 2014 Yakaz
Copyright ( c ) 2016 - 2022 < >
% All rights reserved.
%
% Redistribution and use in source and binary forms, with or without
% modification, are permitted provided that the following conditions
% are met:
1 . Redistributions of source code must retain the above copyright
% notice, this list of conditions and the following disclaimer.
2 . Redistributions in binary form must reproduce the above copyright
% notice, this list of conditions and the following disclaimer in the
% documentation and/or other materials provided with the distribution.
%
THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ` ` AS IS '' AND
% ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
% ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
% DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
% OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT
% LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
% OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
% SUCH DAMAGE.
@author < >
2012 - 2014 Yakaz ,
2016 - 2022 < >
%%
%% @doc {@module} implements a YAML constructor. It uses {@link
%% yamerl_parser} as the underlying parser. The parser emits YAML nodes
%% which are assembled as structured YAML documents by the constructor.
%%
%% It is able to construct YAML documents from in-memory strings (see
%% {@link string/1} and {@link string/2}), regular files (see {@link
%% file/1} and {@link file/2}) or streams (see {@link new/1}, {@link
} and { @link next_chunk/3 } ) .
%%
%% YAML documents can be constructed in simple or detailed modes. In
simple mode , they are made of simple builting Erlang types . In
%% detailed mode, they are made of records, holding more information
%% about YAML nodes and their presentation.
%%
%% The `yamerl' application must be started to use the constructor.
%%
< strong > Example : parse a string in simple >
%% ```
%% yamerl_constr:string("Hello!").
%% '''
%%
%% It returns:
%% ```
% List of documents ; here , only one .
%% [
%% % Document root node: a string.
%% "Hello!"
%% ].
%% '''
%%
< strong > Example : parse a stream in detailed >
%% ```
Stream_St1 = yamerl_constr : new({file , " < stdin > " } , [ { detailed_constr , true } ] ) ,
%% {continue, Stream_St2} = yamerl_constr:next_chunk(Stream_St1, <<"He">>),
%% {continue, Stream_St3} = yamerl_constr:next_chunk(Stream_St2, <<"ll">>),
%% yamerl_constr:last_chunk(Stream_St3, <<"o!">>).
%% '''
%%
%% It returns:
%% ```
% List of documents ; here , only one .
%% [
% Document # 1 .
%% {yamerl_doc,
%% % Document root node: a string.
{ yamerl_str , yamerl_node_str , " tag : yaml.org,2002 : str " ,
[ { line , 1 } , { column , 1 } ] , % Node location in the original string .
%% "Hello!" % String value.
%% }
%% }
%% ].
%% '''
-module(yamerl_constr).
-include("yamerl_errors.hrl").
-include("yamerl_tokens.hrl").
-include("yamerl_nodes.hrl").
-include("yamerl_constr.hrl").
%% Public API.
-export([
new/1,
new/2,
string/1,
string/2,
file/1,
file/2,
next_chunk/3,
next_chunk/2,
last_chunk/2,
get_pres_details/1,
node_line/1,
node_column/1,
option_names/0
]).
%% -------------------------------------------------------------------
%% Exported types.
%% -------------------------------------------------------------------
%% FIXME:
This type should be " -opaque " . However , up - to Erlang R15B03 , an issue
with either this code or Dialyzer prevents us from declaring it
properly : reports warning regarding the stream_state_fun ( )
%% type and several guard expression which will never match.
-type yamerl_constr() :: #yamerl_constr{}.
-export_type([
yamerl_constr/0,
yamerl_constr_option/0,
yamerl_node/0,
yamerl_seq/0,
yamerl_map/0,
yamerl_str/0,
yamerl_null/0,
yamerl_bool/0,
yamerl_int/0,
yamerl_float/0,
yamerl_binary/0,
yamerl_timestamp/0,
yamerl_erlang_atom/0,
yamerl_erlang_fun/0,
yamerl_user_node/0,
yamerl_doc/0,
yamerl_simple_node/0,
yamerl_simple_seq/0,
yamerl_simple_map/0,
yamerl_simple_str/0,
yamerl_simple_null/0,
yamerl_simple_bool/0,
yamerl_simple_int/0,
yamerl_simple_float/0,
yamerl_simple_timestamp/0,
yamerl_simple_erlang_atom/0,
yamerl_simple_erlang_fun/0,
yamerl_user_simple_node/0,
yamerl_simple_doc/0
]).
%% -------------------------------------------------------------------
%% Public API: chunked stream scanning.
%% -------------------------------------------------------------------
%% @equiv new(Source, [])
-spec new(Source) ->
Constr | no_return() when
Source :: term(),
Constr :: yamerl_parser:yamerl_parser().
new(Source) ->
new(Source, []).
%% @doc Creates and returns a new YAML construction state.
%%
%% When you want to parse a stream (as opposed to in-memory strings or
regular files ) , this is the first function you call before feeding
%% the constructor with stream "chunks".
%%
%% `Source' can be any term describing the stream. {@link string/1} and
%% {@link string/2} sets it to the atom `string'. {@link file/1} and
%% {@link file/2} sets it to `{file, Filename}'. The constructor doesn't
%% use that value.
%%
%% `Options' is a list of options for the parser and the constructor.
%% Valid options are:
%%
%% <dl>
< , boolean()}'</dt >
%% <dd>Flag to enable/disable the detailed construction mode. In simple
construction mode , YAML nodes are returned as Erlang integers ,
strings , lists , proplists , etc . In other words , only simple builtin
%% types. In detailed construction mode, YAML nodes are returned using
%% records. Those records gives additional information such as the YAML
%% node type, the location in the stream (line and column number) and so
%% on.</dd>
%% <dt>`{ignore_unrecognized_tags, boolean()}'</dt>
%% <dd>Indicate if unrecognized tags should be ignored. When `false'
%% (the default), a node with an unrecognized tag can't be constructed
%% because yamerl doesn't know how to interpret the node. When this
%% happens an exception is raised. When set to `true', the node is
%% constructed as if it was a plain YAML node without any tag.</dd>
%% <dd>Default: `false'.</dd>
%% <dt>`{keep_duplicate_keys, boolean()}'</dt>
%% <dd>Flag to keep duplicate keys in maps. By default all duplicate keys
%% in maps/proplists will be ignored and the last occurence of a key will
%% prevail. If this flag is enabled all keys will remain. This flag only
works when the ` detailed_constr ' flag is set to ` true ' or proplists
%% are used instead of maps.</dd>
%% <dd>Default: `false'</dd>
%% <dt>`{node_mods, Mods_List}'</dt>
< dd > List of Erlang modules to extend support node types.</dd >
%% <dd>Default: `[]'.</dd>
< dt>`{schema , failsafe | json | core | yaml11}'</dt >
%% <dd>Name of the official schema to use.</dd>
%% <dd>Default: `core'.</dd>
%% </dl>
%%
%% The returned state is opaque value. You then pass it to {@link
%% next_chunk/2}, {@link next_chunk/3} and {@link last_chunk/2}.
%%
%% If an option is invalid, an exception is thrown.
%%
%% <strong>Example: parse a valid stream</strong>
%% ```
Stream_St1 = yamerl_constr : new({file , " < stdin > " } ) ,
%% {continue, Stream_St2} = yamerl_constr:next_chunk(Stream_St1, <<"He">>),
%% {continue, Stream_St3} = yamerl_constr:next_chunk(Stream_St2, <<"ll">>),
%% yamerl_constr:last_chunk(Stream_St3, <<"o!">>).
%% '''
%% It returns:
%%
%% ```
% List of documents ; here , only one .
%% [
%% % Document root node: a string.
%% "Hello!"
%% ].
%% '''
%%
%% <strong>Example: parse an invalid stream</strong>
%% ```
Stream_St1 = yamerl_constr : new({file , " < stdin > " } ) ,
%% {continue, Stream_St2} = yamerl_constr:next_chunk(Stream_St1, <<"'He">>),
%% {continue, Stream_St3} = yamerl_constr:next_chunk(Stream_St2, <<"ll">>),
%% yamerl_constr:last_chunk(Stream_St3, <<"o!">>) % Unfinished single-quoted scalar.
%% '''
%%
%% It throws:
%% ```
%% {yamerl_exception,
% List of warnings and errors ; here , one fatal error .
%% [
% Error # 1 .
%% {yamerl_parsing_error, error,
%% "Unexpected end-of-stream while parsing flow scalar", % Human-readable message.
1 , 8 , % Error location .
%% unexpected_eos,
{ yamerl_scalar , 1 , 1 , { yamerl_tag , 1 , 1 , { non_specific , " ! " } } , % Token being parsed .
%% flow, single_quoted,
%% "Hello!"},
%% []
%% }
%% ]
%% }
%% '''
%%
@see new/1 .
-spec new(Source, Options) ->
Constr | no_return() when
Source :: term(),
Options :: [
yamerl_constr_option() |
yamerl_parser:yamerl_parser_option() |
proplists:property()
],
Constr :: yamerl_parser:yamerl_parser().
new(Source, Options) ->
Parser_Options = initialize(Options),
yamerl_parser:new(Source, Parser_Options).
@equiv next_chunk(Constr , Chunk , false )
-spec next_chunk(Constr, Chunk) ->
Ret | no_return() when
Constr :: yamerl_parser:yamerl_parser(),
Chunk :: unicode_binary(),
Ret :: {continue, New_Constr},
New_Constr :: yamerl_parser:yamerl_parser().
next_chunk(Constr, Chunk) ->
next_chunk(Constr, Chunk, false).
%% @doc Feeds the constructor with the next chunk from the YAML stream.
%%
%% `Constr' is the constructor state returned by a previous call
to { @link new/1 } , { @link } , { @link next_chunk/2 } or { @link
%% next_chunk/3}.
%%
` Chunk ' must be an Erlang binary using the UTF-8 , UTF-16 or UTF-32
Unicode encoding . A leading BOM character in the first chunk is used
to determine the encoding and endianness . If no BOM is present , UTF-8
%% is assumed.
%%
` EOS ' indicates the constructor if this is the last chunk from the
%% stream.
%%
If this is not the last chunk ( ` EOS = false ' ) , it returns ` { continue ,
New_Constr } ' where ` New_Constr ' is an updated state which replaces
%% `Constr'. The new state is to be passed to future calls to {@link
%% next_chunk/2}, {@link next_chunk/3} or {@link last_chunk/2}.
%%
If this is the last chunk ( ` EOS = true ' ) , it returns a list of YAML
documents . Documents are made of simple builtin Erlang types if the
%% detailed construction mode is disabled, or records if the detailed
%% construction mode is enabled (`{detailed_constr, boolean()}' passed
%% as an option; default is `false').
%%
%% It throws an exception if there is a parsing or construction error.
-spec next_chunk(Constr, Chunk, false) ->
Ret | no_return() when
Constr :: yamerl_parser:yamerl_parser(),
Chunk :: unicode_binary(),
Ret :: {continue, New_Constr},
New_Constr :: yamerl_parser:yamerl_parser();
(Constr, Chunk, true) ->
Result | no_return() when
Constr :: yamerl_parser:yamerl_parser(),
Chunk :: unicode_binary(),
Result :: [yamerl_doc()]
| [yamerl_simple_doc()].
next_chunk(Constr, Chunk, EOS) ->
Ret = yamerl_parser:next_chunk(Constr, Chunk, EOS),
if
EOS -> get_docs(Ret);
true -> Ret
end.
@equiv next_chunk(Constr , Chunk , true )
-spec last_chunk(Constr, Chunk) ->
Result | no_return() when
Constr :: yamerl_parser:yamerl_parser(),
Chunk :: unicode_binary(),
Result :: [yamerl_doc()]
| [yamerl_simple_doc()].
last_chunk(Constr, Chunk) ->
next_chunk(Constr, Chunk, true).
-spec get_docs(Constr) ->
Docs | no_return() when
Constr :: yamerl_parser:yamerl_parser(),
Docs :: [yamerl_doc()]
| [yamerl_simple_doc()].
get_docs(Constr) ->
case yamerl_parser:get_token_fun(Constr) of
Not_Fun when Not_Fun == acc orelse Not_Fun == drop ->
Error = #yamerl_parsing_error{
name = token_fun_cleared
},
yamerl_errors:throw(Error);
Token_Fun ->
Token_Fun(get_docs)
end.
%% -------------------------------------------------------------------
%% Public API: common stream sources.
%% -------------------------------------------------------------------
%% @equiv string(String, [])
-spec string(String) ->
Result | no_return() when
String :: unicode_data(),
Result :: [yamerl_doc()]
| [yamerl_simple_doc()]
| term().
string(String) ->
string(String, []).
%% @doc Constructs a YAML document from an in-memory YAML string.
%%
` String ' must be an Erlang list or binary containing one or more YAML
documents . If it is a binary , it must be encoded using UTF-8 , UTF-16
%% or UTF-32. A leading BOM character is used to determine the encoding
and endianness . If no BOM is present , UTF-8 is assumed .
%%
%% `Options' is a list of options for the parser and the constructor.
See { @link } for valid options .
%%
%% It returns a list of YAML documents. See {@link next_chunk/3} for
%% more details about the returned documents.
%%
%% It throws an exception if there is a parsing or construction error.
%%
< strong > Example : parse an Erlang list</strong >
%% ```
%% yamerl_constr:string("This is a string").
%% '''
%%
< strong > Example : parse an UTF-8 - encoded >
%% ```
yamerl_constr : string(<<50,32,226,130,172 > > ) . % The string " 2 β¬ " encoded in UTF-8 .
%% '''
%%
< strong > Example : parse a string in simple >
%% ```
%% yamerl_constr:string("Hello!").
%% '''
%%
%% It returns:
%% ```
% List of documents ; here , only one .
%% [
%% % Document root node: a string.
%% "Hello!"
%% ].
%% '''
%%
< strong > Example : parse a string in detailed >
%% ```
yamerl_constr : string("Hello ! " , [ { detailed_constr , true } ] ) .
%% '''
%%
%% It returns:
%% ```
% List of documents ; here , only one .
%% [
% Document # 1 .
%% {yamerl_doc,
%% % Document root node: a string.
{ yamerl_str , yamerl_node_str , " tag : yaml.org,2002 : str " ,
[ { line , 1 } , { column , 1 } ] , % Node location in the original string .
%% "Hello!" % String value.
%% }
%% }
%% ].
%% '''
%%
%% <strong>Example: parse an invalid document</strong>
%% ```
%% yamerl_constr:string(<<"'Oh-oh...">>). % Unfinished single-quoted scalar.
%% '''
%%
%% It throws:
%% ```
%% {yamerl_exception,
% List of warnings and errors ; here , one fatal error .
%% [
% Error # 1 .
%% {yamerl_parsing_error, error,
%% "Unexpected end-of-stream while parsing flow scalar", % Human-readable message.
1 , 10 , % Error location .
%% unexpected_eos,
{ yamerl_scalar , 1 , 1 , { yamerl_tag , 1 , 1 , { non_specific , " ! " } } , % Token being parsed .
%% flow, single_quoted,
%% "Oh-oh..."},
%% []
%% }
%% ]
%% }.
%% '''
-spec string(String, Options) ->
Result | no_return() when
String :: unicode_data(),
Options :: [ yamerl_parser:yamerl_parser_option()
| yamerl_constr_option()
| proplists:property()],
Result :: [yamerl_doc()]
| [yamerl_simple_doc()]
| term().
string(String, Options) ->
Parser_Options = initialize(Options),
Constr = yamerl_parser:string(String, Parser_Options),
get_docs(Constr).
-spec file(Filename) ->
Result | no_return() when
Filename :: string(),
Result :: [yamerl_doc()]
| [yamerl_simple_doc()]
| term().
%% @equiv file(Filename, [])
file(Filename) ->
file(Filename, []).
%% @doc Constructs a YAML document from a regular file.
%%
%% `Filename' must be a string indicating the filename. The file must
contain one or more YAML documents . The file must be encoded using
UTF-8 , UTF-16 or UTF-32 . A leading BOM character is used to determine
the encoding and endianness . If no BOM is present , UTF-8 is assumed .
%%
%% `Options' is a list of options for the parser and the constructor.
See { @link } for valid options .
%%
%% It returns a list of YAML documents. See {@link next_chunk/3} for
%% more details about the returned documents.
%%
%% It throws an exception if there is a parsing or construction error.
%%
%% See {@link string/2} for some examples.
-spec file(Filename, Options) ->
Result | no_return() when
Filename :: string(),
Options :: [ yamerl_parser:yamerl_parser_option()
| yamerl_constr_option()
| proplists:property()],
Result :: [yamerl_doc()]
| [yamerl_simple_doc()]
| term().
file(Filename, Options) ->
Parser_Options = initialize(Options),
Constr = yamerl_parser:file(Filename, Parser_Options),
get_docs(Constr).
%% -------------------------------------------------------------------
%% Presentation details.
%% -------------------------------------------------------------------
%% @doc Returns presentation information in the stream for the given
%% node.
%%
%% This only makes sense when the detailed construction mode is enabled
%% (ie. `{detailed_constr, true}' was passed as an option to {@link
} , { @link file/2 } or { @link string/2 } ) .
get_pres_details(Token) ->
Line = ?TOKEN_LINE(Token),
Column = ?TOKEN_COLUMN(Token),
[{line, Line}, {column, Column}].
%% -------------------------------------------------------------------
%% Node information.
%% -------------------------------------------------------------------
%% @doc Returns the line number in the stream for the given node.
%%
%% This only makes sense when the detailed construction mode is enabled
%% (ie. `{detailed_constr, true}' was passed as an option to {@link
} , { @link file/2 } or { @link string/2 } ) .
node_line(Node) ->
case node_pres(Node) of
undefined -> undefined;
Pres -> proplists:get_value(line, Pres)
end.
%% @doc Returns the column number in the stream for the given node.
%%
%% This only makes sense when the detailed construction mode is enabled
%% (ie. `{detailed_constr, true}' was passed as an option to {@link
} , { @link file/2 } or { @link string/2 } ) .
node_column(Node) ->
case node_pres(Node) of
undefined -> undefined;
Pres -> proplists:get_value(column, Pres)
end.
node_pres(Node) when
is_record(Node, yamerl_seq) orelse
is_record(Node, yamerl_map) orelse
is_record(Node, yamerl_str) orelse
is_record(Node, yamerl_null) orelse
is_record(Node, yamerl_bool) orelse
is_record(Node, yamerl_int) orelse
is_record(Node, yamerl_binary) orelse
is_record(Node, yamerl_timestamp) orelse
is_record(Node, yamerl_erlang_atom) orelse
is_record(Node, yamerl_erlang_fun) ->
?NODE_PRES(Node);
node_pres(Node) when is_tuple(Node) ->
%% For user-defined nodes, we call the module responsible for it.
Mod = ?NODE_MOD(Node),
try
Mod:node_pres(Node)
catch
error:undef ->
undefined
end.
%% -------------------------------------------------------------------
%% Construction.
%% -------------------------------------------------------------------
construct(Constr, #yamerl_doc_start{version = Version}) ->
%% Select schema and associated modules, possibly based on the
%% document version.
Constr1 = setup_node_mods(Constr, Version),
%% Prepare a document node.
Doc = #yamerl_doc{},
Constr2 = Constr1#yamerl_constr{
current_doc = [Doc]
},
return_new_fun(Constr2);
construct(_, Token) when
is_record(Token, yamerl_stream_start) orelse
is_record(Token, yamerl_stream_end) orelse
is_record(Token, yamerl_yaml_directive) orelse
is_record(Token, yamerl_tag_directive) orelse
is_record(Token, yamerl_reserved_directive) orelse
is_record(Token, yamerl_doc_end) ->
%% This token doesn't start a node: ignore it.
ok;
construct(
#yamerl_constr{current_doc = Doc, current_node_is_leaf = false,
mods = Mods, tags = Tags} = Constr,
Token) when Doc /= undefined andalso
(is_record(Token, yamerl_collection_start) orelse
is_record(Token, yamerl_scalar)) ->
%% This token starts a node. We must determine the module to use to
%% construct this node.
Tag = case Token of
#yamerl_collection_start{tag = T} -> T;
#yamerl_scalar{tag = T} -> T
end,
Ret = case Tag of
#yamerl_tag{uri = {non_specific, _}} ->
%% The node has a non-specific tag. We let each module
%% decides if they want to construct the node.
try_construct(Constr, Mods, Token);
#yamerl_tag{uri = URI} ->
We look up this URI in the tag 's index .
IgnoreUnrecognizedTags = proplists:get_value(
ignore_unrecognized_tags, Constr#yamerl_constr.options, false),
case proplists:get_value(URI, Tags) of
Mod when Mod /= undefined ->
Mod:construct_token(Constr, undefined, Token);
undefined when IgnoreUnrecognizedTags ->
try_construct(Constr, Mods, Token);
undefined ->
%% This tag isn't handled by anything!
Error = #yamerl_parsing_error{
name = unrecognized_node,
token = Tag,
line = ?TOKEN_LINE(Tag),
column = ?TOKEN_COLUMN(Tag)
},
Error1 = yamerl_errors:format(Error,
"Tag \"~s\" unrecognized by any module", [URI]),
yamerl_errors:throw(Error1)
end
end,
handle_construct_return(Constr, Doc, Ret);
construct(
#yamerl_constr{current_doc = Doc, current_node_is_leaf = false} = Constr,
#yamerl_anchor{name = Anchor}) when Doc /= undefined ->
handle_construct_return(Constr, Doc, #node_anchor{name = Anchor});
construct(
#yamerl_constr{current_doc = Doc, anchors = Anchors} = Constr,
#yamerl_alias{name = Alias} = Token) when Doc /= undefined ->
try
Node = dict:fetch(Alias, Anchors),
handle_construct_return(Constr, Doc, {finished, Node})
catch
_:_ ->
%% This alias references a non-existent anchor!
Error = #yamerl_parsing_error{
name = no_matching_anchor,
token = Token,
line = ?TOKEN_LINE(Token),
column = ?TOKEN_COLUMN(Token)
},
Error1 = yamerl_errors:format(Error,
"No anchor corresponds to alias \"~s\"", [Alias]),
yamerl_errors:throw(Error1)
end;
construct(
#yamerl_constr{current_doc =
[#unfinished_node{module = Mod} = Node | Doc]} = Constr,
Token) ->
%% This token continues a node. We call the current node's module to
%% handle it.
Ret = Mod:construct_token(Constr, Node, Token),
handle_construct_return(Constr, Doc, Ret).
try_construct(Constr, [Mod | Rest], Token) ->
case Mod:try_construct_token(Constr, undefined, Token) of
unrecognized -> try_construct(Constr, Rest, Token);
Ret -> Ret
end;
try_construct(_, [], Token) ->
Error = #yamerl_parsing_error{
name = unrecognized_node,
token = Token,
text = "No module found to handle node",
line = ?TOKEN_LINE(Token),
column = ?TOKEN_COLUMN(Token)
},
yamerl_errors:throw(Error).
construct_parent(#yamerl_constr{anchors = Anchors} = Constr,
[#node_anchor{name = Anchor} | Doc], Child) ->
Anchors1 = dict:store(Anchor, Child, Anchors),
Constr1 = Constr#yamerl_constr{
anchors = Anchors1
},
construct_parent(Constr1, Doc, Child);
construct_parent(#yamerl_constr{docs = Docs, docs_count = Count} = Constr,
[#yamerl_doc{} = Doc], Root) ->
%% This node is the root of the document.
Doc1 = Doc#yamerl_doc{
root = Root
},
Constr1 = Constr#yamerl_constr{
docs = Docs ++ [Doc1],
docs_count = Count + 1,
current_doc = undefined,
current_node_is_leaf = false,
anchors = dict:new()
},
return_new_fun(Constr1);
construct_parent(Constr, [#unfinished_node{module = Mod} = Node | Doc],
Child) ->
%% We call the parent node's module to handle this new child node.
Ret = Mod:construct_node(Constr, Node, Child),
handle_construct_return(Constr, Doc, Ret).
handle_construct_return(Constr, Doc, {finished, Node}) ->
%% Give this node to the parent node.
construct_parent(Constr, Doc, Node);
handle_construct_return(Constr, Doc, {unfinished, Node, Is_Leaf}) ->
%% Unfinished node, wait for the next tokens.
Constr1 = Constr#yamerl_constr{
current_doc = [Node | Doc],
current_node_is_leaf = Is_Leaf
},
return_new_fun(Constr1);
handle_construct_return(Constr, Doc, #node_anchor{} = Anchor) ->
%% Anchor before a (not-yet-started) node, wait this node.
Constr1 = Constr#yamerl_constr{
current_doc = [Anchor | Doc]
},
return_new_fun(Constr1).
return_new_fun(#yamerl_constr{detailed_constr = Detailed} = Constr) ->
Fun = fun
(get_docs) when not Detailed ->
[Doc#yamerl_doc.root || Doc <- Constr#yamerl_constr.docs];
(get_docs) ->
Constr#yamerl_constr.docs;
(get_constr) ->
Constr;
(T) ->
construct(Constr, T)
end,
{ok, Fun}.
%% -------------------------------------------------------------------
Node modules .
%% -------------------------------------------------------------------
setup_node_mods(Constr, Version) ->
Mods1 = umerge_unsorted(
proplists:get_value(node_mods, Constr#yamerl_constr.options, []),
yamerl_app:get_param(node_mods)
),
DefaultSchema = case Version of
{1, 0} -> ?YAML11_SCHEMA_MODS;
{1, 1} -> ?YAML11_SCHEMA_MODS;
_ -> ?CORE_SCHEMA_MODS
end,
Schema = proplists:get_value(schema, Constr#yamerl_constr.options, auto),
Mods = case Schema of
failsafe -> umerge_unsorted(Mods1, ?FAILSAFE_SCHEMA_MODS);
json -> umerge_unsorted(Mods1, ?JSON_SCHEMA_MODS);
core -> umerge_unsorted(Mods1, ?CORE_SCHEMA_MODS);
yaml11 -> umerge_unsorted(Mods1, ?YAML11_SCHEMA_MODS);
auto -> umerge_unsorted(Mods1, DefaultSchema)
end,
Auto = filter_autodetection_capable_mods(Mods, []),
Tags = index_tags(Mods, []),
Constr#yamerl_constr{
mods = Auto,
tags = Tags
}.
umerge_unsorted(List1, List2) ->
Fun = fun(Mod, List) ->
case lists:member(Mod, List) of
true -> List;
false -> List ++ [Mod]
end
end,
lists:foldl(Fun, List1, List2).
filter_autodetection_capable_mods([Mod | Rest], Auto) ->
catch Mod:module_info(),
Auto1 = case erlang:function_exported(Mod, try_construct_token, 3) of
true -> [Mod | Auto];
false -> Auto
end,
filter_autodetection_capable_mods(Rest, Auto1);
filter_autodetection_capable_mods([], Auto) ->
lists:reverse(Auto).
index_tags([Mod | Rest], Tags) ->
try
Tags1 = index_tags2(Tags, Mod:tags(), Mod),
index_tags(Rest, Tags1)
catch
_:_ ->
index_tags(Rest, Tags)
end;
index_tags([], Tags) ->
Tags.
index_tags2(Tags, [Tag | Rest], Mod) ->
Tags1 = case lists:keymember(Tag, 1, Tags) of
false -> [{Tag, Mod} | Tags];
true -> Tags
end,
index_tags2(Tags1, Rest, Mod);
index_tags2(Tags, [], _) ->
Tags.
%% -------------------------------------------------------------------
Internal functions .
%% -------------------------------------------------------------------
initialize(Options) ->
Options0 = proplists:unfold(Options),
{Constr_Options, Parser_Options, Ext_Options} = filter_options(Options0),
check_options(Constr_Options),
Detailed = proplists:get_value(detailed_constr, Constr_Options, false),
Constr = #yamerl_constr{
options = Constr_Options,
ext_options = Ext_Options,
detailed_constr = Detailed
},
{ok, Token_Fun} = return_new_fun(Constr),
[{token_fun, Token_Fun} | Parser_Options].
filter_options(Options) ->
Constr_Option_Names = option_names(),
Parser_Option_Names = yamerl_parser:option_names(),
filter_options2(Options, Constr_Option_Names, Parser_Option_Names,
[], [], []).
filter_options2([{Name, _} = Option | Rest],
Constr_Option_Names, Parser_Option_Names,
Constr_Options, Parser_Options, Ext_Options) ->
case lists:member(Name, Constr_Option_Names) of
true ->
filter_options2(Rest,
Constr_Option_Names, Parser_Option_Names,
[Option | Constr_Options], Parser_Options, Ext_Options);
false ->
case lists:member(Name, Parser_Option_Names) of
true ->
filter_options2(Rest,
Constr_Option_Names, Parser_Option_Names,
Constr_Options, [Option | Parser_Options], Ext_Options);
false ->
filter_options2(Rest,
Constr_Option_Names, Parser_Option_Names,
Constr_Options, Parser_Options, [Option | Ext_Options])
end
end;
filter_options2([], _, _, Constr_Options, Parser_Options, Ext_Options) ->
{
lists:reverse(Constr_Options),
lists:reverse(Parser_Options),
lists:reverse(Ext_Options)
}.
@private
option_names() ->
[
node_mods,
schema,
detailed_constr,
ignore_unrecognized_tags
].
check_options([Option | Rest]) ->
case is_option_valid(Option) of
true -> check_options(Rest);
false -> invalid_option(Option)
end;
check_options([]) ->
ok.
is_option_valid({detailed_constr, Flag}) when is_boolean(Flag) ->
true;
is_option_valid({node_mods, Mods}) when is_list(Mods) ->
Fun = fun(Mod) ->
not yamerl_app:is_node_mod(Mod)
end,
case lists:filter(Fun, Mods) of
[] -> true;
_ -> false
end;
is_option_valid({schema, Schema})
when Schema == failsafe
orelse Schema == json
orelse Schema == core
orelse Schema == yaml11
orelse Schema == auto ->
true;
is_option_valid({ignore_unrecognized_tags, Flag}) when is_boolean(Flag) ->
true;
is_option_valid(_) ->
false.
invalid_option(Option) ->
Error = #yamerl_invalid_option{
option = Option
},
Error1 = case Option of
{detailed_constr, _} ->
Error#yamerl_invalid_option{
text = "Invalid value for option \"detailed_constr\": "
"it must be a boolean"
};
{node_mods, _} ->
Error#yamerl_invalid_option{
text = "Invalid value for option \"node_mods\": "
"it must be a list of modules"
};
_ ->
yamerl_errors:format(Error, "Unknown option \"~w\"", [Option])
end,
yamerl_errors:throw(Error1).
| null | https://raw.githubusercontent.com/yakaz/yamerl/bf9d8b743bfc9775f2ddad9fb8d18ba5dc29d3e1/src/yamerl_constr.erl | erlang | -
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
notice, this list of conditions and the following disclaimer.
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
@doc {@module} implements a YAML constructor. It uses {@link
yamerl_parser} as the underlying parser. The parser emits YAML nodes
which are assembled as structured YAML documents by the constructor.
It is able to construct YAML documents from in-memory strings (see
{@link string/1} and {@link string/2}), regular files (see {@link
file/1} and {@link file/2}) or streams (see {@link new/1}, {@link
YAML documents can be constructed in simple or detailed modes. In
detailed mode, they are made of records, holding more information
about YAML nodes and their presentation.
The `yamerl' application must be started to use the constructor.
```
yamerl_constr:string("Hello!").
'''
It returns:
```
List of documents ; here , only one .
[
% Document root node: a string.
"Hello!"
].
'''
```
{continue, Stream_St2} = yamerl_constr:next_chunk(Stream_St1, <<"He">>),
{continue, Stream_St3} = yamerl_constr:next_chunk(Stream_St2, <<"ll">>),
yamerl_constr:last_chunk(Stream_St3, <<"o!">>).
'''
It returns:
```
List of documents ; here , only one .
[
Document # 1 .
{yamerl_doc,
% Document root node: a string.
Node location in the original string .
"Hello!" % String value.
}
}
].
'''
Public API.
-------------------------------------------------------------------
Exported types.
-------------------------------------------------------------------
FIXME:
type and several guard expression which will never match.
-------------------------------------------------------------------
Public API: chunked stream scanning.
-------------------------------------------------------------------
@equiv new(Source, [])
@doc Creates and returns a new YAML construction state.
When you want to parse a stream (as opposed to in-memory strings or
the constructor with stream "chunks".
`Source' can be any term describing the stream. {@link string/1} and
{@link string/2} sets it to the atom `string'. {@link file/1} and
{@link file/2} sets it to `{file, Filename}'. The constructor doesn't
use that value.
`Options' is a list of options for the parser and the constructor.
Valid options are:
<dl>
<dd>Flag to enable/disable the detailed construction mode. In simple
types. In detailed construction mode, YAML nodes are returned using
records. Those records gives additional information such as the YAML
node type, the location in the stream (line and column number) and so
on.</dd>
<dt>`{ignore_unrecognized_tags, boolean()}'</dt>
<dd>Indicate if unrecognized tags should be ignored. When `false'
(the default), a node with an unrecognized tag can't be constructed
because yamerl doesn't know how to interpret the node. When this
happens an exception is raised. When set to `true', the node is
constructed as if it was a plain YAML node without any tag.</dd>
<dd>Default: `false'.</dd>
<dt>`{keep_duplicate_keys, boolean()}'</dt>
<dd>Flag to keep duplicate keys in maps. By default all duplicate keys
in maps/proplists will be ignored and the last occurence of a key will
prevail. If this flag is enabled all keys will remain. This flag only
are used instead of maps.</dd>
<dd>Default: `false'</dd>
<dt>`{node_mods, Mods_List}'</dt>
<dd>Default: `[]'.</dd>
<dd>Name of the official schema to use.</dd>
<dd>Default: `core'.</dd>
</dl>
The returned state is opaque value. You then pass it to {@link
next_chunk/2}, {@link next_chunk/3} and {@link last_chunk/2}.
If an option is invalid, an exception is thrown.
<strong>Example: parse a valid stream</strong>
```
{continue, Stream_St2} = yamerl_constr:next_chunk(Stream_St1, <<"He">>),
{continue, Stream_St3} = yamerl_constr:next_chunk(Stream_St2, <<"ll">>),
yamerl_constr:last_chunk(Stream_St3, <<"o!">>).
'''
It returns:
```
List of documents ; here , only one .
[
% Document root node: a string.
"Hello!"
].
'''
<strong>Example: parse an invalid stream</strong>
```
{continue, Stream_St2} = yamerl_constr:next_chunk(Stream_St1, <<"'He">>),
{continue, Stream_St3} = yamerl_constr:next_chunk(Stream_St2, <<"ll">>),
yamerl_constr:last_chunk(Stream_St3, <<"o!">>) % Unfinished single-quoted scalar.
'''
It throws:
```
{yamerl_exception,
List of warnings and errors ; here , one fatal error .
[
Error # 1 .
{yamerl_parsing_error, error,
"Unexpected end-of-stream while parsing flow scalar", % Human-readable message.
Error location .
unexpected_eos,
Token being parsed .
flow, single_quoted,
"Hello!"},
[]
}
]
}
'''
@doc Feeds the constructor with the next chunk from the YAML stream.
`Constr' is the constructor state returned by a previous call
next_chunk/3}.
is assumed.
stream.
`Constr'. The new state is to be passed to future calls to {@link
next_chunk/2}, {@link next_chunk/3} or {@link last_chunk/2}.
detailed construction mode is disabled, or records if the detailed
construction mode is enabled (`{detailed_constr, boolean()}' passed
as an option; default is `false').
It throws an exception if there is a parsing or construction error.
-------------------------------------------------------------------
Public API: common stream sources.
-------------------------------------------------------------------
@equiv string(String, [])
@doc Constructs a YAML document from an in-memory YAML string.
or UTF-32. A leading BOM character is used to determine the encoding
`Options' is a list of options for the parser and the constructor.
It returns a list of YAML documents. See {@link next_chunk/3} for
more details about the returned documents.
It throws an exception if there is a parsing or construction error.
```
yamerl_constr:string("This is a string").
'''
```
The string " 2 β¬ " encoded in UTF-8 .
'''
```
yamerl_constr:string("Hello!").
'''
It returns:
```
List of documents ; here , only one .
[
% Document root node: a string.
"Hello!"
].
'''
```
'''
It returns:
```
List of documents ; here , only one .
[
Document # 1 .
{yamerl_doc,
% Document root node: a string.
Node location in the original string .
"Hello!" % String value.
}
}
].
'''
<strong>Example: parse an invalid document</strong>
```
yamerl_constr:string(<<"'Oh-oh...">>). % Unfinished single-quoted scalar.
'''
It throws:
```
{yamerl_exception,
List of warnings and errors ; here , one fatal error .
[
Error # 1 .
{yamerl_parsing_error, error,
"Unexpected end-of-stream while parsing flow scalar", % Human-readable message.
Error location .
unexpected_eos,
Token being parsed .
flow, single_quoted,
"Oh-oh..."},
[]
}
]
}.
'''
@equiv file(Filename, [])
@doc Constructs a YAML document from a regular file.
`Filename' must be a string indicating the filename. The file must
`Options' is a list of options for the parser and the constructor.
It returns a list of YAML documents. See {@link next_chunk/3} for
more details about the returned documents.
It throws an exception if there is a parsing or construction error.
See {@link string/2} for some examples.
-------------------------------------------------------------------
Presentation details.
-------------------------------------------------------------------
@doc Returns presentation information in the stream for the given
node.
This only makes sense when the detailed construction mode is enabled
(ie. `{detailed_constr, true}' was passed as an option to {@link
-------------------------------------------------------------------
Node information.
-------------------------------------------------------------------
@doc Returns the line number in the stream for the given node.
This only makes sense when the detailed construction mode is enabled
(ie. `{detailed_constr, true}' was passed as an option to {@link
@doc Returns the column number in the stream for the given node.
This only makes sense when the detailed construction mode is enabled
(ie. `{detailed_constr, true}' was passed as an option to {@link
For user-defined nodes, we call the module responsible for it.
-------------------------------------------------------------------
Construction.
-------------------------------------------------------------------
Select schema and associated modules, possibly based on the
document version.
Prepare a document node.
This token doesn't start a node: ignore it.
This token starts a node. We must determine the module to use to
construct this node.
The node has a non-specific tag. We let each module
decides if they want to construct the node.
This tag isn't handled by anything!
This alias references a non-existent anchor!
This token continues a node. We call the current node's module to
handle it.
This node is the root of the document.
We call the parent node's module to handle this new child node.
Give this node to the parent node.
Unfinished node, wait for the next tokens.
Anchor before a (not-yet-started) node, wait this node.
-------------------------------------------------------------------
-------------------------------------------------------------------
-------------------------------------------------------------------
------------------------------------------------------------------- | Copyright ( c ) 2012 - 2014 Yakaz
Copyright ( c ) 2016 - 2022 < >
1 . Redistributions of source code must retain the above copyright
2 . Redistributions in binary form must reproduce the above copyright
THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ` ` AS IS '' AND
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT
@author < >
2012 - 2014 Yakaz ,
2016 - 2022 < >
} and { @link next_chunk/3 } ) .
simple mode , they are made of simple builting Erlang types . In
< strong > Example : parse a string in simple >
< strong > Example : parse a stream in detailed >
Stream_St1 = yamerl_constr : new({file , " < stdin > " } , [ { detailed_constr , true } ] ) ,
{ yamerl_str , yamerl_node_str , " tag : yaml.org,2002 : str " ,
-module(yamerl_constr).
-include("yamerl_errors.hrl").
-include("yamerl_tokens.hrl").
-include("yamerl_nodes.hrl").
-include("yamerl_constr.hrl").
-export([
new/1,
new/2,
string/1,
string/2,
file/1,
file/2,
next_chunk/3,
next_chunk/2,
last_chunk/2,
get_pres_details/1,
node_line/1,
node_column/1,
option_names/0
]).
This type should be " -opaque " . However , up - to Erlang R15B03 , an issue
with either this code or Dialyzer prevents us from declaring it
properly : reports warning regarding the stream_state_fun ( )
-type yamerl_constr() :: #yamerl_constr{}.
-export_type([
yamerl_constr/0,
yamerl_constr_option/0,
yamerl_node/0,
yamerl_seq/0,
yamerl_map/0,
yamerl_str/0,
yamerl_null/0,
yamerl_bool/0,
yamerl_int/0,
yamerl_float/0,
yamerl_binary/0,
yamerl_timestamp/0,
yamerl_erlang_atom/0,
yamerl_erlang_fun/0,
yamerl_user_node/0,
yamerl_doc/0,
yamerl_simple_node/0,
yamerl_simple_seq/0,
yamerl_simple_map/0,
yamerl_simple_str/0,
yamerl_simple_null/0,
yamerl_simple_bool/0,
yamerl_simple_int/0,
yamerl_simple_float/0,
yamerl_simple_timestamp/0,
yamerl_simple_erlang_atom/0,
yamerl_simple_erlang_fun/0,
yamerl_user_simple_node/0,
yamerl_simple_doc/0
]).
-spec new(Source) ->
Constr | no_return() when
Source :: term(),
Constr :: yamerl_parser:yamerl_parser().
new(Source) ->
new(Source, []).
regular files ) , this is the first function you call before feeding
< , boolean()}'</dt >
construction mode , YAML nodes are returned as Erlang integers ,
strings , lists , proplists , etc . In other words , only simple builtin
works when the ` detailed_constr ' flag is set to ` true ' or proplists
< dd > List of Erlang modules to extend support node types.</dd >
< dt>`{schema , failsafe | json | core | yaml11}'</dt >
Stream_St1 = yamerl_constr : new({file , " < stdin > " } ) ,
Stream_St1 = yamerl_constr : new({file , " < stdin > " } ) ,
@see new/1 .
-spec new(Source, Options) ->
Constr | no_return() when
Source :: term(),
Options :: [
yamerl_constr_option() |
yamerl_parser:yamerl_parser_option() |
proplists:property()
],
Constr :: yamerl_parser:yamerl_parser().
new(Source, Options) ->
Parser_Options = initialize(Options),
yamerl_parser:new(Source, Parser_Options).
@equiv next_chunk(Constr , Chunk , false )
-spec next_chunk(Constr, Chunk) ->
Ret | no_return() when
Constr :: yamerl_parser:yamerl_parser(),
Chunk :: unicode_binary(),
Ret :: {continue, New_Constr},
New_Constr :: yamerl_parser:yamerl_parser().
next_chunk(Constr, Chunk) ->
next_chunk(Constr, Chunk, false).
to { @link new/1 } , { @link } , { @link next_chunk/2 } or { @link
` Chunk ' must be an Erlang binary using the UTF-8 , UTF-16 or UTF-32
Unicode encoding . A leading BOM character in the first chunk is used
to determine the encoding and endianness . If no BOM is present , UTF-8
` EOS ' indicates the constructor if this is the last chunk from the
If this is not the last chunk ( ` EOS = false ' ) , it returns ` { continue ,
New_Constr } ' where ` New_Constr ' is an updated state which replaces
If this is the last chunk ( ` EOS = true ' ) , it returns a list of YAML
documents . Documents are made of simple builtin Erlang types if the
-spec next_chunk(Constr, Chunk, false) ->
Ret | no_return() when
Constr :: yamerl_parser:yamerl_parser(),
Chunk :: unicode_binary(),
Ret :: {continue, New_Constr},
New_Constr :: yamerl_parser:yamerl_parser();
(Constr, Chunk, true) ->
Result | no_return() when
Constr :: yamerl_parser:yamerl_parser(),
Chunk :: unicode_binary(),
Result :: [yamerl_doc()]
| [yamerl_simple_doc()].
next_chunk(Constr, Chunk, EOS) ->
Ret = yamerl_parser:next_chunk(Constr, Chunk, EOS),
if
EOS -> get_docs(Ret);
true -> Ret
end.
@equiv next_chunk(Constr , Chunk , true )
-spec last_chunk(Constr, Chunk) ->
Result | no_return() when
Constr :: yamerl_parser:yamerl_parser(),
Chunk :: unicode_binary(),
Result :: [yamerl_doc()]
| [yamerl_simple_doc()].
last_chunk(Constr, Chunk) ->
next_chunk(Constr, Chunk, true).
-spec get_docs(Constr) ->
Docs | no_return() when
Constr :: yamerl_parser:yamerl_parser(),
Docs :: [yamerl_doc()]
| [yamerl_simple_doc()].
get_docs(Constr) ->
case yamerl_parser:get_token_fun(Constr) of
Not_Fun when Not_Fun == acc orelse Not_Fun == drop ->
Error = #yamerl_parsing_error{
name = token_fun_cleared
},
yamerl_errors:throw(Error);
Token_Fun ->
Token_Fun(get_docs)
end.
-spec string(String) ->
Result | no_return() when
String :: unicode_data(),
Result :: [yamerl_doc()]
| [yamerl_simple_doc()]
| term().
string(String) ->
string(String, []).
` String ' must be an Erlang list or binary containing one or more YAML
documents . If it is a binary , it must be encoded using UTF-8 , UTF-16
and endianness . If no BOM is present , UTF-8 is assumed .
See { @link } for valid options .
< strong > Example : parse an Erlang list</strong >
< strong > Example : parse an UTF-8 - encoded >
< strong > Example : parse a string in simple >
< strong > Example : parse a string in detailed >
yamerl_constr : string("Hello ! " , [ { detailed_constr , true } ] ) .
{ yamerl_str , yamerl_node_str , " tag : yaml.org,2002 : str " ,
-spec string(String, Options) ->
Result | no_return() when
String :: unicode_data(),
Options :: [ yamerl_parser:yamerl_parser_option()
| yamerl_constr_option()
| proplists:property()],
Result :: [yamerl_doc()]
| [yamerl_simple_doc()]
| term().
string(String, Options) ->
Parser_Options = initialize(Options),
Constr = yamerl_parser:string(String, Parser_Options),
get_docs(Constr).
-spec file(Filename) ->
Result | no_return() when
Filename :: string(),
Result :: [yamerl_doc()]
| [yamerl_simple_doc()]
| term().
file(Filename) ->
file(Filename, []).
contain one or more YAML documents . The file must be encoded using
UTF-8 , UTF-16 or UTF-32 . A leading BOM character is used to determine
the encoding and endianness . If no BOM is present , UTF-8 is assumed .
See { @link } for valid options .
-spec file(Filename, Options) ->
Result | no_return() when
Filename :: string(),
Options :: [ yamerl_parser:yamerl_parser_option()
| yamerl_constr_option()
| proplists:property()],
Result :: [yamerl_doc()]
| [yamerl_simple_doc()]
| term().
file(Filename, Options) ->
Parser_Options = initialize(Options),
Constr = yamerl_parser:file(Filename, Parser_Options),
get_docs(Constr).
} , { @link file/2 } or { @link string/2 } ) .
get_pres_details(Token) ->
Line = ?TOKEN_LINE(Token),
Column = ?TOKEN_COLUMN(Token),
[{line, Line}, {column, Column}].
} , { @link file/2 } or { @link string/2 } ) .
node_line(Node) ->
case node_pres(Node) of
undefined -> undefined;
Pres -> proplists:get_value(line, Pres)
end.
} , { @link file/2 } or { @link string/2 } ) .
node_column(Node) ->
case node_pres(Node) of
undefined -> undefined;
Pres -> proplists:get_value(column, Pres)
end.
node_pres(Node) when
is_record(Node, yamerl_seq) orelse
is_record(Node, yamerl_map) orelse
is_record(Node, yamerl_str) orelse
is_record(Node, yamerl_null) orelse
is_record(Node, yamerl_bool) orelse
is_record(Node, yamerl_int) orelse
is_record(Node, yamerl_binary) orelse
is_record(Node, yamerl_timestamp) orelse
is_record(Node, yamerl_erlang_atom) orelse
is_record(Node, yamerl_erlang_fun) ->
?NODE_PRES(Node);
node_pres(Node) when is_tuple(Node) ->
Mod = ?NODE_MOD(Node),
try
Mod:node_pres(Node)
catch
error:undef ->
undefined
end.
construct(Constr, #yamerl_doc_start{version = Version}) ->
Constr1 = setup_node_mods(Constr, Version),
Doc = #yamerl_doc{},
Constr2 = Constr1#yamerl_constr{
current_doc = [Doc]
},
return_new_fun(Constr2);
construct(_, Token) when
is_record(Token, yamerl_stream_start) orelse
is_record(Token, yamerl_stream_end) orelse
is_record(Token, yamerl_yaml_directive) orelse
is_record(Token, yamerl_tag_directive) orelse
is_record(Token, yamerl_reserved_directive) orelse
is_record(Token, yamerl_doc_end) ->
ok;
construct(
#yamerl_constr{current_doc = Doc, current_node_is_leaf = false,
mods = Mods, tags = Tags} = Constr,
Token) when Doc /= undefined andalso
(is_record(Token, yamerl_collection_start) orelse
is_record(Token, yamerl_scalar)) ->
Tag = case Token of
#yamerl_collection_start{tag = T} -> T;
#yamerl_scalar{tag = T} -> T
end,
Ret = case Tag of
#yamerl_tag{uri = {non_specific, _}} ->
try_construct(Constr, Mods, Token);
#yamerl_tag{uri = URI} ->
We look up this URI in the tag 's index .
IgnoreUnrecognizedTags = proplists:get_value(
ignore_unrecognized_tags, Constr#yamerl_constr.options, false),
case proplists:get_value(URI, Tags) of
Mod when Mod /= undefined ->
Mod:construct_token(Constr, undefined, Token);
undefined when IgnoreUnrecognizedTags ->
try_construct(Constr, Mods, Token);
undefined ->
Error = #yamerl_parsing_error{
name = unrecognized_node,
token = Tag,
line = ?TOKEN_LINE(Tag),
column = ?TOKEN_COLUMN(Tag)
},
Error1 = yamerl_errors:format(Error,
"Tag \"~s\" unrecognized by any module", [URI]),
yamerl_errors:throw(Error1)
end
end,
handle_construct_return(Constr, Doc, Ret);
construct(
#yamerl_constr{current_doc = Doc, current_node_is_leaf = false} = Constr,
#yamerl_anchor{name = Anchor}) when Doc /= undefined ->
handle_construct_return(Constr, Doc, #node_anchor{name = Anchor});
construct(
#yamerl_constr{current_doc = Doc, anchors = Anchors} = Constr,
#yamerl_alias{name = Alias} = Token) when Doc /= undefined ->
try
Node = dict:fetch(Alias, Anchors),
handle_construct_return(Constr, Doc, {finished, Node})
catch
_:_ ->
Error = #yamerl_parsing_error{
name = no_matching_anchor,
token = Token,
line = ?TOKEN_LINE(Token),
column = ?TOKEN_COLUMN(Token)
},
Error1 = yamerl_errors:format(Error,
"No anchor corresponds to alias \"~s\"", [Alias]),
yamerl_errors:throw(Error1)
end;
construct(
#yamerl_constr{current_doc =
[#unfinished_node{module = Mod} = Node | Doc]} = Constr,
Token) ->
Ret = Mod:construct_token(Constr, Node, Token),
handle_construct_return(Constr, Doc, Ret).
try_construct(Constr, [Mod | Rest], Token) ->
case Mod:try_construct_token(Constr, undefined, Token) of
unrecognized -> try_construct(Constr, Rest, Token);
Ret -> Ret
end;
try_construct(_, [], Token) ->
Error = #yamerl_parsing_error{
name = unrecognized_node,
token = Token,
text = "No module found to handle node",
line = ?TOKEN_LINE(Token),
column = ?TOKEN_COLUMN(Token)
},
yamerl_errors:throw(Error).
construct_parent(#yamerl_constr{anchors = Anchors} = Constr,
[#node_anchor{name = Anchor} | Doc], Child) ->
Anchors1 = dict:store(Anchor, Child, Anchors),
Constr1 = Constr#yamerl_constr{
anchors = Anchors1
},
construct_parent(Constr1, Doc, Child);
construct_parent(#yamerl_constr{docs = Docs, docs_count = Count} = Constr,
[#yamerl_doc{} = Doc], Root) ->
Doc1 = Doc#yamerl_doc{
root = Root
},
Constr1 = Constr#yamerl_constr{
docs = Docs ++ [Doc1],
docs_count = Count + 1,
current_doc = undefined,
current_node_is_leaf = false,
anchors = dict:new()
},
return_new_fun(Constr1);
construct_parent(Constr, [#unfinished_node{module = Mod} = Node | Doc],
Child) ->
Ret = Mod:construct_node(Constr, Node, Child),
handle_construct_return(Constr, Doc, Ret).
handle_construct_return(Constr, Doc, {finished, Node}) ->
construct_parent(Constr, Doc, Node);
handle_construct_return(Constr, Doc, {unfinished, Node, Is_Leaf}) ->
Constr1 = Constr#yamerl_constr{
current_doc = [Node | Doc],
current_node_is_leaf = Is_Leaf
},
return_new_fun(Constr1);
handle_construct_return(Constr, Doc, #node_anchor{} = Anchor) ->
Constr1 = Constr#yamerl_constr{
current_doc = [Anchor | Doc]
},
return_new_fun(Constr1).
return_new_fun(#yamerl_constr{detailed_constr = Detailed} = Constr) ->
Fun = fun
(get_docs) when not Detailed ->
[Doc#yamerl_doc.root || Doc <- Constr#yamerl_constr.docs];
(get_docs) ->
Constr#yamerl_constr.docs;
(get_constr) ->
Constr;
(T) ->
construct(Constr, T)
end,
{ok, Fun}.
Node modules .
setup_node_mods(Constr, Version) ->
Mods1 = umerge_unsorted(
proplists:get_value(node_mods, Constr#yamerl_constr.options, []),
yamerl_app:get_param(node_mods)
),
DefaultSchema = case Version of
{1, 0} -> ?YAML11_SCHEMA_MODS;
{1, 1} -> ?YAML11_SCHEMA_MODS;
_ -> ?CORE_SCHEMA_MODS
end,
Schema = proplists:get_value(schema, Constr#yamerl_constr.options, auto),
Mods = case Schema of
failsafe -> umerge_unsorted(Mods1, ?FAILSAFE_SCHEMA_MODS);
json -> umerge_unsorted(Mods1, ?JSON_SCHEMA_MODS);
core -> umerge_unsorted(Mods1, ?CORE_SCHEMA_MODS);
yaml11 -> umerge_unsorted(Mods1, ?YAML11_SCHEMA_MODS);
auto -> umerge_unsorted(Mods1, DefaultSchema)
end,
Auto = filter_autodetection_capable_mods(Mods, []),
Tags = index_tags(Mods, []),
Constr#yamerl_constr{
mods = Auto,
tags = Tags
}.
umerge_unsorted(List1, List2) ->
Fun = fun(Mod, List) ->
case lists:member(Mod, List) of
true -> List;
false -> List ++ [Mod]
end
end,
lists:foldl(Fun, List1, List2).
filter_autodetection_capable_mods([Mod | Rest], Auto) ->
catch Mod:module_info(),
Auto1 = case erlang:function_exported(Mod, try_construct_token, 3) of
true -> [Mod | Auto];
false -> Auto
end,
filter_autodetection_capable_mods(Rest, Auto1);
filter_autodetection_capable_mods([], Auto) ->
lists:reverse(Auto).
index_tags([Mod | Rest], Tags) ->
try
Tags1 = index_tags2(Tags, Mod:tags(), Mod),
index_tags(Rest, Tags1)
catch
_:_ ->
index_tags(Rest, Tags)
end;
index_tags([], Tags) ->
Tags.
index_tags2(Tags, [Tag | Rest], Mod) ->
Tags1 = case lists:keymember(Tag, 1, Tags) of
false -> [{Tag, Mod} | Tags];
true -> Tags
end,
index_tags2(Tags1, Rest, Mod);
index_tags2(Tags, [], _) ->
Tags.
Internal functions .
initialize(Options) ->
Options0 = proplists:unfold(Options),
{Constr_Options, Parser_Options, Ext_Options} = filter_options(Options0),
check_options(Constr_Options),
Detailed = proplists:get_value(detailed_constr, Constr_Options, false),
Constr = #yamerl_constr{
options = Constr_Options,
ext_options = Ext_Options,
detailed_constr = Detailed
},
{ok, Token_Fun} = return_new_fun(Constr),
[{token_fun, Token_Fun} | Parser_Options].
filter_options(Options) ->
Constr_Option_Names = option_names(),
Parser_Option_Names = yamerl_parser:option_names(),
filter_options2(Options, Constr_Option_Names, Parser_Option_Names,
[], [], []).
filter_options2([{Name, _} = Option | Rest],
Constr_Option_Names, Parser_Option_Names,
Constr_Options, Parser_Options, Ext_Options) ->
case lists:member(Name, Constr_Option_Names) of
true ->
filter_options2(Rest,
Constr_Option_Names, Parser_Option_Names,
[Option | Constr_Options], Parser_Options, Ext_Options);
false ->
case lists:member(Name, Parser_Option_Names) of
true ->
filter_options2(Rest,
Constr_Option_Names, Parser_Option_Names,
Constr_Options, [Option | Parser_Options], Ext_Options);
false ->
filter_options2(Rest,
Constr_Option_Names, Parser_Option_Names,
Constr_Options, Parser_Options, [Option | Ext_Options])
end
end;
filter_options2([], _, _, Constr_Options, Parser_Options, Ext_Options) ->
{
lists:reverse(Constr_Options),
lists:reverse(Parser_Options),
lists:reverse(Ext_Options)
}.
@private
option_names() ->
[
node_mods,
schema,
detailed_constr,
ignore_unrecognized_tags
].
check_options([Option | Rest]) ->
case is_option_valid(Option) of
true -> check_options(Rest);
false -> invalid_option(Option)
end;
check_options([]) ->
ok.
is_option_valid({detailed_constr, Flag}) when is_boolean(Flag) ->
true;
is_option_valid({node_mods, Mods}) when is_list(Mods) ->
Fun = fun(Mod) ->
not yamerl_app:is_node_mod(Mod)
end,
case lists:filter(Fun, Mods) of
[] -> true;
_ -> false
end;
is_option_valid({schema, Schema})
when Schema == failsafe
orelse Schema == json
orelse Schema == core
orelse Schema == yaml11
orelse Schema == auto ->
true;
is_option_valid({ignore_unrecognized_tags, Flag}) when is_boolean(Flag) ->
true;
is_option_valid(_) ->
false.
invalid_option(Option) ->
Error = #yamerl_invalid_option{
option = Option
},
Error1 = case Option of
{detailed_constr, _} ->
Error#yamerl_invalid_option{
text = "Invalid value for option \"detailed_constr\": "
"it must be a boolean"
};
{node_mods, _} ->
Error#yamerl_invalid_option{
text = "Invalid value for option \"node_mods\": "
"it must be a list of modules"
};
_ ->
yamerl_errors:format(Error, "Unknown option \"~w\"", [Option])
end,
yamerl_errors:throw(Error1).
|
894aee4ca43facfdd7b953178a5777faa13182042eab5dbc4ba06d4b2c1a6c7c | masaeedu/selectivemonoidal | Decisive.hs | module Decisive where
import Data.Void (Void)
-- Represents a context with static choice
class Functor f => Decide f
where
decide :: f (Either a b) -> Either (f a) (f b)
-- Choosing among no options
class Decide f => Decisive f
where
force :: f Void -> Void
| null | https://raw.githubusercontent.com/masaeedu/selectivemonoidal/39d32c3743889bd9ce05b04ed4029dc7528dc48d/src/Decisive.hs | haskell | Represents a context with static choice
Choosing among no options | module Decisive where
import Data.Void (Void)
class Functor f => Decide f
where
decide :: f (Either a b) -> Either (f a) (f b)
class Decide f => Decisive f
where
force :: f Void -> Void
|
6cca0b6daa6eda4fe6cfff16b25b45d8eee06338c88d6bd5810e81cc77bf0ebb | janestreet/core | zone_intf.ml | (** Time-zone handling. *)
open! Import
* The internal time representation of [ Zone.t ] . This is a tiny subset of [ Time0_intf . S ] ,
see that interface for details such as the meaning of [ Span ] and [ Date_and_ofday ] .
The name of the interface reflects the fact that the interface only gives you access
to the seconds of the [ t ] . But you can use this interface with types that have higher
precision than that , hence the rounding implied in the name of
[ to_int63_seconds_round_down_exn ] .
see that interface for details such as the meaning of [Span] and [Date_and_ofday].
The name of the interface reflects the fact that the interface only gives you access
to the seconds of the [t]. But you can use this interface with types that have higher
precision than that, hence the rounding implied in the name of
[to_int63_seconds_round_down_exn].
*)
module type Time_in_seconds = sig
module Span : sig
type t
val of_int63_seconds : Int63.t -> t
val to_int63_seconds_round_down_exn : t -> Int63.t
end
module Date_and_ofday : sig
type t
val of_synthetic_span_since_epoch : Span.t -> t
val to_synthetic_span_since_epoch : t -> Span.t
end
type t
val of_span_since_epoch : Span.t -> t
val to_span_since_epoch : t -> Span.t
end
* This is the interface of [ Zone ] , but not the interface of [ Time . Zone ] or
[ Time_ns . Zone ] . For those , look at [ Time_intf . Zone ]
[Time_ns.Zone]. For those, look at [Time_intf.Zone] *)
module type S = sig
* { 1 User - friendly interface }
(** The type of a time-zone.
bin_io and sexp representations of Zone.t are the name of the zone, and
not the full data that is read from disk when Zone.find is called. The
full Zone.t is reconstructed on the receiving/reading side by reloading
the zone file from disk. Any zone name that is accepted by [find] is
acceptable in the bin_io and sexp representations. *)
type t [@@deriving sexp_of, compare]
(** [input_tz_file ~zonename ~filename] read in [filename] and return [t]
with [name t] = [zonename] *)
val input_tz_file : zonename:string -> filename:string -> t
* [ likely_machine_zones ] is a list of zone names that will be searched
first when trying to determine the machine zone of a box . Setting this
to a likely set of zones for your application will speed the very first
use of the local timezone .
first when trying to determine the machine zone of a box. Setting this
to a likely set of zones for your application will speed the very first
use of the local timezone. *)
val likely_machine_zones : string list ref
(** [of_utc_offset offset] returns a timezone with a static UTC offset (given in
hours). *)
val of_utc_offset : hours:int -> t
val of_utc_offset_explicit_name : name:string -> hours:int -> t
(** [utc] the UTC time zone. Included for convenience *)
val utc : t
val name : t -> string
(** [original_filename t] return the filename [t] was loaded from (if any) *)
val original_filename : t -> string option
* [ digest t ] return the MD5 digest of the file the t was created from ( if any )
val digest : t -> Md5.t option
module Time_in_seconds : Time_in_seconds
(** For performance testing only; [reset_transition_cache t] resets an internal cache in
[t] used to speed up repeated lookups of the same clock shift transition. *)
val reset_transition_cache : t -> unit
* A time zone index refers to a range of times delimited by DST transitions at one or
both ends . Every time belongs to exactly one such range . The times of DST
transitions themselves belong to the range for which they are the lower bound .
both ends. Every time belongs to exactly one such range. The times of DST
transitions themselves belong to the range for which they are the lower bound. *)
module Index : sig
type t [@@immediate]
val next : t -> t
val prev : t -> t
end
(** Gets the index of a time. *)
val index : t -> Time_in_seconds.t -> Index.t
val index_of_date_and_ofday : t -> Time_in_seconds.Date_and_ofday.t -> Index.t
(** Gets the UTC offset of times in a specific range.
This can raise if you use an [Index.t] that is out of bounds for this [t]. *)
val index_offset_from_utc_exn : t -> Index.t -> Time_in_seconds.Span.t
* [ index_abbreviation_exn t index ] returns the abbreviation name ( such as EDT , EST ,
) of given zone [ t ] for the range of [ index ] . This string conversion is one - way
only , and can not reliably be turned back into a [ t ] . This function reads and writes
the zone 's cached index . Raises if [ index ] is out of bounds for [ t ] .
JST) of given zone [t] for the range of [index]. This string conversion is one-way
only, and cannot reliably be turned back into a [t]. This function reads and writes
the zone's cached index. Raises if [index] is out of bounds for [t]. *)
val index_abbreviation_exn : t -> Index.t -> string
* Accessors for the DST transitions delimiting the start and end of a range , if any .
The [ _ exn ] accessors raise if there is no such transition . These accessors are split
up to increase performance and improve allocation ; they are intended as a low - level
back - end for commonly - used time conversion functions . See [ Time . Zone ] and
[ Time_ns . Zone ] for higher - level accessors that return an optional tuple for clock
shifts in either direction .
The [_exn] accessors raise if there is no such transition. These accessors are split
up to increase performance and improve allocation; they are intended as a low-level
back-end for commonly-used time conversion functions. See [Time.Zone] and
[Time_ns.Zone] for higher-level accessors that return an optional tuple for clock
shifts in either direction. *)
val index_has_prev_clock_shift : t -> Index.t -> bool
val index_prev_clock_shift_time_exn : t -> Index.t -> Time_in_seconds.t
val index_prev_clock_shift_amount_exn : t -> Index.t -> Time_in_seconds.Span.t
val index_has_next_clock_shift : t -> Index.t -> bool
val index_next_clock_shift_time_exn : t -> Index.t -> Time_in_seconds.t
val index_next_clock_shift_amount_exn : t -> Index.t -> Time_in_seconds.Span.t
end
module type S_stable = sig
type t
module Full_data : sig
module V1 :
Stable_module_types.With_stable_witness.S0_without_comparator with type t = t
end
end
module type Zone = sig
module type S = S
module type S_stable = S_stable
include S
module Stable : S_stable with type t := t
end
| null | https://raw.githubusercontent.com/janestreet/core/4b6635d206f7adcfac8324820d246299d6f572fe/core/src/zone_intf.ml | ocaml | * Time-zone handling.
* The type of a time-zone.
bin_io and sexp representations of Zone.t are the name of the zone, and
not the full data that is read from disk when Zone.find is called. The
full Zone.t is reconstructed on the receiving/reading side by reloading
the zone file from disk. Any zone name that is accepted by [find] is
acceptable in the bin_io and sexp representations.
* [input_tz_file ~zonename ~filename] read in [filename] and return [t]
with [name t] = [zonename]
* [of_utc_offset offset] returns a timezone with a static UTC offset (given in
hours).
* [utc] the UTC time zone. Included for convenience
* [original_filename t] return the filename [t] was loaded from (if any)
* For performance testing only; [reset_transition_cache t] resets an internal cache in
[t] used to speed up repeated lookups of the same clock shift transition.
* Gets the index of a time.
* Gets the UTC offset of times in a specific range.
This can raise if you use an [Index.t] that is out of bounds for this [t]. |
open! Import
* The internal time representation of [ Zone.t ] . This is a tiny subset of [ Time0_intf . S ] ,
see that interface for details such as the meaning of [ Span ] and [ Date_and_ofday ] .
The name of the interface reflects the fact that the interface only gives you access
to the seconds of the [ t ] . But you can use this interface with types that have higher
precision than that , hence the rounding implied in the name of
[ to_int63_seconds_round_down_exn ] .
see that interface for details such as the meaning of [Span] and [Date_and_ofday].
The name of the interface reflects the fact that the interface only gives you access
to the seconds of the [t]. But you can use this interface with types that have higher
precision than that, hence the rounding implied in the name of
[to_int63_seconds_round_down_exn].
*)
module type Time_in_seconds = sig
module Span : sig
type t
val of_int63_seconds : Int63.t -> t
val to_int63_seconds_round_down_exn : t -> Int63.t
end
module Date_and_ofday : sig
type t
val of_synthetic_span_since_epoch : Span.t -> t
val to_synthetic_span_since_epoch : t -> Span.t
end
type t
val of_span_since_epoch : Span.t -> t
val to_span_since_epoch : t -> Span.t
end
* This is the interface of [ Zone ] , but not the interface of [ Time . Zone ] or
[ Time_ns . Zone ] . For those , look at [ Time_intf . Zone ]
[Time_ns.Zone]. For those, look at [Time_intf.Zone] *)
module type S = sig
* { 1 User - friendly interface }
type t [@@deriving sexp_of, compare]
val input_tz_file : zonename:string -> filename:string -> t
* [ likely_machine_zones ] is a list of zone names that will be searched
first when trying to determine the machine zone of a box . Setting this
to a likely set of zones for your application will speed the very first
use of the local timezone .
first when trying to determine the machine zone of a box. Setting this
to a likely set of zones for your application will speed the very first
use of the local timezone. *)
val likely_machine_zones : string list ref
val of_utc_offset : hours:int -> t
val of_utc_offset_explicit_name : name:string -> hours:int -> t
val utc : t
val name : t -> string
val original_filename : t -> string option
* [ digest t ] return the MD5 digest of the file the t was created from ( if any )
val digest : t -> Md5.t option
module Time_in_seconds : Time_in_seconds
val reset_transition_cache : t -> unit
* A time zone index refers to a range of times delimited by DST transitions at one or
both ends . Every time belongs to exactly one such range . The times of DST
transitions themselves belong to the range for which they are the lower bound .
both ends. Every time belongs to exactly one such range. The times of DST
transitions themselves belong to the range for which they are the lower bound. *)
module Index : sig
type t [@@immediate]
val next : t -> t
val prev : t -> t
end
val index : t -> Time_in_seconds.t -> Index.t
val index_of_date_and_ofday : t -> Time_in_seconds.Date_and_ofday.t -> Index.t
val index_offset_from_utc_exn : t -> Index.t -> Time_in_seconds.Span.t
* [ index_abbreviation_exn t index ] returns the abbreviation name ( such as EDT , EST ,
) of given zone [ t ] for the range of [ index ] . This string conversion is one - way
only , and can not reliably be turned back into a [ t ] . This function reads and writes
the zone 's cached index . Raises if [ index ] is out of bounds for [ t ] .
JST) of given zone [t] for the range of [index]. This string conversion is one-way
only, and cannot reliably be turned back into a [t]. This function reads and writes
the zone's cached index. Raises if [index] is out of bounds for [t]. *)
val index_abbreviation_exn : t -> Index.t -> string
* Accessors for the DST transitions delimiting the start and end of a range , if any .
The [ _ exn ] accessors raise if there is no such transition . These accessors are split
up to increase performance and improve allocation ; they are intended as a low - level
back - end for commonly - used time conversion functions . See [ Time . Zone ] and
[ Time_ns . Zone ] for higher - level accessors that return an optional tuple for clock
shifts in either direction .
The [_exn] accessors raise if there is no such transition. These accessors are split
up to increase performance and improve allocation; they are intended as a low-level
back-end for commonly-used time conversion functions. See [Time.Zone] and
[Time_ns.Zone] for higher-level accessors that return an optional tuple for clock
shifts in either direction. *)
val index_has_prev_clock_shift : t -> Index.t -> bool
val index_prev_clock_shift_time_exn : t -> Index.t -> Time_in_seconds.t
val index_prev_clock_shift_amount_exn : t -> Index.t -> Time_in_seconds.Span.t
val index_has_next_clock_shift : t -> Index.t -> bool
val index_next_clock_shift_time_exn : t -> Index.t -> Time_in_seconds.t
val index_next_clock_shift_amount_exn : t -> Index.t -> Time_in_seconds.Span.t
end
module type S_stable = sig
type t
module Full_data : sig
module V1 :
Stable_module_types.With_stable_witness.S0_without_comparator with type t = t
end
end
module type Zone = sig
module type S = S
module type S_stable = S_stable
include S
module Stable : S_stable with type t := t
end
|
db86946caad54e14828d6ed5f6fb3dbe7278ca593257bc57b206256a593e686f | didierverna/tfm | font.lisp | ;;; font.lisp --- Font Information
Copyright ( C ) 2018 , 2019
Author : < >
This file is part of TFM .
;; Permission to use, copy, modify, and distribute this software for any
;; purpose with or without fee is hereby granted, provided that the above
;; copyright notice and this permission notice appear in all copies.
THIS SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
;; WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
;; MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
;; ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
;; OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
;;; Commentary:
;;; Code:
(in-package :net.didierverna.tfm)
(in-readtable :net.didierverna.tfm)
;; ==========================================================================
;; Ligatures
;; ==========================================================================
(defclass ligature ()
((composite
:documentation "The character to insert between the two original ones."
:initarg :composite
:reader composite)
(delete-before
:documentation "Whether to delete the character before the ligature."
:initarg :delete-before
:reader delete-before)
(delete-after
:documentation "Whether to delete the character after the ligature."
:initarg :delete-after
:reader delete-after)
(pass-over
:documentation
"The number of characters to skip for reaching the next character."
:initarg :pass-over
:reader pass-over))
(:documentation "The Ligature class.
This class represents a decoded ligature program. Within the context of this
library, the term \"ligature\" denotes an instance of this class."))
# # # # NOTE : the pass - over must not exceed the number of original characters
;; retained, but this has been checked already by the (unique) caller when
;; decoding the ligature op-code.
(defun make-ligature (composite delete-before delete-after pass-over)
"Make a new LIGATURE instance, and return it."
(make-instance 'ligature
:composite composite
:delete-before delete-before
:delete-after delete-after
:pass-over pass-over))
(defun apply-ligature (ligature state &aux (new-state (cddr state)))
"Apply LIGATURE to STATE and return the new state.
STATE is a list of characters, the first two being subject to LIGATURE."
(unless (delete-after ligature) (push (cadr state) new-state))
(push (composite ligature) new-state)
(unless (delete-before ligature) (push (car state) new-state))
(cond ((= (pass-over ligature) 2) (cddr new-state))
((= (pass-over ligature) 1) (cdr new-state))
((= (pass-over ligature) 0) new-state)))
;; ==========================================================================
;; Base Font
;; ==========================================================================
;; -----
;; Class
;; -----
(eval-when (:compile-toplevel :load-toplevel :execute)
(define-constant +font-dimension-accessors+
'(interword-space interword-stretch interword-shrink ex em extra-space)
"The list of dimension accessor names in the FONT class."))
(defmacro map-font-dimension-accessors (var font &body body)
"Map BODY on FONT dimension accessors available as VAR."
`(map-accessors ,var ,font ,+font-dimension-accessors+
,@body))
(defclass font ()
((name
:documentation "The font's name.
When the font is loaded from a file, it defaults to the file's base name,
along with potential scaling information."
:initarg :name
:accessor name)
(file
:documentation "The file from which the font was loaded, or NIL."
:initform nil
:initarg :file
:accessor file)
(checksum
:documentation "The font's checksum, as provided by Metafont."
:accessor checksum)
(frozen
:documentation "Whether the font is frozen."
:initform nil
:accessor frozen)
(design-size
:documentation "The font's design size, in units of TeX points."
:initform nil
:accessor design-size)
(original-design-size
:documentation "The font's original design size, in units of TeX points."
:accessor original-design-size)
(encoding
:documentation "The font's character coding scheme (a BCPL string), or NIL."
:initform nil
:accessor encoding)
(family
:documentation "The font's family (a BCPL string), or NIL."
:initform nil
:accessor family)
(7bits-safe
:documentation "Whether the font is 7 bits safe (0 or 1), or NIL.
When 1, it means that no character of code lesser than 128 can lead to a
character of code greater than 128 by ways of ligatures or extensible
recipes."
:initform nil
:accessor 7bits-safe)
(face-number
:documentation "The font's face number, or NIL."
:initform nil
:accessor face-number)
(weight
:documentation "The font's weight (:medium, :bold, :light), or NIL.
When available, it is decoded from the font face number."
:initform nil
:accessor weight)
(slope
:documentation "The font's slope (:roman, :italic), or NIL.
When available, it is decoded from the font face number."
:initform nil
:accessor slope)
(expansion
:documentation
"The font's expansion (:regular, :condensed, :extended), or NIL.
When available, it is decoded from the font face number."
:initform nil
:accessor expansion)
(face-code
:documentation "The font's 3-letters face code, or NIL.
When available, it is the concatenation of the upcased first letters of the
font's weight, slope, and expansion."
:initform nil
:accessor face-code)
(slant
:documentation "The font's slant (a scalar ratio)."
:initform 0
:accessor slant)
(interword-space
:documentation "The font's normal interword space.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor interword-space)
(interword-stretch
:documentation "The font's interword stretchability.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor interword-stretch)
(interword-shrink
:documentation "The font's interword shrinkability.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor interword-shrink)
(ex
:documentation "The font's ex size.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor ex)
(em
:documentation "The font's em size.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor em)
(extra-space
:documentation "The font's extra space.
It is expressed in design size units, or in TeX point units if the font is
frozen.
This is the additional space to put at the end of sentences."
:initform 0 :accessor extra-space)
(parameters
:documentation "The font's additional parameters array, or NIL.
Parameters are expressed in design size units, or in TeX point units if the
font is frozen."
:initform nil
:accessor parameters)
(min-code
:documentation "The font's smallest character code."
:accessor min-code)
(max-code
:documentation "The font's largest character code."
:accessor max-code)
(characters
:documentation "The font's characters.
This is a hash table associating character codes with characters."
:initform (make-hash-table :test #'eq)
:accessor characters)
(character-count
:documentation "The font's number of characters.
The character count does not include the boundary character, unless that
character really exists in the font (has non-zerop metrics)."
:accessor character-count)
(ligatures
:documentation "The font's ligatures.
This is a hash table associating conses of characters with the corresponding
ligature."
:initform (make-hash-table :test #'equal)
:accessor ligatures)
(kernings
:documentation "The font's kernings.
This is a hash table associating conses of characters with the corresponding
kerning. They are expressed in design size units, or in TeX point units if
the font is frozen."
:initform (make-hash-table :test #'equal)
:accessor kernings)
(boundary-character
:documentation "The font's boundary character, or NIL.
This character is also accessible by code, like normal ones. However, it is
the only character the code of which may be outside [MIN-CODE,MAX-CODE] (see
TeX: the Program [545]). Finally, this character is not included in the
character count, unless it exists for real in the font (has non-zero
metrics)."
:initform nil
:accessor boundary-character))
(:documentation "The TeX Font Metrics class.
This class represents decoded font information. Within the context of this
library, the term \"font\" denotes an instance of this class, or of one of its
subclasses."))
(defmethod (setf design-size) :before (design-size font)
"Unscale FONT if frozen."
(when (frozen font) (scale font (/ 1 (design-size font)))))
(defmethod (setf design-size) :after (design-size font)
"Rescale FONT if frozen."
(when (frozen font) (scale font (design-size font))))
(defmethod (setf design-size) :around (design-size font)
"Check that DESIGN-SIZE is a real greater or equal to 1."
(check-type design-size (real 1))
(call-next-method design-size font))
(defmethod print-object ((font font) stream)
"Print FONT unreadably with its name to STREAM."
(print-unreadable-object (font stream :type t)
(princ (name font) stream)
(unless (= (design-size font) (original-design-size font))
(format stream " at ~Apt" (design-size font)))
(when (frozen font) (princ " (frozen)" stream))))
# # # # NOTE : this error is not currently exported , because it can not in fact
;; be triggered yet (by the public API).
(define-condition anonymous-font (tfm-usage-error)
()
(:report (lambda (anonymous-font stream)
(declare (ignore anonymous-font))
(princ "All fonts must be named." stream)))
(:documentation "The Anonymous Font usage error.
It signals an attempt at creating a font with no name."))
(defmethod initialize-instance :before ((font font) &key name)
"Check that FONT has a name, or signal an ANONYMOUS-FONT error."
(unless name (error 'anonymous-font)))
(defun make-font (name &rest initargs)
"Make a new NAMEd FONT instance, and return it.
If INITARGS are provided, pass them as-is to MAKE-INSTANCE."
(apply #'make-instance 'font :name name initargs))
;; ----------------
;; Pseudo-accessors
;; ----------------
# # # # NOTE : this is a compliance error . It may only be signalled by the
internal API , meaning that the TFM data contains invalid references to
;; characters that don't exist in the font (remember that we do add a fake
;; boundary character if needed, so even this one can be retrieved).
(define-condition invalid-character-code (tfm-compliance-error)
((value
:documentation "The invalid character code."
:initarg :value
:accessor value))
(:report (lambda (invalid-character-code stream)
(report stream "character code ~A is invalid in font ~A."
(value invalid-character-code)
(font invalid-character-code))))
(:documentation "The Invalid Character Code compliance error.
It signals a reference to a character code which does not exist in the font
being loaded."))
# # # # NOTE : this is the internal API , used while loading TFM data .
(defun code-character (code font &optional (errorp t))
"Return FONT's CODE character.
If ERRORP (the default), check that the character exists, or signal an
INVALID-CHARACTER-CODE error. Note that a fake boundary character may be
retrieved by this function."
(or (gethash code (characters font))
# # # # NOTE : recovering from here directly would make little sense , so
;; it's rather the job of the callers to provide sensible restarts.
(when errorp (error 'invalid-character-code :value code))))
(defun (setf code-character) (character font)
"Make FONT's CHARACTER accessible by its code."
(setf (gethash (code character) (characters font)) character))
# # # # NOTE : this is the public API .
(defun get-character (code font)
"Return FONT's CODE character, or NIL."
(gethash code (characters font)))
(define-condition different-fonts (tfm-usage-error)
((character1
:documentation "The first character."
:initarg :character1
:accessor character1)
(character2
:documentation "The second character."
:initarg :character2
:accessor character2))
(:report (lambda (different-fonts stream)
(format stream
"Characters ~A and ~A don't belong to the same font."
(character1 different-fonts)
(character2 different-fonts))))
(:documentation "The Different Fonts usage error.
It signals an attempt at retrieving a ligature or kerning for two characters
from different fonts."))
(defun ligature (character1 character2)
"Return ligature for CHARACTER1 and CHARACTER2, or NIL.
If CHARACTER1 and CHARACTER2 don't belong to the same font, signal a
DIFFERENT-FONTS error."
(unless (eq (font character1) (font character2))
(error 'different-fonts :character1 character1 :character2 character2))
(gethash (cons character1 character2) (ligatures (font character1))))
(defun kerning (character1 character2)
"Return kerning for CHARACTER1 and CHARACTER2, or NIL.
If CHARACTER1 and CHARACTER2 don't belong to the same font, signal a
DIFFERENT-FONTS error."
(unless (eq (font character1) (font character2))
(error 'different-fonts :character1 character1 :character2 character2))
(gethash (cons character1 character2) (kernings (font character1))))
# # # # NOTE : we do n't currently bother to check that the two characters
;; belong to the same font. These functions are internal only (although the
symbols are exported , damn you CL ) , so let 's just say I trust my own code
;; for now.
(defun (setf ligature) (ligature character1 character2)
"Set LIGATURE for CHARACTER1 and CHARACTER2."
(setf (gethash (cons character1 character2) (ligatures (font character1)))
ligature))
(defun (setf kerning) (kerning character1 character2)
"Set KERNING for CHARACTER1 and CHARACTER2."
(setf (gethash (cons character1 character2) (kernings (font character1)))
kerning))
;; --------
;; Freezing
;; --------
(defgeneric scale (font factor)
(:documentation "Scale all FONT dimensions by FACTOR.")
(:method (font factor)
"Scaling method for regular FONTs."
(map-font-dimension-accessors slot font
(setf slot (* slot factor)))
(when (parameters font)
(loop :for i :from 0 :upto (1- (length (parameters font)))
:do (setf (aref (parameters font) i)
(* (aref (parameters font) i) factor))))
(maphash (lambda (code character)
(declare (ignore code))
(map-character-metrics-dimension-accessors slot character
(setf slot (* slot factor))))
(characters font))
(maphash (lambda (pair kern)
(setf (kerning (car pair) (cdr pair)) (* kern factor)))
(kernings font))))
(defun freeze (font)
"Freeze FONT.
Freezing a font means that all dimensions normally expressed in design size
units are multiplied by it, so as to lead values in TeX point units.
If FONT is already frozen, this function does nothing and returns NIL.
Otherwise, it returns T."
(unless (frozen font)
(scale font (design-size font))
(setf (frozen font) t)))
(defun unfreeze (font)
"Unfreeze FONT.
Unfreezing means performing the inverse of what FREEZE does.
If FONT is not frozen, this function does nothing and returns NIL. Otherwise,
it returns T."
(when (frozen font)
(scale font (/ 1 (design-size font)))
(setf (frozen font) nil)
t))
;; ==========================================================================
;; Math Symbols Font
;; ==========================================================================
(eval-when (:compile-toplevel :load-toplevel :execute)
(define-constant +math-symbols-font-dimension-accessors+
'(num1 num2 num3
denom1 denom2
sup1 sup2 sup3
sub1 sub2
supdrop subdrop
delim1 delim2
axis-height)
"The list of dimension accessor names in the MATH-SYMBOLS-FONT class."))
(defmacro map-math-symbols-font-dimension-accessors (var font &body body)
"Map BODY on FONT dimension accessors available as VAR."
`(map-accessors ,var ,font ,+math-symbols-font-dimension-accessors+
,@body))
(defclass math-symbols-font (font)
((num1
:documentation "The font's NUM1 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor num1)
(num2
:documentation "The font's NUM2 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor num2)
(num3
:documentation "The font's NUM2 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor num3)
(denom1
:documentation "The font's DENOM1 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor denom1)
(denom2
:documentation "The font's DENOM2 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor denom2)
(sup1
:documentation "The font's SUP1 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor sup1)
(sup2
:documentation "The font's SUP2 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor sup2)
(sup3
:documentation "The font's SUP2 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor sup3)
(sub1
:documentation "The font's SUB1 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor sub1)
(sub2
:documentation "The font's SUB2 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor sub2)
(supdrop
:documentation "The font's SUPDROP parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor supdrop)
(subdrop
:documentation "The font's SUBDROP parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor subdrop)
(delim1
:documentation "The font's DELIM1 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor delim1)
(delim2
:documentation "The font's DELIM2 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor delim2)
(axis-height
:documentation "The font's AXIS-HEIGHT parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor axis-height))
(:documentation "The Math Symbols Font class.
This class represents fonts with the \"TeX math symbols\" character coding
scheme."))
(defmethod scale :around ((font math-symbols-font) factor)
"Scaling method for MATH-SYMBOL-FONTs."
(map-math-symbols-font-dimension-accessors slot font
(setf slot (* slot factor)))
(call-next-method))
;; ==========================================================================
;; Math Symbols Font
;; ==========================================================================
(eval-when (:compile-toplevel :load-toplevel :execute)
(define-constant +math-extension-font-dimension-accessors+
'(default-rule-thickness
big-op-spacing1 big-op-spacing2 big-op-spacing3 big-op-spacing4
big-op-spacing5)
"The list of dimension accessor names in the MATH-EXTENSION-FONT class."))
(defmacro map-math-extension-font-dimension-accessors (var font &body body)
"Map BODY on math extension FONT dimension accessors available as VAR."
`(map-accessors ,var ,font ,+math-extension-font-dimension-accessors+
,@body))
(defclass math-extension-font (font)
((default-rule-thickness
:documentation "The font's default rule thickness.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor default-rule-thickness)
(big-op-spacing1
:documentation "The font's BIG-OP-SPACING1 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor big-op-spacing1)
(big-op-spacing2
:documentation "The font's BIG-OP-SPACING2 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor big-op-spacing2)
(big-op-spacing3
:documentation "The font's BIG-OP-SPACING3 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor big-op-spacing3)
(big-op-spacing4
:documentation "The font's BIG-OP-SPACING4 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor big-op-spacing4)
(big-op-spacing5
:documentation "The font's BIG-OP-SPACING5 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor big-op-spacing5))
(:documentation "The Math Extension Font class.
This class represents fonts with the \"TeX math extension\" character coding
scheme."))
(defmethod scale :around ((font math-extension-font) factor)
"Scaling method for MATH-EXTENSION-FONTs."
(map-math-extension-font-dimension-accessors slot font
(setf slot (* slot factor)))
(call-next-method))
;;; font.lisp ends here
| null | https://raw.githubusercontent.com/didierverna/tfm/192c10b04eaec381638bfcf9bbea66b208141f5a/core/src/font.lisp | lisp | font.lisp --- Font Information
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
Commentary:
Code:
==========================================================================
Ligatures
==========================================================================
retained, but this has been checked already by the (unique) caller when
decoding the ligature op-code.
==========================================================================
Base Font
==========================================================================
-----
Class
-----
be triggered yet (by the public API).
----------------
Pseudo-accessors
----------------
characters that don't exist in the font (remember that we do add a fake
boundary character if needed, so even this one can be retrieved).
it's rather the job of the callers to provide sensible restarts.
belong to the same font. These functions are internal only (although the
for now.
--------
Freezing
--------
==========================================================================
Math Symbols Font
==========================================================================
==========================================================================
Math Symbols Font
==========================================================================
font.lisp ends here |
Copyright ( C ) 2018 , 2019
Author : < >
This file is part of TFM .
THIS SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
(in-package :net.didierverna.tfm)
(in-readtable :net.didierverna.tfm)
(defclass ligature ()
((composite
:documentation "The character to insert between the two original ones."
:initarg :composite
:reader composite)
(delete-before
:documentation "Whether to delete the character before the ligature."
:initarg :delete-before
:reader delete-before)
(delete-after
:documentation "Whether to delete the character after the ligature."
:initarg :delete-after
:reader delete-after)
(pass-over
:documentation
"The number of characters to skip for reaching the next character."
:initarg :pass-over
:reader pass-over))
(:documentation "The Ligature class.
This class represents a decoded ligature program. Within the context of this
library, the term \"ligature\" denotes an instance of this class."))
# # # # NOTE : the pass - over must not exceed the number of original characters
(defun make-ligature (composite delete-before delete-after pass-over)
"Make a new LIGATURE instance, and return it."
(make-instance 'ligature
:composite composite
:delete-before delete-before
:delete-after delete-after
:pass-over pass-over))
(defun apply-ligature (ligature state &aux (new-state (cddr state)))
"Apply LIGATURE to STATE and return the new state.
STATE is a list of characters, the first two being subject to LIGATURE."
(unless (delete-after ligature) (push (cadr state) new-state))
(push (composite ligature) new-state)
(unless (delete-before ligature) (push (car state) new-state))
(cond ((= (pass-over ligature) 2) (cddr new-state))
((= (pass-over ligature) 1) (cdr new-state))
((= (pass-over ligature) 0) new-state)))
(eval-when (:compile-toplevel :load-toplevel :execute)
(define-constant +font-dimension-accessors+
'(interword-space interword-stretch interword-shrink ex em extra-space)
"The list of dimension accessor names in the FONT class."))
(defmacro map-font-dimension-accessors (var font &body body)
"Map BODY on FONT dimension accessors available as VAR."
`(map-accessors ,var ,font ,+font-dimension-accessors+
,@body))
(defclass font ()
((name
:documentation "The font's name.
When the font is loaded from a file, it defaults to the file's base name,
along with potential scaling information."
:initarg :name
:accessor name)
(file
:documentation "The file from which the font was loaded, or NIL."
:initform nil
:initarg :file
:accessor file)
(checksum
:documentation "The font's checksum, as provided by Metafont."
:accessor checksum)
(frozen
:documentation "Whether the font is frozen."
:initform nil
:accessor frozen)
(design-size
:documentation "The font's design size, in units of TeX points."
:initform nil
:accessor design-size)
(original-design-size
:documentation "The font's original design size, in units of TeX points."
:accessor original-design-size)
(encoding
:documentation "The font's character coding scheme (a BCPL string), or NIL."
:initform nil
:accessor encoding)
(family
:documentation "The font's family (a BCPL string), or NIL."
:initform nil
:accessor family)
(7bits-safe
:documentation "Whether the font is 7 bits safe (0 or 1), or NIL.
When 1, it means that no character of code lesser than 128 can lead to a
character of code greater than 128 by ways of ligatures or extensible
recipes."
:initform nil
:accessor 7bits-safe)
(face-number
:documentation "The font's face number, or NIL."
:initform nil
:accessor face-number)
(weight
:documentation "The font's weight (:medium, :bold, :light), or NIL.
When available, it is decoded from the font face number."
:initform nil
:accessor weight)
(slope
:documentation "The font's slope (:roman, :italic), or NIL.
When available, it is decoded from the font face number."
:initform nil
:accessor slope)
(expansion
:documentation
"The font's expansion (:regular, :condensed, :extended), or NIL.
When available, it is decoded from the font face number."
:initform nil
:accessor expansion)
(face-code
:documentation "The font's 3-letters face code, or NIL.
When available, it is the concatenation of the upcased first letters of the
font's weight, slope, and expansion."
:initform nil
:accessor face-code)
(slant
:documentation "The font's slant (a scalar ratio)."
:initform 0
:accessor slant)
(interword-space
:documentation "The font's normal interword space.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor interword-space)
(interword-stretch
:documentation "The font's interword stretchability.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor interword-stretch)
(interword-shrink
:documentation "The font's interword shrinkability.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor interword-shrink)
(ex
:documentation "The font's ex size.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor ex)
(em
:documentation "The font's em size.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor em)
(extra-space
:documentation "The font's extra space.
It is expressed in design size units, or in TeX point units if the font is
frozen.
This is the additional space to put at the end of sentences."
:initform 0 :accessor extra-space)
(parameters
:documentation "The font's additional parameters array, or NIL.
Parameters are expressed in design size units, or in TeX point units if the
font is frozen."
:initform nil
:accessor parameters)
(min-code
:documentation "The font's smallest character code."
:accessor min-code)
(max-code
:documentation "The font's largest character code."
:accessor max-code)
(characters
:documentation "The font's characters.
This is a hash table associating character codes with characters."
:initform (make-hash-table :test #'eq)
:accessor characters)
(character-count
:documentation "The font's number of characters.
The character count does not include the boundary character, unless that
character really exists in the font (has non-zerop metrics)."
:accessor character-count)
(ligatures
:documentation "The font's ligatures.
This is a hash table associating conses of characters with the corresponding
ligature."
:initform (make-hash-table :test #'equal)
:accessor ligatures)
(kernings
:documentation "The font's kernings.
This is a hash table associating conses of characters with the corresponding
kerning. They are expressed in design size units, or in TeX point units if
the font is frozen."
:initform (make-hash-table :test #'equal)
:accessor kernings)
(boundary-character
:documentation "The font's boundary character, or NIL.
This character is also accessible by code, like normal ones. However, it is
the only character the code of which may be outside [MIN-CODE,MAX-CODE] (see
TeX: the Program [545]). Finally, this character is not included in the
character count, unless it exists for real in the font (has non-zero
metrics)."
:initform nil
:accessor boundary-character))
(:documentation "The TeX Font Metrics class.
This class represents decoded font information. Within the context of this
library, the term \"font\" denotes an instance of this class, or of one of its
subclasses."))
(defmethod (setf design-size) :before (design-size font)
"Unscale FONT if frozen."
(when (frozen font) (scale font (/ 1 (design-size font)))))
(defmethod (setf design-size) :after (design-size font)
"Rescale FONT if frozen."
(when (frozen font) (scale font (design-size font))))
(defmethod (setf design-size) :around (design-size font)
"Check that DESIGN-SIZE is a real greater or equal to 1."
(check-type design-size (real 1))
(call-next-method design-size font))
(defmethod print-object ((font font) stream)
"Print FONT unreadably with its name to STREAM."
(print-unreadable-object (font stream :type t)
(princ (name font) stream)
(unless (= (design-size font) (original-design-size font))
(format stream " at ~Apt" (design-size font)))
(when (frozen font) (princ " (frozen)" stream))))
# # # # NOTE : this error is not currently exported , because it can not in fact
(define-condition anonymous-font (tfm-usage-error)
()
(:report (lambda (anonymous-font stream)
(declare (ignore anonymous-font))
(princ "All fonts must be named." stream)))
(:documentation "The Anonymous Font usage error.
It signals an attempt at creating a font with no name."))
(defmethod initialize-instance :before ((font font) &key name)
"Check that FONT has a name, or signal an ANONYMOUS-FONT error."
(unless name (error 'anonymous-font)))
(defun make-font (name &rest initargs)
"Make a new NAMEd FONT instance, and return it.
If INITARGS are provided, pass them as-is to MAKE-INSTANCE."
(apply #'make-instance 'font :name name initargs))
# # # # NOTE : this is a compliance error . It may only be signalled by the
internal API , meaning that the TFM data contains invalid references to
(define-condition invalid-character-code (tfm-compliance-error)
((value
:documentation "The invalid character code."
:initarg :value
:accessor value))
(:report (lambda (invalid-character-code stream)
(report stream "character code ~A is invalid in font ~A."
(value invalid-character-code)
(font invalid-character-code))))
(:documentation "The Invalid Character Code compliance error.
It signals a reference to a character code which does not exist in the font
being loaded."))
# # # # NOTE : this is the internal API , used while loading TFM data .
(defun code-character (code font &optional (errorp t))
"Return FONT's CODE character.
If ERRORP (the default), check that the character exists, or signal an
INVALID-CHARACTER-CODE error. Note that a fake boundary character may be
retrieved by this function."
(or (gethash code (characters font))
# # # # NOTE : recovering from here directly would make little sense , so
(when errorp (error 'invalid-character-code :value code))))
(defun (setf code-character) (character font)
"Make FONT's CHARACTER accessible by its code."
(setf (gethash (code character) (characters font)) character))
# # # # NOTE : this is the public API .
(defun get-character (code font)
"Return FONT's CODE character, or NIL."
(gethash code (characters font)))
(define-condition different-fonts (tfm-usage-error)
((character1
:documentation "The first character."
:initarg :character1
:accessor character1)
(character2
:documentation "The second character."
:initarg :character2
:accessor character2))
(:report (lambda (different-fonts stream)
(format stream
"Characters ~A and ~A don't belong to the same font."
(character1 different-fonts)
(character2 different-fonts))))
(:documentation "The Different Fonts usage error.
It signals an attempt at retrieving a ligature or kerning for two characters
from different fonts."))
(defun ligature (character1 character2)
"Return ligature for CHARACTER1 and CHARACTER2, or NIL.
If CHARACTER1 and CHARACTER2 don't belong to the same font, signal a
DIFFERENT-FONTS error."
(unless (eq (font character1) (font character2))
(error 'different-fonts :character1 character1 :character2 character2))
(gethash (cons character1 character2) (ligatures (font character1))))
(defun kerning (character1 character2)
"Return kerning for CHARACTER1 and CHARACTER2, or NIL.
If CHARACTER1 and CHARACTER2 don't belong to the same font, signal a
DIFFERENT-FONTS error."
(unless (eq (font character1) (font character2))
(error 'different-fonts :character1 character1 :character2 character2))
(gethash (cons character1 character2) (kernings (font character1))))
# # # # NOTE : we do n't currently bother to check that the two characters
symbols are exported , damn you CL ) , so let 's just say I trust my own code
(defun (setf ligature) (ligature character1 character2)
"Set LIGATURE for CHARACTER1 and CHARACTER2."
(setf (gethash (cons character1 character2) (ligatures (font character1)))
ligature))
(defun (setf kerning) (kerning character1 character2)
"Set KERNING for CHARACTER1 and CHARACTER2."
(setf (gethash (cons character1 character2) (kernings (font character1)))
kerning))
(defgeneric scale (font factor)
(:documentation "Scale all FONT dimensions by FACTOR.")
(:method (font factor)
"Scaling method for regular FONTs."
(map-font-dimension-accessors slot font
(setf slot (* slot factor)))
(when (parameters font)
(loop :for i :from 0 :upto (1- (length (parameters font)))
:do (setf (aref (parameters font) i)
(* (aref (parameters font) i) factor))))
(maphash (lambda (code character)
(declare (ignore code))
(map-character-metrics-dimension-accessors slot character
(setf slot (* slot factor))))
(characters font))
(maphash (lambda (pair kern)
(setf (kerning (car pair) (cdr pair)) (* kern factor)))
(kernings font))))
(defun freeze (font)
"Freeze FONT.
Freezing a font means that all dimensions normally expressed in design size
units are multiplied by it, so as to lead values in TeX point units.
If FONT is already frozen, this function does nothing and returns NIL.
Otherwise, it returns T."
(unless (frozen font)
(scale font (design-size font))
(setf (frozen font) t)))
(defun unfreeze (font)
"Unfreeze FONT.
Unfreezing means performing the inverse of what FREEZE does.
If FONT is not frozen, this function does nothing and returns NIL. Otherwise,
it returns T."
(when (frozen font)
(scale font (/ 1 (design-size font)))
(setf (frozen font) nil)
t))
(eval-when (:compile-toplevel :load-toplevel :execute)
(define-constant +math-symbols-font-dimension-accessors+
'(num1 num2 num3
denom1 denom2
sup1 sup2 sup3
sub1 sub2
supdrop subdrop
delim1 delim2
axis-height)
"The list of dimension accessor names in the MATH-SYMBOLS-FONT class."))
(defmacro map-math-symbols-font-dimension-accessors (var font &body body)
"Map BODY on FONT dimension accessors available as VAR."
`(map-accessors ,var ,font ,+math-symbols-font-dimension-accessors+
,@body))
(defclass math-symbols-font (font)
((num1
:documentation "The font's NUM1 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor num1)
(num2
:documentation "The font's NUM2 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor num2)
(num3
:documentation "The font's NUM2 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor num3)
(denom1
:documentation "The font's DENOM1 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor denom1)
(denom2
:documentation "The font's DENOM2 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor denom2)
(sup1
:documentation "The font's SUP1 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor sup1)
(sup2
:documentation "The font's SUP2 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor sup2)
(sup3
:documentation "The font's SUP2 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor sup3)
(sub1
:documentation "The font's SUB1 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor sub1)
(sub2
:documentation "The font's SUB2 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor sub2)
(supdrop
:documentation "The font's SUPDROP parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor supdrop)
(subdrop
:documentation "The font's SUBDROP parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor subdrop)
(delim1
:documentation "The font's DELIM1 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor delim1)
(delim2
:documentation "The font's DELIM2 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor delim2)
(axis-height
:documentation "The font's AXIS-HEIGHT parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor axis-height))
(:documentation "The Math Symbols Font class.
This class represents fonts with the \"TeX math symbols\" character coding
scheme."))
(defmethod scale :around ((font math-symbols-font) factor)
"Scaling method for MATH-SYMBOL-FONTs."
(map-math-symbols-font-dimension-accessors slot font
(setf slot (* slot factor)))
(call-next-method))
(eval-when (:compile-toplevel :load-toplevel :execute)
(define-constant +math-extension-font-dimension-accessors+
'(default-rule-thickness
big-op-spacing1 big-op-spacing2 big-op-spacing3 big-op-spacing4
big-op-spacing5)
"The list of dimension accessor names in the MATH-EXTENSION-FONT class."))
(defmacro map-math-extension-font-dimension-accessors (var font &body body)
"Map BODY on math extension FONT dimension accessors available as VAR."
`(map-accessors ,var ,font ,+math-extension-font-dimension-accessors+
,@body))
(defclass math-extension-font (font)
((default-rule-thickness
:documentation "The font's default rule thickness.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor default-rule-thickness)
(big-op-spacing1
:documentation "The font's BIG-OP-SPACING1 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor big-op-spacing1)
(big-op-spacing2
:documentation "The font's BIG-OP-SPACING2 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor big-op-spacing2)
(big-op-spacing3
:documentation "The font's BIG-OP-SPACING3 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor big-op-spacing3)
(big-op-spacing4
:documentation "The font's BIG-OP-SPACING4 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor big-op-spacing4)
(big-op-spacing5
:documentation "The font's BIG-OP-SPACING5 parameter.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initform 0
:accessor big-op-spacing5))
(:documentation "The Math Extension Font class.
This class represents fonts with the \"TeX math extension\" character coding
scheme."))
(defmethod scale :around ((font math-extension-font) factor)
"Scaling method for MATH-EXTENSION-FONTs."
(map-math-extension-font-dimension-accessors slot font
(setf slot (* slot factor)))
(call-next-method))
|
43ce3fd5fe925f3bce772939b9f930f68ca8277a51da059944c54f2d00652d70 | moonpolysoft/dynomite | dmerkle_test.erl | -include_lib("eunit/include/eunit.hrl").
test_cleanup() ->
file:delete(data_file()),
file:delete(data_file(1)).
open_and_close_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
Merkle = get_state(Pid),
Root = Merkle#dmerkle.root,
error_logger:info_msg("root ~p~n", [Root]),
?assertEqual(?headersize_from_blocksize(256), Root#leaf.offset),
?assertEqual(0, Root#leaf.m),
close(Pid).
open_and_insert_one_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
update("mykey", <<"myvalue">>, Pid),
Tree = get_state(Pid),
Root = Tree#dmerkle.root,
error_logger:info_msg("root w/ one ~p merkle~p~n", [Root, Tree]),
?assertEqual(1, Root#leaf.m),
Hash = hash(<<"myvalue">>),
Hash = find("mykey", Pid),
close(Pid).
open_and_reopen_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
update("mykey", <<"myvalue">>, Pid),
close(Pid),
{ok, NewPid} = open(data_file(), 256),
Hash = hash(<<"myvalue">>),
Hash = find("mykey", NewPid),
close(NewPid).
open_and_insert_20_test() ->
open_and_insert_n(20).
open_and_insert_260_test() ->
open_and_insert_n(260).
open_and_insert_1000_test() ->
open_and_insert_n(1000).
open_and_insert_3000_test() ->
open_and_insert_n(3000).
insert_500_both_ways_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, Pid, lists:seq(1,500)),
{ok, Pid2} = open(data_file(1), 256),
lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, Pid2, lists:reverse(lists:seq(1,500))),
TreeA = get_state(Pid),
TreeB = get_state(Pid2),
?infoFmt("leaves A: ~p~n", [leaves(Pid)]),
?infoFmt("leaves B: ~p~n", [leaves(Pid2)]),
LeafHashA = lists:foldl(fun({_,Hash}, Sum) ->
(Hash + Sum) rem (2 bsl 31)
end, 0, leaves(Pid)),
LeafHashB = lists:foldl(fun({_,Hash}, Sum) ->
(Hash + Sum) rem (2 bsl 31)
end, 0, leaves(Pid2)),
?assertEqual(leaves(Pid), leaves(Pid2)),
?assertEqual(true, equals(Pid, Pid2)),
close(Pid),
close(Pid2).
insert_realistic_scenario_equality_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
TreeA = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, Pid, lists:seq(1,500)),
{ok, Pid2} = open(data_file(1), 256),
TreeB = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, Pid2, lists:seq(1,505)),
false = equals(TreeA, TreeB).
insert_realistic_scenario_diff_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
TreeA = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, Pid, lists:seq(1,495)),
{ok, Pid2} = open(data_file(1), 256),
TreeB = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, Pid2, lists:seq(1,500)),
Diff = key_diff(TreeA, TreeB),
Keys = lists:map(fun(N) -> lists:concat(["key", N]) end, lists:seq(496, 500)),
error_logger:info_msg("realistic diff: ~p~n", [Diff]),
?assertEqual(Keys, Diff),
close(Pid),
close(Pid2).
insert_500_both_ways_diff_test() ->
test_cleanup(),
{ok, PidA} = open(data_file(), 256),
TreeA = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, PidA, lists:seq(1,500)),
{ok, PidB} = open(data_file(1), 256),
TreeB = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, PidB, lists:reverse(lists:seq(1,500))),
error_logger : info_msg("both ways diff : ~p ~ n " , [ key_diff(TreeA , ) ] ) ,
?assertEqual([], key_diff(TreeA, TreeB)),
close(PidA),
close(PidB).
insert_overwrite_test() ->
test_cleanup(),
{ok, PidA} = open(data_file(), 256),
TreeA = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, PidA, lists:seq(1,500)),
{ok, PidB} = open(data_file(1), 256),
TreeB = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["different", N]), Tree)
end, lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, PidB, lists:seq(1,500)), lists:seq(1, 500)),
Diff = key_diff(TreeA, TreeB),
500 = length(Diff),
500 = length(leaves(TreeB)).
insert_overwrite2_test() ->
test_cleanup(),
{ok, PidA} = open(data_file(), 256),
TreeA = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, PidA, lists:seq(1,3000)),
{ok, PidB} = open(data_file(1), 256),
TreeB = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key"]), lists:concat(["value", N]), Tree)
end, lists:foldl(fun(N, Tree) ->
update(lists:concat(["key"]), lists:concat(["value", N]), Tree)
end, PidB, lists:seq(1,3000)), lists:seq(1, 3000)),
Diff = key_diff(TreeA , ) ,
% [] = Diff,
1 = length(leaves(TreeB)).
%% swapping trees may not be something we want to support nomo
% swap_tree_test() ->
% test_cleanup(),
{ ok , PidA } = open(data_file ( ) , 256 ) ,
TreeA = lists : foldl(fun(N , ) - >
% update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end , , lists : seq(1,250 ) ) ,
{ ok , PidB } = ) , 256 ) ,
TreeB = lists : foldl(fun(N , ) - >
% update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end , PidB , lists : reverse(lists : ) ) ) ,
{ ok , swap_tree(TreeA , TreeB ) ,
% true = lists:all(fun(N) ->
% Hash = hash(lists:concat(["value", N])),
Result = Hash = = find(lists : concat(["key " , N ] ) , ,
% if
% Result -> Result;
% true ->
error_logger : info_msg("could not get ~p was ~p ~ n " , [ N , find(lists : concat(["key " , N ] ) , ) ] ) ,
% Result
% end
end , lists : seq(1 , 500 ) ) .
%
leaves_test() ->
test_cleanup(),
{ok, PidA} = open(data_file(), 256),
Tree = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, PidA, lists:seq(1,500)),
500 = length(leaves(Tree)).
empty_diff_test() ->
test_cleanup(),
{ok, PidA} = open(data_file(), 256),
TreeA = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, PidA, lists:seq(1,500)),
{ok, TreeB} = open(data_file(1), 256),
500 = length(key_diff(TreeA, TreeB)).
live_test_() ->
{timeout, 120, ?_test(test_live())}.
test_live() ->
{ok, TreeA} = open(data_file(410), 4096),
{ok, TreeB} = open(data_file(42), 4096),
KeyDiff = key_diff(TreeA, TreeB),
error_logger:info_msg("key_diff: ~p~n", [KeyDiff]),
LeavesA = leaves(TreeA),
LeavesB = leaves(TreeB),
LeafDiff = LeavesA -- LeavesB,
error_logger:info_msg("leaf_diff: ~p~n", [LeafDiff]),
timer:sleep(100),
KeyDiff = LeafDiff.
insert_variable_keys_under_block_size_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
Keys = lists:map(fun(N) ->
lists:duplicate(N, $a)
end, lists:seq(1,255)),
lists:foreach(fun(Key) ->
update(Key, "value", Pid)
end, Keys),
{LK, _} = lists:unzip(leaves(Pid)),
?assertEqual(Keys, lists:sort(LK)),
close(Pid).
insert_variable_keys_over_a_block_size_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
Keys = lists:map(fun(N) ->
lists:duplicate(N, $a)
end, lists:seq(1,512)),
lists:foreach(fun(Key) ->
update(Key, "value", Pid)
end, Keys),
{LK, _} = lists:unzip(leaves(Pid)),
?assertEqual(Keys, lists:sort(LK)),
close(Pid).
simple_deletion_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
update("key", "value", Pid),
delete("key", Pid),
Tree = get_state(Pid),
Root = Tree#dmerkle.root,
error_logger:info_msg("Root ~p~n", [Root]),
?assertEqual(0, Root#leaf.m),
close(Pid).
full_deletion_with_single_split_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, Pid, lists:seq(1,20)),
lists:foreach(fun(N) ->
Key = lists:concat(["key", N]),
delete(Key, Pid)
end, lists:seq(1,20)),
Tree = get_state(Pid),
Root = Tree#dmerkle.root,
?assertMatch(#leaf{}, Root),
?assertEqual(0, Root#leaf.m),
close(Pid).
compare_trees_with_delete_test() ->
test_cleanup(),
{ok, PidA} = open(data_file(), 256),
{ok, PidB} = open(data_file(1), 256),
lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, PidA, lists:seq(1,20)),
lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, PidB, lists:seq(1,20)),
delete("key1", PidB),
?assertEqual(["key1"], key_diff(PidA, PidB)),
close(PidA),
close(PidB).
full_deletion_with_multiple_split_test_() ->
{timeout, 120, ?_test(test_full_deletion_with_multiple_split())}.
test_full_deletion_with_multiple_split() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, Pid, lists:seq(1,300)),
lists:foldl(fun(N, Tree) ->
Key = lists:concat(["key", N]),
% ?infoFmt("deleting ~p~n", [Key]),
delete(Key, Tree),
?assertEqual(300-N, length(leaves(Tree))),
Tree
end, Pid, lists:seq(1,300)),
Tree = get_state(Pid),
Root = Tree#dmerkle.root,
?infoFmt("root: ~p~n", [Tree#dmerkle.root]),
?assertMatch(#leaf{}, Root),
?assertEqual(0, Root#leaf.m),
close(Pid).
partial_deletion_with_multiple_split_test_() ->
{timeout, 120, ?_test(test_partial_deletion_with_multiple_split())}.
test_partial_deletion_with_multiple_split() ->
test_cleanup(),
{ok, Pid1} = open(data_file(), 256),
{ok, Pid2} = open(data_file(1), 256),
Keys = lists:map(fun(I) ->
lib_misc:rand_str(10)
end, lists:seq(1,300)),
lists:foreach(fun(Key) ->
update(Key, "vallllllueeee" ++ Key, Pid1),
update(Key, "vallllllueeee" ++ Key, Pid2)
end, Keys),
lists:foreach(fun(Key) ->
delete(Key, Pid2)
end, lists:sublist(Keys, 50)),
?assertEqual(lists:sort(lists:sublist(Keys, 50)), key_diff(Pid1, Pid2)),
close(Pid1),
close(Pid2).
partial_deletion_and_rebuild_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
Keys = lists:map(fun(I) ->
lib_misc:rand_str(10)
end, lists:seq(1,300)),
lists:foreach(fun(Key) ->
update(Key, "valuuueeeee" ++ Key, Pid)
end, Keys),
IdxSize = filelib:file_size(data_file()),
lists:foreach(fun(Key) ->
delete(Key, Pid)
end, lists:sublist(Keys, 100)),
lists:foreach(fun(Key) ->
update(Key, "valuuueeeee" ++ Key, Pid)
end, lists:sublist(Keys, 100)),
?assertEqual(IdxSize, filelib:file_size(data_file())),
close(Pid).
partial_delete_and_rebuild_var_keysize_small_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
Keys = lists:map(fun(N) ->
lists:duplicate(N, $a)
end, lists:seq(1,255)),
lists:foreach(fun(Key) ->
update(Key, "value", Pid)
end, Keys),
Size = filelib:file_size(data_file()),
lists:foreach(fun(Key) ->
delete(Key, Pid)
end, lists:sublist(Keys, 50)),
lists:foreach(fun(Key) ->
update(Key, "Value", Pid)
end, lists:sublist(Keys, 50)),
?assertEqual(Size, filelib:file_size(data_file())),
close(Pid).
open_and_insert_n(N) ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
lists:foreach(fun(N) ->
Key = lists:concat(["key", N]),
Value = lists:concat(["value", N]),
update(Key, Value, Pid)
? infoFmt("leaves ~p ~ n " , [ leaves(Pid ) ] )
end, lists:seq(1,N)),
?assertEqual(true, lists:all(fun(N) ->
Hash = hash(lists:concat(["value", N])),
Result = Hash == find(lists:concat(["key", N]), Pid),
if
Result -> Result;
true ->
error_logger:info_msg("could not get ~p was ~p~n", [N, find(lists:concat(["key", N]), Pid)]),
Result
end
end, lists:seq(1, N))),
close(Pid).
stress() ->
test_cleanup(),
spawn_link(
fun() -> lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, open(data_file(), 4096), lists:seq(1,100000))
end),
receive _ -> timer:sleep(1) end.
priv_dir() ->
Dir = filename:join(t:config(priv_dir), "data"),
filelib:ensure_dir(filename:join(Dir, "dmerkle")),
Dir.
data_file() ->
filename:join(priv_dir(), "dmerkle").
data_file(N) ->
filename:join(priv_dir(), "dmerkle" ++ integer_to_list(N)).
| null | https://raw.githubusercontent.com/moonpolysoft/dynomite/a5618dcbe17b16cefdc9c567f27a1f4445aee005/etest/dmerkle_test.erl | erlang | [] = Diff,
swapping trees may not be something we want to support nomo
swap_tree_test() ->
test_cleanup(),
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
true = lists:all(fun(N) ->
Hash = hash(lists:concat(["value", N])),
if
Result -> Result;
true ->
Result
end
?infoFmt("deleting ~p~n", [Key]), | -include_lib("eunit/include/eunit.hrl").
test_cleanup() ->
file:delete(data_file()),
file:delete(data_file(1)).
open_and_close_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
Merkle = get_state(Pid),
Root = Merkle#dmerkle.root,
error_logger:info_msg("root ~p~n", [Root]),
?assertEqual(?headersize_from_blocksize(256), Root#leaf.offset),
?assertEqual(0, Root#leaf.m),
close(Pid).
open_and_insert_one_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
update("mykey", <<"myvalue">>, Pid),
Tree = get_state(Pid),
Root = Tree#dmerkle.root,
error_logger:info_msg("root w/ one ~p merkle~p~n", [Root, Tree]),
?assertEqual(1, Root#leaf.m),
Hash = hash(<<"myvalue">>),
Hash = find("mykey", Pid),
close(Pid).
open_and_reopen_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
update("mykey", <<"myvalue">>, Pid),
close(Pid),
{ok, NewPid} = open(data_file(), 256),
Hash = hash(<<"myvalue">>),
Hash = find("mykey", NewPid),
close(NewPid).
open_and_insert_20_test() ->
open_and_insert_n(20).
open_and_insert_260_test() ->
open_and_insert_n(260).
open_and_insert_1000_test() ->
open_and_insert_n(1000).
open_and_insert_3000_test() ->
open_and_insert_n(3000).
insert_500_both_ways_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, Pid, lists:seq(1,500)),
{ok, Pid2} = open(data_file(1), 256),
lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, Pid2, lists:reverse(lists:seq(1,500))),
TreeA = get_state(Pid),
TreeB = get_state(Pid2),
?infoFmt("leaves A: ~p~n", [leaves(Pid)]),
?infoFmt("leaves B: ~p~n", [leaves(Pid2)]),
LeafHashA = lists:foldl(fun({_,Hash}, Sum) ->
(Hash + Sum) rem (2 bsl 31)
end, 0, leaves(Pid)),
LeafHashB = lists:foldl(fun({_,Hash}, Sum) ->
(Hash + Sum) rem (2 bsl 31)
end, 0, leaves(Pid2)),
?assertEqual(leaves(Pid), leaves(Pid2)),
?assertEqual(true, equals(Pid, Pid2)),
close(Pid),
close(Pid2).
insert_realistic_scenario_equality_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
TreeA = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, Pid, lists:seq(1,500)),
{ok, Pid2} = open(data_file(1), 256),
TreeB = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, Pid2, lists:seq(1,505)),
false = equals(TreeA, TreeB).
insert_realistic_scenario_diff_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
TreeA = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, Pid, lists:seq(1,495)),
{ok, Pid2} = open(data_file(1), 256),
TreeB = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, Pid2, lists:seq(1,500)),
Diff = key_diff(TreeA, TreeB),
Keys = lists:map(fun(N) -> lists:concat(["key", N]) end, lists:seq(496, 500)),
error_logger:info_msg("realistic diff: ~p~n", [Diff]),
?assertEqual(Keys, Diff),
close(Pid),
close(Pid2).
insert_500_both_ways_diff_test() ->
test_cleanup(),
{ok, PidA} = open(data_file(), 256),
TreeA = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, PidA, lists:seq(1,500)),
{ok, PidB} = open(data_file(1), 256),
TreeB = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, PidB, lists:reverse(lists:seq(1,500))),
error_logger : info_msg("both ways diff : ~p ~ n " , [ key_diff(TreeA , ) ] ) ,
?assertEqual([], key_diff(TreeA, TreeB)),
close(PidA),
close(PidB).
insert_overwrite_test() ->
test_cleanup(),
{ok, PidA} = open(data_file(), 256),
TreeA = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, PidA, lists:seq(1,500)),
{ok, PidB} = open(data_file(1), 256),
TreeB = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["different", N]), Tree)
end, lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, PidB, lists:seq(1,500)), lists:seq(1, 500)),
Diff = key_diff(TreeA, TreeB),
500 = length(Diff),
500 = length(leaves(TreeB)).
insert_overwrite2_test() ->
test_cleanup(),
{ok, PidA} = open(data_file(), 256),
TreeA = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, PidA, lists:seq(1,3000)),
{ok, PidB} = open(data_file(1), 256),
TreeB = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key"]), lists:concat(["value", N]), Tree)
end, lists:foldl(fun(N, Tree) ->
update(lists:concat(["key"]), lists:concat(["value", N]), Tree)
end, PidB, lists:seq(1,3000)), lists:seq(1, 3000)),
Diff = key_diff(TreeA , ) ,
1 = length(leaves(TreeB)).
{ ok , PidA } = open(data_file ( ) , 256 ) ,
TreeA = lists : foldl(fun(N , ) - >
end , , lists : seq(1,250 ) ) ,
{ ok , PidB } = ) , 256 ) ,
TreeB = lists : foldl(fun(N , ) - >
end , PidB , lists : reverse(lists : ) ) ) ,
{ ok , swap_tree(TreeA , TreeB ) ,
Result = Hash = = find(lists : concat(["key " , N ] ) , ,
error_logger : info_msg("could not get ~p was ~p ~ n " , [ N , find(lists : concat(["key " , N ] ) , ) ] ) ,
end , lists : seq(1 , 500 ) ) .
leaves_test() ->
test_cleanup(),
{ok, PidA} = open(data_file(), 256),
Tree = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, PidA, lists:seq(1,500)),
500 = length(leaves(Tree)).
empty_diff_test() ->
test_cleanup(),
{ok, PidA} = open(data_file(), 256),
TreeA = lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, PidA, lists:seq(1,500)),
{ok, TreeB} = open(data_file(1), 256),
500 = length(key_diff(TreeA, TreeB)).
live_test_() ->
{timeout, 120, ?_test(test_live())}.
test_live() ->
{ok, TreeA} = open(data_file(410), 4096),
{ok, TreeB} = open(data_file(42), 4096),
KeyDiff = key_diff(TreeA, TreeB),
error_logger:info_msg("key_diff: ~p~n", [KeyDiff]),
LeavesA = leaves(TreeA),
LeavesB = leaves(TreeB),
LeafDiff = LeavesA -- LeavesB,
error_logger:info_msg("leaf_diff: ~p~n", [LeafDiff]),
timer:sleep(100),
KeyDiff = LeafDiff.
insert_variable_keys_under_block_size_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
Keys = lists:map(fun(N) ->
lists:duplicate(N, $a)
end, lists:seq(1,255)),
lists:foreach(fun(Key) ->
update(Key, "value", Pid)
end, Keys),
{LK, _} = lists:unzip(leaves(Pid)),
?assertEqual(Keys, lists:sort(LK)),
close(Pid).
insert_variable_keys_over_a_block_size_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
Keys = lists:map(fun(N) ->
lists:duplicate(N, $a)
end, lists:seq(1,512)),
lists:foreach(fun(Key) ->
update(Key, "value", Pid)
end, Keys),
{LK, _} = lists:unzip(leaves(Pid)),
?assertEqual(Keys, lists:sort(LK)),
close(Pid).
simple_deletion_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
update("key", "value", Pid),
delete("key", Pid),
Tree = get_state(Pid),
Root = Tree#dmerkle.root,
error_logger:info_msg("Root ~p~n", [Root]),
?assertEqual(0, Root#leaf.m),
close(Pid).
full_deletion_with_single_split_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, Pid, lists:seq(1,20)),
lists:foreach(fun(N) ->
Key = lists:concat(["key", N]),
delete(Key, Pid)
end, lists:seq(1,20)),
Tree = get_state(Pid),
Root = Tree#dmerkle.root,
?assertMatch(#leaf{}, Root),
?assertEqual(0, Root#leaf.m),
close(Pid).
compare_trees_with_delete_test() ->
test_cleanup(),
{ok, PidA} = open(data_file(), 256),
{ok, PidB} = open(data_file(1), 256),
lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, PidA, lists:seq(1,20)),
lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, PidB, lists:seq(1,20)),
delete("key1", PidB),
?assertEqual(["key1"], key_diff(PidA, PidB)),
close(PidA),
close(PidB).
full_deletion_with_multiple_split_test_() ->
{timeout, 120, ?_test(test_full_deletion_with_multiple_split())}.
test_full_deletion_with_multiple_split() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, Pid, lists:seq(1,300)),
lists:foldl(fun(N, Tree) ->
Key = lists:concat(["key", N]),
delete(Key, Tree),
?assertEqual(300-N, length(leaves(Tree))),
Tree
end, Pid, lists:seq(1,300)),
Tree = get_state(Pid),
Root = Tree#dmerkle.root,
?infoFmt("root: ~p~n", [Tree#dmerkle.root]),
?assertMatch(#leaf{}, Root),
?assertEqual(0, Root#leaf.m),
close(Pid).
partial_deletion_with_multiple_split_test_() ->
{timeout, 120, ?_test(test_partial_deletion_with_multiple_split())}.
test_partial_deletion_with_multiple_split() ->
test_cleanup(),
{ok, Pid1} = open(data_file(), 256),
{ok, Pid2} = open(data_file(1), 256),
Keys = lists:map(fun(I) ->
lib_misc:rand_str(10)
end, lists:seq(1,300)),
lists:foreach(fun(Key) ->
update(Key, "vallllllueeee" ++ Key, Pid1),
update(Key, "vallllllueeee" ++ Key, Pid2)
end, Keys),
lists:foreach(fun(Key) ->
delete(Key, Pid2)
end, lists:sublist(Keys, 50)),
?assertEqual(lists:sort(lists:sublist(Keys, 50)), key_diff(Pid1, Pid2)),
close(Pid1),
close(Pid2).
partial_deletion_and_rebuild_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
Keys = lists:map(fun(I) ->
lib_misc:rand_str(10)
end, lists:seq(1,300)),
lists:foreach(fun(Key) ->
update(Key, "valuuueeeee" ++ Key, Pid)
end, Keys),
IdxSize = filelib:file_size(data_file()),
lists:foreach(fun(Key) ->
delete(Key, Pid)
end, lists:sublist(Keys, 100)),
lists:foreach(fun(Key) ->
update(Key, "valuuueeeee" ++ Key, Pid)
end, lists:sublist(Keys, 100)),
?assertEqual(IdxSize, filelib:file_size(data_file())),
close(Pid).
partial_delete_and_rebuild_var_keysize_small_test() ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
Keys = lists:map(fun(N) ->
lists:duplicate(N, $a)
end, lists:seq(1,255)),
lists:foreach(fun(Key) ->
update(Key, "value", Pid)
end, Keys),
Size = filelib:file_size(data_file()),
lists:foreach(fun(Key) ->
delete(Key, Pid)
end, lists:sublist(Keys, 50)),
lists:foreach(fun(Key) ->
update(Key, "Value", Pid)
end, lists:sublist(Keys, 50)),
?assertEqual(Size, filelib:file_size(data_file())),
close(Pid).
open_and_insert_n(N) ->
test_cleanup(),
{ok, Pid} = open(data_file(), 256),
lists:foreach(fun(N) ->
Key = lists:concat(["key", N]),
Value = lists:concat(["value", N]),
update(Key, Value, Pid)
? infoFmt("leaves ~p ~ n " , [ leaves(Pid ) ] )
end, lists:seq(1,N)),
?assertEqual(true, lists:all(fun(N) ->
Hash = hash(lists:concat(["value", N])),
Result = Hash == find(lists:concat(["key", N]), Pid),
if
Result -> Result;
true ->
error_logger:info_msg("could not get ~p was ~p~n", [N, find(lists:concat(["key", N]), Pid)]),
Result
end
end, lists:seq(1, N))),
close(Pid).
stress() ->
test_cleanup(),
spawn_link(
fun() -> lists:foldl(fun(N, Tree) ->
update(lists:concat(["key", N]), lists:concat(["value", N]), Tree)
end, open(data_file(), 4096), lists:seq(1,100000))
end),
receive _ -> timer:sleep(1) end.
priv_dir() ->
Dir = filename:join(t:config(priv_dir), "data"),
filelib:ensure_dir(filename:join(Dir, "dmerkle")),
Dir.
data_file() ->
filename:join(priv_dir(), "dmerkle").
data_file(N) ->
filename:join(priv_dir(), "dmerkle" ++ integer_to_list(N)).
|
a70a84337536b08df2b7445fe3d0c86a407b9faba80049d6bbb7bd5d9f62fcb6 | orx/ocaml-orx | orx.mli | type camera
type obj
module Status : sig
* { 1 Specialization of { ! } values for orx }
type 'ok result = ('ok, [ `Orx ]) Stdlib.result
(** Errors are all grouped as [`Orx]. *)
type t = unit result
(** Status of a side effect only operation. *)
val ok : t
(** Success! *)
val error : t
(** Not success! *)
val open_error : 'ok result -> ('ok, [> `Orx ]) Stdlib.result
(** Convenience function to open the [`Orx] type to make composing results
from other libraries easier. *)
val get : t -> unit
(** [get result] is [()] if [result] is [Ok ()], otherwise it raises
{!Invalid_argument}. *)
val get_ok : 'ok result -> 'ok
(** [get_ok result] is [o] if [result] is [Ok o], otherwise it raises
{!Invalid_argument}. *)
val ignore : t -> unit
(** [ignore result] is {!Stdlib.ignore} constrained for more precise type
checking. *)
end
module Log : sig
(** {!Logging using the orx engine's logger}
These functions use orx's logging functionality. Log output will only be
shown when building against a debug build of orx. Release builds disable
logging. *)
type 'a format_logger =
('a, Format.formatter, unit, unit, unit, unit) format6 -> 'a
* All formatting functions act as standard { ! . Format } formatters .
val log : 'a format_logger
* Log with output going to all of 's log targets .
val terminal : 'a format_logger
(** Log with output going to the terminal. *)
val file : 'a format_logger
* Log with output going to 's log file(s ) .
val console : 'a format_logger
* Log with output going to the console .
end
module String_id : sig
(** {1 Locally unique IDs for registered strings} *)
type t
(** A unique ID for a registered string. *)
val undefined : t
(** ID used to represent undefined/unregistered strings. *)
val get_id : string -> t
(** [get_id s] registers [s] if it has not been registered already and returns
the ID associated with [s]. *)
val get_from_id : t -> string
(** [get_from_id id] returns the string associated with [id]. If no string is
associated with [id] then the return value is an empty string. *)
end
module Parent : sig
* { 1 Parent values for nesting structures }
(** Possible parents *)
type t =
| Camera of camera
| Object of obj
end
module Structure : sig
* { 1 General orx engine structures }
From -project.org/orx/doc/html/group__orx_structure.html
From -project.org/orx/doc/html/group__orx_structure.html *)
type t
(** A structure *)
module Guid : sig
type t
(** {1 Unique IDs for structures}
These IDs are unique for a single process. They are a safe way to track
stuctures such as objects across time. *)
val compare : t -> t -> int
* A total order comparison for { ! t } values . The actual order does not hold
important semantic meaning but this does allow for easy use of
{ ! . Set . Make } and { ! . Map . Make } .
important semantic meaning but this does allow for easy use of
{!Stdlib.Set.Make} and {!Stdlib.Map.Make}. *)
val equal : t -> t -> bool
(** Equality for {!t} values. *)
val pp : Format.formatter -> t -> unit
(** Pretty-printer for {!t} values with an unspecified respresentation. *)
val to_string : t -> string
(** [to_string id] is a string representation of [id]. It can be used for
logging, storing in config, commands or anywhere else a {!t} value might
be persisted. *)
val of_string : string -> t
* [ of_string s ] is { ! t } parsed from [ s ] .
@raise Failure
if [ s ] is not a valid { ! t } . Note that a valid { ! t } does not
necessarily mean that value is an active GUID in the current orx
session .
@raise Failure
if [s] is not a valid {!t}. Note that a valid {!t} does not
necessarily mean that value is an active GUID in the current orx
session. *)
end
end
module Vector : sig
* { 1 Three dimensional vectors }
From -project.org/orx/doc/html/group__orx_vector.html
From -project.org/orx/doc/html/group__orx_vector.html *)
type t
* A three dimensional [ ( x , y , z ) ] vector
val pp : Format.formatter -> t -> unit
(** Pretty-printer for vector values *)
val equal : t -> t -> bool
* Equality across all three dimensions
val equal_2d : t -> t -> bool
(** Equality in [(x, y)] only. [z] is ignored. *)
val get_x : t -> float
* [ get_x v ] is the [ x ] element of [ v ] .
val get_y : t -> float
(** [get_y v] is the [y] element of [v]. *)
val get_z : t -> float
(** [get_z v] is the [z] element of [v]. *)
val get_size : t -> float
(** [get_size v] is the vector magnitude of [v]. *)
val make : x:float -> y:float -> z:float -> t
(** [make ~x ~y ~z] is the vector [(x, y, z)]. *)
val set_x : t -> float -> unit
(** [set_x v x'] modifies [v] in place by assigning the magnitude of [v]'s [x]
as [x']. *)
val set_y : t -> float -> unit
(** [set_y v y'] modifies [v] in place by assigning the magnitude of [v]'s [y]
as [y']. *)
val set_z : t -> float -> unit
* [ set_z v z ' ] modifies [ v ] in place by assigning the magnitude of [ v ] 's [ z ]
as [ z ' ] .
as [z']. *)
* { 2 Vector operations }
Each of the following operations has a [ f ] and [ f ' ] form . The [ f ] form
returns a freshly allocated vector with the result of the specified
operation . The [ f ' ] form takes a [ target ] which will be modified to
contain the results of the operation performed by [ f ' ] .
In the case of [ f ' ] functions , the target and source vector can be the
same value , in which case the source vector will be modified in place .
Each of the following operations has a [f] and [f'] form. The [f] form
returns a freshly allocated vector with the result of the specified
operation. The [f'] form takes a [target] which will be modified to
contain the results of the operation performed by [f'].
In the case of [f'] functions, the target and source vector can be the
same value, in which case the source vector will be modified in place. *)
val copy' : target:t -> t -> unit
val copy : t -> t
val normalize' : target:t -> t -> unit
val normalize : t -> t
val reciprocal' : target:t -> t -> unit
val reciprocal : t -> t
val round' : target:t -> t -> unit
val round : t -> t
val floor' : target:t -> t -> unit
val floor : t -> t
val neg' : target:t -> t -> unit
val neg : t -> t
val add' : target:t -> t -> t -> unit
val add : t -> t -> t
val sub' : target:t -> t -> t -> unit
val sub : t -> t -> t
val mul' : target:t -> t -> t -> unit
val mul : t -> t -> t
val div' : target:t -> t -> t -> unit
val div : t -> t -> t
val dot : t -> t -> float
val dot_2d : t -> t -> float
val cross' : target:t -> t -> t -> unit
val cross : t -> t -> t
val mulf' : target:t -> t -> float -> unit
val mulf : t -> float -> t
val divf' : target:t -> t -> float -> unit
val divf : t -> float -> t
val rotate_2d' : target:t -> t -> float -> unit
val rotate_2d : t -> float -> t
val lerp' : target:t -> t -> t -> float -> unit
val lerp : t -> t -> float -> t
val clamp' : target:t -> t -> min:t -> max:t -> unit
val clamp : t -> min:t -> max:t -> t
val clamp_size' : target:t -> t -> min:float -> max:float -> unit
val clamp_size : t -> min:float -> max:float -> t
val move_x : t -> float -> unit
val move_y : t -> float -> unit
val move_z : t -> float -> unit
val of_rotation : float -> t
val to_rotation : t -> float
end
module Obox : sig
* { 1 Oriented boxes }
From -project.org/orx/doc/html/group__orx_o_box.html
From -project.org/orx/doc/html/group__orx_o_box.html *)
type t
(** Type of an oriented box *)
val make : pos:Vector.t -> pivot:Vector.t -> size:Vector.t -> angle:float -> t
(** [make ~pos ~pivot ~size ~angle] creates an oriented box with the given
specs. *)
val set_2d :
t -> pos:Vector.t -> pivot:Vector.t -> size:Vector.t -> angle:float -> unit
(** [set_2d obox ~pos ~pivot ~size ~angle] sets [obox] to the given specs. *)
val copy : t -> t
(** [copy obox] is a fresh {!t} with the same specs as [obox]. *)
val get_center : t -> Vector.t
(** [get_center obox] is the center of [obox]. *)
val move : t -> Vector.t -> t
* [ move ] is a fresh { ! t } which is [ obox ] moved by [ ] .
val move' : t -> Vector.t -> unit
* [ move ' obox vec ] moves [ obox ] by [ ] .
val rotate_2d : t -> float -> t
(** [rotate_2d obox angle] is a fresh {!t} which is [obox] rotated by [angle].
[angle] is in radiians. *)
val rotate_2d' : t -> float -> unit
(** [rotate_2d' obox angle] rotates [obox] by [angle]. [angle] is in radiians. *)
val is_inside : t -> Vector.t -> bool
(** [is_inside obox vec] is [true] if [vec] is inside [obox]. *)
val is_inside_2d : t -> Vector.t -> bool
* [ ] is [ true ] if [ vec ] is inside [ obox ] . The [ z ] of
the [ obox ] and [ vec ] are ignored .
the [obox] and [vec] are ignored. *)
end
module Module_id : sig
* { 1 Engine module IDs }
From -project.org/orx/doc/html/group__orx_module.html
From -project.org/orx/doc/html/group__orx_module.html *)
type t =
| Clock
| Main
end
module Clock_modifier : sig
* { 1 Game clock modifiers }
From -project.org/orx/doc/html/group__orx_clock.html
From -project.org/orx/doc/html/group__orx_clock.html *)
type t =
| Fixed
| Multiply
| Maxed
| Average
end
module Clock_priority : sig
(** {1 Clock callback priorities}
From -project.org/orx/doc/html/group__orx_clock.html *)
type t =
| Lowest
| Lower
| Low
| Normal
| High
| Higher
| Highest
end
module Clock : sig
* { 1 Engine clocks }
From -project.org/orx/doc/html/group__orx_clock.html
From -project.org/orx/doc/html/group__orx_clock.html *)
type t
(** A game clock. Clocks are unique by name within a process. *)
module Info : sig
* { 1 Clock information }
From -project.org/orx/doc/html/group__orx_clock.html
From -project.org/orx/doc/html/group__orx_clock.html *)
type clock = t
type t
(** Clock information passed to a clock's callback function *)
val get_tick_size : t -> float
(** Tick size of the clock associated with this clock info *)
val get_dt : t -> float
(** Time since the last clock tick *)
val get_time : t -> float
(** Current overall time for a clock *)
val get_clock : t -> clock option
(** Get the clock associated with a clock info value *)
end
val compare : t -> t -> int
* A total order comparison for { ! t } values . The actual order does not hold
important semantic meaning but this does allow for easy use of
{ ! . Set . Make } and { ! . Map . Make } .
important semantic meaning but this does allow for easy use of
{!Stdlib.Set.Make} and {!Stdlib.Map.Make}. *)
val equal : t -> t -> bool
(** Equality for {!t} values *)
val create_from_config : string -> t option
(** [create_from_config name] creates and returns the clock under config
section [name], or [None] if a valid clock is not defined under [name]. *)
val create_from_config_exn : string -> t
(** [create_from_config_exn name] creates and returns the clock under config
section [name].
@raise Invalid_argument if a valid clock is not defined under [name]. *)
val create : float -> t
(** [create tick_size] creates a new clock with [tick_size] defined in
seconds. *)
val get : string -> t option
(** [get name] gets the clock named [name] if it exists. *)
val get_exn : string -> t
(** [get_exn name] gets the clock named [name].
@raise Invalid_argument if [name] is not a valid clock. *)
val get_core : unit -> t
* [ ( ) ] returns the core engine clock .
val get_name : t -> string
* [ clock ] is [ clock ] 's config name .
val get_info : t -> Info.t
val get_modifier : t -> Clock_modifier.t -> float
val set_modifier : t -> Clock_modifier.t -> float -> unit
val set_tick_size : t -> float -> unit
val restart : t -> Status.t
val pause : t -> unit
val unpause : t -> unit
val is_paused : t -> bool
* { 2 Callbacks }
Clock callbacks fire on each tick of a clock .
Clock callbacks fire on each tick of a clock. *)
module Callback_handle : sig
(** {1 Callback handles}
Callbacks are associated with handles. These handles may be used to
unregister callbacks associated with them. *)
type t
val default : t
(** The default handle for all callbacks registered to a clock without an
explicitly provided callback handle. *)
val make : unit -> t
(** [make ()] is a fresh callback handle with no callbacks associated with
it. *)
end
val register :
?handle:Callback_handle.t ->
?module_id:Module_id.t ->
?priority:Clock_priority.t ->
t ->
(Info.t -> unit) ->
unit
(** [register ?handle ?module_id ?priority clock callback] registers
[callback] so that it will be called on each tick of [clock].
@param handle
Can be provided when a callback should not exist for the entire
remaining lifetime of a clock, allowing callbacks to be unregistered.
Defaults to {!Callback_handle.default}.
@param orx_module
ID of the module related to this callback. Defaults to
{!Module_id.Main}.
@param priority
Priority of callback. Defaults to {!Clock_priority.Normal}. *)
val unregister : t -> Callback_handle.t -> unit
(** [unregister clock handle] unregisters all callbacks associated with
[clock] and [handle]. *)
val unregister_all : t -> unit
(** [unregister_all clock] unregisters all callbacks associated with [clock]. *)
* { 2 Timers }
Timers fire one or more times , after a specified delay .
Timers fire one or more times, after a specified delay. *)
module Timer_handle : sig
(** {1 Callback handles}
Callbacks are associated with handles. These handles may be used to
unregister callbacks associated with them. *)
type t
val default : t
(** The default handle for all callbacks registered to a clock without an
explicitly provided callback handle. *)
val make : unit -> t
(** [make ()] is a fresh callback handle with no callbacks associated with
it. *)
end
val add_timer :
?handle:Timer_handle.t -> t -> (Info.t -> unit) -> float -> int -> unit
(** [add_timer ?handle clock callback delay repetition] registers [callback]
with [clock] as a timer callback.
@param delay Specifies the delay between calls to [callback]
@param repetition
Specifies the number of times [callback] should be called before it's
deregistered. Use [-1] to specify that [timer] should keep being called
forever. *)
val remove_timer : t -> Timer_handle.t -> unit
(** [remove_timer clock handle] removes the timers associated with [handle]
from [clock]. Timers with a finite number of repetitions will be
automatically removed once they have run out of repetitions. *)
val remove_all_timers : t -> unit
(** [remove_all_timers clock] removes all timers associated with [clock]. *)
end
module Texture : sig
(** {1 Textures for in game graphics}
From -project.org/orx/doc/html/group__orx_texture.html *)
type t
(** A single texture *)
val load : string -> bool -> t option
(** [load path keep_in_cache] creates a texture from the file at [path].
@param keep_in_cache
Specifies if a texture should be kept active in orx's cache even when
there are no more active references to it. *)
val delete : t -> Status.t
(** [delete texture] deletes [texture]. *)
val clear_cache : unit -> Status.t
(** [clear_cache ()] will clear any unreferenced textures from orx's cache. *)
val get_size : t -> float * float
(** [get_size texture] retrieves the dimensions of [texture]. *)
end
module Graphic : sig
(** {1 Graphic module for 2D graphics}
From -project.org/orx/doc/html/group__orx_graphic.html *)
type t
(** An in engine graphic *)
val create : unit -> t option
(** [create ()] creates a fresh graphic. *)
val create_from_config : string -> t option
(** [create_from_config section_name] createes the graphic defined under
[section_name] in config if it's properly defined. *)
val delete : t -> Status.t
val set_size : t -> Vector.t -> unit
val get_size : t -> Vector.t
val set_origin : t -> Vector.t -> unit
val get_origin : t -> Vector.t
val set_flip : t -> x:bool -> y:bool -> unit
val set_pivot : t -> Vector.t -> unit
val set_data : t -> Structure.t -> Status.t
val to_structure : t -> Structure.t
end
module Sound_status : sig
type t =
| Play
| Pause
| Stop
end
module Sound : sig
* { 1 Sound playback }
From -project.org/orx/doc/html/group__orx_sound.html
From -project.org/orx/doc/html/group__orx_sound.html *)
type t
(** A sound *)
val create_from_config : string -> t option
(** [create_from_config section_name] creates a sound from the configuration
in [section_name] from config if it defines a valid sound. *)
val get_name : t -> string
* [ sound ] is the config section name of [ sound ] .
val get_status : t -> Sound_status.t
(** [get_status sound] is the playback status of [sound]. *)
val play : t -> unit
val pause : t -> unit
val stop : t -> unit
val get_duration : t -> float
val get_pitch : t -> float
val set_pitch : t -> float -> unit
val get_volume : t -> float
val set_volume : t -> float -> unit
end
module Resource : sig
* { 1 Engine resources }
From -project.org/orx/doc/html/group__orx_resource.html
From -project.org/orx/doc/html/group__orx_resource.html *)
type group =
| Config
| Sound
| Texture
| Custom of string
val group_of_string : string -> group
val string_of_group : group -> string
val add_storage : group -> string -> bool -> Status.t
(** [add_storage group description add_first] adds [description] as a storage
source for [group]. Storage sources depend on the type of storage being
used. By default this will be a filesystem path, but other resource
systems can be defined and used with orx.
@param add_first
If [true] then [description] will be checked before all previously
defined storage systems. If [false] then [description] will be checked
after. *)
val remove_storage : group option -> string option -> Status.t
(** [remove_storage group description] removes [description] from [group].
@param group
If [group] is [None] then [description] will be removed from all groups.
@param description
If [description] is [None] then all storages will be removed from
[group]. *)
val reload_storage : unit -> Status.t
(** [reload_storage ()] forces orx to reload all storages from config. *)
val sync : group option -> Status.t
(** [sync group] synchronizes all storages associated with [group] with their
source material.
@param group
If [group] is [None] then all resource groups are synchronized. *)
end
module Mouse_button : sig
type t =
| Left
| Right
| Middle
| Extra_1
| Extra_2
| Extra_3
| Extra_4
| Extra_5
| Wheel_up
| Wheel_down
end
module Mouse_axis : sig
type t =
| X
| Y
end
module Mouse : sig
* { 1 Read mouse state }
From -project.org/orx/doc/html/group__orx_mouse.html
From -project.org/orx/doc/html/group__orx_mouse.html *)
val is_button_pressed : Mouse_button.t -> bool
val get_position : unit -> Vector.t option
(** [get_position ()] is the current mouse screen position. *)
val get_position_exn : unit -> Vector.t
(** [get_position_exn ()] is the current mouse screen position.
@raise Invalid_argument if no mouse position can be retrieved. *)
val get_move_delta : unit -> Vector.t option
(** [get_move_delta ()] is the position change since the last call to this
function. *)
val get_wheel_delta : unit -> float
(** [get_wheel_delta ()] is the position change since the last call to this
function. *)
val show_cursor : bool -> Status.t
val set_cursor : string -> Vector.t option -> Status.t
* [ set_cursor name pivot ] sets the mouse 's cursor display to [ name ] and its
hotspot to [ pivot ] .
@param name
Can be standard names ( arrow , ibeam , hand , crosshair , hresize or
vresize ) or a file name
@param pivot
Can be an offset for the hotspot or [ None ] to default to [ ( 0 , 0 ) ]
hotspot to [pivot].
@param name
Can be standard names (arrow, ibeam, hand, crosshair, hresize or
vresize) or a file name
@param pivot
Can be an offset for the hotspot or [None] to default to [(0, 0)] *)
val get_button_name : Mouse_button.t -> string
(** [get_button_name button] is a canonical name for [button] if one exists. *)
val get_axis_name : Mouse_axis.t -> string
(** [get_axis_name axis] is a canonical name for [axis] if one exists. *)
end
module Input_type : sig
type t =
| Keyboard_key
| Mouse_button
| Mouse_axis
| Joystick_button
| Joystick_axis
| External
| No_input
end
module Input_mode : sig
type t =
| Full
| Positive
| Negative
end
module Input : sig
* { 1 General user input handling }
Orx inputs are defined by name in config . This module allows querying the
state of inputs .
From
Orx inputs are defined by name in config. This module allows querying the
state of inputs.
From -project.org/orx/doc/html/group__orx_input.html *)
val is_active : string -> bool
(** [is_active input] is [true] if [input] is currently active. *)
val has_new_status : string -> bool
(** [has_new_status input] is [true] if [input] has changed status since the
last time it was checked. *)
val has_been_activated : string -> bool
(** [has_been_activated input] is [true] if [input] has been activated since
the last time it was checked. *)
val has_been_deactivated : string -> bool
(** [has_been_deactivated input] is [true] if [input] has been deactivated
since the last time it was checked. *)
val get_value : string -> float
* [ get_value input ] is the current value of [ input ] . For keypresses , this
will generally be [ 0.0 ] or [ 1.0 ] . For a joystick the value will scale
according to the position of the stick along the queried axis .
will generally be [0.0] or [1.0]. For a joystick the value will scale
according to the position of the stick along the queried axis. *)
val set_value : string -> float -> Status.t
* [ set_value input value ] sets [ input ] to [ value ] . This impacts only one
input for [ input ] .
input for [input]. *)
val set_permanent_value : string -> float -> Status.t
* [ set_permanent_value input value ] sets [ input ] to [ value ] until reset . Set
[ value ] to [ 0.0 ] to reset .
[value] to [0.0] to reset. *)
val get_binding :
string -> int -> (Input_type.t * int * Input_mode.t) Status.result
(** [get_binding input index] gives information on [input]'s type and mode. *)
val get_binding_name : Input_type.t -> int -> Input_mode.t -> string
(** [get_binding_name input_type binding_id mode] give the name associated
with [input_type], [binding_id] and [mode]. *)
* { 2 Input sets }
val get_current_set : unit -> string
(** [get_current_set ()] returns the currently selected input set. *)
val select_set : string -> Status.t
(** [select_set input_set] selects [input_set] as the currently active input
set. *)
val enable_set : string -> bool -> Status.t
* [ enable_set input_set enabled ] enables / disabled [ input_set ] without
selecting it .
selecting it. *)
val is_set_enabled : string -> bool
(** [is_set_enabled input_set] is [true] if [input_set] is enabled, otherwise
[false]. *)
end
module Physics : sig
* { 1 General physics engine settings and values }
From -project.org/orx/doc/html/group__orx_physics.html
From -project.org/orx/doc/html/group__orx_physics.html *)
val get_collision_flag_name : Unsigned.UInt32.t -> string
* [ get_collision_flag_name flag ] is the name defined in config matching
[ flag ] if one exists , otherwise an empty string .
[flag] if one exists, otherwise an empty string. *)
val get_collision_flag_value : string -> Unsigned.UInt32.t
* [ name ] is the value associated with the named
collision flag [ name ] or { ! Unsigned.UInt32.zero } if [ name ] is not a
defined collision flag .
collision flag [name] or {!Unsigned.UInt32.zero} if [name] is not a
defined collision flag. *)
val check_collision_flag :
mask:Unsigned.UInt32.t -> flag:Unsigned.UInt32.t -> bool
(** [check_collision_flag ~mask ~flag] indicates if [mask] and [flag] would
collide. *)
val get_gravity : unit -> Vector.t
(** [get_gravity ()] is the current world gravity. *)
val set_gravity : Vector.t -> unit
(** [set_gravity v] sets the current world gravity to [v]. *)
val enable_simulation : bool -> unit
(** [enable_simulation enabled] enables or disables the world physics
simulation. Can be used when the game simulation is paused, for example. *)
end
module Body_part : sig
* { 1 Body parts for physics simulation }
For physics body parts , flags specify the collision bitmask for a part . A
mask specifies the flags for other bodies which a part should collide
with .
From -project.org/orx/doc/html/group__orx_body.html
For physics body parts, flags specify the collision bitmask for a part. A
mask specifies the flags for other bodies which a part should collide
with.
From -project.org/orx/doc/html/group__orx_body.html *)
type t
(** A single body part *)
val get_name : t -> string
* [ part ] is the config name associated with [ part ] .
val set_self_flags : t -> int -> unit
(** [set_self_flags part flags] sets the collision flags for [part] to
[flags]. *)
val get_self_flags : t -> int
(** [get_self_flags part] is the current collision flags for [part]. *)
val set_check_mask : t -> int -> unit
(** [set_check_mask part mask] sets the check mask for [part]. *)
val get_check_mask : t -> int
(** [get_check_mask part] is the current check mask for [part]. *)
end
module Body : sig
* { 1 Physics bodies }
A physics body may be made up of one or more parts a defined in
{ ! Body_part } .
From -project.org/orx/doc/html/group__orx_body.html
A physics body may be made up of one or more parts a defined in
{!Body_part}.
From -project.org/orx/doc/html/group__orx_body.html *)
type t
(** A single physics body *)
val get_parts : t -> Body_part.t Seq.t
* [ body ] is the sequence of parts which make up [ body ] .
end
module Object : sig
(** {1 Objects in the orx engine world}
From -project.org/orx/doc/html/group__orx_object.html *)
type t = obj
(** An Orx object *)
val compare : t -> t -> int
* Comparison defining a total ordering over objects . This is primarily
useful for defining containers like { ! . Map } and { ! . Set } .
useful for defining containers like {!Stdlib.Map} and {!Stdlib.Set}. *)
val equal : t -> t -> bool
(** Object equality *)
* { 2 Object creation }
val create_from_config : string -> t option
val create_from_config_exn : string -> t
* { 2 Enabling / disabling objects }
val enable : t -> bool -> unit
val enable_recursive : t -> bool -> unit
val is_enabled : t -> bool
val pause : t -> bool -> unit
val is_paused : t -> bool
* { 2 Object ownership }
val set_owner : t -> Parent.t option -> unit
val get_owner : t -> Parent.t option
val set_parent : t -> Parent.t option -> unit
val get_parent : t -> Parent.t option
type _ child =
| Child_object : t child
| Owned_object : t child
| Child_camera : camera child
val get_children : t -> 'a child -> 'a Seq.t
val get_first_child : t -> 'a child -> 'a option
val get_children_recursive : t -> t child -> t Seq.t
val iter_children_recursive : (t -> unit) -> t -> t child -> unit
val iter_recursive : (t -> unit) -> t -> t child -> unit
* { 2 Basic object properties }
val get_name : t -> string
val get_bounding_box : t -> Obox.t
* { 2 Clock association }
val set_clock : t -> Clock.t option -> Status.t
val set_clock_recursive : t -> Clock.t option -> unit
val get_clock : t -> Clock.t option
* { 2 FX }
val add_fx : t -> string -> Status.t
val add_fx_exn : t -> string -> unit
val add_unique_fx : t -> string -> Status.t
val add_unique_fx_exn : t -> string -> unit
val add_fx_recursive : t -> string -> float -> unit
val add_unique_fx_recursive : t -> string -> float -> unit
val remove_fx : t -> string -> Status.t
val remove_fx_exn : t -> string -> unit
val remove_fx_recursive : t -> string -> unit
val remove_all_fxs : t -> Status.t
val remove_all_fxs_exn : t -> unit
val remove_all_fxs_recursive : t -> Status.t
val remove_all_fxs_recursive_exn : t -> unit
(*** {2 Shaders} *)
val add_shader : t -> string -> Status.t
val add_shader_exn : t -> string -> unit
val add_shader_recursive : t -> string -> unit
val remove_shader : t -> string -> Status.t
val remove_shader_exn : t -> string -> unit
val remove_shader_recursive : t -> string -> unit
* { 2 Placement and dimensions }
val get_rotation : t -> float
val set_rotation : t -> float -> unit
val get_world_position : t -> Vector.t
val set_world_position : t -> Vector.t -> unit
val get_position : t -> Vector.t
val set_position : t -> Vector.t -> unit
val get_scale : t -> Vector.t
val set_scale : t -> Vector.t -> unit
* { 2 Repetition }
val get_repeat : t -> float * float
val set_repeat : t -> float -> float -> unit
* { 2 Text }
val set_text_string : t -> string -> unit
val get_text_string : t -> string
* { 2 Lifetime }
val set_life_time : t -> float -> unit
val get_life_time : t -> float
val get_active_time : t -> float
* { 2 Timeline tracks }
val add_time_line_track : t -> string -> Status.t
val add_time_line_track_exn : t -> string -> unit
val add_time_line_track_recursive : t -> string -> unit
val remove_time_line_track : t -> string -> Status.t
val remove_time_line_track_exn : t -> string -> unit
val remove_time_line_track_recursive : t -> string -> unit
val enable_time_line : t -> bool -> unit
val is_time_line_enabled : t -> bool
(** {2 Speed} *)
val set_speed : t -> Vector.t -> unit
val get_speed : t -> Vector.t
val set_relative_speed : t -> Vector.t -> unit
val get_relative_speed : t -> Vector.t
(** {2 Physics} *)
val apply_force : ?location:Vector.t -> t -> Vector.t -> unit
val apply_impulse : ?location:Vector.t -> t -> Vector.t -> unit
val apply_torque : t -> float -> unit
val set_angular_velocity : t -> float -> unit
val get_angular_velocity : t -> float
val set_custom_gravity : t -> Vector.t option -> unit
val get_custom_gravity : t -> Vector.t option
val get_mass : t -> float
val get_mass_center : t -> Vector.t
type collision = {
colliding_object : t;
contact : Vector.t;
normal : Vector.t;
}
val raycast :
?self_flags:int ->
?check_mask:int ->
?early_exit:bool ->
Vector.t ->
Vector.t ->
collision option
(** {2 Color} *)
val set_rgb : t -> Vector.t -> unit
val set_rgb_recursive : t -> Vector.t -> unit
val set_alpha : t -> float -> unit
val set_alpha_recursive : t -> float -> unit
(** {2 Animation} *)
val set_target_anim : t -> string -> Status.t
val set_target_anim_exn : t -> string -> unit
val set_target_anim_recursive : t -> string -> unit
val get_target_anim : t -> string
val set_current_anim : t -> string -> Status.t
val set_current_anim_exn : t -> string -> unit
val set_current_anim_recursive : t -> string -> unit
val get_current_anim : t -> string
(** {2 Sound} *)
val add_sound : t -> string -> Status.t
val add_sound_exn : t -> string -> unit
val remove_sound : t -> string -> Status.t
val remove_sound_exn : t -> string -> unit
val get_last_added_sound : t -> Sound.t option
val set_volume : t -> float -> unit
val set_pitch : t -> float -> unit
val set_panning : t -> float -> bool -> unit
val play : t -> unit
val stop : t -> unit
* { 2 Associated structures }
type 'a associated_structure =
| Body : Body.t associated_structure
| Graphic : Graphic.t associated_structure
| Sound : Sound.t associated_structure
val link_structure : t -> Structure.t -> unit
val get_structure : t -> 'a associated_structure -> 'a option
* { 2 Spatial selection }
type group =
| All_groups
| Group of string
| Group_id of String_id.t
val get_neighbor_list : Obox.t -> group -> t list
val get_group : group -> t Seq.t
val get_enabled : group -> t Seq.t
val pick : Vector.t -> group -> t option
val box_pick : Obox.t -> group -> t option
* { 2 Groups }
val get_default_group_id : unit -> String_id.t
val get_group_id : t -> String_id.t
val set_group_id : t -> group -> unit
val set_group_id_recursive : t -> group -> unit
* { 2 Object GUIDs }
val to_guid : t -> Structure.Guid.t
val get_guid : t -> Structure.Guid.t
val of_guid : Structure.Guid.t -> t option
val of_guid_exn : Structure.Guid.t -> t
* { 2 Structure conversion }
val of_structure : Structure.t -> t option
end
module Shader_param_type : sig
type t =
| Float
| Texture
| Vector
| Time
end
module Shader : sig
(** {1 Shaders}
From -project.org/orx/doc/html/group__orx_shader.html *)
type t
(** Runtime representation of a shader *)
val set_float_param_exn : t -> string -> float -> unit
(** [set_float_param_exn shader name value] sets the parameter [name] to
[value] for [shader]. *)
val set_vector_param_exn : t -> string -> Vector.t -> unit
(** [set_vector_param_exn shade name value] sets the parameter [name] to
[value] for [shader]. *)
val get_name : t -> string
* [ shader ] gets the config name for [ shader ] .
end
module Shader_pointer : sig
(** {1 Pointers to shaders}
From -project.org/orx/doc/html/group__orx_shader_pointer.html *)
type t
val get_shader : t -> int -> Shader.t option
(** [get_shader ptr index] gets the shader associated with [ptr] at index
[index]. *)
end
module Anim : sig
type t
end
module Anim_event : sig
type t =
| Start
| Stop
| Cut
| Loop
| Update
| Custom_event
val compare : t -> t -> int
type payload
(* TODO: Update this module *)
val get_animation : payload -> Anim.t
val get_name : payload -> string
(* TODO: Union values *)
(* val get_count : payload -> int *)
(* val get_time : payload -> float *)
(* val get_custom_event : payload -> Custom_event *)
end
module Config_event : sig
* { 1 Configuration events }
type t =
| Reload_start
| Reload_stop
val compare : t -> t -> int
end
module Fx_event : sig
* { 1 FX events }
type t =
| Start
| Stop
| Add
| Remove
| Loop
val compare : t -> t -> int
type payload
(** Event payload *)
val get_name : payload -> string
* [ payload ] is the name of the event which sent [ payload ] .
end
module Input_event : sig
(** {1 Input events} *)
type t =
| On
| Off
| Select_set
val compare : t -> t -> int
type payload
(** Event payload *)
val get_set_name : payload -> string
(** [get_set_name payload] is the input set name for the input event which
sent [payload]. *)
val get_input_name : payload -> string
(** [get_input_name payload] is the input name for the input event which sent
[payload]. *)
* { 2 Input event details }
Each of the following functions has a singular form for convenience when
inputs only have one key involved and a plural form for compound inputs
with multiple keys , buttons or axes involved .
Calling the singular form of the singular [ get ] functions below is
slightly more efficient than and functionally equivalent to using the
plural form when only a single input is involved .
Each of the following functions has a singular form for convenience when
inputs only have one key involved and a plural form for compound inputs
with multiple keys, buttons or axes involved.
Calling the singular form of the singular [get] functions below is
slightly more efficient than and functionally equivalent to using the
plural form when only a single input is involved. *)
val get_input_type : ?i:int -> payload -> Input_type.t
val get_input_types : payload -> Input_type.t array
val get_input_id : ?i:int -> payload -> int
val get_input_ids : payload -> int array
val get_input_mode : ?i:int -> payload -> Input_mode.t
val get_input_modes : payload -> Input_mode.t array
val get_input_value : ?i:int -> payload -> float
val get_input_values : payload -> float array
end
module Object_event : sig
* { 1 Object events }
type t =
| Create
| Delete
| Prepare
| Enable
| Disable
| Pause
| Unpause
val compare : t -> t -> int
type payload
(** Event payload *)
end
module Physics_event : sig
(** {1 Physics events} *)
type t =
| Contact_add
| Contact_remove
val compare : t -> t -> int
type payload
(** Event payload *)
val get_position : payload -> Vector.t
(** [get_position payload] is the location of the contact for the physics
event that sent [payload]. *)
val get_normal : payload -> Vector.t
(** [get_normal payload] is the normal vector of the contact for the physics
event that sent [payload]. *)
val get_sender_part : payload -> Body_part.t
(** [get_sender_part payload] is the body part which sent the event which sent
[payload]. *)
val get_recipient_part : payload -> Body_part.t
* [ get_recipient_part payload ] is the body part which recieved the event
which sent [ payload ] .
which sent [payload]. *)
end
module Shader_event : sig
(** {1 Shader events}
Shader events can be used to set dynamic parameters for shaders. *)
type t = Set_param
val compare : t -> t -> int
type payload
(** Event payload *)
val get_shader : payload -> Shader.t
(** [get_shader payload] is the shader associated with the event. *)
val get_shader_name : payload -> string
(** [get_shader_name payload] is the name of the shader associated with the
event. *)
val get_param_name : payload -> string
(** [get_param_name payload] is the name of the shader parameter associated
with the event. *)
val get_param_type : payload -> Shader_param_type.t
* [ get_param_type payload ] is the type of the shader parameter associated
with the event .
with the event. *)
val get_param_index : payload -> int
(** [get_param_index payload] is the index of the shader parameter associated
with the event. *)
val set_param_float : payload -> float -> unit
(** [set_param_float payload v] sets the shader parameter for this event to
[v]. *)
val set_param_vector : payload -> Vector.t -> unit
(** [set_param_vector payload v] sets the shader parameter for this event to
[v]. *)
end
module Sound_event : sig
* { 1 Sound events }
type t =
| Start
| Stop
| Add
| Remove
val compare : t -> t -> int
type payload
(** Event payload *)
val get_sound : payload -> Sound.t
(** [get_sound payload] is the sound associated with this event. *)
end
module Time_line_event : sig
* { 1 Time line track events }
type t =
| Track_start
| Track_stop
| Track_add
| Track_remove
| Loop
| Trigger
val compare : t -> t -> int
type payload
(** Event payload *)
val get_track_name : payload -> string
(** [get_track_name payload] is the config name of the track associated with
the event. *)
val get_event : payload -> string
(** [get_event payload] is the event text associated with the event. *)
val get_time_stamp : payload -> float
(** [get_time_stamp payload] is the time associated with the event. *)
end
module Event : sig
(** {1 Events} *)
type t
(** Engine events *)
module Event_type : sig
type ('event, 'payload) t =
| Anim : (Anim_event.t, Anim_event.payload) t
| Fx : (Fx_event.t, Fx_event.payload) t
| Input : (Input_event.t, Input_event.payload) t
| Object : (Object_event.t, Object_event.payload) t
| Physics : (Physics_event.t, Physics_event.payload) t
| Shader : (Shader_event.t, Shader_event.payload) t
| Sound : (Sound_event.t, Sound_event.payload) t
| Time_line : (Time_line_event.t, Time_line_event.payload) t
type any = Any : (_, _) t -> any
end
val to_type : t -> Event_type.any
val to_event : t -> ('event, _) Event_type.t -> 'event
val get_sender_object : t -> Object.t option
(** [get_sender_object t] is the sending object for the event [t] if there is
one. *)
val get_recipient_object : t -> Object.t option
(** [get_recipient_object t] is the receiving object for the event [t] if
there is one. *)
val get_sender_structure : t -> Structure.t option
(** [get_sender_structure t] is the sending structure for the event [t] if
there is one. *)
val get_recipient_structure : t -> Structure.t option
(** [get_recipient_structure t] is the receiving structure for the event [t]
if there is one. *)
module Handle : sig
(** {1 Callback/handler handles}
Handles track registered callbacks/handlers so they can be explicitly
released. *)
type t
(** Handle for tracking callbacks/handlers *)
val default : t
(** Default handle when none is specified *)
val make : unit -> t
(** [make ()] is a fresh handle with no associated callbacks/handlers *)
end
val add_handler :
?handle:Handle.t ->
?events:'event list ->
('event, 'payload) Event_type.t ->
(t -> 'event -> 'payload -> Status.t) ->
unit
(** [add_handler ?events event_type handler_callback] associates
[handler_callback] with [events] from [event_type].
@param events defaults to all events matching [event_type]. *)
val remove_handler : (_, _) Event_type.t -> Handle.t -> unit
(** [remove_handler event_type handle] removes and releases all handlers for
[event_type] associated with [handler]. *)
val remove_all_handlers : (_, _) Event_type.t -> unit
(** [remove_all_handlers event_type] removes and releases all handlers for
[event_type]. *)
end
module Camera : sig
* { 1 In - game cameras }
From -project.org/orx/doc/html/group__orx_camera.html
From -project.org/orx/doc/html/group__orx_camera.html *)
type t = camera
(** Game camera *)
val create_from_config : string -> t option
(** [create_from_config section] creates the camera under config [section] if
[section] exists and correctly defines a camera. *)
val create_from_config_exn : string -> t
(** [create_from_config section] creates the camera under config [section].
@raise Invalid_argument
if [section] does not exist or does not correctly define a camera. *)
val get : string -> t option
(** [get name] gets the camera [name] if one exists. *)
val get_name : t -> string
* [ camera ] is the name of [ camera ] .
val get_parent : t -> Parent.t option
(** [get_parent camera] gets the parent of [camera] if it has one. *)
val set_parent : t -> Parent.t option -> unit
(** [set_parent camera parent] sets the parent of [camera] to [parent]. If
[parent] is [None] then the parent is [cleared]. *)
val get_position : t -> Vector.t
(** [get_position camera] is the position of [camera]. *)
val set_position : t -> Vector.t -> unit
(** [set_position camera pos] sets [camera]'s position to [pos]. *)
val get_rotation : t -> float
(** [get_rotation camera] is the rotation of [camera] in radians. *)
val set_rotation : t -> float -> unit
(** [set_rotation camera angle] sets the rotation of [camera] to [angle].
@param angle Angle in radians *)
val get_zoom : t -> float
* [ get_zoom camera ] is the zoom multiplier for [ camera ] .
val set_zoom : t -> float -> unit
* [ set_zoom camera zoom ] sets [ camera ] 's zoom multiplier to [ zoom ] .
val set_frustum :
t -> width:float -> height:float -> near:float -> far:float -> unit
* [ set_frustum camera ~width ~height ~near ~far ] sets the frustum - the
visible volume - for [ camera ] .
visible volume - for [camera]. *)
end
module Viewport : sig
* { 1 Game world viewports }
From
From -project.org/orx/doc/html/group__orx_viewport.html *)
type t
(** Viewport *)
val create_from_config : string -> t option
(** [create_from_config section] creates the viewport under config [section]
if [section] exists and correctly defines a viewport. *)
val create_from_config_exn : string -> t
(** [create_from_config section] creates the viewport under config [section].
@raise Invalid_argument
if [section] does not exist or does not correctly define a viewport. *)
val get_camera : t -> Camera.t option
* [ get_camera viewport ] is the camera associated with [ viewport ] if one
exists .
exists. *)
val get_shader_pointer : t -> Shader_pointer.t option
(** [get_shader_pointer viewport] is the shader pointer associated with
[viewport] if one exists. *)
val get_shader_exn : ?index:int -> t -> Shader.t
(** [get_shader_exn ?index viewport] is the shader associated with [viewport]. *)
val get_name : t -> string
* [ viewport ] is the name of [ viewport ] .
val get : string -> t option
(** [get name] is the viewport associated with [name] if one exists. *)
val get_exn : string -> t
(** [get_exn name] is the viewport associated with [name].
@raise Invalid_argument if [name] does name match a valid viewport. *)
val of_structure : Structure.t -> t option
(** [of_structure s] casts a {!t} from [s] if [s] is a viewport. *)
end
module Render : sig
(** {1 Rendering}
From -project.org/orx/doc/html/group__orx_render.html *)
val get_world_position : Vector.t -> Viewport.t -> Vector.t option
(** [get_world_position screen_position viewport] is the world position
matching [screen_position] in [viewport] if [screen_position] falls within
the display surface. Otherwise, [None]. *)
val get_screen_position : Vector.t -> Viewport.t -> Vector.t option
(** [get_screen_position world_position viewport] is the screen position
matching [world_position] in [viewport] if [world_position] is found. The
result may be offscreen. Otherwise, [None]. *)
end
module Config : sig
* { 1 Config values }
module Value : sig
* { 1 Config convenience get / set functions }
type _ t =
| String : string t
| Int : int t
| Float : float t
| Bool : bool t
| Vector : Vector.t t
| Guid : Structure.Guid.t t
val to_string : _ t -> string
val to_proper_string : _ t -> string
val set : 'a t -> 'a -> section:string -> key:string -> unit
(** [set value_type value ~section ~key] sets the config [section] [key] to
[value].
@param value_type
indicates the type of value to store under [section] [key] *)
val get : 'a t -> section:string -> key:string -> 'a
(** [get value_type ~section ~key] is the value under [section] [key]. *)
val find : 'a t -> section:string -> key:string -> 'a option
(** [find value_type ~section ~key] is the value under [section] [key] if it
exists, else [None]. *)
val clear : section:string -> key:string -> unit
(** [clear ~section ~key] clears any value under [section] [key]. *)
val update :
'a t -> ('a option -> 'a option) -> section:string -> key:string -> unit
(** [update value_type f ~section ~key] sets [section] [key] to
[f old_value]. If [f old_value] is [None] then the value is cleared. *)
end
val set_basename : string -> unit
(** [set_basename name] set [name] as the base name for the default config
file. *)
val load : string -> Status.t
(** [load name] loads config from the file [name]. *)
val load_from_memory : string -> Status.t
(** [load_from_memory config] loads config from the config in buffer [config]. *)
val push_section : string -> unit
(** [push_section section] pushes [section] to the top of the active section
stack. *)
val pop_section : unit -> unit
(** [pop_section section] pops the top active section from the active section
stack. *)
val get_current_section : unit -> string
(** [get_current_section ()] is the currently active config section. *)
val select_section : string -> unit
(** [select_section section] makes [section] the currently active config
section without modifying the stack. *)
val get_section_count : unit -> int
(** [get_section_count ()] gets the total number of config sections. *)
val get_section : int -> string
(** [get_section i] gets the name of the section at index [i]. *)
val get_key_count : unit -> int
(** [get_key_count ()] gets the number of keys from the current section. *)
val get_key : int -> string
(** [get_key i] gets the key at index [i] from the current section. *)
val get_parent : string -> string option
(** [get_parent section] gets the parent of [section] if it has one. *)
val has_section : string -> bool
(** [has_section name] indicates if [name] exists as a config section. *)
val has_value : string -> bool
(** [has_value name] indicates if [name] is a key in the current config
section. *)
val clear_section : string -> Status.t
(** [clear_section name] clears the section [name]. *)
val clear_value : string -> Status.t
(** [clear_value key] clears [key] from the currently active section. *)
* { 2 Get / set values in the current section }
val get_string : string -> string
val set_string : string -> string -> unit
val get_bool : string -> bool
val set_bool : string -> bool -> unit
val get_float : string -> float
val set_float : string -> float -> unit
val get_int : string -> int
val set_int : string -> int -> unit
val get_vector : string -> Vector.t
val set_vector : string -> Vector.t -> unit
val get_list_vector : string -> int option -> Vector.t
val set_list_string : string -> string list -> unit
val append_list_string : string -> string list -> unit
val if_has_value : string -> (string -> 'a) -> 'a option
(** [if_has_value key getter] is [Some (getter key)] if [key] exists in the
currently selected config section or [None] if [key] does not exist in the
current section. *)
val exists : section:string -> key:string -> bool
(** [exists ~section ~key] is [true] if [key] exists in [section]. *)
val get : (string -> 'a) -> section:string -> key:string -> 'a
val set : (string -> 'a -> unit) -> 'a -> section:string -> key:string -> unit
val get_seq : (string -> 'a) -> section:string -> key:string -> 'a Seq.t
(** [get_seq getter ~section ~key] is a sequence of values pulled repeatedly
from the same [section] and [key].
If the values are random then a new random value will be returned for
every element of the sequence.
If the [section] and [key] represent a constant value then the sequence
will return the same value for every element.
If [section] and [key] do not exist then the result will be [Seq.empty]. *)
val get_list_item :
(string -> int option -> 'a) ->
int option ->
section:string ->
key:string ->
'a
val get_list :
(string -> int option -> 'a) -> section:string -> key:string -> 'a list
val is_list : string -> bool
val get_sections : unit -> string list
(** [get_sections ()] is all section names defined in config. *)
val get_current_section_keys : unit -> string list
(** [get_current_section_keys ()] is all keys in the active section. *)
val get_section_keys : string -> string list
(** [get_section_keys section] is all the keys in [section]. *)
val get_guid : string -> Structure.Guid.t
val set_guid : string -> Structure.Guid.t -> unit
val with_section : string -> (unit -> 'a) -> 'a
(** [with_section section f] calls [f ()] with [section] as the active
section, then restores the previously active section. *)
val set_bootstrap : (unit -> Status.t) -> unit
(** [set_bootstrap f] sets [f] as the config bootstrap function. *)
end
module Command : sig
(** {1 Define and run custom engine commands} *)
module Var_def : sig
* { 1 Command variable definitions }
type t
(** Command variable definition *)
val make : string -> _ Config.Value.t -> t
(** [make name value_type] creates a new {!t} named [name] of type
[value_type]. *)
end
module Var : sig
(** {1 Command variables} *)
type t
(** Command variable *)
val make : 'a Config.Value.t -> 'a -> t
(** [make value_type value] creates a command variable containing [value]. *)
val set : t -> 'a Config.Value.t -> 'a -> unit
* [ set v value_type value ] sets [ v ] to [ value ] .
val get : t -> 'a Config.Value.t -> 'a
(** [get v value_type] is the value in [v]. *)
end
val register :
string ->
(Var.t array -> Var.t -> unit) ->
Var_def.t list * Var_def.t list ->
Var_def.t ->
Status.t
val register_exn :
string ->
(Var.t array -> Var.t -> unit) ->
Var_def.t list * Var_def.t list ->
Var_def.t ->
unit
val unregister : string -> Status.t
val unregister_exn : string -> unit
val unregister_all : unit -> unit
(** [unregister_all ()] will unregister all custom orx commands registered
from OCaml. *)
val is_registered : string -> bool
val evaluate : string -> Var.t option
val evaluate_with_guid : string -> Structure.Guid.t -> Var.t option
end
module Orx_thread : sig
(** {1 OCaml support for orx's threading} *)
val set_ocaml_callbacks : unit -> unit
(** [set_ocaml_callbacks ()] initializes the support necessary to have OCaml
play well with callbacks from other orx threads. This is currently only
required when manipulating audio in OCaml callbacks from audio packet
events. *)
end
module Main : sig
val execute :
init:(unit -> Status.t) ->
run:(unit -> Status.t) ->
exit:(unit -> unit) ->
unit ->
unit
(** [execute ~init ~run ~exit ()] starts the Orx engine loop.
Many games will be able to use {!start} instead of [execute] for slightly
simpler application code. *)
val start :
?config_dir:string ->
?exit:(unit -> unit) ->
init:(unit -> (unit, [ `Orx ]) result) ->
run:(unit -> (unit, [ `Orx ]) result) ->
string ->
unit
* [ start ? ? exit ~init ~run name ] starts the Orx engine loop .
[ start ] automates a few common steps a game will often need when getting
ready to call { ! execute } . [ start ] defines a custom bootstrap function to
specify where the game engine configuration resides and calls
{ ! Config.set_basename } with [ name ] to define the root configuration file
for a game .
@param
specifies the directory holding engine configuration files . The current
working directory will be used if this is not provided .
@param exit
specifies a function to be run when the engine loop exits . It can be
used to clean up game data which is not managed by or within the game
engine .
@param init
specifies a function to run after the engine has initialized and before
the game loop begins .
@param run specifies a function that will be run once per frame .
@param name
species the name of the root configuration file without an extension .
[start] automates a few common steps a game will often need when getting
ready to call {!execute}. [start] defines a custom bootstrap function to
specify where the game engine configuration resides and calls
{!Config.set_basename} with [name] to define the root configuration file
for a game.
@param config_dir
specifies the directory holding engine configuration files. The current
working directory will be used if this is not provided.
@param exit
specifies a function to be run when the engine loop exits. It can be
used to clean up game data which is not managed by or within the game
engine.
@param init
specifies a function to run after the engine has initialized and before
the game loop begins.
@param run specifies a function that will be run once per frame.
@param name
species the name of the root configuration file without an extension. *)
end
| null | https://raw.githubusercontent.com/orx/ocaml-orx/169db0178c7bdce8e4e916050150aa3323760c25/src/lib/orx.mli | ocaml | * Errors are all grouped as [`Orx].
* Status of a side effect only operation.
* Success!
* Not success!
* Convenience function to open the [`Orx] type to make composing results
from other libraries easier.
* [get result] is [()] if [result] is [Ok ()], otherwise it raises
{!Invalid_argument}.
* [get_ok result] is [o] if [result] is [Ok o], otherwise it raises
{!Invalid_argument}.
* [ignore result] is {!Stdlib.ignore} constrained for more precise type
checking.
* {!Logging using the orx engine's logger}
These functions use orx's logging functionality. Log output will only be
shown when building against a debug build of orx. Release builds disable
logging.
* Log with output going to the terminal.
* {1 Locally unique IDs for registered strings}
* A unique ID for a registered string.
* ID used to represent undefined/unregistered strings.
* [get_id s] registers [s] if it has not been registered already and returns
the ID associated with [s].
* [get_from_id id] returns the string associated with [id]. If no string is
associated with [id] then the return value is an empty string.
* Possible parents
* A structure
* {1 Unique IDs for structures}
These IDs are unique for a single process. They are a safe way to track
stuctures such as objects across time.
* Equality for {!t} values.
* Pretty-printer for {!t} values with an unspecified respresentation.
* [to_string id] is a string representation of [id]. It can be used for
logging, storing in config, commands or anywhere else a {!t} value might
be persisted.
* Pretty-printer for vector values
* Equality in [(x, y)] only. [z] is ignored.
* [get_y v] is the [y] element of [v].
* [get_z v] is the [z] element of [v].
* [get_size v] is the vector magnitude of [v].
* [make ~x ~y ~z] is the vector [(x, y, z)].
* [set_x v x'] modifies [v] in place by assigning the magnitude of [v]'s [x]
as [x'].
* [set_y v y'] modifies [v] in place by assigning the magnitude of [v]'s [y]
as [y'].
* Type of an oriented box
* [make ~pos ~pivot ~size ~angle] creates an oriented box with the given
specs.
* [set_2d obox ~pos ~pivot ~size ~angle] sets [obox] to the given specs.
* [copy obox] is a fresh {!t} with the same specs as [obox].
* [get_center obox] is the center of [obox].
* [rotate_2d obox angle] is a fresh {!t} which is [obox] rotated by [angle].
[angle] is in radiians.
* [rotate_2d' obox angle] rotates [obox] by [angle]. [angle] is in radiians.
* [is_inside obox vec] is [true] if [vec] is inside [obox].
* {1 Clock callback priorities}
From -project.org/orx/doc/html/group__orx_clock.html
* A game clock. Clocks are unique by name within a process.
* Clock information passed to a clock's callback function
* Tick size of the clock associated with this clock info
* Time since the last clock tick
* Current overall time for a clock
* Get the clock associated with a clock info value
* Equality for {!t} values
* [create_from_config name] creates and returns the clock under config
section [name], or [None] if a valid clock is not defined under [name].
* [create_from_config_exn name] creates and returns the clock under config
section [name].
@raise Invalid_argument if a valid clock is not defined under [name].
* [create tick_size] creates a new clock with [tick_size] defined in
seconds.
* [get name] gets the clock named [name] if it exists.
* [get_exn name] gets the clock named [name].
@raise Invalid_argument if [name] is not a valid clock.
* {1 Callback handles}
Callbacks are associated with handles. These handles may be used to
unregister callbacks associated with them.
* The default handle for all callbacks registered to a clock without an
explicitly provided callback handle.
* [make ()] is a fresh callback handle with no callbacks associated with
it.
* [register ?handle ?module_id ?priority clock callback] registers
[callback] so that it will be called on each tick of [clock].
@param handle
Can be provided when a callback should not exist for the entire
remaining lifetime of a clock, allowing callbacks to be unregistered.
Defaults to {!Callback_handle.default}.
@param orx_module
ID of the module related to this callback. Defaults to
{!Module_id.Main}.
@param priority
Priority of callback. Defaults to {!Clock_priority.Normal}.
* [unregister clock handle] unregisters all callbacks associated with
[clock] and [handle].
* [unregister_all clock] unregisters all callbacks associated with [clock].
* {1 Callback handles}
Callbacks are associated with handles. These handles may be used to
unregister callbacks associated with them.
* The default handle for all callbacks registered to a clock without an
explicitly provided callback handle.
* [make ()] is a fresh callback handle with no callbacks associated with
it.
* [add_timer ?handle clock callback delay repetition] registers [callback]
with [clock] as a timer callback.
@param delay Specifies the delay between calls to [callback]
@param repetition
Specifies the number of times [callback] should be called before it's
deregistered. Use [-1] to specify that [timer] should keep being called
forever.
* [remove_timer clock handle] removes the timers associated with [handle]
from [clock]. Timers with a finite number of repetitions will be
automatically removed once they have run out of repetitions.
* [remove_all_timers clock] removes all timers associated with [clock].
* {1 Textures for in game graphics}
From -project.org/orx/doc/html/group__orx_texture.html
* A single texture
* [load path keep_in_cache] creates a texture from the file at [path].
@param keep_in_cache
Specifies if a texture should be kept active in orx's cache even when
there are no more active references to it.
* [delete texture] deletes [texture].
* [clear_cache ()] will clear any unreferenced textures from orx's cache.
* [get_size texture] retrieves the dimensions of [texture].
* {1 Graphic module for 2D graphics}
From -project.org/orx/doc/html/group__orx_graphic.html
* An in engine graphic
* [create ()] creates a fresh graphic.
* [create_from_config section_name] createes the graphic defined under
[section_name] in config if it's properly defined.
* A sound
* [create_from_config section_name] creates a sound from the configuration
in [section_name] from config if it defines a valid sound.
* [get_status sound] is the playback status of [sound].
* [add_storage group description add_first] adds [description] as a storage
source for [group]. Storage sources depend on the type of storage being
used. By default this will be a filesystem path, but other resource
systems can be defined and used with orx.
@param add_first
If [true] then [description] will be checked before all previously
defined storage systems. If [false] then [description] will be checked
after.
* [remove_storage group description] removes [description] from [group].
@param group
If [group] is [None] then [description] will be removed from all groups.
@param description
If [description] is [None] then all storages will be removed from
[group].
* [reload_storage ()] forces orx to reload all storages from config.
* [sync group] synchronizes all storages associated with [group] with their
source material.
@param group
If [group] is [None] then all resource groups are synchronized.
* [get_position ()] is the current mouse screen position.
* [get_position_exn ()] is the current mouse screen position.
@raise Invalid_argument if no mouse position can be retrieved.
* [get_move_delta ()] is the position change since the last call to this
function.
* [get_wheel_delta ()] is the position change since the last call to this
function.
* [get_button_name button] is a canonical name for [button] if one exists.
* [get_axis_name axis] is a canonical name for [axis] if one exists.
* [is_active input] is [true] if [input] is currently active.
* [has_new_status input] is [true] if [input] has changed status since the
last time it was checked.
* [has_been_activated input] is [true] if [input] has been activated since
the last time it was checked.
* [has_been_deactivated input] is [true] if [input] has been deactivated
since the last time it was checked.
* [get_binding input index] gives information on [input]'s type and mode.
* [get_binding_name input_type binding_id mode] give the name associated
with [input_type], [binding_id] and [mode].
* [get_current_set ()] returns the currently selected input set.
* [select_set input_set] selects [input_set] as the currently active input
set.
* [is_set_enabled input_set] is [true] if [input_set] is enabled, otherwise
[false].
* [check_collision_flag ~mask ~flag] indicates if [mask] and [flag] would
collide.
* [get_gravity ()] is the current world gravity.
* [set_gravity v] sets the current world gravity to [v].
* [enable_simulation enabled] enables or disables the world physics
simulation. Can be used when the game simulation is paused, for example.
* A single body part
* [set_self_flags part flags] sets the collision flags for [part] to
[flags].
* [get_self_flags part] is the current collision flags for [part].
* [set_check_mask part mask] sets the check mask for [part].
* [get_check_mask part] is the current check mask for [part].
* A single physics body
* {1 Objects in the orx engine world}
From -project.org/orx/doc/html/group__orx_object.html
* An Orx object
* Object equality
** {2 Shaders}
* {2 Speed}
* {2 Physics}
* {2 Color}
* {2 Animation}
* {2 Sound}
* {1 Shaders}
From -project.org/orx/doc/html/group__orx_shader.html
* Runtime representation of a shader
* [set_float_param_exn shader name value] sets the parameter [name] to
[value] for [shader].
* [set_vector_param_exn shade name value] sets the parameter [name] to
[value] for [shader].
* {1 Pointers to shaders}
From -project.org/orx/doc/html/group__orx_shader_pointer.html
* [get_shader ptr index] gets the shader associated with [ptr] at index
[index].
TODO: Update this module
TODO: Union values
val get_count : payload -> int
val get_time : payload -> float
val get_custom_event : payload -> Custom_event
* Event payload
* {1 Input events}
* Event payload
* [get_set_name payload] is the input set name for the input event which
sent [payload].
* [get_input_name payload] is the input name for the input event which sent
[payload].
* Event payload
* {1 Physics events}
* Event payload
* [get_position payload] is the location of the contact for the physics
event that sent [payload].
* [get_normal payload] is the normal vector of the contact for the physics
event that sent [payload].
* [get_sender_part payload] is the body part which sent the event which sent
[payload].
* {1 Shader events}
Shader events can be used to set dynamic parameters for shaders.
* Event payload
* [get_shader payload] is the shader associated with the event.
* [get_shader_name payload] is the name of the shader associated with the
event.
* [get_param_name payload] is the name of the shader parameter associated
with the event.
* [get_param_index payload] is the index of the shader parameter associated
with the event.
* [set_param_float payload v] sets the shader parameter for this event to
[v].
* [set_param_vector payload v] sets the shader parameter for this event to
[v].
* Event payload
* [get_sound payload] is the sound associated with this event.
* Event payload
* [get_track_name payload] is the config name of the track associated with
the event.
* [get_event payload] is the event text associated with the event.
* [get_time_stamp payload] is the time associated with the event.
* {1 Events}
* Engine events
* [get_sender_object t] is the sending object for the event [t] if there is
one.
* [get_recipient_object t] is the receiving object for the event [t] if
there is one.
* [get_sender_structure t] is the sending structure for the event [t] if
there is one.
* [get_recipient_structure t] is the receiving structure for the event [t]
if there is one.
* {1 Callback/handler handles}
Handles track registered callbacks/handlers so they can be explicitly
released.
* Handle for tracking callbacks/handlers
* Default handle when none is specified
* [make ()] is a fresh handle with no associated callbacks/handlers
* [add_handler ?events event_type handler_callback] associates
[handler_callback] with [events] from [event_type].
@param events defaults to all events matching [event_type].
* [remove_handler event_type handle] removes and releases all handlers for
[event_type] associated with [handler].
* [remove_all_handlers event_type] removes and releases all handlers for
[event_type].
* Game camera
* [create_from_config section] creates the camera under config [section] if
[section] exists and correctly defines a camera.
* [create_from_config section] creates the camera under config [section].
@raise Invalid_argument
if [section] does not exist or does not correctly define a camera.
* [get name] gets the camera [name] if one exists.
* [get_parent camera] gets the parent of [camera] if it has one.
* [set_parent camera parent] sets the parent of [camera] to [parent]. If
[parent] is [None] then the parent is [cleared].
* [get_position camera] is the position of [camera].
* [set_position camera pos] sets [camera]'s position to [pos].
* [get_rotation camera] is the rotation of [camera] in radians.
* [set_rotation camera angle] sets the rotation of [camera] to [angle].
@param angle Angle in radians
* Viewport
* [create_from_config section] creates the viewport under config [section]
if [section] exists and correctly defines a viewport.
* [create_from_config section] creates the viewport under config [section].
@raise Invalid_argument
if [section] does not exist or does not correctly define a viewport.
* [get_shader_pointer viewport] is the shader pointer associated with
[viewport] if one exists.
* [get_shader_exn ?index viewport] is the shader associated with [viewport].
* [get name] is the viewport associated with [name] if one exists.
* [get_exn name] is the viewport associated with [name].
@raise Invalid_argument if [name] does name match a valid viewport.
* [of_structure s] casts a {!t} from [s] if [s] is a viewport.
* {1 Rendering}
From -project.org/orx/doc/html/group__orx_render.html
* [get_world_position screen_position viewport] is the world position
matching [screen_position] in [viewport] if [screen_position] falls within
the display surface. Otherwise, [None].
* [get_screen_position world_position viewport] is the screen position
matching [world_position] in [viewport] if [world_position] is found. The
result may be offscreen. Otherwise, [None].
* [set value_type value ~section ~key] sets the config [section] [key] to
[value].
@param value_type
indicates the type of value to store under [section] [key]
* [get value_type ~section ~key] is the value under [section] [key].
* [find value_type ~section ~key] is the value under [section] [key] if it
exists, else [None].
* [clear ~section ~key] clears any value under [section] [key].
* [update value_type f ~section ~key] sets [section] [key] to
[f old_value]. If [f old_value] is [None] then the value is cleared.
* [set_basename name] set [name] as the base name for the default config
file.
* [load name] loads config from the file [name].
* [load_from_memory config] loads config from the config in buffer [config].
* [push_section section] pushes [section] to the top of the active section
stack.
* [pop_section section] pops the top active section from the active section
stack.
* [get_current_section ()] is the currently active config section.
* [select_section section] makes [section] the currently active config
section without modifying the stack.
* [get_section_count ()] gets the total number of config sections.
* [get_section i] gets the name of the section at index [i].
* [get_key_count ()] gets the number of keys from the current section.
* [get_key i] gets the key at index [i] from the current section.
* [get_parent section] gets the parent of [section] if it has one.
* [has_section name] indicates if [name] exists as a config section.
* [has_value name] indicates if [name] is a key in the current config
section.
* [clear_section name] clears the section [name].
* [clear_value key] clears [key] from the currently active section.
* [if_has_value key getter] is [Some (getter key)] if [key] exists in the
currently selected config section or [None] if [key] does not exist in the
current section.
* [exists ~section ~key] is [true] if [key] exists in [section].
* [get_seq getter ~section ~key] is a sequence of values pulled repeatedly
from the same [section] and [key].
If the values are random then a new random value will be returned for
every element of the sequence.
If the [section] and [key] represent a constant value then the sequence
will return the same value for every element.
If [section] and [key] do not exist then the result will be [Seq.empty].
* [get_sections ()] is all section names defined in config.
* [get_current_section_keys ()] is all keys in the active section.
* [get_section_keys section] is all the keys in [section].
* [with_section section f] calls [f ()] with [section] as the active
section, then restores the previously active section.
* [set_bootstrap f] sets [f] as the config bootstrap function.
* {1 Define and run custom engine commands}
* Command variable definition
* [make name value_type] creates a new {!t} named [name] of type
[value_type].
* {1 Command variables}
* Command variable
* [make value_type value] creates a command variable containing [value].
* [get v value_type] is the value in [v].
* [unregister_all ()] will unregister all custom orx commands registered
from OCaml.
* {1 OCaml support for orx's threading}
* [set_ocaml_callbacks ()] initializes the support necessary to have OCaml
play well with callbacks from other orx threads. This is currently only
required when manipulating audio in OCaml callbacks from audio packet
events.
* [execute ~init ~run ~exit ()] starts the Orx engine loop.
Many games will be able to use {!start} instead of [execute] for slightly
simpler application code. | type camera
type obj
module Status : sig
* { 1 Specialization of { ! } values for orx }
type 'ok result = ('ok, [ `Orx ]) Stdlib.result
type t = unit result
val ok : t
val error : t
val open_error : 'ok result -> ('ok, [> `Orx ]) Stdlib.result
val get : t -> unit
val get_ok : 'ok result -> 'ok
val ignore : t -> unit
end
module Log : sig
type 'a format_logger =
('a, Format.formatter, unit, unit, unit, unit) format6 -> 'a
* All formatting functions act as standard { ! . Format } formatters .
val log : 'a format_logger
* Log with output going to all of 's log targets .
val terminal : 'a format_logger
val file : 'a format_logger
* Log with output going to 's log file(s ) .
val console : 'a format_logger
* Log with output going to the console .
end
module String_id : sig
type t
val undefined : t
val get_id : string -> t
val get_from_id : t -> string
end
module Parent : sig
* { 1 Parent values for nesting structures }
type t =
| Camera of camera
| Object of obj
end
module Structure : sig
* { 1 General orx engine structures }
From -project.org/orx/doc/html/group__orx_structure.html
From -project.org/orx/doc/html/group__orx_structure.html *)
type t
module Guid : sig
type t
val compare : t -> t -> int
* A total order comparison for { ! t } values . The actual order does not hold
important semantic meaning but this does allow for easy use of
{ ! . Set . Make } and { ! . Map . Make } .
important semantic meaning but this does allow for easy use of
{!Stdlib.Set.Make} and {!Stdlib.Map.Make}. *)
val equal : t -> t -> bool
val pp : Format.formatter -> t -> unit
val to_string : t -> string
val of_string : string -> t
* [ of_string s ] is { ! t } parsed from [ s ] .
@raise Failure
if [ s ] is not a valid { ! t } . Note that a valid { ! t } does not
necessarily mean that value is an active GUID in the current orx
session .
@raise Failure
if [s] is not a valid {!t}. Note that a valid {!t} does not
necessarily mean that value is an active GUID in the current orx
session. *)
end
end
module Vector : sig
* { 1 Three dimensional vectors }
From -project.org/orx/doc/html/group__orx_vector.html
From -project.org/orx/doc/html/group__orx_vector.html *)
type t
* A three dimensional [ ( x , y , z ) ] vector
val pp : Format.formatter -> t -> unit
val equal : t -> t -> bool
* Equality across all three dimensions
val equal_2d : t -> t -> bool
val get_x : t -> float
* [ get_x v ] is the [ x ] element of [ v ] .
val get_y : t -> float
val get_z : t -> float
val get_size : t -> float
val make : x:float -> y:float -> z:float -> t
val set_x : t -> float -> unit
val set_y : t -> float -> unit
val set_z : t -> float -> unit
* [ set_z v z ' ] modifies [ v ] in place by assigning the magnitude of [ v ] 's [ z ]
as [ z ' ] .
as [z']. *)
* { 2 Vector operations }
Each of the following operations has a [ f ] and [ f ' ] form . The [ f ] form
returns a freshly allocated vector with the result of the specified
operation . The [ f ' ] form takes a [ target ] which will be modified to
contain the results of the operation performed by [ f ' ] .
In the case of [ f ' ] functions , the target and source vector can be the
same value , in which case the source vector will be modified in place .
Each of the following operations has a [f] and [f'] form. The [f] form
returns a freshly allocated vector with the result of the specified
operation. The [f'] form takes a [target] which will be modified to
contain the results of the operation performed by [f'].
In the case of [f'] functions, the target and source vector can be the
same value, in which case the source vector will be modified in place. *)
val copy' : target:t -> t -> unit
val copy : t -> t
val normalize' : target:t -> t -> unit
val normalize : t -> t
val reciprocal' : target:t -> t -> unit
val reciprocal : t -> t
val round' : target:t -> t -> unit
val round : t -> t
val floor' : target:t -> t -> unit
val floor : t -> t
val neg' : target:t -> t -> unit
val neg : t -> t
val add' : target:t -> t -> t -> unit
val add : t -> t -> t
val sub' : target:t -> t -> t -> unit
val sub : t -> t -> t
val mul' : target:t -> t -> t -> unit
val mul : t -> t -> t
val div' : target:t -> t -> t -> unit
val div : t -> t -> t
val dot : t -> t -> float
val dot_2d : t -> t -> float
val cross' : target:t -> t -> t -> unit
val cross : t -> t -> t
val mulf' : target:t -> t -> float -> unit
val mulf : t -> float -> t
val divf' : target:t -> t -> float -> unit
val divf : t -> float -> t
val rotate_2d' : target:t -> t -> float -> unit
val rotate_2d : t -> float -> t
val lerp' : target:t -> t -> t -> float -> unit
val lerp : t -> t -> float -> t
val clamp' : target:t -> t -> min:t -> max:t -> unit
val clamp : t -> min:t -> max:t -> t
val clamp_size' : target:t -> t -> min:float -> max:float -> unit
val clamp_size : t -> min:float -> max:float -> t
val move_x : t -> float -> unit
val move_y : t -> float -> unit
val move_z : t -> float -> unit
val of_rotation : float -> t
val to_rotation : t -> float
end
module Obox : sig
* { 1 Oriented boxes }
From -project.org/orx/doc/html/group__orx_o_box.html
From -project.org/orx/doc/html/group__orx_o_box.html *)
type t
val make : pos:Vector.t -> pivot:Vector.t -> size:Vector.t -> angle:float -> t
val set_2d :
t -> pos:Vector.t -> pivot:Vector.t -> size:Vector.t -> angle:float -> unit
val copy : t -> t
val get_center : t -> Vector.t
val move : t -> Vector.t -> t
* [ move ] is a fresh { ! t } which is [ obox ] moved by [ ] .
val move' : t -> Vector.t -> unit
* [ move ' obox vec ] moves [ obox ] by [ ] .
val rotate_2d : t -> float -> t
val rotate_2d' : t -> float -> unit
val is_inside : t -> Vector.t -> bool
val is_inside_2d : t -> Vector.t -> bool
* [ ] is [ true ] if [ vec ] is inside [ obox ] . The [ z ] of
the [ obox ] and [ vec ] are ignored .
the [obox] and [vec] are ignored. *)
end
module Module_id : sig
* { 1 Engine module IDs }
From -project.org/orx/doc/html/group__orx_module.html
From -project.org/orx/doc/html/group__orx_module.html *)
type t =
| Clock
| Main
end
module Clock_modifier : sig
* { 1 Game clock modifiers }
From -project.org/orx/doc/html/group__orx_clock.html
From -project.org/orx/doc/html/group__orx_clock.html *)
type t =
| Fixed
| Multiply
| Maxed
| Average
end
module Clock_priority : sig
type t =
| Lowest
| Lower
| Low
| Normal
| High
| Higher
| Highest
end
module Clock : sig
* { 1 Engine clocks }
From -project.org/orx/doc/html/group__orx_clock.html
From -project.org/orx/doc/html/group__orx_clock.html *)
type t
module Info : sig
* { 1 Clock information }
From -project.org/orx/doc/html/group__orx_clock.html
From -project.org/orx/doc/html/group__orx_clock.html *)
type clock = t
type t
val get_tick_size : t -> float
val get_dt : t -> float
val get_time : t -> float
val get_clock : t -> clock option
end
val compare : t -> t -> int
* A total order comparison for { ! t } values . The actual order does not hold
important semantic meaning but this does allow for easy use of
{ ! . Set . Make } and { ! . Map . Make } .
important semantic meaning but this does allow for easy use of
{!Stdlib.Set.Make} and {!Stdlib.Map.Make}. *)
val equal : t -> t -> bool
val create_from_config : string -> t option
val create_from_config_exn : string -> t
val create : float -> t
val get : string -> t option
val get_exn : string -> t
val get_core : unit -> t
* [ ( ) ] returns the core engine clock .
val get_name : t -> string
* [ clock ] is [ clock ] 's config name .
val get_info : t -> Info.t
val get_modifier : t -> Clock_modifier.t -> float
val set_modifier : t -> Clock_modifier.t -> float -> unit
val set_tick_size : t -> float -> unit
val restart : t -> Status.t
val pause : t -> unit
val unpause : t -> unit
val is_paused : t -> bool
* { 2 Callbacks }
Clock callbacks fire on each tick of a clock .
Clock callbacks fire on each tick of a clock. *)
module Callback_handle : sig
type t
val default : t
val make : unit -> t
end
val register :
?handle:Callback_handle.t ->
?module_id:Module_id.t ->
?priority:Clock_priority.t ->
t ->
(Info.t -> unit) ->
unit
val unregister : t -> Callback_handle.t -> unit
val unregister_all : t -> unit
* { 2 Timers }
Timers fire one or more times , after a specified delay .
Timers fire one or more times, after a specified delay. *)
module Timer_handle : sig
type t
val default : t
val make : unit -> t
end
val add_timer :
?handle:Timer_handle.t -> t -> (Info.t -> unit) -> float -> int -> unit
val remove_timer : t -> Timer_handle.t -> unit
val remove_all_timers : t -> unit
end
module Texture : sig
type t
val load : string -> bool -> t option
val delete : t -> Status.t
val clear_cache : unit -> Status.t
val get_size : t -> float * float
end
module Graphic : sig
type t
val create : unit -> t option
val create_from_config : string -> t option
val delete : t -> Status.t
val set_size : t -> Vector.t -> unit
val get_size : t -> Vector.t
val set_origin : t -> Vector.t -> unit
val get_origin : t -> Vector.t
val set_flip : t -> x:bool -> y:bool -> unit
val set_pivot : t -> Vector.t -> unit
val set_data : t -> Structure.t -> Status.t
val to_structure : t -> Structure.t
end
module Sound_status : sig
type t =
| Play
| Pause
| Stop
end
module Sound : sig
* { 1 Sound playback }
From -project.org/orx/doc/html/group__orx_sound.html
From -project.org/orx/doc/html/group__orx_sound.html *)
type t
val create_from_config : string -> t option
val get_name : t -> string
* [ sound ] is the config section name of [ sound ] .
val get_status : t -> Sound_status.t
val play : t -> unit
val pause : t -> unit
val stop : t -> unit
val get_duration : t -> float
val get_pitch : t -> float
val set_pitch : t -> float -> unit
val get_volume : t -> float
val set_volume : t -> float -> unit
end
module Resource : sig
* { 1 Engine resources }
From -project.org/orx/doc/html/group__orx_resource.html
From -project.org/orx/doc/html/group__orx_resource.html *)
type group =
| Config
| Sound
| Texture
| Custom of string
val group_of_string : string -> group
val string_of_group : group -> string
val add_storage : group -> string -> bool -> Status.t
val remove_storage : group option -> string option -> Status.t
val reload_storage : unit -> Status.t
val sync : group option -> Status.t
end
module Mouse_button : sig
type t =
| Left
| Right
| Middle
| Extra_1
| Extra_2
| Extra_3
| Extra_4
| Extra_5
| Wheel_up
| Wheel_down
end
module Mouse_axis : sig
type t =
| X
| Y
end
module Mouse : sig
* { 1 Read mouse state }
From -project.org/orx/doc/html/group__orx_mouse.html
From -project.org/orx/doc/html/group__orx_mouse.html *)
val is_button_pressed : Mouse_button.t -> bool
val get_position : unit -> Vector.t option
val get_position_exn : unit -> Vector.t
val get_move_delta : unit -> Vector.t option
val get_wheel_delta : unit -> float
val show_cursor : bool -> Status.t
val set_cursor : string -> Vector.t option -> Status.t
* [ set_cursor name pivot ] sets the mouse 's cursor display to [ name ] and its
hotspot to [ pivot ] .
@param name
Can be standard names ( arrow , ibeam , hand , crosshair , hresize or
vresize ) or a file name
@param pivot
Can be an offset for the hotspot or [ None ] to default to [ ( 0 , 0 ) ]
hotspot to [pivot].
@param name
Can be standard names (arrow, ibeam, hand, crosshair, hresize or
vresize) or a file name
@param pivot
Can be an offset for the hotspot or [None] to default to [(0, 0)] *)
val get_button_name : Mouse_button.t -> string
val get_axis_name : Mouse_axis.t -> string
end
module Input_type : sig
type t =
| Keyboard_key
| Mouse_button
| Mouse_axis
| Joystick_button
| Joystick_axis
| External
| No_input
end
module Input_mode : sig
type t =
| Full
| Positive
| Negative
end
module Input : sig
* { 1 General user input handling }
Orx inputs are defined by name in config . This module allows querying the
state of inputs .
From
Orx inputs are defined by name in config. This module allows querying the
state of inputs.
From -project.org/orx/doc/html/group__orx_input.html *)
val is_active : string -> bool
val has_new_status : string -> bool
val has_been_activated : string -> bool
val has_been_deactivated : string -> bool
val get_value : string -> float
* [ get_value input ] is the current value of [ input ] . For keypresses , this
will generally be [ 0.0 ] or [ 1.0 ] . For a joystick the value will scale
according to the position of the stick along the queried axis .
will generally be [0.0] or [1.0]. For a joystick the value will scale
according to the position of the stick along the queried axis. *)
val set_value : string -> float -> Status.t
* [ set_value input value ] sets [ input ] to [ value ] . This impacts only one
input for [ input ] .
input for [input]. *)
val set_permanent_value : string -> float -> Status.t
* [ set_permanent_value input value ] sets [ input ] to [ value ] until reset . Set
[ value ] to [ 0.0 ] to reset .
[value] to [0.0] to reset. *)
val get_binding :
string -> int -> (Input_type.t * int * Input_mode.t) Status.result
val get_binding_name : Input_type.t -> int -> Input_mode.t -> string
* { 2 Input sets }
val get_current_set : unit -> string
val select_set : string -> Status.t
val enable_set : string -> bool -> Status.t
* [ enable_set input_set enabled ] enables / disabled [ input_set ] without
selecting it .
selecting it. *)
val is_set_enabled : string -> bool
end
module Physics : sig
* { 1 General physics engine settings and values }
From -project.org/orx/doc/html/group__orx_physics.html
From -project.org/orx/doc/html/group__orx_physics.html *)
val get_collision_flag_name : Unsigned.UInt32.t -> string
* [ get_collision_flag_name flag ] is the name defined in config matching
[ flag ] if one exists , otherwise an empty string .
[flag] if one exists, otherwise an empty string. *)
val get_collision_flag_value : string -> Unsigned.UInt32.t
* [ name ] is the value associated with the named
collision flag [ name ] or { ! Unsigned.UInt32.zero } if [ name ] is not a
defined collision flag .
collision flag [name] or {!Unsigned.UInt32.zero} if [name] is not a
defined collision flag. *)
val check_collision_flag :
mask:Unsigned.UInt32.t -> flag:Unsigned.UInt32.t -> bool
val get_gravity : unit -> Vector.t
val set_gravity : Vector.t -> unit
val enable_simulation : bool -> unit
end
module Body_part : sig
* { 1 Body parts for physics simulation }
For physics body parts , flags specify the collision bitmask for a part . A
mask specifies the flags for other bodies which a part should collide
with .
From -project.org/orx/doc/html/group__orx_body.html
For physics body parts, flags specify the collision bitmask for a part. A
mask specifies the flags for other bodies which a part should collide
with.
From -project.org/orx/doc/html/group__orx_body.html *)
type t
val get_name : t -> string
* [ part ] is the config name associated with [ part ] .
val set_self_flags : t -> int -> unit
val get_self_flags : t -> int
val set_check_mask : t -> int -> unit
val get_check_mask : t -> int
end
module Body : sig
* { 1 Physics bodies }
A physics body may be made up of one or more parts a defined in
{ ! Body_part } .
From -project.org/orx/doc/html/group__orx_body.html
A physics body may be made up of one or more parts a defined in
{!Body_part}.
From -project.org/orx/doc/html/group__orx_body.html *)
type t
val get_parts : t -> Body_part.t Seq.t
* [ body ] is the sequence of parts which make up [ body ] .
end
module Object : sig
type t = obj
val compare : t -> t -> int
* Comparison defining a total ordering over objects . This is primarily
useful for defining containers like { ! . Map } and { ! . Set } .
useful for defining containers like {!Stdlib.Map} and {!Stdlib.Set}. *)
val equal : t -> t -> bool
* { 2 Object creation }
val create_from_config : string -> t option
val create_from_config_exn : string -> t
* { 2 Enabling / disabling objects }
val enable : t -> bool -> unit
val enable_recursive : t -> bool -> unit
val is_enabled : t -> bool
val pause : t -> bool -> unit
val is_paused : t -> bool
* { 2 Object ownership }
val set_owner : t -> Parent.t option -> unit
val get_owner : t -> Parent.t option
val set_parent : t -> Parent.t option -> unit
val get_parent : t -> Parent.t option
type _ child =
| Child_object : t child
| Owned_object : t child
| Child_camera : camera child
val get_children : t -> 'a child -> 'a Seq.t
val get_first_child : t -> 'a child -> 'a option
val get_children_recursive : t -> t child -> t Seq.t
val iter_children_recursive : (t -> unit) -> t -> t child -> unit
val iter_recursive : (t -> unit) -> t -> t child -> unit
* { 2 Basic object properties }
val get_name : t -> string
val get_bounding_box : t -> Obox.t
* { 2 Clock association }
val set_clock : t -> Clock.t option -> Status.t
val set_clock_recursive : t -> Clock.t option -> unit
val get_clock : t -> Clock.t option
* { 2 FX }
val add_fx : t -> string -> Status.t
val add_fx_exn : t -> string -> unit
val add_unique_fx : t -> string -> Status.t
val add_unique_fx_exn : t -> string -> unit
val add_fx_recursive : t -> string -> float -> unit
val add_unique_fx_recursive : t -> string -> float -> unit
val remove_fx : t -> string -> Status.t
val remove_fx_exn : t -> string -> unit
val remove_fx_recursive : t -> string -> unit
val remove_all_fxs : t -> Status.t
val remove_all_fxs_exn : t -> unit
val remove_all_fxs_recursive : t -> Status.t
val remove_all_fxs_recursive_exn : t -> unit
val add_shader : t -> string -> Status.t
val add_shader_exn : t -> string -> unit
val add_shader_recursive : t -> string -> unit
val remove_shader : t -> string -> Status.t
val remove_shader_exn : t -> string -> unit
val remove_shader_recursive : t -> string -> unit
* { 2 Placement and dimensions }
val get_rotation : t -> float
val set_rotation : t -> float -> unit
val get_world_position : t -> Vector.t
val set_world_position : t -> Vector.t -> unit
val get_position : t -> Vector.t
val set_position : t -> Vector.t -> unit
val get_scale : t -> Vector.t
val set_scale : t -> Vector.t -> unit
* { 2 Repetition }
val get_repeat : t -> float * float
val set_repeat : t -> float -> float -> unit
* { 2 Text }
val set_text_string : t -> string -> unit
val get_text_string : t -> string
* { 2 Lifetime }
val set_life_time : t -> float -> unit
val get_life_time : t -> float
val get_active_time : t -> float
* { 2 Timeline tracks }
val add_time_line_track : t -> string -> Status.t
val add_time_line_track_exn : t -> string -> unit
val add_time_line_track_recursive : t -> string -> unit
val remove_time_line_track : t -> string -> Status.t
val remove_time_line_track_exn : t -> string -> unit
val remove_time_line_track_recursive : t -> string -> unit
val enable_time_line : t -> bool -> unit
val is_time_line_enabled : t -> bool
val set_speed : t -> Vector.t -> unit
val get_speed : t -> Vector.t
val set_relative_speed : t -> Vector.t -> unit
val get_relative_speed : t -> Vector.t
val apply_force : ?location:Vector.t -> t -> Vector.t -> unit
val apply_impulse : ?location:Vector.t -> t -> Vector.t -> unit
val apply_torque : t -> float -> unit
val set_angular_velocity : t -> float -> unit
val get_angular_velocity : t -> float
val set_custom_gravity : t -> Vector.t option -> unit
val get_custom_gravity : t -> Vector.t option
val get_mass : t -> float
val get_mass_center : t -> Vector.t
type collision = {
colliding_object : t;
contact : Vector.t;
normal : Vector.t;
}
val raycast :
?self_flags:int ->
?check_mask:int ->
?early_exit:bool ->
Vector.t ->
Vector.t ->
collision option
val set_rgb : t -> Vector.t -> unit
val set_rgb_recursive : t -> Vector.t -> unit
val set_alpha : t -> float -> unit
val set_alpha_recursive : t -> float -> unit
val set_target_anim : t -> string -> Status.t
val set_target_anim_exn : t -> string -> unit
val set_target_anim_recursive : t -> string -> unit
val get_target_anim : t -> string
val set_current_anim : t -> string -> Status.t
val set_current_anim_exn : t -> string -> unit
val set_current_anim_recursive : t -> string -> unit
val get_current_anim : t -> string
val add_sound : t -> string -> Status.t
val add_sound_exn : t -> string -> unit
val remove_sound : t -> string -> Status.t
val remove_sound_exn : t -> string -> unit
val get_last_added_sound : t -> Sound.t option
val set_volume : t -> float -> unit
val set_pitch : t -> float -> unit
val set_panning : t -> float -> bool -> unit
val play : t -> unit
val stop : t -> unit
* { 2 Associated structures }
type 'a associated_structure =
| Body : Body.t associated_structure
| Graphic : Graphic.t associated_structure
| Sound : Sound.t associated_structure
val link_structure : t -> Structure.t -> unit
val get_structure : t -> 'a associated_structure -> 'a option
* { 2 Spatial selection }
type group =
| All_groups
| Group of string
| Group_id of String_id.t
val get_neighbor_list : Obox.t -> group -> t list
val get_group : group -> t Seq.t
val get_enabled : group -> t Seq.t
val pick : Vector.t -> group -> t option
val box_pick : Obox.t -> group -> t option
* { 2 Groups }
val get_default_group_id : unit -> String_id.t
val get_group_id : t -> String_id.t
val set_group_id : t -> group -> unit
val set_group_id_recursive : t -> group -> unit
* { 2 Object GUIDs }
val to_guid : t -> Structure.Guid.t
val get_guid : t -> Structure.Guid.t
val of_guid : Structure.Guid.t -> t option
val of_guid_exn : Structure.Guid.t -> t
* { 2 Structure conversion }
val of_structure : Structure.t -> t option
end
module Shader_param_type : sig
type t =
| Float
| Texture
| Vector
| Time
end
module Shader : sig
type t
val set_float_param_exn : t -> string -> float -> unit
val set_vector_param_exn : t -> string -> Vector.t -> unit
val get_name : t -> string
* [ shader ] gets the config name for [ shader ] .
end
module Shader_pointer : sig
type t
val get_shader : t -> int -> Shader.t option
end
module Anim : sig
type t
end
module Anim_event : sig
type t =
| Start
| Stop
| Cut
| Loop
| Update
| Custom_event
val compare : t -> t -> int
type payload
val get_animation : payload -> Anim.t
val get_name : payload -> string
end
module Config_event : sig
* { 1 Configuration events }
type t =
| Reload_start
| Reload_stop
val compare : t -> t -> int
end
module Fx_event : sig
* { 1 FX events }
type t =
| Start
| Stop
| Add
| Remove
| Loop
val compare : t -> t -> int
type payload
val get_name : payload -> string
* [ payload ] is the name of the event which sent [ payload ] .
end
module Input_event : sig
type t =
| On
| Off
| Select_set
val compare : t -> t -> int
type payload
val get_set_name : payload -> string
val get_input_name : payload -> string
* { 2 Input event details }
Each of the following functions has a singular form for convenience when
inputs only have one key involved and a plural form for compound inputs
with multiple keys , buttons or axes involved .
Calling the singular form of the singular [ get ] functions below is
slightly more efficient than and functionally equivalent to using the
plural form when only a single input is involved .
Each of the following functions has a singular form for convenience when
inputs only have one key involved and a plural form for compound inputs
with multiple keys, buttons or axes involved.
Calling the singular form of the singular [get] functions below is
slightly more efficient than and functionally equivalent to using the
plural form when only a single input is involved. *)
val get_input_type : ?i:int -> payload -> Input_type.t
val get_input_types : payload -> Input_type.t array
val get_input_id : ?i:int -> payload -> int
val get_input_ids : payload -> int array
val get_input_mode : ?i:int -> payload -> Input_mode.t
val get_input_modes : payload -> Input_mode.t array
val get_input_value : ?i:int -> payload -> float
val get_input_values : payload -> float array
end
module Object_event : sig
* { 1 Object events }
type t =
| Create
| Delete
| Prepare
| Enable
| Disable
| Pause
| Unpause
val compare : t -> t -> int
type payload
end
module Physics_event : sig
type t =
| Contact_add
| Contact_remove
val compare : t -> t -> int
type payload
val get_position : payload -> Vector.t
val get_normal : payload -> Vector.t
val get_sender_part : payload -> Body_part.t
val get_recipient_part : payload -> Body_part.t
* [ get_recipient_part payload ] is the body part which recieved the event
which sent [ payload ] .
which sent [payload]. *)
end
module Shader_event : sig
type t = Set_param
val compare : t -> t -> int
type payload
val get_shader : payload -> Shader.t
val get_shader_name : payload -> string
val get_param_name : payload -> string
val get_param_type : payload -> Shader_param_type.t
* [ get_param_type payload ] is the type of the shader parameter associated
with the event .
with the event. *)
val get_param_index : payload -> int
val set_param_float : payload -> float -> unit
val set_param_vector : payload -> Vector.t -> unit
end
module Sound_event : sig
* { 1 Sound events }
type t =
| Start
| Stop
| Add
| Remove
val compare : t -> t -> int
type payload
val get_sound : payload -> Sound.t
end
module Time_line_event : sig
* { 1 Time line track events }
type t =
| Track_start
| Track_stop
| Track_add
| Track_remove
| Loop
| Trigger
val compare : t -> t -> int
type payload
val get_track_name : payload -> string
val get_event : payload -> string
val get_time_stamp : payload -> float
end
module Event : sig
type t
module Event_type : sig
type ('event, 'payload) t =
| Anim : (Anim_event.t, Anim_event.payload) t
| Fx : (Fx_event.t, Fx_event.payload) t
| Input : (Input_event.t, Input_event.payload) t
| Object : (Object_event.t, Object_event.payload) t
| Physics : (Physics_event.t, Physics_event.payload) t
| Shader : (Shader_event.t, Shader_event.payload) t
| Sound : (Sound_event.t, Sound_event.payload) t
| Time_line : (Time_line_event.t, Time_line_event.payload) t
type any = Any : (_, _) t -> any
end
val to_type : t -> Event_type.any
val to_event : t -> ('event, _) Event_type.t -> 'event
val get_sender_object : t -> Object.t option
val get_recipient_object : t -> Object.t option
val get_sender_structure : t -> Structure.t option
val get_recipient_structure : t -> Structure.t option
module Handle : sig
type t
val default : t
val make : unit -> t
end
val add_handler :
?handle:Handle.t ->
?events:'event list ->
('event, 'payload) Event_type.t ->
(t -> 'event -> 'payload -> Status.t) ->
unit
val remove_handler : (_, _) Event_type.t -> Handle.t -> unit
val remove_all_handlers : (_, _) Event_type.t -> unit
end
module Camera : sig
* { 1 In - game cameras }
From -project.org/orx/doc/html/group__orx_camera.html
From -project.org/orx/doc/html/group__orx_camera.html *)
type t = camera
val create_from_config : string -> t option
val create_from_config_exn : string -> t
val get : string -> t option
val get_name : t -> string
* [ camera ] is the name of [ camera ] .
val get_parent : t -> Parent.t option
val set_parent : t -> Parent.t option -> unit
val get_position : t -> Vector.t
val set_position : t -> Vector.t -> unit
val get_rotation : t -> float
val set_rotation : t -> float -> unit
val get_zoom : t -> float
* [ get_zoom camera ] is the zoom multiplier for [ camera ] .
val set_zoom : t -> float -> unit
* [ set_zoom camera zoom ] sets [ camera ] 's zoom multiplier to [ zoom ] .
val set_frustum :
t -> width:float -> height:float -> near:float -> far:float -> unit
* [ set_frustum camera ~width ~height ~near ~far ] sets the frustum - the
visible volume - for [ camera ] .
visible volume - for [camera]. *)
end
module Viewport : sig
* { 1 Game world viewports }
From
From -project.org/orx/doc/html/group__orx_viewport.html *)
type t
val create_from_config : string -> t option
val create_from_config_exn : string -> t
val get_camera : t -> Camera.t option
* [ get_camera viewport ] is the camera associated with [ viewport ] if one
exists .
exists. *)
val get_shader_pointer : t -> Shader_pointer.t option
val get_shader_exn : ?index:int -> t -> Shader.t
val get_name : t -> string
* [ viewport ] is the name of [ viewport ] .
val get : string -> t option
val get_exn : string -> t
val of_structure : Structure.t -> t option
end
module Render : sig
val get_world_position : Vector.t -> Viewport.t -> Vector.t option
val get_screen_position : Vector.t -> Viewport.t -> Vector.t option
end
module Config : sig
* { 1 Config values }
module Value : sig
* { 1 Config convenience get / set functions }
type _ t =
| String : string t
| Int : int t
| Float : float t
| Bool : bool t
| Vector : Vector.t t
| Guid : Structure.Guid.t t
val to_string : _ t -> string
val to_proper_string : _ t -> string
val set : 'a t -> 'a -> section:string -> key:string -> unit
val get : 'a t -> section:string -> key:string -> 'a
val find : 'a t -> section:string -> key:string -> 'a option
val clear : section:string -> key:string -> unit
val update :
'a t -> ('a option -> 'a option) -> section:string -> key:string -> unit
end
val set_basename : string -> unit
val load : string -> Status.t
val load_from_memory : string -> Status.t
val push_section : string -> unit
val pop_section : unit -> unit
val get_current_section : unit -> string
val select_section : string -> unit
val get_section_count : unit -> int
val get_section : int -> string
val get_key_count : unit -> int
val get_key : int -> string
val get_parent : string -> string option
val has_section : string -> bool
val has_value : string -> bool
val clear_section : string -> Status.t
val clear_value : string -> Status.t
* { 2 Get / set values in the current section }
val get_string : string -> string
val set_string : string -> string -> unit
val get_bool : string -> bool
val set_bool : string -> bool -> unit
val get_float : string -> float
val set_float : string -> float -> unit
val get_int : string -> int
val set_int : string -> int -> unit
val get_vector : string -> Vector.t
val set_vector : string -> Vector.t -> unit
val get_list_vector : string -> int option -> Vector.t
val set_list_string : string -> string list -> unit
val append_list_string : string -> string list -> unit
val if_has_value : string -> (string -> 'a) -> 'a option
val exists : section:string -> key:string -> bool
val get : (string -> 'a) -> section:string -> key:string -> 'a
val set : (string -> 'a -> unit) -> 'a -> section:string -> key:string -> unit
val get_seq : (string -> 'a) -> section:string -> key:string -> 'a Seq.t
val get_list_item :
(string -> int option -> 'a) ->
int option ->
section:string ->
key:string ->
'a
val get_list :
(string -> int option -> 'a) -> section:string -> key:string -> 'a list
val is_list : string -> bool
val get_sections : unit -> string list
val get_current_section_keys : unit -> string list
val get_section_keys : string -> string list
val get_guid : string -> Structure.Guid.t
val set_guid : string -> Structure.Guid.t -> unit
val with_section : string -> (unit -> 'a) -> 'a
val set_bootstrap : (unit -> Status.t) -> unit
end
module Command : sig
module Var_def : sig
* { 1 Command variable definitions }
type t
val make : string -> _ Config.Value.t -> t
end
module Var : sig
type t
val make : 'a Config.Value.t -> 'a -> t
val set : t -> 'a Config.Value.t -> 'a -> unit
* [ set v value_type value ] sets [ v ] to [ value ] .
val get : t -> 'a Config.Value.t -> 'a
end
val register :
string ->
(Var.t array -> Var.t -> unit) ->
Var_def.t list * Var_def.t list ->
Var_def.t ->
Status.t
val register_exn :
string ->
(Var.t array -> Var.t -> unit) ->
Var_def.t list * Var_def.t list ->
Var_def.t ->
unit
val unregister : string -> Status.t
val unregister_exn : string -> unit
val unregister_all : unit -> unit
val is_registered : string -> bool
val evaluate : string -> Var.t option
val evaluate_with_guid : string -> Structure.Guid.t -> Var.t option
end
module Orx_thread : sig
val set_ocaml_callbacks : unit -> unit
end
module Main : sig
val execute :
init:(unit -> Status.t) ->
run:(unit -> Status.t) ->
exit:(unit -> unit) ->
unit ->
unit
val start :
?config_dir:string ->
?exit:(unit -> unit) ->
init:(unit -> (unit, [ `Orx ]) result) ->
run:(unit -> (unit, [ `Orx ]) result) ->
string ->
unit
* [ start ? ? exit ~init ~run name ] starts the Orx engine loop .
[ start ] automates a few common steps a game will often need when getting
ready to call { ! execute } . [ start ] defines a custom bootstrap function to
specify where the game engine configuration resides and calls
{ ! Config.set_basename } with [ name ] to define the root configuration file
for a game .
@param
specifies the directory holding engine configuration files . The current
working directory will be used if this is not provided .
@param exit
specifies a function to be run when the engine loop exits . It can be
used to clean up game data which is not managed by or within the game
engine .
@param init
specifies a function to run after the engine has initialized and before
the game loop begins .
@param run specifies a function that will be run once per frame .
@param name
species the name of the root configuration file without an extension .
[start] automates a few common steps a game will often need when getting
ready to call {!execute}. [start] defines a custom bootstrap function to
specify where the game engine configuration resides and calls
{!Config.set_basename} with [name] to define the root configuration file
for a game.
@param config_dir
specifies the directory holding engine configuration files. The current
working directory will be used if this is not provided.
@param exit
specifies a function to be run when the engine loop exits. It can be
used to clean up game data which is not managed by or within the game
engine.
@param init
specifies a function to run after the engine has initialized and before
the game loop begins.
@param run specifies a function that will be run once per frame.
@param name
species the name of the root configuration file without an extension. *)
end
|
0445c87dcabe49eafd619c453b51f5add098c1adec1d6fe81f310d23073396f5 | clojure-interop/java-jdk | ZoneView.clj | (ns javax.swing.text.ZoneView
"ZoneView is a View implementation that creates zones for which
the child views are not created or stored until they are needed
for display or model/view translations. This enables a substantial
reduction in memory consumption for situations where the model
being represented is very large, by building view objects only for
the region being actively viewed/edited. The size of the children
can be estimated in some way, or calculated asynchronously with
only the result being saved.
ZoneView extends BoxView to provide a box that implements
zones for its children. The zones are special View implementations
(the children of an instance of this class) that represent only a
portion of the model that an instance of ZoneView is responsible
for. The zones don't create child views until an attempt is made
to display them. A box shaped view is well suited to this because:
Boxes are a heavily used view, and having a box that
provides this behavior gives substantial opportunity
to plug the behavior into a view hierarchy from the
view factory.
Boxes are tiled in one direction, so it is easy to
divide them into zones in a reliable way.
Boxes typically have a simple relationship to the model (i.e. they
create child views that directly represent the child elements).
Boxes are easier to estimate the size of than some other shapes.
The default behavior is controlled by two properties, maxZoneSize
and maxZonesLoaded. Setting maxZoneSize to Integer.MAX_VALUE would
have the effect of causing only one zone to be created. This would
effectively turn the view into an implementation of the decorator
pattern. Setting maxZonesLoaded to a value of Integer.MAX_VALUE would
cause zones to never be unloaded. For simplicity, zones are created on
boundaries represented by the child elements of the element the view is
responsible for. The zones can be any View implementation, but the
default implementation is based upon AsyncBoxView which supports fairly
large zones efficiently."
(:refer-clojure :only [require comment defn ->])
(:import [javax.swing.text ZoneView]))
(defn ->zone-view
"Constructor.
Constructs a ZoneView.
elem - the element this view is responsible for - `javax.swing.text.Element`
axis - either View.X_AXIS or View.Y_AXIS - `int`"
(^ZoneView [^javax.swing.text.Element elem ^Integer axis]
(new ZoneView elem axis)))
(defn get-maximum-zone-size
"Get the current maximum zone size.
returns: `int`"
(^Integer [^ZoneView this]
(-> this (.getMaximumZoneSize))))
(defn set-maximum-zone-size
"Set the desired maximum zone size. A
zone may get larger than this size if
a single child view is larger than this
size since zones are formed on child view
boundaries.
size - the number of characters the zone may represent before attempting to break the zone into a smaller size. - `int`"
([^ZoneView this ^Integer size]
(-> this (.setMaximumZoneSize size))))
(defn get-max-zones-loaded
"Get the current setting of the number of zones
allowed to be loaded at the same time.
returns: `int`"
(^Integer [^ZoneView this]
(-> this (.getMaxZonesLoaded))))
(defn set-max-zones-loaded
"Sets the current setting of the number of zones
allowed to be loaded at the same time. This will throw an
IllegalArgumentException if mzl is less
than 1.
mzl - the desired maximum number of zones to be actively loaded, must be greater than 0 - `int`
throws: java.lang.IllegalArgumentException - if mzl is < 1"
([^ZoneView this ^Integer mzl]
(-> this (.setMaxZonesLoaded mzl))))
(defn insert-update
"Gives notification that something was inserted into the document
in a location that this view is responsible for. This is largely
delegated to the superclass, but is reimplemented to update the
relevant zone (i.e. determine if a zone needs to be split into a
set of 2 or more zones).
changes - the change information from the associated document - `javax.swing.event.DocumentEvent`
a - the current allocation of the view - `java.awt.Shape`
f - the factory to use to rebuild if the view has children - `javax.swing.text.ViewFactory`"
([^ZoneView this ^javax.swing.event.DocumentEvent changes ^java.awt.Shape a ^javax.swing.text.ViewFactory f]
(-> this (.insertUpdate changes a f))))
(defn remove-update
"Gives notification that something was removed from the document
in a location that this view is responsible for. This is largely
delegated to the superclass, but is reimplemented to update the
relevant zones (i.e. determine if zones need to be removed or
joined with another zone).
changes - the change information from the associated document - `javax.swing.event.DocumentEvent`
a - the current allocation of the view - `java.awt.Shape`
f - the factory to use to rebuild if the view has children - `javax.swing.text.ViewFactory`"
([^ZoneView this ^javax.swing.event.DocumentEvent changes ^java.awt.Shape a ^javax.swing.text.ViewFactory f]
(-> this (.removeUpdate changes a f))))
| null | https://raw.githubusercontent.com/clojure-interop/java-jdk/8d7a223e0f9a0965eb0332fad595cf7649d9d96e/javax.swing/src/javax/swing/text/ZoneView.clj | clojure | (ns javax.swing.text.ZoneView
"ZoneView is a View implementation that creates zones for which
the child views are not created or stored until they are needed
for display or model/view translations. This enables a substantial
reduction in memory consumption for situations where the model
being represented is very large, by building view objects only for
the region being actively viewed/edited. The size of the children
can be estimated in some way, or calculated asynchronously with
only the result being saved.
ZoneView extends BoxView to provide a box that implements
zones for its children. The zones are special View implementations
(the children of an instance of this class) that represent only a
portion of the model that an instance of ZoneView is responsible
for. The zones don't create child views until an attempt is made
to display them. A box shaped view is well suited to this because:
Boxes are a heavily used view, and having a box that
provides this behavior gives substantial opportunity
to plug the behavior into a view hierarchy from the
view factory.
Boxes are tiled in one direction, so it is easy to
divide them into zones in a reliable way.
Boxes typically have a simple relationship to the model (i.e. they
create child views that directly represent the child elements).
Boxes are easier to estimate the size of than some other shapes.
The default behavior is controlled by two properties, maxZoneSize
and maxZonesLoaded. Setting maxZoneSize to Integer.MAX_VALUE would
have the effect of causing only one zone to be created. This would
effectively turn the view into an implementation of the decorator
pattern. Setting maxZonesLoaded to a value of Integer.MAX_VALUE would
cause zones to never be unloaded. For simplicity, zones are created on
boundaries represented by the child elements of the element the view is
responsible for. The zones can be any View implementation, but the
default implementation is based upon AsyncBoxView which supports fairly
large zones efficiently."
(:refer-clojure :only [require comment defn ->])
(:import [javax.swing.text ZoneView]))
(defn ->zone-view
"Constructor.
Constructs a ZoneView.
elem - the element this view is responsible for - `javax.swing.text.Element`
axis - either View.X_AXIS or View.Y_AXIS - `int`"
(^ZoneView [^javax.swing.text.Element elem ^Integer axis]
(new ZoneView elem axis)))
(defn get-maximum-zone-size
"Get the current maximum zone size.
returns: `int`"
(^Integer [^ZoneView this]
(-> this (.getMaximumZoneSize))))
(defn set-maximum-zone-size
"Set the desired maximum zone size. A
zone may get larger than this size if
a single child view is larger than this
size since zones are formed on child view
boundaries.
size - the number of characters the zone may represent before attempting to break the zone into a smaller size. - `int`"
([^ZoneView this ^Integer size]
(-> this (.setMaximumZoneSize size))))
(defn get-max-zones-loaded
"Get the current setting of the number of zones
allowed to be loaded at the same time.
returns: `int`"
(^Integer [^ZoneView this]
(-> this (.getMaxZonesLoaded))))
(defn set-max-zones-loaded
"Sets the current setting of the number of zones
allowed to be loaded at the same time. This will throw an
IllegalArgumentException if mzl is less
than 1.
mzl - the desired maximum number of zones to be actively loaded, must be greater than 0 - `int`
throws: java.lang.IllegalArgumentException - if mzl is < 1"
([^ZoneView this ^Integer mzl]
(-> this (.setMaxZonesLoaded mzl))))
(defn insert-update
"Gives notification that something was inserted into the document
in a location that this view is responsible for. This is largely
delegated to the superclass, but is reimplemented to update the
relevant zone (i.e. determine if a zone needs to be split into a
set of 2 or more zones).
changes - the change information from the associated document - `javax.swing.event.DocumentEvent`
a - the current allocation of the view - `java.awt.Shape`
f - the factory to use to rebuild if the view has children - `javax.swing.text.ViewFactory`"
([^ZoneView this ^javax.swing.event.DocumentEvent changes ^java.awt.Shape a ^javax.swing.text.ViewFactory f]
(-> this (.insertUpdate changes a f))))
(defn remove-update
"Gives notification that something was removed from the document
in a location that this view is responsible for. This is largely
delegated to the superclass, but is reimplemented to update the
relevant zones (i.e. determine if zones need to be removed or
joined with another zone).
changes - the change information from the associated document - `javax.swing.event.DocumentEvent`
a - the current allocation of the view - `java.awt.Shape`
f - the factory to use to rebuild if the view has children - `javax.swing.text.ViewFactory`"
([^ZoneView this ^javax.swing.event.DocumentEvent changes ^java.awt.Shape a ^javax.swing.text.ViewFactory f]
(-> this (.removeUpdate changes a f))))
|
|
2f5498dc4cf9502f424dac53592374dbb284d8bb3a4d770c036c3471de89e98f | 1Jajen1/Brokkr | Word24.hs | # LANGUAGE MagicHash #
module Util.Word24 (
Word24(..)
, byteSwap24
) where
import GHC.Exts
newtype Word24 = Word24 Word
deriving newtype (Eq, Show, Enum, Ord, Num, Real, Integral)
byteSwap24 :: Word24 -> Word24
byteSwap24 (Word24 (W# w)) = Word24 $ W# (byteSwap24# w)
byteSwap24# :: Word# -> Word#
byteSwap24# w =
let b1 = uncheckedShiftL# (w `and#` 0x0000ff##) 16#
b2 = w `and#` 0x00ff00##
b3 = uncheckedShiftRL# (w `and#` 0xff0000##) 16#
in b1 `or#` b2 `or#` b3
| null | https://raw.githubusercontent.com/1Jajen1/Brokkr/fe56efaf450f29a5571cc34fa01f7301678f3eaf/src/Util/Word24.hs | haskell | # LANGUAGE MagicHash #
module Util.Word24 (
Word24(..)
, byteSwap24
) where
import GHC.Exts
newtype Word24 = Word24 Word
deriving newtype (Eq, Show, Enum, Ord, Num, Real, Integral)
byteSwap24 :: Word24 -> Word24
byteSwap24 (Word24 (W# w)) = Word24 $ W# (byteSwap24# w)
byteSwap24# :: Word# -> Word#
byteSwap24# w =
let b1 = uncheckedShiftL# (w `and#` 0x0000ff##) 16#
b2 = w `and#` 0x00ff00##
b3 = uncheckedShiftRL# (w `and#` 0xff0000##) 16#
in b1 `or#` b2 `or#` b3
|
|
2558f2e3a10072e40fc55dc2e281d15065f8b9d4ffb25fabf8985ee99e0e52bd | solidsnack/shell-escape | Bench.hs |
{-# LANGUAGE OverloadedStrings
#-}
import Data.ByteString.Char8 (ByteString)
import Criterion.Main
import Data.ByteString.ShellEscape
strings :: [ByteString]
strings =
[ "echo * $PWD"
, ""
, "~/Music/M.I.A. & Diplo - Piracy Funds Terrorism Vol. 1 (2004)"
, "abcds"
, "\x00\n\204\DEL"
, "\x00\n\204\DELecho * $PWD" ]
main = (defaultMain . concat . fmap (`fmap` strings)) [benchBash, benchSh]
where
bench' d f s = bench (d ++ show s) (whnf f s)
benchBash = bench' "bash:" (escape :: ByteString -> Bash)
benchSh = bench' "sh:" (escape :: ByteString -> Sh)
| null | https://raw.githubusercontent.com/solidsnack/shell-escape/8a4a0e8153c5e2bcc0dc30773b0009e8f4d5e6cc/bench/Bench.hs | haskell | # LANGUAGE OverloadedStrings
# |
import Data.ByteString.Char8 (ByteString)
import Criterion.Main
import Data.ByteString.ShellEscape
strings :: [ByteString]
strings =
[ "echo * $PWD"
, ""
, "~/Music/M.I.A. & Diplo - Piracy Funds Terrorism Vol. 1 (2004)"
, "abcds"
, "\x00\n\204\DEL"
, "\x00\n\204\DELecho * $PWD" ]
main = (defaultMain . concat . fmap (`fmap` strings)) [benchBash, benchSh]
where
bench' d f s = bench (d ++ show s) (whnf f s)
benchBash = bench' "bash:" (escape :: ByteString -> Bash)
benchSh = bench' "sh:" (escape :: ByteString -> Sh)
|
68056c4b2642f41dc5b6aa5a98baa5aa858a78ea7f3041801f3d68ff4c08204f | ConsumerDataStandardsAustralia/validation-prototype | AccountsTest.hs | # LANGUAGE QuasiQuotes #
module Web.ConsumerData.Au.LambdaBank.Banking.AccountsTest where
import Control.Lens
import Web.ConsumerData.Au.Api.Client
import Web.ConsumerData.Au.Api.Types
import Control.Monad.IO.Class (liftIO)
import Data.Text (pack)
import Test.Tasty (TestTree)
import Test.Tasty.HUnit (testCase, (@?=))
import Text.URI.QQ (uri)
import Web.ConsumerData.Au.LambdaBank.FakeData
import Web.ConsumerData.Au.LambdaBank.Model
(filterBalancesByAccountIds, filterDirectDebitsByAccountIds,
filterTransactionsByAccountIds)
import Web.ConsumerData.Au.LambdaBank.WithServer
test_accounts :: [TestTree]
test_accounts =
[ testCase "/banking/accounts test" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsGet . to (\f -> f Nothing Nothing Nothing Nothing Nothing)
liftIO $ res @?= Response testAccounts
(LinksPaginated
[uri|:1337/banking/accounts?page=1|]
Nothing
Nothing
Nothing
Nothing
)
(MetaPaginated 0 1)
, testCase "/banking/accounts test with page 2" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsGet . to (\f -> f Nothing Nothing Nothing (Just (PageNumber 2)) Nothing)
liftIO $ res @?= Response testAccounts
(LinksPaginated
[uri|:1337/banking/accounts?page=2|]
(Just [uri|:1337/banking/accounts?page=1|])
(Just [uri|:1337/banking/accounts?page=1|])
Nothing
Nothing
)
(MetaPaginated 0 2)
, testCase "/banking/accounts/{accountId} test" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsByIdClient . to ($ AccountId (AsciiString (pack "12345"))) . accountGet
liftIO $ res @?= Response testAccountDetail
(LinksStandard [uri|:1337/banking/accounts/12345|])
MetaStandard
, testCase "/banking/accounts/balances test with page" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsBalancesGet . to (\f -> f Nothing Nothing Nothing (Just (PageNumber 2)) Nothing)
liftIO $ res @?= Response testBalances
(LinksPaginated
[uri|:1337/banking/accounts/balances?page=2|]
(Just [uri|:1337/banking/accounts/balances?page=1|])
(Just [uri|:1337/banking/accounts/balances?page=1|])
Nothing
Nothing
)
(MetaPaginated 0 2)
, testCase "/banking/accounts/balances test" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsBalancesGet . to (\f -> f Nothing Nothing Nothing Nothing Nothing)
liftIO $ res @?= Response testBalances
(LinksPaginated
[uri|:1337/banking/accounts/balances?page=1|]
Nothing
Nothing
Nothing
Nothing
)
(MetaPaginated 0 1)
, testCase "/banking/accounts/transactions test with page 2" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsTransactionsGet . to (\f -> f Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing (Just (PageNumber 2)) Nothing)
liftIO $ res @?= Response testTransactions
(LinksPaginated
[uri|:1337/banking/accounts/transactions?page=2|]
(Just [uri|:1337/banking/accounts/transactions?page=1|])
(Just [uri|:1337/banking/accounts/transactions?page=1|])
Nothing
Nothing
)
(MetaPaginated 0 2)
, testCase "/banking/accounts/{accountId}/transactions test" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsByIdClient . to ($ AccountId (AsciiString (pack "12345"))) . accountTransactionsGet . to (\f -> f Nothing Nothing Nothing Nothing Nothing Nothing Nothing)
liftIO $ res @?= Response testTransactions
(LinksPaginated
[uri|:1337/banking/accounts/12345/transactions?page=1|]
Nothing
Nothing
Nothing
Nothing
)
(MetaPaginated 0 1)
, testCase "/banking/accounts/{accountId}/transactions test with page 2" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsByIdClient . to ($ AccountId (AsciiString (pack "12345"))) . accountTransactionsGet . to (\f -> f Nothing Nothing Nothing Nothing Nothing (Just (PageNumber 2)) Nothing)
liftIO $ res @?= Response testTransactions
(LinksPaginated
[uri|:1337/banking/accounts/12345/transactions?page=2|]
(Just [uri|:1337/banking/accounts/12345/transactions?page=1|])
(Just [uri|:1337/banking/accounts/12345/transactions?page=1|])
Nothing
Nothing
)
(MetaPaginated 0 2)
, testCase "/banking/accounts/{accountId}/transactions/{transactionId} test" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsByIdClient . to ($ AccountId (AsciiString (pack "12345"))) . accountTransactionByIdGet . to ($ TransactionId (AsciiString (pack "6789")))
liftIO $ res @?= Response testTransactionDetailResponse
(LinksStandard [uri|:1337/banking/accounts/12345/transactions/6789|])
MetaStandard
]
test_accountsPost :: [TestTree]
test_accountsPost =
[ testCase "POST /banking/accounts/balances test" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsBalancesPost . to (\f -> f requestAccountIds Nothing Nothing)
liftIO $ res @?= Response (filterBalancesByAccountIds testAccountIds testBalances)
(LinksPaginated
[uri|:1337/banking/accounts/balances?page=1|]
Nothing
Nothing
Nothing
Nothing
)
(MetaPaginated 0 1)
, testCase "POST /banking/accounts/transactions test" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsTransactionsPost . to (\f -> f Nothing Nothing Nothing Nothing Nothing requestAccountIds Nothing Nothing)
liftIO $ res @?= Response (filterTransactionsByAccountIds testAccountIds testTransactions)
(LinksPaginated
[uri|:1337/banking/accounts/transactions?page=1|]
Nothing
Nothing
Nothing
Nothing
)
(MetaPaginated 0 1)
, testCase "POST /banking/accounts/transactions test with page 2" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsTransactionsPost . to (\f -> f Nothing Nothing Nothing Nothing Nothing requestAccountIds (Just (PageNumber 2)) Nothing)
liftIO $ res @?= Response (filterTransactionsByAccountIds testAccountIds testTransactions)
(LinksPaginated
[uri|:1337/banking/accounts/transactions?page=2|]
(Just [uri|:1337/banking/accounts/transactions?page=1|])
(Just [uri|:1337/banking/accounts/transactions?page=1|])
Nothing
Nothing
)
(MetaPaginated 0 2)
, testCase "POST /banking/accounts/direct-debits test" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsDirectDebitsPost . to (\f -> f requestAccountIds Nothing Nothing)
liftIO $ res @?= Response (filterDirectDebitsByAccountIds testAccountIds testDirectDebitAuthorisations)
(LinksPaginated
[uri|:1337/banking/accounts/direct-debits?page=1|]
Nothing
Nothing
Nothing
Nothing
)
(MetaPaginated 0 1)
]
where
requestAccountIds :: RequestAccountIds
requestAccountIds = mkStandardRequest (AccountIds [AccountId (AsciiString (pack "12345")), AccountId (AsciiString (pack "12347"))])
| null | https://raw.githubusercontent.com/ConsumerDataStandardsAustralia/validation-prototype/ff63338b77339ee49fa3e0be5bb9d7f74e50c28b/consumer-data-au-lambdabank/tests/Web/ConsumerData/Au/LambdaBank/Banking/AccountsTest.hs | haskell | # LANGUAGE QuasiQuotes #
module Web.ConsumerData.Au.LambdaBank.Banking.AccountsTest where
import Control.Lens
import Web.ConsumerData.Au.Api.Client
import Web.ConsumerData.Au.Api.Types
import Control.Monad.IO.Class (liftIO)
import Data.Text (pack)
import Test.Tasty (TestTree)
import Test.Tasty.HUnit (testCase, (@?=))
import Text.URI.QQ (uri)
import Web.ConsumerData.Au.LambdaBank.FakeData
import Web.ConsumerData.Au.LambdaBank.Model
(filterBalancesByAccountIds, filterDirectDebitsByAccountIds,
filterTransactionsByAccountIds)
import Web.ConsumerData.Au.LambdaBank.WithServer
test_accounts :: [TestTree]
test_accounts =
[ testCase "/banking/accounts test" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsGet . to (\f -> f Nothing Nothing Nothing Nothing Nothing)
liftIO $ res @?= Response testAccounts
(LinksPaginated
[uri|:1337/banking/accounts?page=1|]
Nothing
Nothing
Nothing
Nothing
)
(MetaPaginated 0 1)
, testCase "/banking/accounts test with page 2" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsGet . to (\f -> f Nothing Nothing Nothing (Just (PageNumber 2)) Nothing)
liftIO $ res @?= Response testAccounts
(LinksPaginated
[uri|:1337/banking/accounts?page=2|]
(Just [uri|:1337/banking/accounts?page=1|])
(Just [uri|:1337/banking/accounts?page=1|])
Nothing
Nothing
)
(MetaPaginated 0 2)
, testCase "/banking/accounts/{accountId} test" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsByIdClient . to ($ AccountId (AsciiString (pack "12345"))) . accountGet
liftIO $ res @?= Response testAccountDetail
(LinksStandard [uri|:1337/banking/accounts/12345|])
MetaStandard
, testCase "/banking/accounts/balances test with page" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsBalancesGet . to (\f -> f Nothing Nothing Nothing (Just (PageNumber 2)) Nothing)
liftIO $ res @?= Response testBalances
(LinksPaginated
[uri|:1337/banking/accounts/balances?page=2|]
(Just [uri|:1337/banking/accounts/balances?page=1|])
(Just [uri|:1337/banking/accounts/balances?page=1|])
Nothing
Nothing
)
(MetaPaginated 0 2)
, testCase "/banking/accounts/balances test" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsBalancesGet . to (\f -> f Nothing Nothing Nothing Nothing Nothing)
liftIO $ res @?= Response testBalances
(LinksPaginated
[uri|:1337/banking/accounts/balances?page=1|]
Nothing
Nothing
Nothing
Nothing
)
(MetaPaginated 0 1)
, testCase "/banking/accounts/transactions test with page 2" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsTransactionsGet . to (\f -> f Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing (Just (PageNumber 2)) Nothing)
liftIO $ res @?= Response testTransactions
(LinksPaginated
[uri|:1337/banking/accounts/transactions?page=2|]
(Just [uri|:1337/banking/accounts/transactions?page=1|])
(Just [uri|:1337/banking/accounts/transactions?page=1|])
Nothing
Nothing
)
(MetaPaginated 0 2)
, testCase "/banking/accounts/{accountId}/transactions test" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsByIdClient . to ($ AccountId (AsciiString (pack "12345"))) . accountTransactionsGet . to (\f -> f Nothing Nothing Nothing Nothing Nothing Nothing Nothing)
liftIO $ res @?= Response testTransactions
(LinksPaginated
[uri|:1337/banking/accounts/12345/transactions?page=1|]
Nothing
Nothing
Nothing
Nothing
)
(MetaPaginated 0 1)
, testCase "/banking/accounts/{accountId}/transactions test with page 2" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsByIdClient . to ($ AccountId (AsciiString (pack "12345"))) . accountTransactionsGet . to (\f -> f Nothing Nothing Nothing Nothing Nothing (Just (PageNumber 2)) Nothing)
liftIO $ res @?= Response testTransactions
(LinksPaginated
[uri|:1337/banking/accounts/12345/transactions?page=2|]
(Just [uri|:1337/banking/accounts/12345/transactions?page=1|])
(Just [uri|:1337/banking/accounts/12345/transactions?page=1|])
Nothing
Nothing
)
(MetaPaginated 0 2)
, testCase "/banking/accounts/{accountId}/transactions/{transactionId} test" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsByIdClient . to ($ AccountId (AsciiString (pack "12345"))) . accountTransactionByIdGet . to ($ TransactionId (AsciiString (pack "6789")))
liftIO $ res @?= Response testTransactionDetailResponse
(LinksStandard [uri|:1337/banking/accounts/12345/transactions/6789|])
MetaStandard
]
test_accountsPost :: [TestTree]
test_accountsPost =
[ testCase "POST /banking/accounts/balances test" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsBalancesPost . to (\f -> f requestAccountIds Nothing Nothing)
liftIO $ res @?= Response (filterBalancesByAccountIds testAccountIds testBalances)
(LinksPaginated
[uri|:1337/banking/accounts/balances?page=1|]
Nothing
Nothing
Nothing
Nothing
)
(MetaPaginated 0 1)
, testCase "POST /banking/accounts/transactions test" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsTransactionsPost . to (\f -> f Nothing Nothing Nothing Nothing Nothing requestAccountIds Nothing Nothing)
liftIO $ res @?= Response (filterTransactionsByAccountIds testAccountIds testTransactions)
(LinksPaginated
[uri|:1337/banking/accounts/transactions?page=1|]
Nothing
Nothing
Nothing
Nothing
)
(MetaPaginated 0 1)
, testCase "POST /banking/accounts/transactions test with page 2" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsTransactionsPost . to (\f -> f Nothing Nothing Nothing Nothing Nothing requestAccountIds (Just (PageNumber 2)) Nothing)
liftIO $ res @?= Response (filterTransactionsByAccountIds testAccountIds testTransactions)
(LinksPaginated
[uri|:1337/banking/accounts/transactions?page=2|]
(Just [uri|:1337/banking/accounts/transactions?page=1|])
(Just [uri|:1337/banking/accounts/transactions?page=1|])
Nothing
Nothing
)
(MetaPaginated 0 2)
, testCase "POST /banking/accounts/direct-debits test" . withServer 1337 $ do
res <- apiClient ^. bankingClient . bankingAccountsClient . accountsDirectDebitsPost . to (\f -> f requestAccountIds Nothing Nothing)
liftIO $ res @?= Response (filterDirectDebitsByAccountIds testAccountIds testDirectDebitAuthorisations)
(LinksPaginated
[uri|:1337/banking/accounts/direct-debits?page=1|]
Nothing
Nothing
Nothing
Nothing
)
(MetaPaginated 0 1)
]
where
requestAccountIds :: RequestAccountIds
requestAccountIds = mkStandardRequest (AccountIds [AccountId (AsciiString (pack "12345")), AccountId (AsciiString (pack "12347"))])
|
|
baa1d577ad416ac19d9211ba434a02bff2922361eb4193f3c29425c7f16eee16 | avsm/platform | recent.mli | (* These tests are run on only the most recent version of the compiler that is
explicitly supported by odoc. This allows us to test doc generation for new
language features. *)
module type S = sig end
module type S1 = S -> S
type variant =
| A
| B of int
| C (** foo *)
| D (** {e bar} *)
| E of {a : int}
type _ gadt =
| A : int gadt
| B : int -> string gadt (** foo *)
| C : {a : int} -> unit gadt
type polymorphic_variant = [
| `A
| `B of int
| `C (** foo *)
| `D (** bar *)
]
type nonrec nonrec_ = int
(* Conjunctive types: dune compilation scheme exposes a bug in old
versions of the compiler *)
type empty_conj= X: [< `X of & 'a & int * float ] -> empty_conj
type conj = X: [< `X of int & [< `B of int & float ] ] -> conj
val empty_conj: [< `X of & 'a & int * float ]
val conj : [< `X of int & [< `B of int & float ] ]
module Z : sig
module Y : sig
module X : sig
type 'a t
end
end
end
module X : sig
module L := Z.Y
type t = int L.X.t
type u := int
type v = u L.X.t
end
| null | https://raw.githubusercontent.com/avsm/platform/b254e3c6b60f3c0c09dfdcde92eb1abdc267fa1c/duniverse/odoc.1.4.2/test/html/cases/recent.mli | ocaml | These tests are run on only the most recent version of the compiler that is
explicitly supported by odoc. This allows us to test doc generation for new
language features.
* foo
* {e bar}
* foo
* foo
* bar
Conjunctive types: dune compilation scheme exposes a bug in old
versions of the compiler |
module type S = sig end
module type S1 = S -> S
type variant =
| A
| B of int
| E of {a : int}
type _ gadt =
| A : int gadt
| C : {a : int} -> unit gadt
type polymorphic_variant = [
| `A
| `B of int
]
type nonrec nonrec_ = int
type empty_conj= X: [< `X of & 'a & int * float ] -> empty_conj
type conj = X: [< `X of int & [< `B of int & float ] ] -> conj
val empty_conj: [< `X of & 'a & int * float ]
val conj : [< `X of int & [< `B of int & float ] ]
module Z : sig
module Y : sig
module X : sig
type 'a t
end
end
end
module X : sig
module L := Z.Y
type t = int L.X.t
type u := int
type v = u L.X.t
end
|
217668a5e42d0bd6dd14cc01654896dc6981bb06437bbbc8adb2c66ac8b7f58d | Tim-ats-d/Laius | text.mli | type t = DotPoint of bullet * t list | Text of string
and bullet = string
val text : string -> t
val dot_point : ?bullet:string -> t list -> t
val eval : t list -> string list
| null | https://raw.githubusercontent.com/Tim-ats-d/Laius/feb8336fe72751b28fa574ed9e9d3d06c9e13843/src/text.mli | ocaml | type t = DotPoint of bullet * t list | Text of string
and bullet = string
val text : string -> t
val dot_point : ?bullet:string -> t list -> t
val eval : t list -> string list
|
|
b20071d242f4c5854628f6587083eb8e0c8398fd1789efe12bb989560fcf93e6 | sellout/Kilns | unification.lisp | (in-package #:kilns)
This file extends the CL - Unification package with the missing
pieces to allow it to unify terms in the Kell calculus
(defmethod unify
((pattern process-variable) agent
&optional (substitutions (make-empty-environment))
&key &allow-other-keys)
(unify (intern (format nil "?~a" (name pattern))) agent substitutions))
(defmethod unify
((pattern kell) (agent kell)
&optional (substitutions (make-empty-environment))
&key &allow-other-keys)
(unify (state pattern) (state agent)
(unify (name pattern) (name agent) substitutions)))
(defmethod unify
((pattern parallel-composition) (agent parallel-composition)
&optional (substitutions (make-empty-environment))
&key &allow-other-keys)
(match-local (process-variables pattern) (process-variables agent)
(match-local (messages pattern) (messages agent)
(match-local (kells pattern) (kells agent)
(match-local (triggers pattern)
(triggers agent)
(match-local (primitives
pattern)
(primitives
agent)
substitutions))))))
We should only get here if we know that both messages are relative to the same
(defmethod unify
((pattern message) (agent message)
&optional (substitutions (make-empty-environment))
&key &allow-other-keys)
(unify (argument pattern) (argument agent)
(unify (name pattern) (name agent) substitutions)))
(defgeneric name-equal (a b)
(:method (a b) (equal a b))
(:method ((a symbol) (b symbol)) (equal (symbol-name a) (symbol-name b))))
;;; this is probably not necessary, but I don't understand these environments well
(defun duplicate-environment (env)
(unify::make-environment
:frames (list (copy-structure (unify::first-frame env)))))
;;; ----------------------------------------------------------------------------
This set of methods ( with PATTERN specialized on PATTERN ) represents the
;;; top-level match that sends off all the sub-matches, so it works a little
differently . EG , we already know that the pattern has a 1 - 1 correspondence
;;; with messages.
(defmethod unify
((pattern pattern) (agent message)
&optional (substitutions (make-empty-environment))
&key &allow-other-keys)
(unify (car (append (local-message-pattern pattern)
(up-message-pattern pattern)
(down-message-pattern pattern)))
agent
substitutions))
(defmethod unify
((pattern pattern) (agent kell)
&optional (substitutions (make-empty-environment))
&key &allow-other-keys)
(unify (car (kell-message-pattern pattern)) agent substitutions))
;; NOTE: FIND-VARIABLE-VALUE isn't generic, so we use a different name
(defun find-symbol-value (variable &optional env errorp)
(find-variable-value (intern (format nil "?~a" variable)) env errorp))
(defun find-process-variable-value (variable &optional env errorp)
(find-variable-value (intern (format nil "?~a" (name variable))) env errorp))
;;; It turns out that occurs-in-p is used to make sure that variables don't
;;; match things that contain the same variable - but we don't care, that's
;;; fine in our calculus.
(defmethod unify::occurs-in-p ((var symbol) (pat process) env)
(declare (ignore env))
nil)
;;; Unification for abstractions and concretions
(defmethod unify
((pattern concretion) (agent concretion)
&optional (substitutions (make-empty-environment))
&key &allow-other-keys)
(unify (continuation pattern) (continuation agent)
(unify (messages pattern) (messages agent)
(unify (restricted-names pattern) (restricted-names agent)
substitutions))))
(defmethod unify
((pattern kell-abstraction) (agent kell-abstraction)
&optional (substitutions (make-empty-environment))
&key &allow-other-keys)
(unify (abstraction pattern) (abstraction agent)
(unify (continuation pattern) (continuation agent)
(unify (name pattern) (name agent) substitutions))))
(defmethod unify
((pattern application-abstraction) (agent application-abstraction)
&optional (substitutions (make-empty-environment))
&key &allow-other-keys)
(unify (abstraction pattern) (abstraction agent)
(unify (concretion pattern) (concretion agent) substitutions)))
(defmethod unify
((pattern restriction-abstraction) (agent restriction-abstraction)
&optional (substitutions (make-empty-environment))
&key &allow-other-keys)
(unify (abstraction pattern) (abstraction agent)
(unify (names pattern) (names agent) substitutions)))
(defmethod unify
((pattern pattern-abstraction) (agent pattern-abstraction)
&optional (substitutions (make-empty-environment))
&key &allow-other-keys)
(unify (process pattern) (process agent)
(unify (pattern pattern) (pattern agent) substitutions)))
| null | https://raw.githubusercontent.com/sellout/Kilns/467ba599f457812daea41a7c56f74a1ec1cdc9b2/src/unification.lisp | lisp | this is probably not necessary, but I don't understand these environments well
----------------------------------------------------------------------------
top-level match that sends off all the sub-matches, so it works a little
with messages.
NOTE: FIND-VARIABLE-VALUE isn't generic, so we use a different name
It turns out that occurs-in-p is used to make sure that variables don't
match things that contain the same variable - but we don't care, that's
fine in our calculus.
Unification for abstractions and concretions | (in-package #:kilns)
This file extends the CL - Unification package with the missing
pieces to allow it to unify terms in the Kell calculus
(defmethod unify
((pattern process-variable) agent
&optional (substitutions (make-empty-environment))
&key &allow-other-keys)
(unify (intern (format nil "?~a" (name pattern))) agent substitutions))
(defmethod unify
((pattern kell) (agent kell)
&optional (substitutions (make-empty-environment))
&key &allow-other-keys)
(unify (state pattern) (state agent)
(unify (name pattern) (name agent) substitutions)))
(defmethod unify
((pattern parallel-composition) (agent parallel-composition)
&optional (substitutions (make-empty-environment))
&key &allow-other-keys)
(match-local (process-variables pattern) (process-variables agent)
(match-local (messages pattern) (messages agent)
(match-local (kells pattern) (kells agent)
(match-local (triggers pattern)
(triggers agent)
(match-local (primitives
pattern)
(primitives
agent)
substitutions))))))
We should only get here if we know that both messages are relative to the same
(defmethod unify
((pattern message) (agent message)
&optional (substitutions (make-empty-environment))
&key &allow-other-keys)
(unify (argument pattern) (argument agent)
(unify (name pattern) (name agent) substitutions)))
(defgeneric name-equal (a b)
(:method (a b) (equal a b))
(:method ((a symbol) (b symbol)) (equal (symbol-name a) (symbol-name b))))
(defun duplicate-environment (env)
(unify::make-environment
:frames (list (copy-structure (unify::first-frame env)))))
This set of methods ( with PATTERN specialized on PATTERN ) represents the
differently . EG , we already know that the pattern has a 1 - 1 correspondence
(defmethod unify
((pattern pattern) (agent message)
&optional (substitutions (make-empty-environment))
&key &allow-other-keys)
(unify (car (append (local-message-pattern pattern)
(up-message-pattern pattern)
(down-message-pattern pattern)))
agent
substitutions))
(defmethod unify
((pattern pattern) (agent kell)
&optional (substitutions (make-empty-environment))
&key &allow-other-keys)
(unify (car (kell-message-pattern pattern)) agent substitutions))
(defun find-symbol-value (variable &optional env errorp)
(find-variable-value (intern (format nil "?~a" variable)) env errorp))
(defun find-process-variable-value (variable &optional env errorp)
(find-variable-value (intern (format nil "?~a" (name variable))) env errorp))
(defmethod unify::occurs-in-p ((var symbol) (pat process) env)
(declare (ignore env))
nil)
(defmethod unify
((pattern concretion) (agent concretion)
&optional (substitutions (make-empty-environment))
&key &allow-other-keys)
(unify (continuation pattern) (continuation agent)
(unify (messages pattern) (messages agent)
(unify (restricted-names pattern) (restricted-names agent)
substitutions))))
(defmethod unify
((pattern kell-abstraction) (agent kell-abstraction)
&optional (substitutions (make-empty-environment))
&key &allow-other-keys)
(unify (abstraction pattern) (abstraction agent)
(unify (continuation pattern) (continuation agent)
(unify (name pattern) (name agent) substitutions))))
(defmethod unify
((pattern application-abstraction) (agent application-abstraction)
&optional (substitutions (make-empty-environment))
&key &allow-other-keys)
(unify (abstraction pattern) (abstraction agent)
(unify (concretion pattern) (concretion agent) substitutions)))
(defmethod unify
((pattern restriction-abstraction) (agent restriction-abstraction)
&optional (substitutions (make-empty-environment))
&key &allow-other-keys)
(unify (abstraction pattern) (abstraction agent)
(unify (names pattern) (names agent) substitutions)))
(defmethod unify
((pattern pattern-abstraction) (agent pattern-abstraction)
&optional (substitutions (make-empty-environment))
&key &allow-other-keys)
(unify (process pattern) (process agent)
(unify (pattern pattern) (pattern agent) substitutions)))
|
0cd3c7aa9d4498931379b277bfb0f2a920cfd081b55d3667ea06375ac991085a | danielfm/bencode | list.clj | (ns bencode.type.list
(:use [bencode.protocol]
[bencode.utils])
(:import [java.util Set]
[java.io InputStream OutputStream]))
(defn- bencode-seq!
"Bencodes the given sequence."
[seq ^OutputStream out opts]
(.write out (int \l))
(doall (map #(bencode! % out opts) seq))
(.write out (int \e)))
(extend-protocol Bencodable
java.util.Set
(bencode! [self out opts]
(bencode-seq! self out opts))
java.util.List
(bencode! [self out opts]
(bencode-seq! self out opts)))
(defmethod bdecode-type! :list [^InputStream in opts]
(loop [data []]
(.mark in 1)
(if (= \e (char (.read in)))
data
(do
(.reset in)
(let [item (bdecode-type! in opts)]
(recur (conj data item)))))))
| null | https://raw.githubusercontent.com/danielfm/bencode/4b47e1c94394c1b5b74171f053873a5df772a8c7/src/main/clojure/bencode/type/list.clj | clojure | (ns bencode.type.list
(:use [bencode.protocol]
[bencode.utils])
(:import [java.util Set]
[java.io InputStream OutputStream]))
(defn- bencode-seq!
"Bencodes the given sequence."
[seq ^OutputStream out opts]
(.write out (int \l))
(doall (map #(bencode! % out opts) seq))
(.write out (int \e)))
(extend-protocol Bencodable
java.util.Set
(bencode! [self out opts]
(bencode-seq! self out opts))
java.util.List
(bencode! [self out opts]
(bencode-seq! self out opts)))
(defmethod bdecode-type! :list [^InputStream in opts]
(loop [data []]
(.mark in 1)
(if (= \e (char (.read in)))
data
(do
(.reset in)
(let [item (bdecode-type! in opts)]
(recur (conj data item)))))))
|
|
6b6d53c4521484185fe96ac92e9ae15598a247dfb99d5663431e1803311922c1 | imrehg/ypsilon | shape.scm | #!nobacktrace
Ypsilon Scheme System
Copyright ( c ) 2004 - 2009 Y.FUJITA / LittleWing Company Limited .
See license.txt for terms and conditions of use .
(library (ypsilon pango shape)
(export pango_shape)
(import (rnrs) (ypsilon ffi))
(define lib-name
(cond (on-linux "libpango-1.0.so.0")
(on-sunos "libpango-1.0.so.0")
(on-freebsd "libpango-1.0.so.0")
(on-openbsd "libpango-1.0.so.0")
(on-darwin "Gtk.framework/Gtk")
(on-windows "libpango-1.0-0.dll")
(else
(assertion-violation #f "can not locate Pango library, unknown operating system"))))
(define lib (load-shared-object lib-name))
(define-syntax define-function
(syntax-rules ()
((_ ret name args)
(define name (c-function lib lib-name ret name args)))))
void pango_shape ( const gchar * text , , const PangoAnalysis * analysis , PangoGlyphString * glyphs )
(define-function void pango_shape (char* int void* void*))
) ;[end]
| null | https://raw.githubusercontent.com/imrehg/ypsilon/e57a06ef5c66c1a88905b2be2fa791fa29848514/sitelib/ypsilon/pango/shape.scm | scheme | [end] | #!nobacktrace
Ypsilon Scheme System
Copyright ( c ) 2004 - 2009 Y.FUJITA / LittleWing Company Limited .
See license.txt for terms and conditions of use .
(library (ypsilon pango shape)
(export pango_shape)
(import (rnrs) (ypsilon ffi))
(define lib-name
(cond (on-linux "libpango-1.0.so.0")
(on-sunos "libpango-1.0.so.0")
(on-freebsd "libpango-1.0.so.0")
(on-openbsd "libpango-1.0.so.0")
(on-darwin "Gtk.framework/Gtk")
(on-windows "libpango-1.0-0.dll")
(else
(assertion-violation #f "can not locate Pango library, unknown operating system"))))
(define lib (load-shared-object lib-name))
(define-syntax define-function
(syntax-rules ()
((_ ret name args)
(define name (c-function lib lib-name ret name args)))))
void pango_shape ( const gchar * text , , const PangoAnalysis * analysis , PangoGlyphString * glyphs )
(define-function void pango_shape (char* int void* void*))
|
bd19eb9347680b71313d2c9475a7963039cdd5d43d183908b09a7eff032dfb8c | ocaml-flambda/flambda-backend | test_iarray.ml | (* TEST
flags = "-extension immutable_arrays_experimental"
* expect
*)
module Iarray = Stdlib__Iarray;;
external ( .:() ) : 'a iarray -> int -> 'a = "%array_safe_get";;
(** Create some immutable and mutable arrays *)
let iarray : int iarray = [:1;2;3;4;5:];;
let ifarray : float iarray = [:1.5;2.5;3.5;4.5;5.5:];;
let marray : int array = [|1;2;3;4;5|];;
let mfarray : float array = [|1.5;2.5;3.5;4.5;5.5|];;
[%%expect{|
module Iarray = Stdlib__Iarray
external ( .:() ) : 'a iarray -> int -> 'a = "%array_safe_get"
val iarray : int iarray = [:1; 2; 3; 4; 5:]
val ifarray : float iarray = [:1.5; 2.5; 3.5; 4.5; 5.5:]
val marray : int array = [|1; 2; 3; 4; 5|]
val mfarray : float array = [|1.5; 2.5; 3.5; 4.5; 5.5|]
|}];;
(** Pattern-match on some immutable arrays, and check the typing of array
patterns, both mutable and immutable *)
match iarray with
| [::] -> "empty"
| [:1;2;3;4;5:] -> "1--5"
| _ -> "who knows?"
;;
[%%expect{|
- : string = "1--5"
|}];;
match ifarray with
| [::] -> "empty"
| [:1.5;2.5;3.5;4.5;5.5:] -> "1.5--5.5"
| _ -> "who knows?"
;;
[%%expect{|
- : string = "1.5--5.5"
|}];;
match iarray with
| [::] -> "empty"
| [:1;2;3;4;6:] -> "1--5"
| _ -> "who knows?"
;;
[%%expect{|
- : string = "who knows?"
|}];;
match ifarray with
| [::] -> "empty"
| [:1;2;3;4;5:] -> "1--5"
| _ -> "who knows?"
;;
[%%expect{|
Line 3, characters 4-5:
3 | | [:1;2;3;4;5:] -> "1--5"
^
Error: This pattern matches values of type int
but a pattern was expected which matches values of type float
Hint: Did you mean `1.'?
|}];;
match marray with
| [::] -> "empty"
| [:1;2;3;4;5:] -> "1--5"
| _ -> "who knows?"
;;
[%%expect{|
Line 2, characters 2-6:
2 | | [::] -> "empty"
^^^^
Error: This pattern matches values of type 'a iarray
but a pattern was expected which matches values of type int array
|}];;
match iarray with
| [||] -> "empty"
| [|1;2;3;4;5|] -> "1--5"
| _ -> "who knows?"
;;
[%%expect{|
Line 2, characters 2-6:
2 | | [||] -> "empty"
^^^^
Error: This pattern matches values of type 'a array
but a pattern was expected which matches values of type int iarray
|}];;
(** Confirm that immutable and mutable arrays have the same representation, even
when they're different objects *)
Obj.repr iarray = Obj.repr marray;;
[%%expect{|
- : bool = true
|}];;
Obj.repr ifarray = Obj.repr mfarray;;
[%%expect{|
- : bool = true
|}];;
iarray == Obj.magic marray;;
[%%expect{|
- : bool = false
|}];;
ifarray == Obj.magic mfarray;;
[%%expect{|
- : bool = false
|}];;
(** Confirm that immutable and mutable arrays don't collide *)
Obj.repr iarray <> Obj.repr ifarray;;
[%%expect{|
- : bool = true
|}];;
Obj.repr marray <> Obj.repr mfarray;;
[%%expect{|
- : bool = true
|}];;
* Test basic functionality : One or a few tests for every function in [ ] .
We test both success and error cases , and in general try to have coverage of
edge cases . Comments are attached everywhere something subtle is being
checked .
We test both success and error cases, and in general try to have coverage of
edge cases. Comments are attached everywhere something subtle is being
checked. *)
Iarray.length iarray, Iarray.length ifarray;;
[%%expect{|
- : int * int = (5, 5)
|}];;
iarray.:(0), Iarray.get iarray 1, ifarray.:(2), Iarray.get ifarray 3;;
[%%expect{|
- : int * int * float * float = (1, 2, 3.5, 4.5)
|}];;
iarray.:(10)
[%%expect{|
Exception: Invalid_argument "index out of bounds".
|}];;
Iarray.get iarray (-1);;
[%%expect{|
Exception: Invalid_argument "index out of bounds".
|}];;
ifarray.:(-10);;
[%%expect{|
Exception: Invalid_argument "index out of bounds".
|}];;
Iarray.get ifarray 5;;
[%%expect{|
Exception: Invalid_argument "index out of bounds".
|}];;
Iarray.init 10 (fun x -> x * 2);;
[%%expect{|
- : int iarray = [:0; 2; 4; 6; 8; 10; 12; 14; 16; 18:]
|}];;
Iarray.append iarray iarray;;
[%%expect{|
- : int iarray = [:1; 2; 3; 4; 5; 1; 2; 3; 4; 5:]
|}];;
Iarray.concat [];;
[%%expect{|
- : 'a iarray = [::]
|}];;
Iarray.concat [ Iarray.init 1 (fun x -> 1 + x)
; Iarray.init 2 (fun x -> 20 + x)
; Iarray.init 3 (fun x -> 300 + x) ];;
[%%expect{|
- : int iarray = [:1; 20; 21; 300; 301; 302:]
|}];;
Iarray.sub iarray 0 2, Iarray.sub iarray 2 3;;
[%%expect{|
- : int iarray * int iarray = ([:1; 2:], [:3; 4; 5:])
|}];;
Iarray.sub iarray (-1) 3;;
[%%expect{|
Exception: Invalid_argument "Array.sub".
|}];;
Iarray.sub iarray 1 (-3);;
[%%expect{|
Exception: Invalid_argument "Array.sub".
|}];;
Iarray.sub iarray 3 10;;
[%%expect{|
Exception: Invalid_argument "Array.sub".
|}];;
Iarray.to_list iarray;;
[%%expect{|
- : int list = [1; 2; 3; 4; 5]
|}];;
Iarray.of_list [10;20;30];;
[%%expect{|
- : int iarray = [:10; 20; 30:]
|}];;
Iarray.to_array iarray;;
[%%expect{|
- : int array = [|1; 2; 3; 4; 5|]
|}];;
Iarray.of_array mfarray;;
[%%expect{|
- : float iarray = [:1.5; 2.5; 3.5; 4.5; 5.5:]
|}];;
(* [Array] has an analog to [Iarray.to_array] *)
Array.of_iarray ifarray;;
[%%expect{|
- : float array = [|1.5; 2.5; 3.5; 4.5; 5.5|]
|}];;
(* [Array] has an analog to [Iarray.of_array] *)
Array.to_iarray marray;;
[%%expect{|
- : int iarray = [:1; 2; 3; 4; 5:]
|}];;
(* [Iarray.to_array] creates a fresh mutable array every time *)
Iarray.to_array iarray == marray;;
[%%expect{|
- : bool = false
|}];;
(* [Iarray.to_array] creates a fresh mutable array every time *)
Iarray.to_array ifarray == Iarray.to_array ifarray;;
[%%expect{|
- : bool = false
|}];;
(* Round-tripping from and to an [iarray] creates a fresh copy every time *)
Iarray.of_array (Iarray.to_array iarray) == iarray;;
[%%expect{|
- : bool = false
|}];;
let sum = ref 0. in
Iarray.iter (fun x -> sum := !sum +. x) ifarray;
!sum;;
[%%expect{|
- : float = 17.5
|}];;
let total = ref 0 in
Iarray.iteri (fun i x -> total := !total + i*x) iarray;
!total;;
[%%expect{|
- : int = 40
|}];;
Iarray.map Int.neg iarray;;
[%%expect{|
- : int iarray = [:-1; -2; -3; -4; -5:]
|}];;
Iarray.mapi (fun i x -> i, 10.*.x) ifarray;;
[%%expect{|
- : (int * float) iarray =
[:(0, 15.); (1, 25.); (2, 35.); (3, 45.); (4, 55.):]
|}];;
Iarray.fold_left (fun acc x -> -x :: acc) [] iarray;;
[%%expect{|
- : int list = [-5; -4; -3; -2; -1]
|}];;
Iarray.fold_left_map (fun acc x -> acc + x, string_of_int x) 0 iarray;;
[%%expect{|
- : int * string iarray = (15, [:"1"; "2"; "3"; "4"; "5":])
|}];;
(* Confirm the function isn't called on the empty immutable array *)
Iarray.fold_left_map (fun _ _ -> assert false) 0 [::];;
[%%expect{|
- : int * 'a iarray = (0, [::])
|}];;
Iarray.fold_right (fun x acc -> -.x :: acc) ifarray [];;
[%%expect{|
- : float list = [-1.5; -2.5; -3.5; -4.5; -5.5]
|}];;
let ints = ref 0 in
let floats = ref 0. in
Iarray.iter2
(fun i f ->
ints := i + !ints;
floats := f +. !floats)
iarray
ifarray;
!ints, !floats;;
[%%expect{|
- : int * float = (15, 17.5)
|}];;
Iarray.map2 (fun i f -> f, i) iarray ifarray;;
[%%expect{|
- : (float * int) iarray =
[:(1.5, 1); (2.5, 2); (3.5, 3); (4.5, 4); (5.5, 5):]
|}];;
Iarray.for_all (fun i -> i > 0) iarray;;
[%%expect{|
- : bool = true
|}];;
Iarray.for_all (fun f -> f < 5.) ifarray;;
[%%expect{|
- : bool = false
|}];;
Iarray.exists (fun f -> f < 5.) ifarray;;
[%%expect{|
- : bool = true
|}];;
Iarray.exists (fun i -> i > 10) iarray;;
[%%expect{|
- : bool = false
|}];;
Iarray.for_all2 (fun i f -> Float.of_int i < f) iarray ifarray;;
[%%expect{|
- : bool = true
|}];;
Iarray.for_all2 (fun f i -> i = 1 && f = 1.5) ifarray iarray;;
[%%expect{|
- : bool = false
|}];;
Iarray.exists2 (fun f i -> Float.of_int i +. f = 8.5) ifarray iarray;;
[%%expect{|
- : bool = true
|}];;
Iarray.exists2 (fun i f -> Float.of_int i > f) iarray ifarray;;
[%%expect{|
- : bool = false
|}];;
Iarray.mem 3 iarray, Iarray.mem 3.5 ifarray;;
[%%expect{|
- : bool * bool = (true, true)
|}];;
Iarray.mem 30 iarray, Iarray.mem 35. ifarray;;
[%%expect{|
- : bool * bool = (false, false)
|}];;
let x = ref 0 in
Iarray.memq x (Iarray.init 3 (Fun.const x));;
[%%expect{|
- : bool = true
|}];;
Iarray.memq (ref 0) (Iarray.init 3 (Fun.const (ref 0)))
[%%expect{|
- : bool = false
|}];;
Iarray.find_opt (fun x -> x*x > 5) iarray,
Iarray.find_opt (fun x -> x*.x > 5.) ifarray;;
[%%expect{|
- : int option * float option = (Some 3, Some 2.5)
|}];;
Iarray.find_opt (fun x -> x*x > 50) iarray,
Iarray.find_opt (fun x -> x*.x > 50.) ifarray;;
[%%expect{|
- : int option * float option = (None, None)
|}];;
Iarray.find_map (fun x -> if x mod 2 = 0
then Some (x / 2)
else None)
iarray,
Iarray.find_map (fun x -> if Float.rem x 2. = 0.5
then Some ((x -. 0.5) /. 2.)
else None)
ifarray;;
[%%expect{|
- : int option * float option = (Some 1, Some 1.)
|}];;
Iarray.find_map (fun x -> if x mod 7 = 0
then Some (x / 7)
else None)
iarray,
Iarray.find_map (fun x -> if Float.rem x 7. = 0.5
then Some ((x -. 0.5) /. 7.)
else None)
ifarray;;
[%%expect{|
- : int option * float option = (None, None)
|}];;
Iarray.split [: 1, "a"; 2, "b"; 3, "c" :];;
[%%expect{|
- : int iarray * string iarray = ([:1; 2; 3:], [:"a"; "b"; "c":])
|}];;
Iarray.split [::];;
[%%expect{|
- : 'a iarray * 'b iarray = ([::], [::])
|}];;
Iarray.combine iarray ifarray;;
[%%expect{|
- : (int * float) iarray =
[:(1, 1.5); (2, 2.5); (3, 3.5); (4, 4.5); (5, 5.5):]
|}];;
Iarray.combine [::] [::];;
[%%expect{|
- : ('a * 'b) iarray = [::]
|}];;
Iarray.combine iarray [: "wrong length" :];;
[%%expect{|
Exception: Invalid_argument "Array.combine".
|}];;
Iarray.sort (Fun.flip Int.compare) iarray,
Iarray.sort (Fun.flip Float.compare) ifarray;;
[%%expect{|
- : int iarray * Float.t iarray =
([:5; 4; 3; 2; 1:], [:5.5; 4.5; 3.5; 2.5; 1.5:])
|}];;
Iarray.stable_sort (Fun.flip Int.compare) iarray,
Iarray.stable_sort (Fun.flip Float.compare) ifarray;;
[%%expect{|
- : int iarray * Float.t iarray =
([:5; 4; 3; 2; 1:], [:5.5; 4.5; 3.5; 2.5; 1.5:])
|}];;
(* Check stability *)
Iarray.stable_sort
(fun s1 s2 -> Int.compare (String.length s1) (String.length s2))
[: "zero"; "one"; "two"; "three"; "four";
"five"; "six"; "seven"; "eight"; "nine";
"ten" :];;
[%%expect{|
- : string iarray =
[:"one"; "two"; "six"; "ten"; "zero"; "four"; "five"; "nine"; "three";
"seven"; "eight":]
|}];;
Iarray.fast_sort (Fun.flip Int.compare) iarray,
Iarray.fast_sort (Fun.flip Float.compare) ifarray;;
[%%expect{|
- : int iarray * Float.t iarray =
([:5; 4; 3; 2; 1:], [:5.5; 4.5; 3.5; 2.5; 1.5:])
|}];;
Iarray.to_seq iarray |> List.of_seq;;
[%%expect{|
- : int list = [1; 2; 3; 4; 5]
|}];;
Iarray.to_seqi ifarray |> List.of_seq;;
[%%expect{|
- : (int * float) list = [(0, 1.5); (1, 2.5); (2, 3.5); (3, 4.5); (4, 5.5)]
|}];;
["hello"; "world"] |> List.to_seq |> Iarray.of_seq;;
[%%expect{|
- : string iarray = [:"hello"; "world":]
|}];;
(** Confirm that we haven't edited the immutable arrays, and that editing
mutable siblings or copies does nothing *)
Array.fill marray 0 3 0;
marray;;
[%%expect{|
- : int array = [|0; 0; 0; 4; 5|]
|}];;
Array.fill (Iarray.to_array iarray) 3 2 10;
iarray;;
[%%expect{|
- : int iarray = [:1; 2; 3; 4; 5:]
|}];;
Array.fill mfarray 3 2 0.;
mfarray;;
[%%expect{|
- : float array = [|1.5; 2.5; 3.5; 0.; 0.|]
|}];;
Array.fill (Iarray.to_array ifarray) 0 3 10.;
ifarray;;
[%%expect{|
- : float iarray = [:1.5; 2.5; 3.5; 4.5; 5.5:]
|}];;
(* Confirm that nothing has changed *)
iarray;;
[%%expect{|
- : int iarray = [:1; 2; 3; 4; 5:]
|}];;
ifarray;;
[%%expect{|
- : float iarray = [:1.5; 2.5; 3.5; 4.5; 5.5:]
|}];;
| null | https://raw.githubusercontent.com/ocaml-flambda/flambda-backend/1d905beda2707698d36835ef3acd77a5992fc389/ocaml/testsuite/tests/lib-array/test_iarray.ml | ocaml | TEST
flags = "-extension immutable_arrays_experimental"
* expect
* Create some immutable and mutable arrays
* Pattern-match on some immutable arrays, and check the typing of array
patterns, both mutable and immutable
* Confirm that immutable and mutable arrays have the same representation, even
when they're different objects
* Confirm that immutable and mutable arrays don't collide
[Array] has an analog to [Iarray.to_array]
[Array] has an analog to [Iarray.of_array]
[Iarray.to_array] creates a fresh mutable array every time
[Iarray.to_array] creates a fresh mutable array every time
Round-tripping from and to an [iarray] creates a fresh copy every time
Confirm the function isn't called on the empty immutable array
Check stability
* Confirm that we haven't edited the immutable arrays, and that editing
mutable siblings or copies does nothing
Confirm that nothing has changed |
module Iarray = Stdlib__Iarray;;
external ( .:() ) : 'a iarray -> int -> 'a = "%array_safe_get";;
let iarray : int iarray = [:1;2;3;4;5:];;
let ifarray : float iarray = [:1.5;2.5;3.5;4.5;5.5:];;
let marray : int array = [|1;2;3;4;5|];;
let mfarray : float array = [|1.5;2.5;3.5;4.5;5.5|];;
[%%expect{|
module Iarray = Stdlib__Iarray
external ( .:() ) : 'a iarray -> int -> 'a = "%array_safe_get"
val iarray : int iarray = [:1; 2; 3; 4; 5:]
val ifarray : float iarray = [:1.5; 2.5; 3.5; 4.5; 5.5:]
val marray : int array = [|1; 2; 3; 4; 5|]
val mfarray : float array = [|1.5; 2.5; 3.5; 4.5; 5.5|]
|}];;
match iarray with
| [::] -> "empty"
| [:1;2;3;4;5:] -> "1--5"
| _ -> "who knows?"
;;
[%%expect{|
- : string = "1--5"
|}];;
match ifarray with
| [::] -> "empty"
| [:1.5;2.5;3.5;4.5;5.5:] -> "1.5--5.5"
| _ -> "who knows?"
;;
[%%expect{|
- : string = "1.5--5.5"
|}];;
match iarray with
| [::] -> "empty"
| [:1;2;3;4;6:] -> "1--5"
| _ -> "who knows?"
;;
[%%expect{|
- : string = "who knows?"
|}];;
match ifarray with
| [::] -> "empty"
| [:1;2;3;4;5:] -> "1--5"
| _ -> "who knows?"
;;
[%%expect{|
Line 3, characters 4-5:
3 | | [:1;2;3;4;5:] -> "1--5"
^
Error: This pattern matches values of type int
but a pattern was expected which matches values of type float
Hint: Did you mean `1.'?
|}];;
match marray with
| [::] -> "empty"
| [:1;2;3;4;5:] -> "1--5"
| _ -> "who knows?"
;;
[%%expect{|
Line 2, characters 2-6:
2 | | [::] -> "empty"
^^^^
Error: This pattern matches values of type 'a iarray
but a pattern was expected which matches values of type int array
|}];;
match iarray with
| [||] -> "empty"
| [|1;2;3;4;5|] -> "1--5"
| _ -> "who knows?"
;;
[%%expect{|
Line 2, characters 2-6:
2 | | [||] -> "empty"
^^^^
Error: This pattern matches values of type 'a array
but a pattern was expected which matches values of type int iarray
|}];;
Obj.repr iarray = Obj.repr marray;;
[%%expect{|
- : bool = true
|}];;
Obj.repr ifarray = Obj.repr mfarray;;
[%%expect{|
- : bool = true
|}];;
iarray == Obj.magic marray;;
[%%expect{|
- : bool = false
|}];;
ifarray == Obj.magic mfarray;;
[%%expect{|
- : bool = false
|}];;
Obj.repr iarray <> Obj.repr ifarray;;
[%%expect{|
- : bool = true
|}];;
Obj.repr marray <> Obj.repr mfarray;;
[%%expect{|
- : bool = true
|}];;
* Test basic functionality : One or a few tests for every function in [ ] .
We test both success and error cases , and in general try to have coverage of
edge cases . Comments are attached everywhere something subtle is being
checked .
We test both success and error cases, and in general try to have coverage of
edge cases. Comments are attached everywhere something subtle is being
checked. *)
Iarray.length iarray, Iarray.length ifarray;;
[%%expect{|
- : int * int = (5, 5)
|}];;
iarray.:(0), Iarray.get iarray 1, ifarray.:(2), Iarray.get ifarray 3;;
[%%expect{|
- : int * int * float * float = (1, 2, 3.5, 4.5)
|}];;
iarray.:(10)
[%%expect{|
Exception: Invalid_argument "index out of bounds".
|}];;
Iarray.get iarray (-1);;
[%%expect{|
Exception: Invalid_argument "index out of bounds".
|}];;
ifarray.:(-10);;
[%%expect{|
Exception: Invalid_argument "index out of bounds".
|}];;
Iarray.get ifarray 5;;
[%%expect{|
Exception: Invalid_argument "index out of bounds".
|}];;
Iarray.init 10 (fun x -> x * 2);;
[%%expect{|
- : int iarray = [:0; 2; 4; 6; 8; 10; 12; 14; 16; 18:]
|}];;
Iarray.append iarray iarray;;
[%%expect{|
- : int iarray = [:1; 2; 3; 4; 5; 1; 2; 3; 4; 5:]
|}];;
Iarray.concat [];;
[%%expect{|
- : 'a iarray = [::]
|}];;
Iarray.concat [ Iarray.init 1 (fun x -> 1 + x)
; Iarray.init 2 (fun x -> 20 + x)
; Iarray.init 3 (fun x -> 300 + x) ];;
[%%expect{|
- : int iarray = [:1; 20; 21; 300; 301; 302:]
|}];;
Iarray.sub iarray 0 2, Iarray.sub iarray 2 3;;
[%%expect{|
- : int iarray * int iarray = ([:1; 2:], [:3; 4; 5:])
|}];;
Iarray.sub iarray (-1) 3;;
[%%expect{|
Exception: Invalid_argument "Array.sub".
|}];;
Iarray.sub iarray 1 (-3);;
[%%expect{|
Exception: Invalid_argument "Array.sub".
|}];;
Iarray.sub iarray 3 10;;
[%%expect{|
Exception: Invalid_argument "Array.sub".
|}];;
Iarray.to_list iarray;;
[%%expect{|
- : int list = [1; 2; 3; 4; 5]
|}];;
Iarray.of_list [10;20;30];;
[%%expect{|
- : int iarray = [:10; 20; 30:]
|}];;
Iarray.to_array iarray;;
[%%expect{|
- : int array = [|1; 2; 3; 4; 5|]
|}];;
Iarray.of_array mfarray;;
[%%expect{|
- : float iarray = [:1.5; 2.5; 3.5; 4.5; 5.5:]
|}];;
Array.of_iarray ifarray;;
[%%expect{|
- : float array = [|1.5; 2.5; 3.5; 4.5; 5.5|]
|}];;
Array.to_iarray marray;;
[%%expect{|
- : int iarray = [:1; 2; 3; 4; 5:]
|}];;
Iarray.to_array iarray == marray;;
[%%expect{|
- : bool = false
|}];;
Iarray.to_array ifarray == Iarray.to_array ifarray;;
[%%expect{|
- : bool = false
|}];;
Iarray.of_array (Iarray.to_array iarray) == iarray;;
[%%expect{|
- : bool = false
|}];;
let sum = ref 0. in
Iarray.iter (fun x -> sum := !sum +. x) ifarray;
!sum;;
[%%expect{|
- : float = 17.5
|}];;
let total = ref 0 in
Iarray.iteri (fun i x -> total := !total + i*x) iarray;
!total;;
[%%expect{|
- : int = 40
|}];;
Iarray.map Int.neg iarray;;
[%%expect{|
- : int iarray = [:-1; -2; -3; -4; -5:]
|}];;
Iarray.mapi (fun i x -> i, 10.*.x) ifarray;;
[%%expect{|
- : (int * float) iarray =
[:(0, 15.); (1, 25.); (2, 35.); (3, 45.); (4, 55.):]
|}];;
Iarray.fold_left (fun acc x -> -x :: acc) [] iarray;;
[%%expect{|
- : int list = [-5; -4; -3; -2; -1]
|}];;
Iarray.fold_left_map (fun acc x -> acc + x, string_of_int x) 0 iarray;;
[%%expect{|
- : int * string iarray = (15, [:"1"; "2"; "3"; "4"; "5":])
|}];;
Iarray.fold_left_map (fun _ _ -> assert false) 0 [::];;
[%%expect{|
- : int * 'a iarray = (0, [::])
|}];;
Iarray.fold_right (fun x acc -> -.x :: acc) ifarray [];;
[%%expect{|
- : float list = [-1.5; -2.5; -3.5; -4.5; -5.5]
|}];;
let ints = ref 0 in
let floats = ref 0. in
Iarray.iter2
(fun i f ->
ints := i + !ints;
floats := f +. !floats)
iarray
ifarray;
!ints, !floats;;
[%%expect{|
- : int * float = (15, 17.5)
|}];;
Iarray.map2 (fun i f -> f, i) iarray ifarray;;
[%%expect{|
- : (float * int) iarray =
[:(1.5, 1); (2.5, 2); (3.5, 3); (4.5, 4); (5.5, 5):]
|}];;
Iarray.for_all (fun i -> i > 0) iarray;;
[%%expect{|
- : bool = true
|}];;
Iarray.for_all (fun f -> f < 5.) ifarray;;
[%%expect{|
- : bool = false
|}];;
Iarray.exists (fun f -> f < 5.) ifarray;;
[%%expect{|
- : bool = true
|}];;
Iarray.exists (fun i -> i > 10) iarray;;
[%%expect{|
- : bool = false
|}];;
Iarray.for_all2 (fun i f -> Float.of_int i < f) iarray ifarray;;
[%%expect{|
- : bool = true
|}];;
Iarray.for_all2 (fun f i -> i = 1 && f = 1.5) ifarray iarray;;
[%%expect{|
- : bool = false
|}];;
Iarray.exists2 (fun f i -> Float.of_int i +. f = 8.5) ifarray iarray;;
[%%expect{|
- : bool = true
|}];;
Iarray.exists2 (fun i f -> Float.of_int i > f) iarray ifarray;;
[%%expect{|
- : bool = false
|}];;
Iarray.mem 3 iarray, Iarray.mem 3.5 ifarray;;
[%%expect{|
- : bool * bool = (true, true)
|}];;
Iarray.mem 30 iarray, Iarray.mem 35. ifarray;;
[%%expect{|
- : bool * bool = (false, false)
|}];;
let x = ref 0 in
Iarray.memq x (Iarray.init 3 (Fun.const x));;
[%%expect{|
- : bool = true
|}];;
Iarray.memq (ref 0) (Iarray.init 3 (Fun.const (ref 0)))
[%%expect{|
- : bool = false
|}];;
Iarray.find_opt (fun x -> x*x > 5) iarray,
Iarray.find_opt (fun x -> x*.x > 5.) ifarray;;
[%%expect{|
- : int option * float option = (Some 3, Some 2.5)
|}];;
Iarray.find_opt (fun x -> x*x > 50) iarray,
Iarray.find_opt (fun x -> x*.x > 50.) ifarray;;
[%%expect{|
- : int option * float option = (None, None)
|}];;
Iarray.find_map (fun x -> if x mod 2 = 0
then Some (x / 2)
else None)
iarray,
Iarray.find_map (fun x -> if Float.rem x 2. = 0.5
then Some ((x -. 0.5) /. 2.)
else None)
ifarray;;
[%%expect{|
- : int option * float option = (Some 1, Some 1.)
|}];;
Iarray.find_map (fun x -> if x mod 7 = 0
then Some (x / 7)
else None)
iarray,
Iarray.find_map (fun x -> if Float.rem x 7. = 0.5
then Some ((x -. 0.5) /. 7.)
else None)
ifarray;;
[%%expect{|
- : int option * float option = (None, None)
|}];;
Iarray.split [: 1, "a"; 2, "b"; 3, "c" :];;
[%%expect{|
- : int iarray * string iarray = ([:1; 2; 3:], [:"a"; "b"; "c":])
|}];;
Iarray.split [::];;
[%%expect{|
- : 'a iarray * 'b iarray = ([::], [::])
|}];;
Iarray.combine iarray ifarray;;
[%%expect{|
- : (int * float) iarray =
[:(1, 1.5); (2, 2.5); (3, 3.5); (4, 4.5); (5, 5.5):]
|}];;
Iarray.combine [::] [::];;
[%%expect{|
- : ('a * 'b) iarray = [::]
|}];;
Iarray.combine iarray [: "wrong length" :];;
[%%expect{|
Exception: Invalid_argument "Array.combine".
|}];;
Iarray.sort (Fun.flip Int.compare) iarray,
Iarray.sort (Fun.flip Float.compare) ifarray;;
[%%expect{|
- : int iarray * Float.t iarray =
([:5; 4; 3; 2; 1:], [:5.5; 4.5; 3.5; 2.5; 1.5:])
|}];;
Iarray.stable_sort (Fun.flip Int.compare) iarray,
Iarray.stable_sort (Fun.flip Float.compare) ifarray;;
[%%expect{|
- : int iarray * Float.t iarray =
([:5; 4; 3; 2; 1:], [:5.5; 4.5; 3.5; 2.5; 1.5:])
|}];;
Iarray.stable_sort
(fun s1 s2 -> Int.compare (String.length s1) (String.length s2))
[: "zero"; "one"; "two"; "three"; "four";
"five"; "six"; "seven"; "eight"; "nine";
"ten" :];;
[%%expect{|
- : string iarray =
[:"one"; "two"; "six"; "ten"; "zero"; "four"; "five"; "nine"; "three";
"seven"; "eight":]
|}];;
Iarray.fast_sort (Fun.flip Int.compare) iarray,
Iarray.fast_sort (Fun.flip Float.compare) ifarray;;
[%%expect{|
- : int iarray * Float.t iarray =
([:5; 4; 3; 2; 1:], [:5.5; 4.5; 3.5; 2.5; 1.5:])
|}];;
Iarray.to_seq iarray |> List.of_seq;;
[%%expect{|
- : int list = [1; 2; 3; 4; 5]
|}];;
Iarray.to_seqi ifarray |> List.of_seq;;
[%%expect{|
- : (int * float) list = [(0, 1.5); (1, 2.5); (2, 3.5); (3, 4.5); (4, 5.5)]
|}];;
["hello"; "world"] |> List.to_seq |> Iarray.of_seq;;
[%%expect{|
- : string iarray = [:"hello"; "world":]
|}];;
Array.fill marray 0 3 0;
marray;;
[%%expect{|
- : int array = [|0; 0; 0; 4; 5|]
|}];;
Array.fill (Iarray.to_array iarray) 3 2 10;
iarray;;
[%%expect{|
- : int iarray = [:1; 2; 3; 4; 5:]
|}];;
Array.fill mfarray 3 2 0.;
mfarray;;
[%%expect{|
- : float array = [|1.5; 2.5; 3.5; 0.; 0.|]
|}];;
Array.fill (Iarray.to_array ifarray) 0 3 10.;
ifarray;;
[%%expect{|
- : float iarray = [:1.5; 2.5; 3.5; 4.5; 5.5:]
|}];;
iarray;;
[%%expect{|
- : int iarray = [:1; 2; 3; 4; 5:]
|}];;
ifarray;;
[%%expect{|
- : float iarray = [:1.5; 2.5; 3.5; 4.5; 5.5:]
|}];;
|
da124b8ffaa3d3507317b2166f3a344cd4f08d8619e748577a03ecc0ef05806d | chrovis/cljam | sam.clj | (ns cljam.io.sam
"Functions to read and write the SAM (Sequence Alignment/Map) format and BAM
(its binary equivalent). See -specs/ for the
detail SAM/BAM specifications."
(:refer-clojure :exclude [indexed?])
(:require [cljam.io.sam.reader :as sam-reader]
[cljam.io.sam.writer :as sam-writer]
[cljam.io.bam.core :as bam-core]
[cljam.io.protocols :as protocols]
[cljam.io.util :as io-util])
(:import java.io.Closeable
cljam.io.sam.reader.SAMReader
cljam.io.sam.writer.SAMWriter
cljam.io.bam.reader.BAMReader
cljam.io.bam.writer.BAMWriter))
;; Reading
;; -------
(defn ^SAMReader sam-reader
"Returns an open cljam.io.sam.reader.SAMReader of f. Should be used inside
with-open to ensure the reader is properly closed."
[f]
(sam-reader/reader f))
(defn ^BAMReader bam-reader
"Returns an open cljam.io.bam.reader.BAMReader of f. Should be used inside
with-open to ensure the reader is properly closed."
[f]
(bam-core/reader f))
(defn ^BAMReader clone-bam-reader
"Clones bam reader sharing persistent objects."
[r]
(bam-core/clone-reader r))
(defn ^Closeable reader
"Selects suitable reader from f's extension, returning the reader. Opens a new
reader if the arg represents a file such as String path, java.io.File, or
java.net.URL. If a reader is given, clones the reader. This function supports
SAM and BAM formats."
[f]
(if (io-util/bam-reader? f)
(clone-bam-reader f)
(case (try
(io-util/file-type f)
(catch IllegalArgumentException _
(io-util/file-type-from-contents f)))
:sam (sam-reader f)
:bam (bam-reader f)
(throw (IllegalArgumentException. "Invalid source type")))))
(defn read-header
"Returns header of the SAM/BAM file."
[rdr]
(protocols/read-header rdr))
(defn read-refs
"Returns references of the SAM/BAM file."
[rdr]
(protocols/read-refs rdr))
(defn read-alignments
"Reads alignments of the SAM/BAM file, returning the alignments as an eduction."
([rdr] (protocols/read-alignments rdr))
([rdr region] (protocols/read-alignments rdr region)))
(defn read-blocks
"Reads alignment blocks of the SAM/BAM file, returning the blocks as an eduction."
([rdr] (protocols/read-blocks rdr))
([rdr region] (protocols/read-blocks rdr region))
([rdr region option] (protocols/read-blocks rdr region option)))
(defn indexed?
"Returns true if the reader can be randomly accessed, false if not. Note this
function immediately realizes a delayed index."
[rdr]
(protocols/indexed? rdr))
;; Writing
;; -------
(defn ^SAMWriter sam-writer
"Returns an open cljam.io.sam.writer.SAMWriter of f. Should be used inside
with-open to ensure the writer is properly closed."
[f]
(sam-writer/writer f))
(defn ^BAMWriter bam-writer
"Returns an open cljam.io.bam.writer.BAMWriter of f. Should be used inside
with-open to ensure the writer is properly closed."
([f]
(bam-writer f false))
([f create-index?]
(bam-core/writer f create-index?)))
(defn ^Closeable writer
"Selects suitable writer from f's extension, returning the writer. This
function supports SAM and BAM format."
([f]
(writer f false))
([f create-index?]
(case (io-util/file-type f)
:sam (if create-index?
(throw (ex-info "SAM file indexing is not implemented." {}))
(sam-writer f))
:bam (bam-writer f create-index?)
(throw (IllegalArgumentException. "Invalid file type")))))
(defn write-header
"Writes header to the SAM/BAM file."
[wtr header]
(protocols/write-header wtr header))
(defn write-refs
"Writes references to the SAM/BAM file."
[wtr header]
(protocols/write-refs wtr header))
(defn write-alignments
"Writes alignments to the SAM/BAM file."
[wtr alignments header]
(protocols/write-alignments wtr alignments header))
(defn write-blocks
"Writes alignment blocks of the SAM/BAM file."
[wtr blocks]
(protocols/write-blocks wtr blocks))
| null | https://raw.githubusercontent.com/chrovis/cljam/2b8e7386765be8efdbbbb4f18dbc52447f4a08af/src/cljam/io/sam.clj | clojure | Reading
-------
Writing
------- | (ns cljam.io.sam
"Functions to read and write the SAM (Sequence Alignment/Map) format and BAM
(its binary equivalent). See -specs/ for the
detail SAM/BAM specifications."
(:refer-clojure :exclude [indexed?])
(:require [cljam.io.sam.reader :as sam-reader]
[cljam.io.sam.writer :as sam-writer]
[cljam.io.bam.core :as bam-core]
[cljam.io.protocols :as protocols]
[cljam.io.util :as io-util])
(:import java.io.Closeable
cljam.io.sam.reader.SAMReader
cljam.io.sam.writer.SAMWriter
cljam.io.bam.reader.BAMReader
cljam.io.bam.writer.BAMWriter))
(defn ^SAMReader sam-reader
"Returns an open cljam.io.sam.reader.SAMReader of f. Should be used inside
with-open to ensure the reader is properly closed."
[f]
(sam-reader/reader f))
(defn ^BAMReader bam-reader
"Returns an open cljam.io.bam.reader.BAMReader of f. Should be used inside
with-open to ensure the reader is properly closed."
[f]
(bam-core/reader f))
(defn ^BAMReader clone-bam-reader
"Clones bam reader sharing persistent objects."
[r]
(bam-core/clone-reader r))
(defn ^Closeable reader
"Selects suitable reader from f's extension, returning the reader. Opens a new
reader if the arg represents a file such as String path, java.io.File, or
java.net.URL. If a reader is given, clones the reader. This function supports
SAM and BAM formats."
[f]
(if (io-util/bam-reader? f)
(clone-bam-reader f)
(case (try
(io-util/file-type f)
(catch IllegalArgumentException _
(io-util/file-type-from-contents f)))
:sam (sam-reader f)
:bam (bam-reader f)
(throw (IllegalArgumentException. "Invalid source type")))))
(defn read-header
"Returns header of the SAM/BAM file."
[rdr]
(protocols/read-header rdr))
(defn read-refs
"Returns references of the SAM/BAM file."
[rdr]
(protocols/read-refs rdr))
(defn read-alignments
"Reads alignments of the SAM/BAM file, returning the alignments as an eduction."
([rdr] (protocols/read-alignments rdr))
([rdr region] (protocols/read-alignments rdr region)))
(defn read-blocks
"Reads alignment blocks of the SAM/BAM file, returning the blocks as an eduction."
([rdr] (protocols/read-blocks rdr))
([rdr region] (protocols/read-blocks rdr region))
([rdr region option] (protocols/read-blocks rdr region option)))
(defn indexed?
"Returns true if the reader can be randomly accessed, false if not. Note this
function immediately realizes a delayed index."
[rdr]
(protocols/indexed? rdr))
(defn ^SAMWriter sam-writer
"Returns an open cljam.io.sam.writer.SAMWriter of f. Should be used inside
with-open to ensure the writer is properly closed."
[f]
(sam-writer/writer f))
(defn ^BAMWriter bam-writer
"Returns an open cljam.io.bam.writer.BAMWriter of f. Should be used inside
with-open to ensure the writer is properly closed."
([f]
(bam-writer f false))
([f create-index?]
(bam-core/writer f create-index?)))
(defn ^Closeable writer
"Selects suitable writer from f's extension, returning the writer. This
function supports SAM and BAM format."
([f]
(writer f false))
([f create-index?]
(case (io-util/file-type f)
:sam (if create-index?
(throw (ex-info "SAM file indexing is not implemented." {}))
(sam-writer f))
:bam (bam-writer f create-index?)
(throw (IllegalArgumentException. "Invalid file type")))))
(defn write-header
"Writes header to the SAM/BAM file."
[wtr header]
(protocols/write-header wtr header))
(defn write-refs
"Writes references to the SAM/BAM file."
[wtr header]
(protocols/write-refs wtr header))
(defn write-alignments
"Writes alignments to the SAM/BAM file."
[wtr alignments header]
(protocols/write-alignments wtr alignments header))
(defn write-blocks
"Writes alignment blocks of the SAM/BAM file."
[wtr blocks]
(protocols/write-blocks wtr blocks))
|
a6fb09140b34da1c8d390105c41071df71afee8ef6d437f985e54a50946a6380 | haskell-tools/haskell-tools | MixedInstance.hs | # LANGUAGE TypeFamilies , MultiParamTypeClasses #
module Decl.MixedInstance where
data Canvas = Canvas
data V2 = V2
class Backend c v e where
data Options c v e :: *
instance Backend Canvas V2 Double where
data Options Canvas V2 Double = CanvasOptions
| null | https://raw.githubusercontent.com/haskell-tools/haskell-tools/b1189ab4f63b29bbf1aa14af4557850064931e32/src/refactor/examples/Decl/MixedInstance.hs | haskell | # LANGUAGE TypeFamilies , MultiParamTypeClasses #
module Decl.MixedInstance where
data Canvas = Canvas
data V2 = V2
class Backend c v e where
data Options c v e :: *
instance Backend Canvas V2 Double where
data Options Canvas V2 Double = CanvasOptions
|
|
4ab63b0c6d0dba6da749ac2a2862b7f85e69f1aeb56d20e52a2759777649e8f7 | bmeurer/ocaml-arm | typedtree.mli | (***********************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ Id$
(* Abstract syntax tree after typing *)
open Asttypes
open Types
(* Value expressions for the core language *)
type partial = Partial | Total
type optional = Required | Optional
type pattern =
{ pat_desc: pattern_desc;
pat_loc: Location.t;
pat_extra : (pat_extra * Location.t) list;
pat_type: type_expr;
mutable pat_env: Env.t }
and pat_extra =
| Tpat_constraint of core_type
| Tpat_type of Path.t * Longident.t loc
| Tpat_unpack
and pattern_desc =
Tpat_any
| Tpat_var of Ident.t * string loc
| Tpat_alias of pattern * Ident.t * string loc
| Tpat_constant of constant
| Tpat_tuple of pattern list
| Tpat_construct of
Path.t * Longident.t loc * constructor_description * pattern list * bool
| Tpat_variant of label * pattern option * row_desc ref
| Tpat_record of
(Path.t * Longident.t loc * label_description * pattern) list *
closed_flag
| Tpat_array of pattern list
| Tpat_or of pattern * pattern * row_desc option
| Tpat_lazy of pattern
and expression =
{ exp_desc: expression_desc;
exp_loc: Location.t;
exp_extra : (exp_extra * Location.t) list;
exp_type: type_expr;
exp_env: Env.t }
and exp_extra =
| Texp_constraint of core_type option * core_type option
| Texp_open of Path.t * Longident.t loc * Env.t
| Texp_poly of core_type option
| Texp_newtype of string
and expression_desc =
Texp_ident of Path.t * Longident.t loc * Types.value_description
| Texp_constant of constant
| Texp_let of rec_flag * (pattern * expression) list * expression
| Texp_function of label * (pattern * expression) list * partial
| Texp_apply of expression * (label * expression option * optional) list
| Texp_match of expression * (pattern * expression) list * partial
| Texp_try of expression * (pattern * expression) list
| Texp_tuple of expression list
| Texp_construct of
Path.t * Longident.t loc * constructor_description * expression list *
bool
| Texp_variant of label * expression option
| Texp_record of
(Path.t * Longident.t loc * label_description * expression) list *
expression option
| Texp_field of expression * Path.t * Longident.t loc * label_description
| Texp_setfield of
expression * Path.t * Longident.t loc * label_description * expression
| Texp_array of expression list
| Texp_ifthenelse of expression * expression * expression option
| Texp_sequence of expression * expression
| Texp_while of expression * expression
| Texp_for of
Ident.t * string loc * expression * expression * direction_flag *
expression
| Texp_when of expression * expression
| Texp_send of expression * meth * expression option
| Texp_new of Path.t * Longident.t loc * Types.class_declaration
| Texp_instvar of Path.t * Path.t * string loc
| Texp_setinstvar of Path.t * Path.t * string loc * expression
| Texp_override of Path.t * (Path.t * string loc * expression) list
| Texp_letmodule of Ident.t * string loc * module_expr * expression
| Texp_assert of expression
| Texp_assertfalse
| Texp_lazy of expression
| Texp_object of class_structure * string list
| Texp_pack of module_expr
and meth =
Tmeth_name of string
| Tmeth_val of Ident.t
(* Value expressions for the class language *)
and class_expr =
{ cl_desc: class_expr_desc;
cl_loc: Location.t;
cl_type: Types.class_type;
cl_env: Env.t }
and class_expr_desc =
Tcl_ident of Path.t * Longident.t loc * core_type list
| Tcl_structure of class_structure
| Tcl_fun of
label * pattern * (Ident.t * string loc * expression) list * class_expr *
partial
| Tcl_apply of class_expr * (label * expression option * optional) list
| Tcl_let of rec_flag * (pattern * expression) list *
(Ident.t * string loc * expression) list * class_expr
| Tcl_constraint of
class_expr * class_type option * string list * string list * Concr.t
(* Visible instance variables, methods and concretes methods *)
and class_structure =
{ cstr_pat : pattern;
cstr_fields: class_field list;
cstr_type : Types.class_signature;
cstr_meths: Ident.t Meths.t }
and class_field =
{
cf_desc : class_field_desc;
cf_loc : Location.t;
}
and class_field_kind =
Tcfk_virtual of core_type
| Tcfk_concrete of expression
and class_field_desc =
Tcf_inher of
override_flag * class_expr * string option * (string * Ident.t) list *
(string * Ident.t) list
(* Inherited instance variables and concrete methods *)
| Tcf_val of
string * string loc * mutable_flag * Ident.t * class_field_kind * bool
(* None = virtual, true = override *)
| Tcf_meth of string * string loc * private_flag * class_field_kind * bool
| Tcf_constr of core_type * core_type
| Tcf_let of rec_flag * ( pattern * expression ) list *
( Ident.t * string loc * expression ) list
(Ident.t * string loc * expression) list *)
| Tcf_init of expression
(* Value expressions for the module language *)
and module_expr =
{ mod_desc: module_expr_desc;
mod_loc: Location.t;
mod_type: Types.module_type;
mod_env: Env.t }
and module_type_constraint =
Tmodtype_implicit
| Tmodtype_explicit of module_type
and module_expr_desc =
Tmod_ident of Path.t * Longident.t loc
| Tmod_structure of structure
| Tmod_functor of Ident.t * string loc * module_type * module_expr
| Tmod_apply of module_expr * module_expr * module_coercion
| Tmod_constraint of
module_expr * Types.module_type * module_type_constraint * module_coercion
| Tmod_unpack of expression * Types.module_type
and structure = {
str_items : structure_item list;
str_type : Types.signature;
str_final_env : Env.t;
}
and structure_item =
{ str_desc : structure_item_desc;
str_loc : Location.t;
str_env : Env.t
}
and structure_item_desc =
Tstr_eval of expression
| Tstr_value of rec_flag * (pattern * expression) list
| Tstr_primitive of Ident.t * string loc * value_description
| Tstr_type of (Ident.t * string loc * type_declaration) list
| Tstr_exception of Ident.t * string loc * exception_declaration
| Tstr_exn_rebind of Ident.t * string loc * Path.t * Longident.t loc
| Tstr_module of Ident.t * string loc * module_expr
| Tstr_recmodule of (Ident.t * string loc * module_type * module_expr) list
| Tstr_modtype of Ident.t * string loc * module_type
| Tstr_open of Path.t * Longident.t loc
| Tstr_class of (class_declaration * string list * virtual_flag) list
| Tstr_class_type of (Ident.t * string loc * class_type_declaration) list
| Tstr_include of module_expr * Ident.t list
and module_coercion =
Tcoerce_none
| Tcoerce_structure of (int * module_coercion) list
| Tcoerce_functor of module_coercion * module_coercion
| Tcoerce_primitive of Primitive.description
and module_type =
{ mty_desc: module_type_desc;
mty_type : Types.module_type;
mty_env : Env.t;
mty_loc: Location.t }
and module_type_desc =
Tmty_ident of Path.t * Longident.t loc
| Tmty_signature of signature
| Tmty_functor of Ident.t * string loc * module_type * module_type
| Tmty_with of module_type * (Path.t * Longident.t loc * with_constraint) list
| Tmty_typeof of module_expr
and signature = {
sig_items : signature_item list;
sig_type : Types.signature;
sig_final_env : Env.t;
}
and signature_item =
{ sig_desc: signature_item_desc;
sig_env : Env.t; (* BINANNOT ADDED *)
sig_loc: Location.t }
and signature_item_desc =
Tsig_value of Ident.t * string loc * value_description
| Tsig_type of (Ident.t * string loc * type_declaration) list
| Tsig_exception of Ident.t * string loc * exception_declaration
| Tsig_module of Ident.t * string loc * module_type
| Tsig_recmodule of (Ident.t * string loc * module_type) list
| Tsig_modtype of Ident.t * string loc * modtype_declaration
| Tsig_open of Path.t * Longident.t loc
| Tsig_include of module_type * Types.signature
| Tsig_class of class_description list
| Tsig_class_type of class_type_declaration list
and modtype_declaration =
Tmodtype_abstract
| Tmodtype_manifest of module_type
and with_constraint =
Twith_type of type_declaration
| Twith_module of Path.t * Longident.t loc
| Twith_typesubst of type_declaration
| Twith_modsubst of Path.t * Longident.t loc
and core_type =
(* mutable because of [Typeclass.declare_method] *)
{ mutable ctyp_desc : core_type_desc;
mutable ctyp_type : type_expr;
ctyp_env : Env.t; (* BINANNOT ADDED *)
ctyp_loc : Location.t }
and core_type_desc =
Ttyp_any
| Ttyp_var of string
| Ttyp_arrow of label * core_type * core_type
| Ttyp_tuple of core_type list
| Ttyp_constr of Path.t * Longident.t loc * core_type list
| Ttyp_object of core_field_type list
| Ttyp_class of Path.t * Longident.t loc * core_type list * label list
| Ttyp_alias of core_type * string
| Ttyp_variant of row_field list * bool * label list option
| Ttyp_poly of string list * core_type
| Ttyp_package of package_type
and package_type = {
pack_name : Path.t;
pack_fields : (Longident.t loc * core_type) list;
pack_type : Types.module_type;
pack_txt : Longident.t loc;
}
and core_field_type =
{ field_desc: core_field_desc;
field_loc: Location.t }
and core_field_desc =
Tcfield of string * core_type
| Tcfield_var
and row_field =
Ttag of label * bool * core_type list
| Tinherit of core_type
and value_description =
{ val_desc : core_type;
val_val : Types.value_description;
val_prim : string list;
val_loc : Location.t;
}
and type_declaration =
{ typ_params: string loc option list;
typ_type : Types.type_declaration;
typ_cstrs: (core_type * core_type * Location.t) list;
typ_kind: type_kind;
typ_private: private_flag;
typ_manifest: core_type option;
typ_variance: (bool * bool) list;
typ_loc: Location.t }
and type_kind =
Ttype_abstract
| Ttype_variant of (Ident.t * string loc * core_type list * Location.t) list
| Ttype_record of
(Ident.t * string loc * mutable_flag * core_type * Location.t) list
and exception_declaration =
{ exn_params : core_type list;
exn_exn : Types.exception_declaration;
exn_loc : Location.t }
and class_type =
{ cltyp_desc: class_type_desc;
cltyp_type : Types.class_type;
cltyp_env : Env.t; (* BINANNOT ADDED *)
cltyp_loc: Location.t }
and class_type_desc =
Tcty_constr of Path.t * Longident.t loc * core_type list
| Tcty_signature of class_signature
| Tcty_fun of label * core_type * class_type
and class_signature = {
csig_self : core_type;
csig_fields : class_type_field list;
csig_type : Types.class_signature;
csig_loc : Location.t;
}
and class_type_field = {
ctf_desc : class_type_field_desc;
ctf_loc : Location.t;
}
and class_type_field_desc =
Tctf_inher of class_type
| Tctf_val of (string * mutable_flag * virtual_flag * core_type)
| Tctf_virt of (string * private_flag * core_type)
| Tctf_meth of (string * private_flag * core_type)
| Tctf_cstr of (core_type * core_type)
and class_declaration =
class_expr class_infos
and class_description =
class_type class_infos
and class_type_declaration =
class_type class_infos
and 'a class_infos =
{ ci_virt: virtual_flag;
ci_params: string loc list * Location.t;
ci_id_name : string loc;
ci_id_class: Ident.t;
ci_id_class_type : Ident.t;
ci_id_object : Ident.t;
ci_id_typesharp : Ident.t;
ci_expr: 'a;
ci_decl: Types.class_declaration;
ci_type_decl : Types.class_type_declaration;
ci_variance: (bool * bool) list;
ci_loc: Location.t }
(* Auxiliary functions over the a.s.t. *)
val iter_pattern_desc: (pattern -> unit) -> pattern_desc -> unit
val map_pattern_desc: (pattern -> pattern) -> pattern_desc -> pattern_desc
val let_bound_idents: (pattern * expression) list -> Ident.t list
val rev_let_bound_idents: (pattern * expression) list -> Ident.t list
val pat_bound_idents: pattern -> Ident.t list
val let_bound_idents_with_loc:
(pattern * expression) list -> (Ident.t * string loc) list
val rev_let_bound_idents_with_loc:
(pattern * expression) list -> (Ident.t * string loc) list
(* Alpha conversion of patterns *)
val alpha_pat: (Ident.t * Ident.t) list -> pattern -> pattern
val mknoloc: 'a -> 'a Asttypes.loc
val mkloc: 'a -> Location.t -> 'a Asttypes.loc
val pat_bound_idents: pattern -> (Ident.t * string Asttypes.loc) list
| null | https://raw.githubusercontent.com/bmeurer/ocaml-arm/43f7689c76a349febe3d06ae7a4fc1d52984fd8b/typing/typedtree.mli | ocaml | *********************************************************************
OCaml
*********************************************************************
Abstract syntax tree after typing
Value expressions for the core language
Value expressions for the class language
Visible instance variables, methods and concretes methods
Inherited instance variables and concrete methods
None = virtual, true = override
Value expressions for the module language
BINANNOT ADDED
mutable because of [Typeclass.declare_method]
BINANNOT ADDED
BINANNOT ADDED
Auxiliary functions over the a.s.t.
Alpha conversion of patterns | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ Id$
open Asttypes
open Types
type partial = Partial | Total
type optional = Required | Optional
type pattern =
{ pat_desc: pattern_desc;
pat_loc: Location.t;
pat_extra : (pat_extra * Location.t) list;
pat_type: type_expr;
mutable pat_env: Env.t }
and pat_extra =
| Tpat_constraint of core_type
| Tpat_type of Path.t * Longident.t loc
| Tpat_unpack
and pattern_desc =
Tpat_any
| Tpat_var of Ident.t * string loc
| Tpat_alias of pattern * Ident.t * string loc
| Tpat_constant of constant
| Tpat_tuple of pattern list
| Tpat_construct of
Path.t * Longident.t loc * constructor_description * pattern list * bool
| Tpat_variant of label * pattern option * row_desc ref
| Tpat_record of
(Path.t * Longident.t loc * label_description * pattern) list *
closed_flag
| Tpat_array of pattern list
| Tpat_or of pattern * pattern * row_desc option
| Tpat_lazy of pattern
and expression =
{ exp_desc: expression_desc;
exp_loc: Location.t;
exp_extra : (exp_extra * Location.t) list;
exp_type: type_expr;
exp_env: Env.t }
and exp_extra =
| Texp_constraint of core_type option * core_type option
| Texp_open of Path.t * Longident.t loc * Env.t
| Texp_poly of core_type option
| Texp_newtype of string
and expression_desc =
Texp_ident of Path.t * Longident.t loc * Types.value_description
| Texp_constant of constant
| Texp_let of rec_flag * (pattern * expression) list * expression
| Texp_function of label * (pattern * expression) list * partial
| Texp_apply of expression * (label * expression option * optional) list
| Texp_match of expression * (pattern * expression) list * partial
| Texp_try of expression * (pattern * expression) list
| Texp_tuple of expression list
| Texp_construct of
Path.t * Longident.t loc * constructor_description * expression list *
bool
| Texp_variant of label * expression option
| Texp_record of
(Path.t * Longident.t loc * label_description * expression) list *
expression option
| Texp_field of expression * Path.t * Longident.t loc * label_description
| Texp_setfield of
expression * Path.t * Longident.t loc * label_description * expression
| Texp_array of expression list
| Texp_ifthenelse of expression * expression * expression option
| Texp_sequence of expression * expression
| Texp_while of expression * expression
| Texp_for of
Ident.t * string loc * expression * expression * direction_flag *
expression
| Texp_when of expression * expression
| Texp_send of expression * meth * expression option
| Texp_new of Path.t * Longident.t loc * Types.class_declaration
| Texp_instvar of Path.t * Path.t * string loc
| Texp_setinstvar of Path.t * Path.t * string loc * expression
| Texp_override of Path.t * (Path.t * string loc * expression) list
| Texp_letmodule of Ident.t * string loc * module_expr * expression
| Texp_assert of expression
| Texp_assertfalse
| Texp_lazy of expression
| Texp_object of class_structure * string list
| Texp_pack of module_expr
and meth =
Tmeth_name of string
| Tmeth_val of Ident.t
and class_expr =
{ cl_desc: class_expr_desc;
cl_loc: Location.t;
cl_type: Types.class_type;
cl_env: Env.t }
and class_expr_desc =
Tcl_ident of Path.t * Longident.t loc * core_type list
| Tcl_structure of class_structure
| Tcl_fun of
label * pattern * (Ident.t * string loc * expression) list * class_expr *
partial
| Tcl_apply of class_expr * (label * expression option * optional) list
| Tcl_let of rec_flag * (pattern * expression) list *
(Ident.t * string loc * expression) list * class_expr
| Tcl_constraint of
class_expr * class_type option * string list * string list * Concr.t
and class_structure =
{ cstr_pat : pattern;
cstr_fields: class_field list;
cstr_type : Types.class_signature;
cstr_meths: Ident.t Meths.t }
and class_field =
{
cf_desc : class_field_desc;
cf_loc : Location.t;
}
and class_field_kind =
Tcfk_virtual of core_type
| Tcfk_concrete of expression
and class_field_desc =
Tcf_inher of
override_flag * class_expr * string option * (string * Ident.t) list *
(string * Ident.t) list
| Tcf_val of
string * string loc * mutable_flag * Ident.t * class_field_kind * bool
| Tcf_meth of string * string loc * private_flag * class_field_kind * bool
| Tcf_constr of core_type * core_type
| Tcf_let of rec_flag * ( pattern * expression ) list *
( Ident.t * string loc * expression ) list
(Ident.t * string loc * expression) list *)
| Tcf_init of expression
and module_expr =
{ mod_desc: module_expr_desc;
mod_loc: Location.t;
mod_type: Types.module_type;
mod_env: Env.t }
and module_type_constraint =
Tmodtype_implicit
| Tmodtype_explicit of module_type
and module_expr_desc =
Tmod_ident of Path.t * Longident.t loc
| Tmod_structure of structure
| Tmod_functor of Ident.t * string loc * module_type * module_expr
| Tmod_apply of module_expr * module_expr * module_coercion
| Tmod_constraint of
module_expr * Types.module_type * module_type_constraint * module_coercion
| Tmod_unpack of expression * Types.module_type
and structure = {
str_items : structure_item list;
str_type : Types.signature;
str_final_env : Env.t;
}
and structure_item =
{ str_desc : structure_item_desc;
str_loc : Location.t;
str_env : Env.t
}
and structure_item_desc =
Tstr_eval of expression
| Tstr_value of rec_flag * (pattern * expression) list
| Tstr_primitive of Ident.t * string loc * value_description
| Tstr_type of (Ident.t * string loc * type_declaration) list
| Tstr_exception of Ident.t * string loc * exception_declaration
| Tstr_exn_rebind of Ident.t * string loc * Path.t * Longident.t loc
| Tstr_module of Ident.t * string loc * module_expr
| Tstr_recmodule of (Ident.t * string loc * module_type * module_expr) list
| Tstr_modtype of Ident.t * string loc * module_type
| Tstr_open of Path.t * Longident.t loc
| Tstr_class of (class_declaration * string list * virtual_flag) list
| Tstr_class_type of (Ident.t * string loc * class_type_declaration) list
| Tstr_include of module_expr * Ident.t list
and module_coercion =
Tcoerce_none
| Tcoerce_structure of (int * module_coercion) list
| Tcoerce_functor of module_coercion * module_coercion
| Tcoerce_primitive of Primitive.description
and module_type =
{ mty_desc: module_type_desc;
mty_type : Types.module_type;
mty_env : Env.t;
mty_loc: Location.t }
and module_type_desc =
Tmty_ident of Path.t * Longident.t loc
| Tmty_signature of signature
| Tmty_functor of Ident.t * string loc * module_type * module_type
| Tmty_with of module_type * (Path.t * Longident.t loc * with_constraint) list
| Tmty_typeof of module_expr
and signature = {
sig_items : signature_item list;
sig_type : Types.signature;
sig_final_env : Env.t;
}
and signature_item =
{ sig_desc: signature_item_desc;
sig_loc: Location.t }
and signature_item_desc =
Tsig_value of Ident.t * string loc * value_description
| Tsig_type of (Ident.t * string loc * type_declaration) list
| Tsig_exception of Ident.t * string loc * exception_declaration
| Tsig_module of Ident.t * string loc * module_type
| Tsig_recmodule of (Ident.t * string loc * module_type) list
| Tsig_modtype of Ident.t * string loc * modtype_declaration
| Tsig_open of Path.t * Longident.t loc
| Tsig_include of module_type * Types.signature
| Tsig_class of class_description list
| Tsig_class_type of class_type_declaration list
and modtype_declaration =
Tmodtype_abstract
| Tmodtype_manifest of module_type
and with_constraint =
Twith_type of type_declaration
| Twith_module of Path.t * Longident.t loc
| Twith_typesubst of type_declaration
| Twith_modsubst of Path.t * Longident.t loc
and core_type =
{ mutable ctyp_desc : core_type_desc;
mutable ctyp_type : type_expr;
ctyp_loc : Location.t }
and core_type_desc =
Ttyp_any
| Ttyp_var of string
| Ttyp_arrow of label * core_type * core_type
| Ttyp_tuple of core_type list
| Ttyp_constr of Path.t * Longident.t loc * core_type list
| Ttyp_object of core_field_type list
| Ttyp_class of Path.t * Longident.t loc * core_type list * label list
| Ttyp_alias of core_type * string
| Ttyp_variant of row_field list * bool * label list option
| Ttyp_poly of string list * core_type
| Ttyp_package of package_type
and package_type = {
pack_name : Path.t;
pack_fields : (Longident.t loc * core_type) list;
pack_type : Types.module_type;
pack_txt : Longident.t loc;
}
and core_field_type =
{ field_desc: core_field_desc;
field_loc: Location.t }
and core_field_desc =
Tcfield of string * core_type
| Tcfield_var
and row_field =
Ttag of label * bool * core_type list
| Tinherit of core_type
and value_description =
{ val_desc : core_type;
val_val : Types.value_description;
val_prim : string list;
val_loc : Location.t;
}
and type_declaration =
{ typ_params: string loc option list;
typ_type : Types.type_declaration;
typ_cstrs: (core_type * core_type * Location.t) list;
typ_kind: type_kind;
typ_private: private_flag;
typ_manifest: core_type option;
typ_variance: (bool * bool) list;
typ_loc: Location.t }
and type_kind =
Ttype_abstract
| Ttype_variant of (Ident.t * string loc * core_type list * Location.t) list
| Ttype_record of
(Ident.t * string loc * mutable_flag * core_type * Location.t) list
and exception_declaration =
{ exn_params : core_type list;
exn_exn : Types.exception_declaration;
exn_loc : Location.t }
and class_type =
{ cltyp_desc: class_type_desc;
cltyp_type : Types.class_type;
cltyp_loc: Location.t }
and class_type_desc =
Tcty_constr of Path.t * Longident.t loc * core_type list
| Tcty_signature of class_signature
| Tcty_fun of label * core_type * class_type
and class_signature = {
csig_self : core_type;
csig_fields : class_type_field list;
csig_type : Types.class_signature;
csig_loc : Location.t;
}
and class_type_field = {
ctf_desc : class_type_field_desc;
ctf_loc : Location.t;
}
and class_type_field_desc =
Tctf_inher of class_type
| Tctf_val of (string * mutable_flag * virtual_flag * core_type)
| Tctf_virt of (string * private_flag * core_type)
| Tctf_meth of (string * private_flag * core_type)
| Tctf_cstr of (core_type * core_type)
and class_declaration =
class_expr class_infos
and class_description =
class_type class_infos
and class_type_declaration =
class_type class_infos
and 'a class_infos =
{ ci_virt: virtual_flag;
ci_params: string loc list * Location.t;
ci_id_name : string loc;
ci_id_class: Ident.t;
ci_id_class_type : Ident.t;
ci_id_object : Ident.t;
ci_id_typesharp : Ident.t;
ci_expr: 'a;
ci_decl: Types.class_declaration;
ci_type_decl : Types.class_type_declaration;
ci_variance: (bool * bool) list;
ci_loc: Location.t }
val iter_pattern_desc: (pattern -> unit) -> pattern_desc -> unit
val map_pattern_desc: (pattern -> pattern) -> pattern_desc -> pattern_desc
val let_bound_idents: (pattern * expression) list -> Ident.t list
val rev_let_bound_idents: (pattern * expression) list -> Ident.t list
val pat_bound_idents: pattern -> Ident.t list
val let_bound_idents_with_loc:
(pattern * expression) list -> (Ident.t * string loc) list
val rev_let_bound_idents_with_loc:
(pattern * expression) list -> (Ident.t * string loc) list
val alpha_pat: (Ident.t * Ident.t) list -> pattern -> pattern
val mknoloc: 'a -> 'a Asttypes.loc
val mkloc: 'a -> Location.t -> 'a Asttypes.loc
val pat_bound_idents: pattern -> (Ident.t * string Asttypes.loc) list
|
1866b5e719e1b0d5f3a55e91205273c95fe32224057a1ced5e163a22ee5ea057 | michalkonecny/aern2 | Integration.hs | module AERN2.Frac.Integration where
import AERN2.MP.Ball
import AERN2.Frac.Type
import AERN2.Frac.Conversion
import qualified AERN2.PPoly.Integration as PPoly
import AERN2.RealFun.Operations
import AERN2.Interval
integral :: Frac MPBall -> MPBall -> MPBall -> MPBall
integral f =
PPoly.integral (toPPoly f)
instance CanIntegrateOverDom (Frac MPBall) DyadicInterval where
type IntegralOverDomType (Frac MPBall) DyadicInterval = MPBall
integrateOverDom f (Interval l r) =
integral f (mpBall l) (mpBall r)
| null | https://raw.githubusercontent.com/michalkonecny/aern2/1c8f12dfcb287bd8e3353802a94865d7c2c121ec/aern2-fun-univariate/src/AERN2/Frac/Integration.hs | haskell | module AERN2.Frac.Integration where
import AERN2.MP.Ball
import AERN2.Frac.Type
import AERN2.Frac.Conversion
import qualified AERN2.PPoly.Integration as PPoly
import AERN2.RealFun.Operations
import AERN2.Interval
integral :: Frac MPBall -> MPBall -> MPBall -> MPBall
integral f =
PPoly.integral (toPPoly f)
instance CanIntegrateOverDom (Frac MPBall) DyadicInterval where
type IntegralOverDomType (Frac MPBall) DyadicInterval = MPBall
integrateOverDom f (Interval l r) =
integral f (mpBall l) (mpBall r)
|
|
f91538ba3cfafb95d4c518689260226429a94740852066b93f33b481899ea95e | rundis/acme-buddy | views.clj | (ns acme-webstore.views
(:require [hiccup.page :as page]
[acme-webstore.roles :refer [any-granted?]]))
(defn include-page-styles [sources]
(map #(page/include-css %) sources))
(defn- render-menu [req]
(let [user (:auth-user req)]
[:nav.menu
[:div {:class "collapse navbar-collapse bs-navbar-collapse navbar-inverse"}
[:ul.nav.navbar-nav
[:li [:a {:href (if user "/dashboard" "/")} "Home"]]
(when user
(list [:li [:a {:href (str "/accounts/" (:id user))} "My account"]]
[:li [:a {:href "/products"} "Products"]]))
(when (any-granted? req [:store-admin])
[:li [:a {:href "/accounts"} "Account listing"]])]
[:ul.nav.navbar-nav.navbar-right
(if user
[:li [:a {:href "/logout"} "Logout"]]
[:li [:a {:href "/login"} "Login"]])]]]))
(defn show-errors [errors]
[:div {:class "alert alert-danger"}
[:ul
(map #(vec [:li %]) errors)]])
(defn layout [{title :title content :content req :request :as props}]
(page/html5
[:head
(include-page-styles
(concat [ "/css/bootstrap.min.css"
"/css/main.css"]))
[:title title]]
[:div {:class "container"}
(render-menu req)]
[:body
[:div.container
[:h1 title]
(when-let [errors (:errors props)] (show-errors errors))
content]]))
(defn index [req]
[:div
[:p "Welcome to the Acme Corp Webstore"]])
(defn dashboard [req]
[:div
[:h3 (str "Greetings " (-> req :auth-user :username) " !")]
[:p "Welcome to your personalized Acme Corp Webstore"]])
(defn input [attrs]
[:div
[:label {:for (:field attrs) :class "control-label"} (:label attrs)]
[:input {:type (or (:type attrs) "text")
:class "form-control"
:id (:field attrs)
:name (:field attrs)
:placeholder (:label attrs)}]])
(defn login [req]
[:div {:class "row"}
[:div {:class "col-sm-9 col-lg-10"} [:p {} "Login to acme store to get all the benefits..."]]
[:div {:class "col-sm-3 col-lg-2"}
[:form {:role "form" :method "POST"}
[:div {:class "form-group"} (input {:field "username" :label "Username"})]
[:div {:class "form-group"} (input {:field "password" :label "Password" :type "password"})]
[:div {:class "form-group"} [:button {:type "submit" :class "btn btn-default"} "Login"]]]]])
(defn account [req]
[:div "Showing account info for logged in user here..."])
(defn accounts [req]
[:div "Showing listing of accounts, only visible to store admins"])
| null | https://raw.githubusercontent.com/rundis/acme-buddy/4134ee33391c858c1c6c517eb22f65aaff34a40e/acme-webstore/src/acme_webstore/views.clj | clojure | (ns acme-webstore.views
(:require [hiccup.page :as page]
[acme-webstore.roles :refer [any-granted?]]))
(defn include-page-styles [sources]
(map #(page/include-css %) sources))
(defn- render-menu [req]
(let [user (:auth-user req)]
[:nav.menu
[:div {:class "collapse navbar-collapse bs-navbar-collapse navbar-inverse"}
[:ul.nav.navbar-nav
[:li [:a {:href (if user "/dashboard" "/")} "Home"]]
(when user
(list [:li [:a {:href (str "/accounts/" (:id user))} "My account"]]
[:li [:a {:href "/products"} "Products"]]))
(when (any-granted? req [:store-admin])
[:li [:a {:href "/accounts"} "Account listing"]])]
[:ul.nav.navbar-nav.navbar-right
(if user
[:li [:a {:href "/logout"} "Logout"]]
[:li [:a {:href "/login"} "Login"]])]]]))
(defn show-errors [errors]
[:div {:class "alert alert-danger"}
[:ul
(map #(vec [:li %]) errors)]])
(defn layout [{title :title content :content req :request :as props}]
(page/html5
[:head
(include-page-styles
(concat [ "/css/bootstrap.min.css"
"/css/main.css"]))
[:title title]]
[:div {:class "container"}
(render-menu req)]
[:body
[:div.container
[:h1 title]
(when-let [errors (:errors props)] (show-errors errors))
content]]))
(defn index [req]
[:div
[:p "Welcome to the Acme Corp Webstore"]])
(defn dashboard [req]
[:div
[:h3 (str "Greetings " (-> req :auth-user :username) " !")]
[:p "Welcome to your personalized Acme Corp Webstore"]])
(defn input [attrs]
[:div
[:label {:for (:field attrs) :class "control-label"} (:label attrs)]
[:input {:type (or (:type attrs) "text")
:class "form-control"
:id (:field attrs)
:name (:field attrs)
:placeholder (:label attrs)}]])
(defn login [req]
[:div {:class "row"}
[:div {:class "col-sm-9 col-lg-10"} [:p {} "Login to acme store to get all the benefits..."]]
[:div {:class "col-sm-3 col-lg-2"}
[:form {:role "form" :method "POST"}
[:div {:class "form-group"} (input {:field "username" :label "Username"})]
[:div {:class "form-group"} (input {:field "password" :label "Password" :type "password"})]
[:div {:class "form-group"} [:button {:type "submit" :class "btn btn-default"} "Login"]]]]])
(defn account [req]
[:div "Showing account info for logged in user here..."])
(defn accounts [req]
[:div "Showing listing of accounts, only visible to store admins"])
|
|
807dc9e182c8d5aa0b973ac29281e0f321197489867eb35201b4910d2f6ac013 | basho/riak_core | riak_core_handoff_sender_sup.erl | %% -------------------------------------------------------------------
%%
Copyright ( c ) 2011 - 2012 Basho Technologies , Inc.
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(riak_core_handoff_sender_sup).
-behaviour(supervisor).
%% callbacks
-export([start_link/0,
init/1
]).
%% API
-export([start_sender/5]).
-include("riak_core_handoff.hrl").
-define(CHILD(I,Type), {I,{I,start_link,[]},temporary,brutal_kill,Type,[I]}).
%%%===================================================================
%%% API
%%%===================================================================
start_link() ->
supervisor:start_link({local,?MODULE},?MODULE,[]).
%% @doc Start the handoff process for the module (`Module'), partition
%% (`Partition'), and vnode (`VNode') from the local node to the
target node ( ` TargetNode ' ) with options ` Opts ' .
%%
%% Options:
%% * src_partition - required. the integer index of the source vnode
%% * target_partition - required. the integer index of the target vnode
* filter - optional . an arity one function that takes the key and returns
%% a boolean. If false, the key is not sent
* unsent_fun - optional . an arity 2 function that takes a key that was not sent
%% (based on filter) and an accumulator. This function is called
%% for each unsent key.
%% * unsent_acc0 - optional. The intial accumulator value passed to unsent_fun
for the first unsent key
-spec start_sender(ho_type(), atom(), term(), pid(), [{atom(), term()}]) -> {ok, pid()}.
start_sender(Type, Module, TargetNode, VNode, Opts) ->
supervisor:start_child(?MODULE, [TargetNode, Module, {Type, Opts}, VNode]).
%%%===================================================================
%%% Callbacks
%%%===================================================================
@private
init ([]) ->
{ok,{{simple_one_for_one,10,10},
[?CHILD(riak_core_handoff_sender,worker)
]}}.
| null | https://raw.githubusercontent.com/basho/riak_core/762ec81ae9af9a278e853f1feca418b9dcf748a3/src/riak_core_handoff_sender_sup.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
callbacks
API
===================================================================
API
===================================================================
@doc Start the handoff process for the module (`Module'), partition
(`Partition'), and vnode (`VNode') from the local node to the
Options:
* src_partition - required. the integer index of the source vnode
* target_partition - required. the integer index of the target vnode
a boolean. If false, the key is not sent
(based on filter) and an accumulator. This function is called
for each unsent key.
* unsent_acc0 - optional. The intial accumulator value passed to unsent_fun
===================================================================
Callbacks
=================================================================== | Copyright ( c ) 2011 - 2012 Basho Technologies , Inc.
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(riak_core_handoff_sender_sup).
-behaviour(supervisor).
-export([start_link/0,
init/1
]).
-export([start_sender/5]).
-include("riak_core_handoff.hrl").
-define(CHILD(I,Type), {I,{I,start_link,[]},temporary,brutal_kill,Type,[I]}).
start_link() ->
supervisor:start_link({local,?MODULE},?MODULE,[]).
target node ( ` TargetNode ' ) with options ` Opts ' .
* filter - optional . an arity one function that takes the key and returns
* unsent_fun - optional . an arity 2 function that takes a key that was not sent
for the first unsent key
-spec start_sender(ho_type(), atom(), term(), pid(), [{atom(), term()}]) -> {ok, pid()}.
start_sender(Type, Module, TargetNode, VNode, Opts) ->
supervisor:start_child(?MODULE, [TargetNode, Module, {Type, Opts}, VNode]).
@private
init ([]) ->
{ok,{{simple_one_for_one,10,10},
[?CHILD(riak_core_handoff_sender,worker)
]}}.
|
1aa517064f6e3191771e67695484dd210837ed9b42255af1394ce41883a2978a | marcoheisig/Typo | test-objects.lisp | (in-package #:typo.test-suite)
(defparameter *test-integers*
(let ((integers '()))
(loop for integer in (list -1338 -1337 -19 -3 0 3 19 1337 1338) do
(push integer integers))
(loop for bits = 1 then (* bits 2) until (>= bits 64) do
(push (1+ (+ (expt 2 bits))) integers)
(push (1- (+ (expt 2 bits))) integers)
(push (+ (expt 2 bits)) integers)
(push (- (expt 2 bits)) integers)
(push (1- (- (expt 2 bits))) integers)
(push (1+ (- (expt 2 bits))) integers))
(remove-duplicates integers)))
(defparameter *test-floats*
(let ((floats '()))
(push most-positive-long-float floats)
(push most-positive-double-float floats)
(push most-positive-single-float floats)
(push most-positive-short-float floats)
(push most-negative-short-float floats)
(push most-negative-single-float floats)
(push most-negative-double-float floats)
(push most-negative-long-float floats)
(loop for base in (list -0.7L0 -0.1L0 -0.0L0 +0.0L0 +0.1L0 +0.7L0) do
(loop for fp-type in '(short-float single-float double-float long-float) do
(loop for exponent in '(1 2 3 5) do
(push (scale-float (coerce base fp-type) exponent) floats))))
(remove-duplicates floats)))
(defparameter *test-reals*
(append *test-integers* *test-floats*))
(defparameter *test-complex-numbers*
(let ((complex-numbers '()))
(loop for float in *test-floats* do
(push (complex float float) complex-numbers)
(push (complex float (- float)) complex-numbers))
(remove-duplicates complex-numbers)))
(defparameter *test-numbers*
(append *test-integers* *test-floats* *test-complex-numbers*))
(defparameter *test-arrays*
(loop for dimensions in '(() (1) (10) (2 2) (2 0 3) (2 3 4) (4 3 2 1))
append
(loop for (element-type initial-element)
in '(((unsigned-byte 1) 1)
((unsigned-byte 2) 1)
((unsigned-byte 4) 1)
((unsigned-byte 8) 42)
((unsigned-byte 16) 42)
((unsigned-byte 32) 42)
((unsigned-byte 64) 42)
((signed-byte 8) -42)
((signed-byte 16) -42)
((signed-byte 32) -42)
((signed-byte 64) -42)
(short-float 42s0)
(single-float 42f0)
(double-float 42d0)
(long-float 42l0)
(character #\X))
append
(loop for adjustable in '(nil t)
collect (make-array dimensions
:element-type element-type
:initial-element initial-element
:adjustable adjustable)))))
(defparameter *test-objects*
(append *test-numbers* *test-arrays*))
| null | https://raw.githubusercontent.com/marcoheisig/Typo/a06eecdcee6994fb5731409fe75fad6b7c59d3a9/code/test-suite/test-objects.lisp | lisp | (in-package #:typo.test-suite)
(defparameter *test-integers*
(let ((integers '()))
(loop for integer in (list -1338 -1337 -19 -3 0 3 19 1337 1338) do
(push integer integers))
(loop for bits = 1 then (* bits 2) until (>= bits 64) do
(push (1+ (+ (expt 2 bits))) integers)
(push (1- (+ (expt 2 bits))) integers)
(push (+ (expt 2 bits)) integers)
(push (- (expt 2 bits)) integers)
(push (1- (- (expt 2 bits))) integers)
(push (1+ (- (expt 2 bits))) integers))
(remove-duplicates integers)))
(defparameter *test-floats*
(let ((floats '()))
(push most-positive-long-float floats)
(push most-positive-double-float floats)
(push most-positive-single-float floats)
(push most-positive-short-float floats)
(push most-negative-short-float floats)
(push most-negative-single-float floats)
(push most-negative-double-float floats)
(push most-negative-long-float floats)
(loop for base in (list -0.7L0 -0.1L0 -0.0L0 +0.0L0 +0.1L0 +0.7L0) do
(loop for fp-type in '(short-float single-float double-float long-float) do
(loop for exponent in '(1 2 3 5) do
(push (scale-float (coerce base fp-type) exponent) floats))))
(remove-duplicates floats)))
(defparameter *test-reals*
(append *test-integers* *test-floats*))
(defparameter *test-complex-numbers*
(let ((complex-numbers '()))
(loop for float in *test-floats* do
(push (complex float float) complex-numbers)
(push (complex float (- float)) complex-numbers))
(remove-duplicates complex-numbers)))
(defparameter *test-numbers*
(append *test-integers* *test-floats* *test-complex-numbers*))
(defparameter *test-arrays*
(loop for dimensions in '(() (1) (10) (2 2) (2 0 3) (2 3 4) (4 3 2 1))
append
(loop for (element-type initial-element)
in '(((unsigned-byte 1) 1)
((unsigned-byte 2) 1)
((unsigned-byte 4) 1)
((unsigned-byte 8) 42)
((unsigned-byte 16) 42)
((unsigned-byte 32) 42)
((unsigned-byte 64) 42)
((signed-byte 8) -42)
((signed-byte 16) -42)
((signed-byte 32) -42)
((signed-byte 64) -42)
(short-float 42s0)
(single-float 42f0)
(double-float 42d0)
(long-float 42l0)
(character #\X))
append
(loop for adjustable in '(nil t)
collect (make-array dimensions
:element-type element-type
:initial-element initial-element
:adjustable adjustable)))))
(defparameter *test-objects*
(append *test-numbers* *test-arrays*))
|
|
67c2e6aa83bd86d84763c32258bafbf5185a20800ee2c2e8cc928e50c14ff74c | lspector/Clojush | penn.clj | ;; penn.clj
;;
, , 20170607
;;
;; Adapted from bioavailibility.clj.
(ns clojush.problems.classification.penn
(:use [clojush.pushgp.pushgp]
[clojush random util pushstate interpreter]
clojush.instructions.tag
[local-file]
[clojure.math.numeric-tower])
(:require [clojure.string :as string]
[clojure-csv.core :as csv]))
This problem file allows for experimentation with Penn Machine Learning Benchmarks problems ,
or other other problems with data in the Penn ML format .
;;
A random 50/50 split of the data will be created each time this is run , with one half of the
;; split used for training and the other half reserved for testing, which is done only for to
;; report on the best (lowest total training error) individual at the end of each generation.
;;
Although 50 % of the data in the file is * available * for use in training , this implementation
;; allows one to specify that only some proportion of that data is actually used. In addition,
;; it allows for the specific subset of training that is used to be resampled each generation.
;;
Unlike most Clojush problems , this one contains parameters that can not be set from the command
;; line. This is because they are required for reading in the data, which currently happens when
;; this file is loaded, before the command-line parameters are interpreted. This means that to
;; change these aspects of the problem you must run it from the source code (not just including
Clojush as a dependency in another project ) and edit / save this file prior to running the
;; system. Other parameters can still be set from the command line, overriding the parameters
;; specified in the argmap definition below.
;;
;; Access to the input variables could, in principle, be handled in various ways. What is done
;; here is to provide a boolean input for whether each input variable is each of the values
;; that appear in the data. For example, for some of the target data files the variables in
the data are all 0 , 1 , or 2 . In this case , evolving programs will have access to three
;; boolean inputs for each input variable. If a particular variable is 0 then these inputs
will be true , false , false , while if it is 1 they will be false , true , false , and if it
is 2 they will be false , false , true .
;;
;; Similarly, specification of the program output could, in principle, be handled in various
;; ways. What is done here is to provide string literals for each of the values in the "CLASS"
;; (output) column in the data, and to use the :string stack only for the purpose of accumulating
;; instances of these literals, which act as votes. At the end of program execution, whichever
;; literal occurs most frequently on the :string stack is taken to be the output of the program.
;;
;; Some of the values for standard pushgp parameters specified here, in the argmap definition
;; below, are intended to help lineages weather the disruptions caused by generational resampling
of the training data . Specifically , we specify the use of leaky lexicase selection with
;; a high leakage value, and the production of some offspring with very low variation rates.
Here are the three data - related parameters that can only be set here , in the source code :
;; The data file, which must be in src/clojush/problems/classification/data/:
(def data-file
;"GAMETES_Epistasis_2-Way_20atts_0.4H_EDM-1_1.txt"
;"xor_2_a_20s_1600_EDM-1_01.txt"
"xor_3_a_20s_1600_EDM-1_01.txt"
" xor_4_a_20s_1600_EDM-1_01.txt "
)
;; The proportion of the training data that will be used to evaluate individuals each generation:
(def training-proportion 0.2)
;; A flag indicating whether the subset of the training data used for evaluation should be
;; resampled each generation:
(def resampling true)
;; Here we define functions to read the data and to split it into training and testing sets.
;; We also extract a subset of the full training set to use for evaluation if training-proportion
is less than 1 , and we store this in an atom so that we can update the training data
;; if resampling is true.
(defn read-data []
"Reads data into a sequence of sequences."
(let [f (slurp* (str "src/clojush/problems/classification/data/" data-file))
lines (csv/parse-csv f :delimiter \tab)]
(println "Total number of data lines:" (count lines))
(mapv #(mapv read-string %) lines)))
(defn define-fitness-cases
"Returns a map with two keys: train and test. Train maps to a
random 50% of the fitness cases and test maps to the remainder.
These sets are different each time this is called."
[]
(let [raw-data (read-data)
target-column (.indexOf (mapv clojure.string/upper-case (mapv name (first raw-data)))
"CLASS")
vocabulary (sort (distinct (flatten (rest raw-data))))
inputs (fn [row]
(let [raw-inputs (concat (take target-column row)
(drop (inc target-column) row))]
(vec (flatten (for [i raw-inputs]
(map #(= i %) vocabulary)))))) ;; inputs are boolean
target (fn [row] (str (nth row target-column))) ;; target classes are strings
fitness-cases-shuffled (lshuffle (mapv (fn [row]
{:inputs (inputs row)
:target (target row)})
(rest raw-data)))
train-num (int (* 0.5 (count fitness-cases-shuffled)))
all-training-cases (subvec fitness-cases-shuffled 0 train-num)
all-testing-cases (subvec fitness-cases-shuffled train-num)]
{:all-train all-training-cases
:train (vec (take (int (* training-proportion train-num)) (lshuffle all-training-cases)))
:test all-testing-cases}))
(def penn-fitness-cases (atom (define-fitness-cases)))
;; The error function runs the program on the specified subset of the data, returning
;; errors of 0 whenever the resulting string stack has the correct answer as its most
frequent value , and 1 otherwise .
(defn penn-error-function
"Error function for the penn problem."
[fitness-set individual]
(assoc individual
:errors
(doall
(for [fitness-case (get @penn-fitness-cases fitness-set)]
(let [inputs (:inputs fitness-case)
target (:target fitness-case)
push-state (run-push (:program individual) (assoc (make-push-state) :input inputs))
[most second-most] (take 2 (reverse (sort-by val (frequencies (:string push-state)))))
answer (if (or (not most)
(= (second most) (second second-most)))
nil
(first most))]
(if (= answer target) 0 1))))))
(defn rmse
"Returns the root of the mean square error for use in error reporting."
[errors]
(sqrt (/ (apply + (map #(* % %) errors))
(count errors))))
(defn penn-report
"Customized generational report for the penn problem, which also resamples (and prints)
the training cases if appropriate."
[best population generation error-function report-simplifications]
(let [best-program (not-lazy (:program best))
best-test-errors (:errors (penn-error-function :test {:program best-program}))]
(printf ";; -*- Penn problem report generation %s" generation)(flush)
(printf "\nTest mean: %.4f"
(float (/ (apply + best-test-errors)
(count best-test-errors))))(flush)
(printf "\nTest RMSE: %.4f" (float (rmse best-test-errors)))(flush)
(printf "\n\n;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;\n\n")(flush)
(when (and resampling (< training-proportion 1))
(println "Resampling training cases...")
(swap! penn-fitness-cases
#(assoc % :train (vec (take (count (:train %))
(lshuffle (:all-train %))))))
(println "New training cases:")
(println (:train @penn-fitness-cases)))))
(defn penn-initial-report
"Initial report function for the penn problem, which prints the training and testing cases."
[argmap]
(println "Train and test cases:")
(println @penn-fitness-cases)
(println ";;******************************"))
(defn cycle-to-longest
"A utility for producing a collection with equal representation from multiple sequences.
The longest sequence will be included in its entirety, and shorter ones will be cycled
as necessary to produce the same number of elements."
[& sequences]
(let [max-count (apply max (map count sequences))]
(vec (apply concat (map #(take max-count (cycle %)) sequences)))))
(def penn-atom-generators
(cycle-to-longest
;; input instructions
(for [n (map inc (range (count (:inputs (first (:train @penn-fitness-cases))))))]
(symbol (str "in" n)))
;; output class strings
(distinct (mapv :target (:all-train @penn-fitness-cases)))
;; other instructions and ephemeral random constants
(concat (registered-for-stacks [:exec :integer :boolean :parentheses])
;; for strings, which are used only for output class literals, just stack instructions
'[string_pop string_dup string_dup_times string_dup_items string_swap string_rot
string_flush string_eq string_stackdepth string_yank string_yankdup string_shove
string_empty]
[(tag-instruction-erc [:exec :integer :boolean :string] 1000)
(tagged-instruction-erc 1000)
(fn [] (lrand-int 1000))])
;; more of anything that uses booleans
(map first
(filter (fn [[instr instr-fn]]
(some #{boolean} (:stack-types (meta instr-fn))))
@instruction-table))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Main call
(def argmap
{:error-function (partial penn-error-function :train)
:atom-generators penn-atom-generators
:max-points 3200
:max-genome-size-in-initial-program 400
:evalpush-limit 1600
:population-size 1000
:max-generations 300
:parent-selection :leaky-lexicase ;:lexicase
:lexicase-leakage 0.5
:genetic-operator-probabilities {:uniform-addition-and-deletion 0.5
:alternation 0.5}
:uniform-addition-and-deletion-rate [0.001 0.01 0.1]
:alternation-rate [0.001 0.01 0.1]
:alignment-deviation [0 1 10 100]
:problem-specific-report penn-report
:problem-specific-initial-report penn-initial-report
: print - behavioral - diversity true ; ; requires maintaining @population - behaviors
:report-simplifications 0
:final-report-simplifications 1000
;; === parameters for experimenting with age-mediated parent selection (AMPS)
: age - mediated - parent - selection [ 0.05 0.5 ]
: age - combining - function :
;:age-combining-function :proportionate
: age - combining - function : first - reuse
;; === parameters for experimenting with autoconstruction
;:autoconstructive true
;:autoconstructive-genome-instructions :uniform
: autoconstructive - diversification - test : three - gens - size - and - instruction
: autoconstructive - si - children 2
: autoconstructive - decay 0.1
})
| null | https://raw.githubusercontent.com/lspector/Clojush/685b991535607cf942ae1500557171a0739982c3/src/clojush/problems/classification/penn.clj | clojure | penn.clj
Adapted from bioavailibility.clj.
split used for training and the other half reserved for testing, which is done only for to
report on the best (lowest total training error) individual at the end of each generation.
allows one to specify that only some proportion of that data is actually used. In addition,
it allows for the specific subset of training that is used to be resampled each generation.
line. This is because they are required for reading in the data, which currently happens when
this file is loaded, before the command-line parameters are interpreted. This means that to
change these aspects of the problem you must run it from the source code (not just including
system. Other parameters can still be set from the command line, overriding the parameters
specified in the argmap definition below.
Access to the input variables could, in principle, be handled in various ways. What is done
here is to provide a boolean input for whether each input variable is each of the values
that appear in the data. For example, for some of the target data files the variables in
boolean inputs for each input variable. If a particular variable is 0 then these inputs
Similarly, specification of the program output could, in principle, be handled in various
ways. What is done here is to provide string literals for each of the values in the "CLASS"
(output) column in the data, and to use the :string stack only for the purpose of accumulating
instances of these literals, which act as votes. At the end of program execution, whichever
literal occurs most frequently on the :string stack is taken to be the output of the program.
Some of the values for standard pushgp parameters specified here, in the argmap definition
below, are intended to help lineages weather the disruptions caused by generational resampling
a high leakage value, and the production of some offspring with very low variation rates.
The data file, which must be in src/clojush/problems/classification/data/:
"GAMETES_Epistasis_2-Way_20atts_0.4H_EDM-1_1.txt"
"xor_2_a_20s_1600_EDM-1_01.txt"
The proportion of the training data that will be used to evaluate individuals each generation:
A flag indicating whether the subset of the training data used for evaluation should be
resampled each generation:
Here we define functions to read the data and to split it into training and testing sets.
We also extract a subset of the full training set to use for evaluation if training-proportion
if resampling is true.
inputs are boolean
target classes are strings
The error function runs the program on the specified subset of the data, returning
errors of 0 whenever the resulting string stack has the correct answer as its most
input instructions
output class strings
other instructions and ephemeral random constants
for strings, which are used only for output class literals, just stack instructions
more of anything that uses booleans
Main call
:lexicase
; requires maintaining @population - behaviors
=== parameters for experimenting with age-mediated parent selection (AMPS)
:age-combining-function :proportionate
=== parameters for experimenting with autoconstruction
:autoconstructive true
:autoconstructive-genome-instructions :uniform | , , 20170607
(ns clojush.problems.classification.penn
(:use [clojush.pushgp.pushgp]
[clojush random util pushstate interpreter]
clojush.instructions.tag
[local-file]
[clojure.math.numeric-tower])
(:require [clojure.string :as string]
[clojure-csv.core :as csv]))
This problem file allows for experimentation with Penn Machine Learning Benchmarks problems ,
or other other problems with data in the Penn ML format .
A random 50/50 split of the data will be created each time this is run , with one half of the
Although 50 % of the data in the file is * available * for use in training , this implementation
Unlike most Clojush problems , this one contains parameters that can not be set from the command
Clojush as a dependency in another project ) and edit / save this file prior to running the
the data are all 0 , 1 , or 2 . In this case , evolving programs will have access to three
will be true , false , false , while if it is 1 they will be false , true , false , and if it
is 2 they will be false , false , true .
of the training data . Specifically , we specify the use of leaky lexicase selection with
Here are the three data - related parameters that can only be set here , in the source code :
(def data-file
"xor_3_a_20s_1600_EDM-1_01.txt"
" xor_4_a_20s_1600_EDM-1_01.txt "
)
(def training-proportion 0.2)
(def resampling true)
is less than 1 , and we store this in an atom so that we can update the training data
(defn read-data []
"Reads data into a sequence of sequences."
(let [f (slurp* (str "src/clojush/problems/classification/data/" data-file))
lines (csv/parse-csv f :delimiter \tab)]
(println "Total number of data lines:" (count lines))
(mapv #(mapv read-string %) lines)))
(defn define-fitness-cases
"Returns a map with two keys: train and test. Train maps to a
random 50% of the fitness cases and test maps to the remainder.
These sets are different each time this is called."
[]
(let [raw-data (read-data)
target-column (.indexOf (mapv clojure.string/upper-case (mapv name (first raw-data)))
"CLASS")
vocabulary (sort (distinct (flatten (rest raw-data))))
inputs (fn [row]
(let [raw-inputs (concat (take target-column row)
(drop (inc target-column) row))]
(vec (flatten (for [i raw-inputs]
fitness-cases-shuffled (lshuffle (mapv (fn [row]
{:inputs (inputs row)
:target (target row)})
(rest raw-data)))
train-num (int (* 0.5 (count fitness-cases-shuffled)))
all-training-cases (subvec fitness-cases-shuffled 0 train-num)
all-testing-cases (subvec fitness-cases-shuffled train-num)]
{:all-train all-training-cases
:train (vec (take (int (* training-proportion train-num)) (lshuffle all-training-cases)))
:test all-testing-cases}))
(def penn-fitness-cases (atom (define-fitness-cases)))
frequent value , and 1 otherwise .
(defn penn-error-function
"Error function for the penn problem."
[fitness-set individual]
(assoc individual
:errors
(doall
(for [fitness-case (get @penn-fitness-cases fitness-set)]
(let [inputs (:inputs fitness-case)
target (:target fitness-case)
push-state (run-push (:program individual) (assoc (make-push-state) :input inputs))
[most second-most] (take 2 (reverse (sort-by val (frequencies (:string push-state)))))
answer (if (or (not most)
(= (second most) (second second-most)))
nil
(first most))]
(if (= answer target) 0 1))))))
(defn rmse
"Returns the root of the mean square error for use in error reporting."
[errors]
(sqrt (/ (apply + (map #(* % %) errors))
(count errors))))
(defn penn-report
"Customized generational report for the penn problem, which also resamples (and prints)
the training cases if appropriate."
[best population generation error-function report-simplifications]
(let [best-program (not-lazy (:program best))
best-test-errors (:errors (penn-error-function :test {:program best-program}))]
(printf ";; -*- Penn problem report generation %s" generation)(flush)
(printf "\nTest mean: %.4f"
(float (/ (apply + best-test-errors)
(count best-test-errors))))(flush)
(printf "\nTest RMSE: %.4f" (float (rmse best-test-errors)))(flush)
(printf "\n\n;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;\n\n")(flush)
(when (and resampling (< training-proportion 1))
(println "Resampling training cases...")
(swap! penn-fitness-cases
#(assoc % :train (vec (take (count (:train %))
(lshuffle (:all-train %))))))
(println "New training cases:")
(println (:train @penn-fitness-cases)))))
(defn penn-initial-report
"Initial report function for the penn problem, which prints the training and testing cases."
[argmap]
(println "Train and test cases:")
(println @penn-fitness-cases)
(println ";;******************************"))
(defn cycle-to-longest
"A utility for producing a collection with equal representation from multiple sequences.
The longest sequence will be included in its entirety, and shorter ones will be cycled
as necessary to produce the same number of elements."
[& sequences]
(let [max-count (apply max (map count sequences))]
(vec (apply concat (map #(take max-count (cycle %)) sequences)))))
(def penn-atom-generators
(cycle-to-longest
(for [n (map inc (range (count (:inputs (first (:train @penn-fitness-cases))))))]
(symbol (str "in" n)))
(distinct (mapv :target (:all-train @penn-fitness-cases)))
(concat (registered-for-stacks [:exec :integer :boolean :parentheses])
'[string_pop string_dup string_dup_times string_dup_items string_swap string_rot
string_flush string_eq string_stackdepth string_yank string_yankdup string_shove
string_empty]
[(tag-instruction-erc [:exec :integer :boolean :string] 1000)
(tagged-instruction-erc 1000)
(fn [] (lrand-int 1000))])
(map first
(filter (fn [[instr instr-fn]]
(some #{boolean} (:stack-types (meta instr-fn))))
@instruction-table))))
(def argmap
{:error-function (partial penn-error-function :train)
:atom-generators penn-atom-generators
:max-points 3200
:max-genome-size-in-initial-program 400
:evalpush-limit 1600
:population-size 1000
:max-generations 300
:lexicase-leakage 0.5
:genetic-operator-probabilities {:uniform-addition-and-deletion 0.5
:alternation 0.5}
:uniform-addition-and-deletion-rate [0.001 0.01 0.1]
:alternation-rate [0.001 0.01 0.1]
:alignment-deviation [0 1 10 100]
:problem-specific-report penn-report
:problem-specific-initial-report penn-initial-report
:report-simplifications 0
:final-report-simplifications 1000
: age - mediated - parent - selection [ 0.05 0.5 ]
: age - combining - function :
: age - combining - function : first - reuse
: autoconstructive - diversification - test : three - gens - size - and - instruction
: autoconstructive - si - children 2
: autoconstructive - decay 0.1
})
|
7496e7c39bcff80f622c45aeb99e85c0f6ed7a8f186ee5ce60e168e1c6a3f756 | kitnil/dotfiles | mail.scm | (define-module (packages mail)
#:use-module (gnu packages mail)
#:use-module (gnu packages)
#:use-module (guix gexp)
#:use-module (guix packages)
#:use-module (guix utils))
(define-public exim-lmtp
(package
(inherit exim)
(name "exim-lmtp")
(arguments
(substitute-keyword-arguments (package-arguments exim)
((#:phases phases)
#~(modify-phases #$phases
(add-after 'configure 'enable-lmtp
(lambda _
(substitute* "Local/Makefile"
(("# (TRANSPORT_LMTP=yes)" all line) line))))))))))
| null | https://raw.githubusercontent.com/kitnil/dotfiles/bdae9b6ae3261d75dff1657a3f3ab2ac88cf200a/dotfiles/guixsd/modules/packages/mail.scm | scheme | (define-module (packages mail)
#:use-module (gnu packages mail)
#:use-module (gnu packages)
#:use-module (guix gexp)
#:use-module (guix packages)
#:use-module (guix utils))
(define-public exim-lmtp
(package
(inherit exim)
(name "exim-lmtp")
(arguments
(substitute-keyword-arguments (package-arguments exim)
((#:phases phases)
#~(modify-phases #$phases
(add-after 'configure 'enable-lmtp
(lambda _
(substitute* "Local/Makefile"
(("# (TRANSPORT_LMTP=yes)" all line) line))))))))))
|
|
ea90af444105abd03b42d6749caff1a1b3abc97d06cf6ecccd1c7a6815c5b089 | IBM/probzelus | run.ml |
* Copyright 2018 - 2020 IBM Corporation
*
* Licensed under the Apache License , Version 2.0 ( the " License " ) ;
* you may not use this file except in compliance with the License .
* You may obtain a copy of the License at
*
* -2.0
*
* Unless required by applicable law or agreed to in writing , software
* distributed under the License is distributed on an " AS IS " BASIS ,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
* See the License for the specific language governing permissions and
* limitations under the License .
* Copyright 2018-2020 IBM Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* -2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*)
open Benchlib
open Zelus_owl
module M = struct
let name = "Robot with GPS delay"
let algo = "Semi-Symbolic"
type input = unit
type output = Mat.mat * Mat.mat
let iters = ref 0
let read_input () =
begin
if !iters >= 500 then
raise End_of_file
else
iters := !iters + 1
end;
()
let main = Trackerdelay_semi_symb.main
let string_of_output (xt, cmd) =
let string_of_vec v =
Printf.sprintf "(%f, %f, %f)" (Mat.get v 0 0) (Mat.get v 1 0) (Mat.get v 2 0)
in
Printf.sprintf "%s, %s" (string_of_vec xt) (string_of_vec cmd)
end
module H = Harness.Make(M)
let () =
H.run ()
| null | https://raw.githubusercontent.com/IBM/probzelus/c56573201b43780b9c103e5616bb193ababa3399/benchmarks/trackerdelay/semi_symb/run.ml | ocaml |
* Copyright 2018 - 2020 IBM Corporation
*
* Licensed under the Apache License , Version 2.0 ( the " License " ) ;
* you may not use this file except in compliance with the License .
* You may obtain a copy of the License at
*
* -2.0
*
* Unless required by applicable law or agreed to in writing , software
* distributed under the License is distributed on an " AS IS " BASIS ,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
* See the License for the specific language governing permissions and
* limitations under the License .
* Copyright 2018-2020 IBM Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* -2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*)
open Benchlib
open Zelus_owl
module M = struct
let name = "Robot with GPS delay"
let algo = "Semi-Symbolic"
type input = unit
type output = Mat.mat * Mat.mat
let iters = ref 0
let read_input () =
begin
if !iters >= 500 then
raise End_of_file
else
iters := !iters + 1
end;
()
let main = Trackerdelay_semi_symb.main
let string_of_output (xt, cmd) =
let string_of_vec v =
Printf.sprintf "(%f, %f, %f)" (Mat.get v 0 0) (Mat.get v 1 0) (Mat.get v 2 0)
in
Printf.sprintf "%s, %s" (string_of_vec xt) (string_of_vec cmd)
end
module H = Harness.Make(M)
let () =
H.run ()
|
|
9936960faa0168e8a2ff8efa201b9c0f3d4e8bf2da3d981a25f3b3d2f6c51502 | marick/fp-oo | build-zipper-3.clj | Exercise 3
(def seq-zip
(fn [tree]
{:here tree
:parents '()
:lefts '()
:rights '()}))
(def zdown
(fn [zipper]
(if (empty? (:here zipper))
nil
(assoc zipper
:here (first (:here zipper))
:lefts '()
:rights (rest (:here zipper))
:parents (cons zipper (:parents zipper))))))
(def zright
(fn [zipper]
(if (empty? (:rights zipper))
nil
(assoc zipper
:here (first (:rights zipper))
:lefts (concat (:lefts zipper) (list (:here zipper)))
:rights (rest (:rights zipper))))))
(def zleft
(fn [zipper]
(if (empty? (:lefts zipper))
nil
(assoc zipper
:here (last (:lefts zipper))
:lefts (butlast (:lefts zipper))
:rights (cons (last zipper) (:rights zipper))))))
| null | https://raw.githubusercontent.com/marick/fp-oo/434937826d794d6fe02b3e9a62cf5b4fbc314412/solutions/pieces/build-zipper-3.clj | clojure | Exercise 3
(def seq-zip
(fn [tree]
{:here tree
:parents '()
:lefts '()
:rights '()}))
(def zdown
(fn [zipper]
(if (empty? (:here zipper))
nil
(assoc zipper
:here (first (:here zipper))
:lefts '()
:rights (rest (:here zipper))
:parents (cons zipper (:parents zipper))))))
(def zright
(fn [zipper]
(if (empty? (:rights zipper))
nil
(assoc zipper
:here (first (:rights zipper))
:lefts (concat (:lefts zipper) (list (:here zipper)))
:rights (rest (:rights zipper))))))
(def zleft
(fn [zipper]
(if (empty? (:lefts zipper))
nil
(assoc zipper
:here (last (:lefts zipper))
:lefts (butlast (:lefts zipper))
:rights (cons (last zipper) (:rights zipper))))))
|
|
9423fe4b7e53bbebe5b7e242d14177f3093822825ab87190be8719bfcbe088e1 | Beluga-lang/Beluga | hashtbl.mli | include module type of Stdlib.Hashtbl
(** [map f m] maps the values in [m] by [f].
This involves converting [m] to a {!Seq}, mapping the values by [m], then
constructing the new hashtable from the resulting sequence.
*)
val map : ('a -> 'b) -> ('k, 'a) t -> ('k, 'b) t
* [ k l ] groups the values in [ l ] by the keys assigned by [ k ] into a
hashtable .
hashtable.
*)
val group_by : ('a -> 'k) -> 'a list -> ('k, 'a list) t
| null | https://raw.githubusercontent.com/Beluga-lang/Beluga/a8027a9052559411ba5da7939ed4cfd73993c957/src/support/hashtbl.mli | ocaml | * [map f m] maps the values in [m] by [f].
This involves converting [m] to a {!Seq}, mapping the values by [m], then
constructing the new hashtable from the resulting sequence.
| include module type of Stdlib.Hashtbl
val map : ('a -> 'b) -> ('k, 'a) t -> ('k, 'b) t
* [ k l ] groups the values in [ l ] by the keys assigned by [ k ] into a
hashtable .
hashtable.
*)
val group_by : ('a -> 'k) -> 'a list -> ('k, 'a list) t
|
0583b4f47934c2bfb7aa14cc313cc3543823468f20836f79b470fb1a9bb332a5 | pveber/bistro | bed.ml | open Core
open Bistro
open Bistro.Shell_dsl
let keep ~n bed =
if n < 1 then raise (Invalid_argument "Bed.keep") ;
Workflow.shell ~descr:"bed.keep" [
cmd "cut" ~stdout:dest [
string (sprintf "-f 1-%d" n) ;
dep bed ;
]
]
let keep3 x = keep ~n:3 x
let keep4 x = keep ~n:4 x
let keep5 x = keep ~n:5 x
let keep6 x = keep ~n:6 x
| null | https://raw.githubusercontent.com/pveber/bistro/d363bd2d8257babbcb6db15bd83fd6465df7c268/lib/bio/bed.ml | ocaml | open Core
open Bistro
open Bistro.Shell_dsl
let keep ~n bed =
if n < 1 then raise (Invalid_argument "Bed.keep") ;
Workflow.shell ~descr:"bed.keep" [
cmd "cut" ~stdout:dest [
string (sprintf "-f 1-%d" n) ;
dep bed ;
]
]
let keep3 x = keep ~n:3 x
let keep4 x = keep ~n:4 x
let keep5 x = keep ~n:5 x
let keep6 x = keep ~n:6 x
|
|
6fe24bb9364590e4dc67ab389f0b2fb884f71a670482da77c3f9e85a7b63fbbf | dyzsr/ocaml-selectml | x86_ast.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Gallium , INRIA Rocquencourt
(* *)
Copyright 2014 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
* Structured representation of Intel assembly language ( 32 and 64 bit ) .
type condition =
| L | GE (* signed comparisons: less/greater *)
| LE | G
| B | AE (* unsigned comparisons: below/above *)
| BE | A
| E | NE (* equal *)
| O | NO (* overflow *)
| S | NS (* sign *)
| P | NP (* parity *)
type rounding =
| RoundUp
| RoundDown
| RoundNearest
| RoundTruncate
type constant =
| Const of int64
| ConstThis
| ConstLabel of string
| ConstAdd of constant * constant
| ConstSub of constant * constant
data_type is used mainly on memory addressing to specify
the size of the addressed memory chunk . It is directly
used by the MASM emitter and indirectly by the GAS emitter
to infer the instruction suffix .
the size of the addressed memory chunk. It is directly
used by the MASM emitter and indirectly by the GAS emitter
to infer the instruction suffix. *)
type data_type =
| NONE
| REAL4 | REAL8 (* floating point values *)
| BYTE | WORD | DWORD | QWORD | OWORD (* integer values *)
| NEAR | PROC
type reg64 =
| RAX | RBX | RCX | RDX | RSP | RBP | RSI | RDI
| R8 | R9 | R10 | R11 | R12 | R13 | R14 | R15
type reg8h =
| AH | BH | CH | DH
type registerf = XMM of int | TOS | ST of int
type arch = X64 | X86
type addr =
{
arch: arch;
typ: data_type;
idx: reg64;
scale: int;
base: reg64 option;
sym: string option;
displ: int;
}
* Addressing modes :
displ + sym + base + idx * scale
( if scale = 0 , idx is ignored and base must be None )
displ + sym + base + idx * scale
(if scale = 0, idx is ignored and base must be None)
*)
type arg =
| Imm of int64
(** Operand is an immediate constant integer *)
| Sym of string
(** Address of a symbol (absolute address except for call/jmp target
where it is interpreted as a relative displacement *)
| Reg8L of reg64
| Reg8H of reg8h
| Reg16 of reg64
| Reg32 of reg64
| Reg64 of reg64
| Regf of registerf
| Mem of addr
| Mem64_RIP of data_type * string * int
type instruction =
| ADD of arg * arg
| ADDSD of arg * arg
| AND of arg * arg
| ANDPD of arg * arg
| BSWAP of arg
| CALL of arg
| CDQ
| CMOV of condition * arg * arg
| CMP of arg * arg
| COMISD of arg * arg
| CQO
| CVTSD2SI of arg * arg
| CVTSD2SS of arg * arg
| CVTSI2SD of arg * arg
| CVTSS2SD of arg * arg
| CVTTSD2SI of arg * arg
| DEC of arg
| DIVSD of arg * arg
| FABS
| FADD of arg
| FADDP of arg * arg
| FCHS
| FCOMP of arg
| FCOMPP
| FCOS
| FDIV of arg
| FDIVP of arg * arg
| FDIVR of arg
| FDIVRP of arg * arg
| FILD of arg
| FISTP of arg
| FLD of arg
| FLD1
| FLDCW of arg
| FLDLG2
| FLDLN2
| FLDZ
| FMUL of arg
| FMULP of arg * arg
| FNSTCW of arg
| FNSTSW of arg
| FPATAN
| FPTAN
| FSIN
| FSQRT
| FSTP of arg
| FSUB of arg
| FSUBP of arg * arg
| FSUBR of arg
| FSUBRP of arg * arg
| FXCH of arg
| FYL2X
| HLT
| IDIV of arg
| IMUL of arg * arg option
| INC of arg
| J of condition * arg
| JMP of arg
| LEA of arg * arg
| LEAVE
| MOV of arg * arg
| MOVAPD of arg * arg
| MOVLPD of arg * arg
| MOVSD of arg * arg
| MOVSS of arg * arg
| MOVSX of arg * arg
| MOVSXD of arg * arg
| MOVZX of arg * arg
| MULSD of arg * arg
| NEG of arg
| NOP
| OR of arg * arg
| POP of arg
| PUSH of arg
| RET
| ROUNDSD of rounding * arg * arg
| SAL of arg * arg
| SAR of arg * arg
| SET of condition * arg
| SHR of arg * arg
| SQRTSD of arg * arg
| SUB of arg * arg
| SUBSD of arg * arg
| TEST of arg * arg
| UCOMISD of arg * arg
| XCHG of arg * arg
| XOR of arg * arg
| XORPD of arg * arg
type asm_line =
| Ins of instruction
| Align of bool * int
| Byte of constant
| Bytes of string
| Comment of string
| Global of string
| Long of constant
| NewLabel of string * data_type
| Quad of constant
| Section of string list * string option * string list
| Space of int
| Word of constant
(* masm only (the gas emitter will fail on them) *)
| External of string * data_type
| Mode386
| Model of string
(* gas only (the masm emitter will fail on them) *)
| Cfi_adjust_cfa_offset of int
| Cfi_endproc
| Cfi_startproc
( file_num , file_name )
| Indirect_symbol of string
( file_num , line , col )
| Private_extern of string
| Set of string * constant
| Size of string * constant
| Type of string * string
type asm_program = asm_line list
| null | https://raw.githubusercontent.com/dyzsr/ocaml-selectml/875544110abb3350e9fb5ec9bbadffa332c270d2/asmcomp/x86_ast.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
signed comparisons: less/greater
unsigned comparisons: below/above
equal
overflow
sign
parity
floating point values
integer values
* Operand is an immediate constant integer
* Address of a symbol (absolute address except for call/jmp target
where it is interpreted as a relative displacement
masm only (the gas emitter will fail on them)
gas only (the masm emitter will fail on them) | , projet Gallium , INRIA Rocquencourt
Copyright 2014 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
* Structured representation of Intel assembly language ( 32 and 64 bit ) .
type condition =
| LE | G
| BE | A
type rounding =
| RoundUp
| RoundDown
| RoundNearest
| RoundTruncate
type constant =
| Const of int64
| ConstThis
| ConstLabel of string
| ConstAdd of constant * constant
| ConstSub of constant * constant
data_type is used mainly on memory addressing to specify
the size of the addressed memory chunk . It is directly
used by the MASM emitter and indirectly by the GAS emitter
to infer the instruction suffix .
the size of the addressed memory chunk. It is directly
used by the MASM emitter and indirectly by the GAS emitter
to infer the instruction suffix. *)
type data_type =
| NONE
| NEAR | PROC
type reg64 =
| RAX | RBX | RCX | RDX | RSP | RBP | RSI | RDI
| R8 | R9 | R10 | R11 | R12 | R13 | R14 | R15
type reg8h =
| AH | BH | CH | DH
type registerf = XMM of int | TOS | ST of int
type arch = X64 | X86
type addr =
{
arch: arch;
typ: data_type;
idx: reg64;
scale: int;
base: reg64 option;
sym: string option;
displ: int;
}
* Addressing modes :
displ + sym + base + idx * scale
( if scale = 0 , idx is ignored and base must be None )
displ + sym + base + idx * scale
(if scale = 0, idx is ignored and base must be None)
*)
type arg =
| Imm of int64
| Sym of string
| Reg8L of reg64
| Reg8H of reg8h
| Reg16 of reg64
| Reg32 of reg64
| Reg64 of reg64
| Regf of registerf
| Mem of addr
| Mem64_RIP of data_type * string * int
type instruction =
| ADD of arg * arg
| ADDSD of arg * arg
| AND of arg * arg
| ANDPD of arg * arg
| BSWAP of arg
| CALL of arg
| CDQ
| CMOV of condition * arg * arg
| CMP of arg * arg
| COMISD of arg * arg
| CQO
| CVTSD2SI of arg * arg
| CVTSD2SS of arg * arg
| CVTSI2SD of arg * arg
| CVTSS2SD of arg * arg
| CVTTSD2SI of arg * arg
| DEC of arg
| DIVSD of arg * arg
| FABS
| FADD of arg
| FADDP of arg * arg
| FCHS
| FCOMP of arg
| FCOMPP
| FCOS
| FDIV of arg
| FDIVP of arg * arg
| FDIVR of arg
| FDIVRP of arg * arg
| FILD of arg
| FISTP of arg
| FLD of arg
| FLD1
| FLDCW of arg
| FLDLG2
| FLDLN2
| FLDZ
| FMUL of arg
| FMULP of arg * arg
| FNSTCW of arg
| FNSTSW of arg
| FPATAN
| FPTAN
| FSIN
| FSQRT
| FSTP of arg
| FSUB of arg
| FSUBP of arg * arg
| FSUBR of arg
| FSUBRP of arg * arg
| FXCH of arg
| FYL2X
| HLT
| IDIV of arg
| IMUL of arg * arg option
| INC of arg
| J of condition * arg
| JMP of arg
| LEA of arg * arg
| LEAVE
| MOV of arg * arg
| MOVAPD of arg * arg
| MOVLPD of arg * arg
| MOVSD of arg * arg
| MOVSS of arg * arg
| MOVSX of arg * arg
| MOVSXD of arg * arg
| MOVZX of arg * arg
| MULSD of arg * arg
| NEG of arg
| NOP
| OR of arg * arg
| POP of arg
| PUSH of arg
| RET
| ROUNDSD of rounding * arg * arg
| SAL of arg * arg
| SAR of arg * arg
| SET of condition * arg
| SHR of arg * arg
| SQRTSD of arg * arg
| SUB of arg * arg
| SUBSD of arg * arg
| TEST of arg * arg
| UCOMISD of arg * arg
| XCHG of arg * arg
| XOR of arg * arg
| XORPD of arg * arg
type asm_line =
| Ins of instruction
| Align of bool * int
| Byte of constant
| Bytes of string
| Comment of string
| Global of string
| Long of constant
| NewLabel of string * data_type
| Quad of constant
| Section of string list * string option * string list
| Space of int
| Word of constant
| External of string * data_type
| Mode386
| Model of string
| Cfi_adjust_cfa_offset of int
| Cfi_endproc
| Cfi_startproc
( file_num , file_name )
| Indirect_symbol of string
( file_num , line , col )
| Private_extern of string
| Set of string * constant
| Size of string * constant
| Type of string * string
type asm_program = asm_line list
|
c80093893dd26ef2e9ad794a176dda861852d60f5284040ca175acc39afcc03f | ciderpunx/57-exercises-for-programmers | P13CompoundInterest.hs | module P13CompoundInterest where
import Library
main :: IO ()
main = do
principal <- promptNonNegFloat "Principal: "
rate <- promptNonNegFloat "Rate of interest: "
years <- promptNonNegFloat "Num years: "
numComp <- promptNonNegFloat "Num times compounded/year: "
let interest = computeCompoundInterest principal rate years numComp
total = principal + interest
putStrLn $ "After " ++ show years ++ " years at "
++ showD rate ++ "%, the investment will be worth Β£"
++ showD total ++ ", a growth of Β£" ++ showD interest
computeCompoundInterest :: Float -> Float -> Float -> Float -> Float
computeCompoundInterest p r y n =
read . showD $ p * (1+(r/100.0)/n) ^ (round (n*y)::Int) - p
-- TODO: reverse program i.e. given a total and a rate show the required principal
-- Make a GUI for it.
| null | https://raw.githubusercontent.com/ciderpunx/57-exercises-for-programmers/25958ab80cc3edc29756d3bddd2d89815fd390bf/src/P13CompoundInterest.hs | haskell | TODO: reverse program i.e. given a total and a rate show the required principal
Make a GUI for it. | module P13CompoundInterest where
import Library
main :: IO ()
main = do
principal <- promptNonNegFloat "Principal: "
rate <- promptNonNegFloat "Rate of interest: "
years <- promptNonNegFloat "Num years: "
numComp <- promptNonNegFloat "Num times compounded/year: "
let interest = computeCompoundInterest principal rate years numComp
total = principal + interest
putStrLn $ "After " ++ show years ++ " years at "
++ showD rate ++ "%, the investment will be worth Β£"
++ showD total ++ ", a growth of Β£" ++ showD interest
computeCompoundInterest :: Float -> Float -> Float -> Float -> Float
computeCompoundInterest p r y n =
read . showD $ p * (1+(r/100.0)/n) ^ (round (n*y)::Int) - p
|
c9113626af7f14a6f99e2b2db58df1c0ce307c0b2398fb7b7ebfd16c996bd0a6 | caradoc-org/caradoc | TestEntry.ml | (*****************************************************************************)
(* Caradoc: a PDF parser and validator *)
Copyright ( C ) 2015 ANSSI
Copyright ( C ) 2015 - 2017
(* *)
(* This program is free software; you can redistribute it and/or modify *)
it under the terms of the GNU General Public License version 2 as
published by the Free Software Foundation .
(* *)
(* This program is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU General Public License for more details. *)
(* *)
You should have received a copy of the GNU General Public License along
with this program ; if not , write to the Free Software Foundation , Inc. ,
51 Franklin Street , Fifth Floor , Boston , USA .
(*****************************************************************************)
open OUnit
open Entry
let tests =
"Errors" >:::
[
"append_entry" >:::
[
"(1)" >:: (fun _ -> assert_equal
(Entry.append_entry Entry.empty (Entry.make_index 0))
(Entry.make_index 0)) ;
"(2)" >:: (fun _ -> assert_equal
(Entry.append_entry (Entry.make_name "bar") Entry.empty)
(Entry.make_name "bar")) ;
"(3)" >:: (fun _ -> assert_equal
(Entry.append_entry (Entry.make_name "bar") (Entry.make_index 0))
(Entry.append_index (Entry.make_name "bar") 0)) ;
] ;
"is_empty" >:::
[
"(1)" >:: (fun _ -> assert_equal
(Entry.is_empty Entry.empty)
true) ;
"(2)" >:: (fun _ -> assert_equal
(Entry.is_empty (Entry.make_index 0))
false) ;
"(3)" >:: (fun _ -> assert_equal
(Entry.is_empty (Entry.make_name ""))
false) ;
"(4)" >:: (fun _ -> assert_equal
(Entry.is_empty (Entry.make_name "foo"))
false) ;
"(5)" >:: (fun _ -> assert_equal
(Entry.is_empty (Entry.make_name_key ""))
false) ;
"(6)" >:: (fun _ -> assert_equal
(Entry.is_empty (Entry.make_name_key "bar"))
false) ;
] ;
"to_string" >:::
[
"(1)" >:: (fun _ -> assert_equal
(Entry.to_string Entry.empty)
"") ;
"(2)" >:: (fun _ -> assert_equal
(Entry.to_string (Entry.make_name "foo"))
"/foo") ;
"(3)" >:: (fun _ -> assert_equal
(Entry.to_string (Entry.make_name_key "bar"))
"\\bar") ;
"(4)" >:: (fun _ -> assert_equal
(Entry.to_string (Entry.make_index 123))
"[123]") ;
"(5)" >:: (fun _ -> assert_equal
(Entry.to_string (Entry.append_name_key (Entry.append_name (Entry.make_index 123) "foo") "bar"))
"[123]/foo\\bar") ;
] ;
"make_selector" >:::
[
"(1)" >:: (fun _ -> assert_equal
(Entry.make_selector [])
Entry.no_selector) ;
] ;
"move_to_index" >:::
[
"(1)" >:: (fun _ -> assert_equal
(Entry.move_to_index Entry.no_selector 123)
Entry.no_selector) ;
"(2)" >:: (fun _ -> assert_equal
(Entry.move_to_index (Entry.make_selector [Entry.make_index 123]) 123)
(Entry.make_selector [Entry.empty])) ;
"(3)" >:: (fun _ -> assert_equal
(Entry.move_to_index (Entry.make_selector [Entry.make_name "foo"]) 123)
Entry.no_selector) ;
"(4)" >:: (fun _ -> assert_equal
(Entry.move_to_index (Entry.make_selector [Entry.make_index 456]) 123)
Entry.no_selector) ;
"(5)" >:: (fun _ -> assert_equal
(Entry.move_to_index (Entry.make_selector [Entry.append_name (Entry.make_index 123) "foo" ; Entry.make_name "bar" ; Entry.append_index (Entry.make_index 123) 456]) 123)
(Entry.make_selector [Entry.make_name "foo" ; Entry.make_index 456])) ;
] ;
"move_to_name" >:::
[
"(1)" >:: (fun _ -> assert_equal
(Entry.move_to_name Entry.no_selector "foo")
Entry.no_selector) ;
"(2)" >:: (fun _ -> assert_equal
(Entry.move_to_name (Entry.make_selector [Entry.make_index 123]) "foo")
Entry.no_selector) ;
"(3)" >:: (fun _ -> assert_equal
(Entry.move_to_name (Entry.make_selector [Entry.make_name "foo"]) "foo")
(Entry.make_selector [Entry.empty])) ;
"(4)" >:: (fun _ -> assert_equal
(Entry.move_to_name (Entry.make_selector [Entry.make_name "bar"]) "foo")
Entry.no_selector) ;
"(5)" >:: (fun _ -> assert_equal
(Entry.move_to_name (Entry.make_selector [Entry.append_name (Entry.make_name "bar") "foo" ; Entry.make_name "bar" ; Entry.append_index (Entry.make_index 123) 456]) "bar")
(Entry.make_selector [Entry.make_name "foo" ; Entry.empty])) ;
] ;
"move_to_name_key" >:::
[
"(1)" >:: (fun _ -> assert_equal
(Entry.move_to_name_key Entry.no_selector "foo")
Entry.no_selector) ;
"(2)" >:: (fun _ -> assert_equal
(Entry.move_to_name_key (Entry.make_selector [Entry.make_index 123]) "foo")
Entry.no_selector) ;
"(3)" >:: (fun _ -> assert_equal
(Entry.move_to_name_key (Entry.make_selector [Entry.make_name "foo"]) "foo")
Entry.no_selector) ;
"(4)" >:: (fun _ -> assert_equal
(Entry.move_to_name_key (Entry.make_selector [Entry.make_name_key "bar"]) "bar")
(Entry.make_selector [Entry.empty])) ;
] ;
"is_selected" >:::
[
"(1)" >:: (fun _ -> assert_equal
(Entry.is_selected Entry.no_selector)
false) ;
"(2)" >:: (fun _ -> assert_equal
(Entry.is_selected (Entry.make_selector [Entry.empty]))
true) ;
] ;
]
| null | https://raw.githubusercontent.com/caradoc-org/caradoc/100f53bc55ef682049e10fabf24869bc019dc6ce/test/TestEntry.ml | ocaml | ***************************************************************************
Caradoc: a PDF parser and validator
This program is free software; you can redistribute it and/or modify
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
*************************************************************************** | Copyright ( C ) 2015 ANSSI
Copyright ( C ) 2015 - 2017
it under the terms of the GNU General Public License version 2 as
published by the Free Software Foundation .
You should have received a copy of the GNU General Public License along
with this program ; if not , write to the Free Software Foundation , Inc. ,
51 Franklin Street , Fifth Floor , Boston , USA .
open OUnit
open Entry
let tests =
"Errors" >:::
[
"append_entry" >:::
[
"(1)" >:: (fun _ -> assert_equal
(Entry.append_entry Entry.empty (Entry.make_index 0))
(Entry.make_index 0)) ;
"(2)" >:: (fun _ -> assert_equal
(Entry.append_entry (Entry.make_name "bar") Entry.empty)
(Entry.make_name "bar")) ;
"(3)" >:: (fun _ -> assert_equal
(Entry.append_entry (Entry.make_name "bar") (Entry.make_index 0))
(Entry.append_index (Entry.make_name "bar") 0)) ;
] ;
"is_empty" >:::
[
"(1)" >:: (fun _ -> assert_equal
(Entry.is_empty Entry.empty)
true) ;
"(2)" >:: (fun _ -> assert_equal
(Entry.is_empty (Entry.make_index 0))
false) ;
"(3)" >:: (fun _ -> assert_equal
(Entry.is_empty (Entry.make_name ""))
false) ;
"(4)" >:: (fun _ -> assert_equal
(Entry.is_empty (Entry.make_name "foo"))
false) ;
"(5)" >:: (fun _ -> assert_equal
(Entry.is_empty (Entry.make_name_key ""))
false) ;
"(6)" >:: (fun _ -> assert_equal
(Entry.is_empty (Entry.make_name_key "bar"))
false) ;
] ;
"to_string" >:::
[
"(1)" >:: (fun _ -> assert_equal
(Entry.to_string Entry.empty)
"") ;
"(2)" >:: (fun _ -> assert_equal
(Entry.to_string (Entry.make_name "foo"))
"/foo") ;
"(3)" >:: (fun _ -> assert_equal
(Entry.to_string (Entry.make_name_key "bar"))
"\\bar") ;
"(4)" >:: (fun _ -> assert_equal
(Entry.to_string (Entry.make_index 123))
"[123]") ;
"(5)" >:: (fun _ -> assert_equal
(Entry.to_string (Entry.append_name_key (Entry.append_name (Entry.make_index 123) "foo") "bar"))
"[123]/foo\\bar") ;
] ;
"make_selector" >:::
[
"(1)" >:: (fun _ -> assert_equal
(Entry.make_selector [])
Entry.no_selector) ;
] ;
"move_to_index" >:::
[
"(1)" >:: (fun _ -> assert_equal
(Entry.move_to_index Entry.no_selector 123)
Entry.no_selector) ;
"(2)" >:: (fun _ -> assert_equal
(Entry.move_to_index (Entry.make_selector [Entry.make_index 123]) 123)
(Entry.make_selector [Entry.empty])) ;
"(3)" >:: (fun _ -> assert_equal
(Entry.move_to_index (Entry.make_selector [Entry.make_name "foo"]) 123)
Entry.no_selector) ;
"(4)" >:: (fun _ -> assert_equal
(Entry.move_to_index (Entry.make_selector [Entry.make_index 456]) 123)
Entry.no_selector) ;
"(5)" >:: (fun _ -> assert_equal
(Entry.move_to_index (Entry.make_selector [Entry.append_name (Entry.make_index 123) "foo" ; Entry.make_name "bar" ; Entry.append_index (Entry.make_index 123) 456]) 123)
(Entry.make_selector [Entry.make_name "foo" ; Entry.make_index 456])) ;
] ;
"move_to_name" >:::
[
"(1)" >:: (fun _ -> assert_equal
(Entry.move_to_name Entry.no_selector "foo")
Entry.no_selector) ;
"(2)" >:: (fun _ -> assert_equal
(Entry.move_to_name (Entry.make_selector [Entry.make_index 123]) "foo")
Entry.no_selector) ;
"(3)" >:: (fun _ -> assert_equal
(Entry.move_to_name (Entry.make_selector [Entry.make_name "foo"]) "foo")
(Entry.make_selector [Entry.empty])) ;
"(4)" >:: (fun _ -> assert_equal
(Entry.move_to_name (Entry.make_selector [Entry.make_name "bar"]) "foo")
Entry.no_selector) ;
"(5)" >:: (fun _ -> assert_equal
(Entry.move_to_name (Entry.make_selector [Entry.append_name (Entry.make_name "bar") "foo" ; Entry.make_name "bar" ; Entry.append_index (Entry.make_index 123) 456]) "bar")
(Entry.make_selector [Entry.make_name "foo" ; Entry.empty])) ;
] ;
"move_to_name_key" >:::
[
"(1)" >:: (fun _ -> assert_equal
(Entry.move_to_name_key Entry.no_selector "foo")
Entry.no_selector) ;
"(2)" >:: (fun _ -> assert_equal
(Entry.move_to_name_key (Entry.make_selector [Entry.make_index 123]) "foo")
Entry.no_selector) ;
"(3)" >:: (fun _ -> assert_equal
(Entry.move_to_name_key (Entry.make_selector [Entry.make_name "foo"]) "foo")
Entry.no_selector) ;
"(4)" >:: (fun _ -> assert_equal
(Entry.move_to_name_key (Entry.make_selector [Entry.make_name_key "bar"]) "bar")
(Entry.make_selector [Entry.empty])) ;
] ;
"is_selected" >:::
[
"(1)" >:: (fun _ -> assert_equal
(Entry.is_selected Entry.no_selector)
false) ;
"(2)" >:: (fun _ -> assert_equal
(Entry.is_selected (Entry.make_selector [Entry.empty]))
true) ;
] ;
]
|
3a3a602d25f1f3381cda2a6d5c25be5145efee91d6661c6df4e48d4b53604408 | TheLortex/mirage-monorepo | angstrom.mli | ----------------------------------------------------------------------------
Copyright ( c ) 2016 Inhabited Type LLC .
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions
are met :
1 . Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
2 . Redistributions in binary form must reproduce the above copyright
notice , this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution .
3 . Neither the name of the author nor the names of his contributors
may be used to endorse or promote products derived from this software
without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS ` ` AS IS '' AND ANY EXPRESS
OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED . IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION )
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT ,
STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE .
----------------------------------------------------------------------------
Copyright (c) 2016 Inhabited Type LLC.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the author nor the names of his contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS ``AS IS'' AND ANY EXPRESS
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
----------------------------------------------------------------------------*)
* Parser combinators built for speed and memory - efficiency .
is a parser - combinator library that provides monadic and
applicative interfaces for constructing parsers with unbounded lookahead .
Its parsers can consume input incrementally , whether in a blocking or
non - blocking environment . To achieve efficient incremental parsing ,
offers both a buffered and unbuffered interface to input streams ,
with the { ! module : Unbuffered } interface enabling zero - copy IO . With these
features and low - level iteration parser primitives like { ! take_while } and
{ ! skip_while } , makes it easy to write efficient , expressive , and
reusable parsers suitable for high - performance applications .
Angstrom is a parser-combinator library that provides monadic and
applicative interfaces for constructing parsers with unbounded lookahead.
Its parsers can consume input incrementally, whether in a blocking or
non-blocking environment. To achieve efficient incremental parsing,
Angstrom offers both a buffered and unbuffered interface to input streams,
with the {!module:Unbuffered} interface enabling zero-copy IO. With these
features and low-level iteration parser primitives like {!take_while} and
{!skip_while}, Angstrom makes it easy to write efficient, expressive, and
reusable parsers suitable for high-performance applications. *)
type +'a t
(** A parser for values of type ['a]. *)
type bigstring =
(char, Bigarray.int8_unsigned_elt, Bigarray.c_layout) Bigarray.Array1.t
* { 2 Basic parsers }
val peek_char : char option t
* [ ] accepts any char and returns it , or returns [ None ] if the end
of input has been reached .
This parser does not advance the input . Use it for lookahead .
of input has been reached.
This parser does not advance the input. Use it for lookahead. *)
val peek_char_fail : char t
(** [peek_char_fail] accepts any char and returns it. If end of input has been
reached, it will fail.
This parser does not advance the input. Use it for lookahead. *)
val peek_string : int -> string t
(** [peek_string n] accepts exactly [n] characters and returns them as a
string. If there is not enough input, it will fail.
This parser does not advance the input. Use it for lookahead. *)
val char : char -> char t
(** [char c] accepts [c] and returns it. *)
val not_char : char -> char t
(** [not_char] accepts any character that is not [c] and returns the matched
character. *)
val any_char : char t
(** [any_char] accepts any character and returns it. *)
val satisfy : (char -> bool) -> char t
(** [satisfy f] accepts any character for which [f] returns [true] and
returns the accepted character. In the case that none of the parser
succeeds, then the parser will fail indicating the offending
character. *)
val string : string -> string t
(** [string s] accepts [s] exactly and returns it. *)
val string_ci : string -> string t
* [ s ] accepts [ s ] , ignoring case , and returns the matched string ,
preserving the case of the original input .
preserving the case of the original input. *)
val skip : (char -> bool) -> unit t
(** [skip f] accepts any character for which [f] returns [true] and discards
the accepted character. [skip f] is equivalent to [satisfy f] but discards
the accepted character. *)
val skip_while : (char -> bool) -> unit t
(** [skip_while f] accepts input as long as [f] returns [true] and discards
the accepted characters. *)
val take : int -> string t
(** [take n] accepts exactly [n] characters of input and returns them as a
string. *)
val take_while : (char -> bool) -> string t
* [ take_while f ] accepts input as long as [ f ] returns [ true ] and returns the
accepted characters as a string .
This parser does not fail . If [ f ] returns [ false ] on the first character ,
it will return the empty string .
accepted characters as a string.
This parser does not fail. If [f] returns [false] on the first character,
it will return the empty string. *)
val take_while1 : (char -> bool) -> string t
* [ take_while1 f ] accepts input as long as [ f ] returns [ true ] and returns the
accepted characters as a string .
This parser requires that [ f ] return [ true ] for at least one character of
input , and will fail otherwise .
accepted characters as a string.
This parser requires that [f] return [true] for at least one character of
input, and will fail otherwise. *)
val take_till : (char -> bool) -> string t
* [ take_till f ] accepts input as long as [ f ] returns [ false ] and returns the
accepted characters as a string .
This parser does not fail . If [ f ] returns [ true ] on the first character , it
will return the empty string .
accepted characters as a string.
This parser does not fail. If [f] returns [true] on the first character, it
will return the empty string. *)
val consumed : _ t -> string t
(** [consumed p] runs [p] and returns the contents that were consumed during the
parsing as a string *)
val take_bigstring : int -> bigstring t
* [ take_bigstring n ] accepts exactly [ n ] characters of input and returns them
as a newly allocated bigstring .
as a newly allocated bigstring. *)
val take_bigstring_while : (char -> bool) -> bigstring t
* [ take_bigstring_while f ] accepts input as long as [ f ] returns [ true ] and
returns the accepted characters as a newly allocated .
This parser does not fail . If [ f ] returns [ false ] on the first character ,
it will return the empty bigstring .
returns the accepted characters as a newly allocated bigstring.
This parser does not fail. If [f] returns [false] on the first character,
it will return the empty bigstring. *)
val take_bigstring_while1 : (char -> bool) -> bigstring t
* [ take_bigstring_while1 f ] accepts input as long as [ f ] returns [ true ] and
returns the accepted characters as a newly allocated .
This parser requires that [ f ] return [ true ] for at least one character of
input , and will fail otherwise .
returns the accepted characters as a newly allocated bigstring.
This parser requires that [f] return [true] for at least one character of
input, and will fail otherwise. *)
val take_bigstring_till : (char -> bool) -> bigstring t
* [ take_bigstring_till f ] accepts input as long as [ f ] returns [ false ] and
returns the accepted characters as a newly allocated .
This parser does not fail . If [ f ] returns [ true ] on the first character , it
will return the empty bigstring .
returns the accepted characters as a newly allocated bigstring.
This parser does not fail. If [f] returns [true] on the first character, it
will return the empty bigstring. *)
val consumed_bigstring : _ t -> bigstring t
* [ consumed p ] runs [ p ] and returns the contents that were consumed during the
parsing as a bigstring
parsing as a bigstring *)
val advance : int -> unit t
(** [advance n] advances the input [n] characters, failing if the remaining
input is less than [n]. *)
val end_of_line : unit t
(** [end_of_line] accepts either a line feed [\n], or a carriage return
followed by a line feed [\r\n] and returns unit. *)
val at_end_of_input : bool t
* [ at_end_of_input ] returns whether the end of the end of input has been
reached . This parser always succeeds .
reached. This parser always succeeds. *)
val end_of_input : unit t
(** [end_of_input] succeeds if all the input has been consumed, and fails
otherwise. *)
val scan : 'state -> ('state -> char -> 'state option) -> (string * 'state) t
(** [scan init f] consumes until [f] returns [None]. Returns the final state
before [None] and the accumulated string *)
val scan_state : 'state -> ('state -> char -> 'state option) -> 'state t
(** [scan_state init f] is like {!scan} but only returns the final state before
[None]. Much more efficient than {!scan}. *)
val scan_string : 'state -> ('state -> char -> 'state option) -> string t
(** [scan_string init f] is like {!scan} but discards the final state and returns
the accumulated string. *)
val int8 : int -> int t
* [ int8 i ] accepts one byte that matches the lower - order byte of [ i ] and
returns unit .
returns unit. *)
val any_uint8 : int t
(** [any_uint8] accepts any byte and returns it as an unsigned int8. *)
val any_int8 : int t
(** [any_int8] accepts any byte and returns it as a signed int8. *)
(** Big endian parsers *)
module BE : sig
val int16 : int -> unit t
* [ int16 i ] accept two bytes that match the two lower order bytes of [ i ]
and returns unit .
and returns unit. *)
val int32 : int32 -> unit t
* [ int32 i ] accept four bytes that match the four bytes of [ i ]
and returns unit .
and returns unit. *)
val int64 : int64 -> unit t
* [ int64 i ] accept eight bytes that match the eight bytes of [ i ] and
returns unit .
returns unit. *)
val any_int16 : int t
val any_int32 : int32 t
val any_int64 : int64 t
(** [any_intN] reads [N] bits and interprets them as big endian signed integers. *)
val any_uint16 : int t
* [ any_uint16 ] reads [ 16 ] bits and interprets them as a big endian unsigned
integer .
integer. *)
val any_float : float t
* [ any_float ] reads 32 bits and interprets them as a big endian floating
point value .
point value. *)
val any_double : float t
* [ any_double ] reads 64 bits and interprets them as a big endian floating
point value .
point value. *)
end
(** Little endian parsers *)
module LE : sig
val int16 : int -> unit t
* [ int16 i ] accept two bytes that match the two lower order bytes of [ i ]
and returns unit .
and returns unit. *)
val int32 : int32 -> unit t
* [ int32 i ] accept four bytes that match the four bytes of [ i ]
and returns unit .
and returns unit. *)
val int64 : int64 -> unit t
* [ int32 i ] accept eight bytes that match the eight bytes of [ i ] and
returns unit .
returns unit. *)
val any_int16 : int t
val any_int32 : int32 t
val any_int64 : int64 t
(** [any_intN] reads [N] bits and interprets them as little endian signed
integers. *)
val any_uint16 : int t
* [ uint16 ] reads [ 16 ] bits and interprets them as a little endian unsigned
integer .
integer. *)
val any_float : float t
* [ any_float ] reads 32 bits and interprets them as a little endian floating
point value .
point value. *)
val any_double : float t
* [ any_double ] reads 64 bits and interprets them as a little endian floating
point value .
point value. *)
end
(** {2 Combinators} *)
val option : 'a -> 'a t -> 'a t
(** [option v p] runs [p], returning the result of [p] if it succeeds and [v]
if it fails. *)
val both : 'a t -> 'b t -> ('a * 'b) t
(** [both p q] runs [p] followed by [q] and returns both results in a tuple *)
val list : 'a t list -> 'a list t
(** [list ps] runs each [p] in [ps] in sequence, returning a list of results of
each [p]. *)
val count : int -> 'a t -> 'a list t
(** [count n p] runs [p] [n] times, returning a list of the results. *)
val many : 'a t -> 'a list t
* [ many p ] runs [ p ] { i zero } or more times and returns a list of results from
the runs of [ p ] .
the runs of [p]. *)
val many1 : 'a t -> 'a list t
(** [many1 p] runs [p] {i one} or more times and returns a list of results from
the runs of [p]. *)
val many_till : 'a t -> _ t -> 'a list t
* [ e ] runs parser [ p ] { i zero } or more times until action [ e ]
succeeds and returns the list of result from the runs of [ p ] .
succeeds and returns the list of result from the runs of [p]. *)
val sep_by : _ t -> 'a t -> 'a list t
* [ sep_by s p ] runs [ p ] { i zero } or more times , interspersing runs of [ s ] in between .
val sep_by1 : _ t -> 'a t -> 'a list t
(** [sep_by1 s p] runs [p] {i one} or more times, interspersing runs of [s] in between. *)
val skip_many : _ t -> unit t
* [ skip_many p ] runs [ p ] { i zero } or more times , discarding the results .
val skip_many1 : _ t -> unit t
(** [skip_many1 p] runs [p] {i one} or more times, discarding the results. *)
val fix : ('a t -> 'a t) -> 'a t
* [ fix f ] computes the fixpoint of [ f ] and runs the resultant parser . The
argument that [ f ] receives is the result of [ fix f ] , which [ f ] must use ,
paradoxically , to define [ fix f ] .
[ fix ] is useful when constructing parsers for inductively - defined types
such as sequences , trees , etc . Consider for example the implementation of
the { ! many } combinator defined in this library :
{ [ let many p =
fix ( fun m - >
( cons < $ > p < * > m ) < | > return [ ] ) ] }
[ many p ] is a parser that will run [ p ] zero or more times , accumulating the
result of every run into a list , returning the result . It 's defined by
passing [ fix ] a function . This function assumes its argument [ m ] is a
parser that behaves exactly like [ many p ] . You can see this in the
expression comprising the left hand side of the alternative operator
[ < | > ] . This expression runs the parser [ p ] followed by the parser [ m ] , and
after which the result of [ p ] is cons'd onto the list that [ m ] produces .
The right - hand side of the alternative operator provides a base case for
the combinator : if [ p ] fails and the parse can not proceed , return an empty
list .
Another way to illustrate the uses of [ fix ] is to construct a JSON parser .
Assuming that parsers exist for the basic types such as [ false ] , [ true ] ,
[ null ] , strings , and numbers , the question then becomes how to define a
parser for objects and arrays ? Both contain values that are themselves JSON
values , so it seems as though it 's impossible to write a parser that will
accept JSON objects and arrays before writing a parser for JSON values as a
whole .
This is the exact situation that [ fix ] was made for . By defining the
parsers for arrays and objects within the function that you pass to [ fix ] ,
you will gain access to a parser that you can use to parse JSON values , the
very parser you are defining !
{ [ let fix ( fun json - >
let = char ' [ ' * > sep_by ( char ' , ' ) json < * char ' ] ' in
let obj = char ' { ' * > ... json ... < * char ' } ' in
choice [ str ; ; , ... ] ) ] }
argument that [f] receives is the result of [fix f], which [f] must use,
paradoxically, to define [fix f].
[fix] is useful when constructing parsers for inductively-defined types
such as sequences, trees, etc. Consider for example the implementation of
the {!many} combinator defined in this library:
{[let many p =
fix (fun m ->
(cons <$> p <*> m) <|> return [])]}
[many p] is a parser that will run [p] zero or more times, accumulating the
result of every run into a list, returning the result. It's defined by
passing [fix] a function. This function assumes its argument [m] is a
parser that behaves exactly like [many p]. You can see this in the
expression comprising the left hand side of the alternative operator
[<|>]. This expression runs the parser [p] followed by the parser [m], and
after which the result of [p] is cons'd onto the list that [m] produces.
The right-hand side of the alternative operator provides a base case for
the combinator: if [p] fails and the parse cannot proceed, return an empty
list.
Another way to illustrate the uses of [fix] is to construct a JSON parser.
Assuming that parsers exist for the basic types such as [false], [true],
[null], strings, and numbers, the question then becomes how to define a
parser for objects and arrays? Both contain values that are themselves JSON
values, so it seems as though it's impossible to write a parser that will
accept JSON objects and arrays before writing a parser for JSON values as a
whole.
This is the exact situation that [fix] was made for. By defining the
parsers for arrays and objects within the function that you pass to [fix],
you will gain access to a parser that you can use to parse JSON values, the
very parser you are defining!
{[let json =
fix (fun json ->
let arr = char '[' *> sep_by (char ',') json <* char ']' in
let obj = char '{' *> ... json ... <* char '}' in
choice [str; num; arr json, ...])]} *)
* { 2 Alternatives }
val (<|>) : 'a t -> 'a t -> 'a t
(** [p <|> q] runs [p] and returns the result if succeeds. If [p] fails, then
the input will be reset and [q] will run instead. *)
val choice : ?failure_msg:string -> 'a t list -> 'a t
* [ choice ? failure_msg ts ] runs each parser in [ ts ] in order until one
succeeds and returns that result . In the case that none of the parser
succeeds , then the parser will fail with the message [ failure_msg ] , if
provided , or a much less informative message otherwise .
succeeds and returns that result. In the case that none of the parser
succeeds, then the parser will fail with the message [failure_msg], if
provided, or a much less informative message otherwise. *)
val (<?>) : 'a t -> string -> 'a t
(** [p <?> name] associates [name] with the parser [p], which will be reported
in the case of failure. *)
val commit : unit t
(** [commit] prevents backtracking beyond the current position of the input,
allowing the manager of the input buffer to reuse the preceding bytes for
other purposes.
The {!module:Unbuffered} parsing interface will report directly to the
caller the number of bytes committed to the when returning a
{!Unbuffered.state.Partial} state, allowing the caller to reuse those bytes
for any purpose. The {!module:Buffered} will keep track of the region of
committed bytes in its internal buffer and reuse that region to store
additional input when necessary. *)
* { 2 Monadic / Applicative interface }
val return : 'a -> 'a t
(** [return v] creates a parser that will always succeed and return [v] *)
val fail : string -> _ t
(** [fail msg] creates a parser that will always fail with the message [msg] *)
val (>>=) : 'a t -> ('a -> 'b t) -> 'b t
(** [p >>= f] creates a parser that will run [p], pass its result to [f], run
the parser that [f] produces, and return its result. *)
val bind : 'a t -> f:('a -> 'b t) -> 'b t
(** [bind] is a prefix version of [>>=] *)
val (>>|) : 'a t -> ('a -> 'b) -> 'b t
(** [p >>| f] creates a parser that will run [p], and if it succeeds with
result [v], will return [f v] *)
val (<*>) : ('a -> 'b) t -> 'a t -> 'b t
(** [f <*> p] is equivalent to [f >>= fun f -> p >>| f]. *)
val (<$>) : ('a -> 'b) -> 'a t -> 'b t
(** [f <$> p] is equivalent to [p >>| f] *)
val ( *>) : _ t -> 'a t -> 'a t
(** [p *> q] runs [p], discards its result and then runs [q], and returns its
result. *)
val (<* ) : 'a t -> _ t -> 'a t
(** [p <* q] runs [p], then runs [q], discards its result, and returns the
result of [p]. *)
val lift : ('a -> 'b) -> 'a t -> 'b t
val lift2 : ('a -> 'b -> 'c) -> 'a t -> 'b t -> 'c t
val lift3 : ('a -> 'b -> 'c -> 'd) -> 'a t -> 'b t -> 'c t -> 'd t
val lift4 : ('a -> 'b -> 'c -> 'd -> 'e) -> 'a t -> 'b t -> 'c t -> 'd t -> 'e t
* The [ liftn ] family of functions promote functions to the parser monad .
For any of these functions , the following equivalence holds :
{ [ liftn f p1 ... pn = f < $ > p1 < * > ... < * > pn ] }
These functions are more efficient than using the applicative interface
directly , mostly in terms of memory allocation but also in terms of speed .
Prefer them over the applicative interface , even when the arity of the
function to be lifted exceeds the maximum [ n ] for which there is an
implementation for [ liftn ] . In other words , if [ f ] has an arity of [ 5 ] but
only [ lift4 ] is provided , do the following :
{ [ lift4 f m1 m2 m3 m4 < * > m5 ] }
Even with the partial application , it will be more efficient than the
applicative implementation .
For any of these functions, the following equivalence holds:
{[liftn f p1 ... pn = f <$> p1 <*> ... <*> pn]}
These functions are more efficient than using the applicative interface
directly, mostly in terms of memory allocation but also in terms of speed.
Prefer them over the applicative interface, even when the arity of the
function to be lifted exceeds the maximum [n] for which there is an
implementation for [liftn]. In other words, if [f] has an arity of [5] but
only [lift4] is provided, do the following:
{[lift4 f m1 m2 m3 m4 <*> m5]}
Even with the partial application, it will be more efficient than the
applicative implementation. *)
val map : 'a t -> f:('a -> 'b) -> 'b t
val map2 : 'a t -> 'b t -> f:('a -> 'b -> 'c) -> 'c t
val map3 : 'a t -> 'b t -> 'c t -> f:('a -> 'b -> 'c -> 'd) -> 'd t
val map4 : 'a t -> 'b t -> 'c t -> 'd t -> f:('a -> 'b -> 'c -> 'd -> 'e) -> 'e t
(** The [mapn] family of functions are just like [liftn], with a slightly
different interface. *)
(** The [Let_syntax] module is intended to be used with the [ppx_let]
pre-processor, and just contains copies of functions described elsewhere. *)
module Let_syntax : sig
val return : 'a -> 'a t
val ( >>| ) : 'a t -> ('a -> 'b) -> 'b t
val ( >>= ) : 'a t -> ('a -> 'b t) -> 'b t
module Let_syntax : sig
val return : 'a -> 'a t
val map : 'a t -> f:('a -> 'b) -> 'b t
val bind : 'a t -> f:('a -> 'b t) -> 'b t
val both : 'a t -> 'b t -> ('a * 'b) t
val map2 : 'a t -> 'b t -> f:('a -> 'b -> 'c) -> 'c t
val map3 : 'a t -> 'b t -> 'c t -> f:('a -> 'b -> 'c -> 'd) -> 'd t
val map4 : 'a t -> 'b t -> 'c t -> 'd t -> f:('a -> 'b -> 'c -> 'd -> 'e) -> 'e t
end
end
val ( let+ ) : 'a t -> ('a -> 'b) -> 'b t
val ( let* ) : 'a t -> ('a -> 'b t) -> 'b t
val ( and+ ) : 'a t -> 'b t -> ('a * 'b) t
* Unsafe Operations on Angstrom 's Internal Buffer
These functions are considered { b unsafe } as they expose the input buffer
to client code without any protections against modification , or leaking
references . They are exposed to support performance - sensitive parsers that
want to avoid allocation at all costs . Client code should take care to
write the input buffer callback functions such that they :
{ ul
{ - do not modify the input buffer { i outside } of the range
[ \[off , off + len ) ] ; }
{ - do not modify the input buffer { i inside } of the range
[ \[off , off + len ) ] if the parser might backtrack ; and }
{ - do not return any direct or indirect references to the input buffer . } }
If the input buffer callback functions do not do any of these things , then
the client may consider their use safe .
These functions are considered {b unsafe} as they expose the input buffer
to client code without any protections against modification, or leaking
references. They are exposed to support performance-sensitive parsers that
want to avoid allocation at all costs. Client code should take care to
write the input buffer callback functions such that they:
{ul
{- do not modify the input buffer {i outside} of the range
[\[off, off + len)];}
{- do not modify the input buffer {i inside} of the range
[\[off, off + len)] if the parser might backtrack; and}
{- do not return any direct or indirect references to the input buffer.}}
If the input buffer callback functions do not do any of these things, then
the client may consider their use safe. *)
module Unsafe : sig
val take : int -> (bigstring -> off:int -> len:int -> 'a) -> 'a t
* [ take n f ] accepts exactly [ n ] characters of input into the parser 's
internal buffer then calls [ f buffer ~len ] . [ buffer ] is the
parser 's internal buffer . [ off ] is the offset from the start of [ buffer ]
containing the requested content . [ len ] is the length of the requested
content . [ len ] is guaranteed to be equal to [ n ] .
internal buffer then calls [f buffer ~off ~len]. [buffer] is the
parser's internal buffer. [off] is the offset from the start of [buffer]
containing the requested content. [len] is the length of the requested
content. [len] is guaranteed to be equal to [n]. *)
val take_while : (char -> bool) -> (bigstring -> off:int -> len:int -> 'a) -> 'a t
* [ take_while check f ] accepts input into the parser 's interal buffer as
long as [ check ] returns [ true ] then calls [ f buffer ~len ] . [ buffer ]
is the parser 's internal buffer . [ off ] is the offset from the start of
[ buffer ] containing the requested content . [ len ] is the length of the
content matched by [ check ] .
This parser does not fail . If [ check ] returns [ false ] on the first
character , [ len ] will be [ 0 ] .
long as [check] returns [true] then calls [f buffer ~off ~len]. [buffer]
is the parser's internal buffer. [off] is the offset from the start of
[buffer] containing the requested content. [len] is the length of the
content matched by [check].
This parser does not fail. If [check] returns [false] on the first
character, [len] will be [0]. *)
val take_while1 : (char -> bool) -> (bigstring -> off:int -> len:int -> 'a) -> 'a t
* [ take_while1 check f ] accepts input into the parser 's interal buffer as
long as [ check ] returns [ true ] then calls [ f buffer ~len ] . [ buffer ]
is the parser 's internal buffer . [ off ] is the offset from the start of
[ buffer ] containing the requested content . [ len ] is the length of the
content matched by [ check ] .
This parser requires that [ f ] return [ true ] for at least one character of
input , and will fail otherwise .
long as [check] returns [true] then calls [f buffer ~off ~len]. [buffer]
is the parser's internal buffer. [off] is the offset from the start of
[buffer] containing the requested content. [len] is the length of the
content matched by [check].
This parser requires that [f] return [true] for at least one character of
input, and will fail otherwise. *)
val take_till : (char -> bool) -> (bigstring -> off:int -> len:int -> 'a) -> 'a t
* [ take_till check f ] accepts input into the parser 's interal buffer as
long as [ check ] returns [ false ] then calls [ f buffer ~len ] . [ buffer ]
is the parser 's internal buffer . [ off ] is the offset from the start of
[ buffer ] containing the requested content . [ len ] is the length of the
content matched by [ check ] .
This parser does not fail . If [ check ] returns [ true ] on the first
character , [ len ] will be [ 0 ] .
long as [check] returns [false] then calls [f buffer ~off ~len]. [buffer]
is the parser's internal buffer. [off] is the offset from the start of
[buffer] containing the requested content. [len] is the length of the
content matched by [check].
This parser does not fail. If [check] returns [true] on the first
character, [len] will be [0]. *)
val peek : int -> (bigstring -> off:int -> len:int -> 'a) -> 'a t
* [ peek n ~f ] accepts exactly [ n ] characters and calls [ f buffer ~len ]
with ] . If there is not enough input , it will fail .
This parser does not advance the input . Use it for lookahead .
with [len = n]. If there is not enough input, it will fail.
This parser does not advance the input. Use it for lookahead. *)
end
(** {2 Running} *)
module Consume : sig
type t =
| Prefix (** Ignore remaining data after parsing. *)
* Require parser to reach eof .
end
val parse_bigstring : consume:Consume.t -> 'a t -> bigstring -> ('a, string) result
(** [parse_bigstring ~consume t bs] runs [t] on [bs]. The parser will receive
an [`Eof] after all of [bs] has been consumed. Passing {!Prefix} in the
[consume] argument allows the parse to successfully complete without
reaching eof. To require the parser to reach eof, pass {!All} in the
[consume] argument.
For use-cases requiring that the parser be fed input incrementally, see the
{!parse_reader} and {!module:Unbuffered} APIs below. *)
val parse_string : consume:Consume.t -> 'a t -> string -> ('a, string) result
(** [parse_string ~consume t bs] runs [t] on [bs]. The parser will receive an
[`Eof] after all of [bs] has been consumed. Passing {!Prefix} in the
[consume] argument allows the parse to successfully complete without
reaching eof. To require the parser to reach eof, pass {!All} in the
[consume] argument.
For use-cases requiring that the parser be fed input incrementally, see the
{!parse_reader} and {!module:Unbuffered} APIs below. *)
val parse_reader :
?initial_buffer_size:int ->
consume:Consume.t ->
'a t ->
(Cstruct.t -> int) ->
('a, string) result
* [ parse_reader ~consume t read_into ] parses a stream using parser [ t ] .
When it needs more data , it calls [ ] to collect some .
[ read_into ] should return the number of bytes written , or raise
[ End_of_file ] if no more data is coming .
@param initial_buffer_size The initial size for the buffer ( defaulting
to 4k bytes ) . It will automatically grow the buffer as needed .
@param consume See { ! Consume } .
@return The parsed result , or a suitable error message .
When it needs more data, it calls [read_into buf] to collect some.
[read_into] should return the number of bytes written, or raise
[End_of_file] if no more data is coming.
@param initial_buffer_size The initial size for the buffer (defaulting
to 4k bytes). It will automatically grow the buffer as needed.
@param consume See {!Consume}.
@return The parsed result, or a suitable error message. *)
* Old buffered parsing interface . Use { ! parse_reader } instead in new code .
Parsers run through this module perform internal buffering of input . The
parser state will keep track of unconsumed input and attempt to minimize
memory allocation and copying . The { ! Buffered.state . Partial } parser state
will accept newly - read , incremental input and copy it into the internal
buffer . Users can feed parser states using the { ! feed } function . As a
result , the interface is much easier to use than the one exposed by the
{ ! Unbuffered } module .
On success or failure , any unconsumed input will be returned to the user
for additional processing . The buffer that the unconsumed input is returned
in can also be reused .
Parsers run through this module perform internal buffering of input. The
parser state will keep track of unconsumed input and attempt to minimize
memory allocation and copying. The {!Buffered.state.Partial} parser state
will accept newly-read, incremental input and copy it into the internal
buffer. Users can feed parser states using the {!feed} function. As a
result, the interface is much easier to use than the one exposed by the
{!Unbuffered} module.
On success or failure, any unconsumed input will be returned to the user
for additional processing. The buffer that the unconsumed input is returned
in can also be reused. *)
module Buffered : sig
type unconsumed =
{ buf : bigstring
; off : int
; len : int }
type input =
[ `Bigstring of bigstring
| `String of string ]
type 'a state =
| Partial of ([ input | `Eof ] -> 'a state) (** The parser requires more input. *)
| Done of unconsumed * 'a (** The parser succeeded. *)
| Fail of unconsumed * string list * string (** The parser failed. *)
val parse : ?initial_buffer_size:int -> 'a t -> 'a state
* [ parse ? initial_buffer_size t ] runs [ t ] and awaits input if needed .
[ parse ] will allocate a buffer of size [ initial_buffer_size ] ( defaulting
to 4k bytes ) to do input buffering and automatically grows the buffer as
needed .
[parse] will allocate a buffer of size [initial_buffer_size] (defaulting
to 4k bytes) to do input buffering and automatically grows the buffer as
needed. *)
val feed : 'a state -> [ input | `Eof ] -> 'a state
(** [feed state input] supplies the parser state with more input. If [state] is
[Partial], then parsing will continue where it left off. Otherwise, the
parser is in a [Fail] or [Done] state, in which case the [input] will be
copied into the state's buffer for later use by the caller. *)
val state_to_option : 'a state -> 'a option
(** [state_to_option state] returns [Some v] if the parser is in the
[Done (bs, v)] state and [None] otherwise. This function has no effect on
the current state of the parser. *)
val state_to_result : 'a state -> ('a, string) result
* [ state ] returns [ Ok v ] if the parser is in the [ Done ( bs , v ) ]
state and [ Error msg ] if it is in the [ Fail ] or [ Partial ] state .
This function has no effect on the current state of the parser .
state and [Error msg] if it is in the [Fail] or [Partial] state.
This function has no effect on the current state of the parser. *)
val state_to_unconsumed : _ state -> unconsumed option
* [ state_to_unconsumed state ] returns [ Some bs ] if [ state = Done(bs , _ ) ] or
[ state = Fail(bs , _ , _ ) ] and [ None ] otherwise .
[state = Fail(bs, _, _)] and [None] otherwise. *)
end
val parse :
buffer:Buffered.unconsumed ->
'a t ->
(Cstruct.t -> int) ->
Buffered.unconsumed * ('a, string) result
* [ parse ~buffer t read_into ] parses a stream using [ t ] . When it needs more data ,
it calls [ ] to collect some . [ read_into ] should return the
number of bytes written , or raise [ End_of_file ] if no more data is coming .
Note : { ! parse_reader } provides a simpler interface if you do n't need the ability
to resume parsing afterwards .
@param buffer The buffer to use for parsing . This can be a fresh buffer ( with off = len=0 )
or a buffer returned from a previous call to [ parse ] .
@return A pair of the unconsumed input and the parsed result .
it calls [read_into cs] to collect some. [read_into] should return the
number of bytes written, or raise [End_of_file] if no more data is coming.
Note: {!parse_reader} provides a simpler interface if you don't need the ability
to resume parsing afterwards.
@param buffer The buffer to use for parsing. This can be a fresh buffer (with off=len=0)
or a buffer returned from a previous call to [parse].
@return A pair of the unconsumed input and the parsed result. *)
* Unbuffered parsing interface .
Use this module for total control over memory allocation and copying .
Parsers run through this module perform no internal buffering . Instead , the
user is responsible for managing a buffer containing the entirety of the
input that has yet to be consumed by the parser . The
{ ! Unbuffered.state . Partial } parser state reports to the user how much input
the parser consumed during its last run , via the
{ ! Unbuffered.partial.committed } field . This area of input must be discarded
before parsing can resume . Once additional input has been collected , the
unconsumed input as well as new input must be passed to the parser state
via the { ! Unbuffered.partial.continue } function , together with an
indication of whether there is { ! Unbuffered.more } input to come .
The logic that must be implemented in order to make proper use of this
module is intricate and tied to your OS environment . It 's advisable to use
the { ! Buffered } module when initially developing and testing your parsers .
For production use - cases , consider the Async and Lwt support that this
library includes before attempting to use this module directly .
Use this module for total control over memory allocation and copying.
Parsers run through this module perform no internal buffering. Instead, the
user is responsible for managing a buffer containing the entirety of the
input that has yet to be consumed by the parser. The
{!Unbuffered.state.Partial} parser state reports to the user how much input
the parser consumed during its last run, via the
{!Unbuffered.partial.committed} field. This area of input must be discarded
before parsing can resume. Once additional input has been collected, the
unconsumed input as well as new input must be passed to the parser state
via the {!Unbuffered.partial.continue} function, together with an
indication of whether there is {!Unbuffered.more} input to come.
The logic that must be implemented in order to make proper use of this
module is intricate and tied to your OS environment. It's advisable to use
the {!Buffered} module when initially developing and testing your parsers.
For production use-cases, consider the Async and Lwt support that this
library includes before attempting to use this module directly. *)
module Unbuffered : sig
type more =
| Complete
| Incomplete
type 'a parse_result =
| Done of int * 'a (** The parser succeeded, consuming specified bytes. *)
| Fail of int * string list * string (** The parser failed, consuming specified bytes. *)
type reader = int -> (bigstring * int * int * more)
(** The user provides a reader function to be called by the parser when it
requires more input data.
The function is passed the number of bytes committed during the last
input feeding. It must drop this number of bytes from the beginning
of the input before continuing. See {!commit} for additional details.
The function should return with some additional input. The input
should include all uncommitted input in addition to any new input that has
become available, as well as an indication of whether there is {!more}
input to come. *)
val parse : read:reader -> 'a t -> 'a parse_result
* [ parse t ] runs [ t ] and returns the result .
It calls [ read ] whenever more input is needed .
It calls [read] whenever more input is needed. *)
val state_to_option : 'a parse_result -> 'a option
(** [state_to_option state] returns [Some v] if the parser is in the
[Done (bs, v)] state and [None] otherwise. This function has no effect on the
current state of the parser. *)
val state_to_result : 'a parse_result -> ('a, string) result
* [ state ] returns [ Ok v ] if the parser is in the
[ Done ( bs , v ) ] state and [ Error msg ] if it is in the [ Fail ] state .
This function has no effect on the current state of the parser .
[Done (bs, v)] state and [Error msg] if it is in the [Fail] state.
This function has no effect on the current state of the parser. *)
end
* { 2 Expert Parsers }
For people that know what they 're doing . If you want to use them , read the
code . No further documentation will be provided .
For people that know what they're doing. If you want to use them, read the
code. No further documentation will be provided. *)
val pos : int t
val available : int t
| null | https://raw.githubusercontent.com/TheLortex/mirage-monorepo/754836818dc5c861bf3b0a78029315a065a543cc/duniverse/angstrom/lib/angstrom.mli | ocaml | * A parser for values of type ['a].
* [peek_char_fail] accepts any char and returns it. If end of input has been
reached, it will fail.
This parser does not advance the input. Use it for lookahead.
* [peek_string n] accepts exactly [n] characters and returns them as a
string. If there is not enough input, it will fail.
This parser does not advance the input. Use it for lookahead.
* [char c] accepts [c] and returns it.
* [not_char] accepts any character that is not [c] and returns the matched
character.
* [any_char] accepts any character and returns it.
* [satisfy f] accepts any character for which [f] returns [true] and
returns the accepted character. In the case that none of the parser
succeeds, then the parser will fail indicating the offending
character.
* [string s] accepts [s] exactly and returns it.
* [skip f] accepts any character for which [f] returns [true] and discards
the accepted character. [skip f] is equivalent to [satisfy f] but discards
the accepted character.
* [skip_while f] accepts input as long as [f] returns [true] and discards
the accepted characters.
* [take n] accepts exactly [n] characters of input and returns them as a
string.
* [consumed p] runs [p] and returns the contents that were consumed during the
parsing as a string
* [advance n] advances the input [n] characters, failing if the remaining
input is less than [n].
* [end_of_line] accepts either a line feed [\n], or a carriage return
followed by a line feed [\r\n] and returns unit.
* [end_of_input] succeeds if all the input has been consumed, and fails
otherwise.
* [scan init f] consumes until [f] returns [None]. Returns the final state
before [None] and the accumulated string
* [scan_state init f] is like {!scan} but only returns the final state before
[None]. Much more efficient than {!scan}.
* [scan_string init f] is like {!scan} but discards the final state and returns
the accumulated string.
* [any_uint8] accepts any byte and returns it as an unsigned int8.
* [any_int8] accepts any byte and returns it as a signed int8.
* Big endian parsers
* [any_intN] reads [N] bits and interprets them as big endian signed integers.
* Little endian parsers
* [any_intN] reads [N] bits and interprets them as little endian signed
integers.
* {2 Combinators}
* [option v p] runs [p], returning the result of [p] if it succeeds and [v]
if it fails.
* [both p q] runs [p] followed by [q] and returns both results in a tuple
* [list ps] runs each [p] in [ps] in sequence, returning a list of results of
each [p].
* [count n p] runs [p] [n] times, returning a list of the results.
* [many1 p] runs [p] {i one} or more times and returns a list of results from
the runs of [p].
* [sep_by1 s p] runs [p] {i one} or more times, interspersing runs of [s] in between.
* [skip_many1 p] runs [p] {i one} or more times, discarding the results.
* [p <|> q] runs [p] and returns the result if succeeds. If [p] fails, then
the input will be reset and [q] will run instead.
* [p <?> name] associates [name] with the parser [p], which will be reported
in the case of failure.
* [commit] prevents backtracking beyond the current position of the input,
allowing the manager of the input buffer to reuse the preceding bytes for
other purposes.
The {!module:Unbuffered} parsing interface will report directly to the
caller the number of bytes committed to the when returning a
{!Unbuffered.state.Partial} state, allowing the caller to reuse those bytes
for any purpose. The {!module:Buffered} will keep track of the region of
committed bytes in its internal buffer and reuse that region to store
additional input when necessary.
* [return v] creates a parser that will always succeed and return [v]
* [fail msg] creates a parser that will always fail with the message [msg]
* [p >>= f] creates a parser that will run [p], pass its result to [f], run
the parser that [f] produces, and return its result.
* [bind] is a prefix version of [>>=]
* [p >>| f] creates a parser that will run [p], and if it succeeds with
result [v], will return [f v]
* [f <*> p] is equivalent to [f >>= fun f -> p >>| f].
* [f <$> p] is equivalent to [p >>| f]
* [p *> q] runs [p], discards its result and then runs [q], and returns its
result.
* [p <* q] runs [p], then runs [q], discards its result, and returns the
result of [p].
* The [mapn] family of functions are just like [liftn], with a slightly
different interface.
* The [Let_syntax] module is intended to be used with the [ppx_let]
pre-processor, and just contains copies of functions described elsewhere.
* {2 Running}
* Ignore remaining data after parsing.
* [parse_bigstring ~consume t bs] runs [t] on [bs]. The parser will receive
an [`Eof] after all of [bs] has been consumed. Passing {!Prefix} in the
[consume] argument allows the parse to successfully complete without
reaching eof. To require the parser to reach eof, pass {!All} in the
[consume] argument.
For use-cases requiring that the parser be fed input incrementally, see the
{!parse_reader} and {!module:Unbuffered} APIs below.
* [parse_string ~consume t bs] runs [t] on [bs]. The parser will receive an
[`Eof] after all of [bs] has been consumed. Passing {!Prefix} in the
[consume] argument allows the parse to successfully complete without
reaching eof. To require the parser to reach eof, pass {!All} in the
[consume] argument.
For use-cases requiring that the parser be fed input incrementally, see the
{!parse_reader} and {!module:Unbuffered} APIs below.
* The parser requires more input.
* The parser succeeded.
* The parser failed.
* [feed state input] supplies the parser state with more input. If [state] is
[Partial], then parsing will continue where it left off. Otherwise, the
parser is in a [Fail] or [Done] state, in which case the [input] will be
copied into the state's buffer for later use by the caller.
* [state_to_option state] returns [Some v] if the parser is in the
[Done (bs, v)] state and [None] otherwise. This function has no effect on
the current state of the parser.
* The parser succeeded, consuming specified bytes.
* The parser failed, consuming specified bytes.
* The user provides a reader function to be called by the parser when it
requires more input data.
The function is passed the number of bytes committed during the last
input feeding. It must drop this number of bytes from the beginning
of the input before continuing. See {!commit} for additional details.
The function should return with some additional input. The input
should include all uncommitted input in addition to any new input that has
become available, as well as an indication of whether there is {!more}
input to come.
* [state_to_option state] returns [Some v] if the parser is in the
[Done (bs, v)] state and [None] otherwise. This function has no effect on the
current state of the parser. | ----------------------------------------------------------------------------
Copyright ( c ) 2016 Inhabited Type LLC .
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions
are met :
1 . Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
2 . Redistributions in binary form must reproduce the above copyright
notice , this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution .
3 . Neither the name of the author nor the names of his contributors
may be used to endorse or promote products derived from this software
without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS ` ` AS IS '' AND ANY EXPRESS
OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED . IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION )
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT ,
STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE .
----------------------------------------------------------------------------
Copyright (c) 2016 Inhabited Type LLC.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the author nor the names of his contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS ``AS IS'' AND ANY EXPRESS
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
----------------------------------------------------------------------------*)
* Parser combinators built for speed and memory - efficiency .
is a parser - combinator library that provides monadic and
applicative interfaces for constructing parsers with unbounded lookahead .
Its parsers can consume input incrementally , whether in a blocking or
non - blocking environment . To achieve efficient incremental parsing ,
offers both a buffered and unbuffered interface to input streams ,
with the { ! module : Unbuffered } interface enabling zero - copy IO . With these
features and low - level iteration parser primitives like { ! take_while } and
{ ! skip_while } , makes it easy to write efficient , expressive , and
reusable parsers suitable for high - performance applications .
Angstrom is a parser-combinator library that provides monadic and
applicative interfaces for constructing parsers with unbounded lookahead.
Its parsers can consume input incrementally, whether in a blocking or
non-blocking environment. To achieve efficient incremental parsing,
Angstrom offers both a buffered and unbuffered interface to input streams,
with the {!module:Unbuffered} interface enabling zero-copy IO. With these
features and low-level iteration parser primitives like {!take_while} and
{!skip_while}, Angstrom makes it easy to write efficient, expressive, and
reusable parsers suitable for high-performance applications. *)
type +'a t
type bigstring =
(char, Bigarray.int8_unsigned_elt, Bigarray.c_layout) Bigarray.Array1.t
* { 2 Basic parsers }
val peek_char : char option t
* [ ] accepts any char and returns it , or returns [ None ] if the end
of input has been reached .
This parser does not advance the input . Use it for lookahead .
of input has been reached.
This parser does not advance the input. Use it for lookahead. *)
val peek_char_fail : char t
val peek_string : int -> string t
val char : char -> char t
val not_char : char -> char t
val any_char : char t
val satisfy : (char -> bool) -> char t
val string : string -> string t
val string_ci : string -> string t
* [ s ] accepts [ s ] , ignoring case , and returns the matched string ,
preserving the case of the original input .
preserving the case of the original input. *)
val skip : (char -> bool) -> unit t
val skip_while : (char -> bool) -> unit t
val take : int -> string t
val take_while : (char -> bool) -> string t
* [ take_while f ] accepts input as long as [ f ] returns [ true ] and returns the
accepted characters as a string .
This parser does not fail . If [ f ] returns [ false ] on the first character ,
it will return the empty string .
accepted characters as a string.
This parser does not fail. If [f] returns [false] on the first character,
it will return the empty string. *)
val take_while1 : (char -> bool) -> string t
* [ take_while1 f ] accepts input as long as [ f ] returns [ true ] and returns the
accepted characters as a string .
This parser requires that [ f ] return [ true ] for at least one character of
input , and will fail otherwise .
accepted characters as a string.
This parser requires that [f] return [true] for at least one character of
input, and will fail otherwise. *)
val take_till : (char -> bool) -> string t
* [ take_till f ] accepts input as long as [ f ] returns [ false ] and returns the
accepted characters as a string .
This parser does not fail . If [ f ] returns [ true ] on the first character , it
will return the empty string .
accepted characters as a string.
This parser does not fail. If [f] returns [true] on the first character, it
will return the empty string. *)
val consumed : _ t -> string t
val take_bigstring : int -> bigstring t
* [ take_bigstring n ] accepts exactly [ n ] characters of input and returns them
as a newly allocated bigstring .
as a newly allocated bigstring. *)
val take_bigstring_while : (char -> bool) -> bigstring t
* [ take_bigstring_while f ] accepts input as long as [ f ] returns [ true ] and
returns the accepted characters as a newly allocated .
This parser does not fail . If [ f ] returns [ false ] on the first character ,
it will return the empty bigstring .
returns the accepted characters as a newly allocated bigstring.
This parser does not fail. If [f] returns [false] on the first character,
it will return the empty bigstring. *)
val take_bigstring_while1 : (char -> bool) -> bigstring t
* [ take_bigstring_while1 f ] accepts input as long as [ f ] returns [ true ] and
returns the accepted characters as a newly allocated .
This parser requires that [ f ] return [ true ] for at least one character of
input , and will fail otherwise .
returns the accepted characters as a newly allocated bigstring.
This parser requires that [f] return [true] for at least one character of
input, and will fail otherwise. *)
val take_bigstring_till : (char -> bool) -> bigstring t
* [ take_bigstring_till f ] accepts input as long as [ f ] returns [ false ] and
returns the accepted characters as a newly allocated .
This parser does not fail . If [ f ] returns [ true ] on the first character , it
will return the empty bigstring .
returns the accepted characters as a newly allocated bigstring.
This parser does not fail. If [f] returns [true] on the first character, it
will return the empty bigstring. *)
val consumed_bigstring : _ t -> bigstring t
* [ consumed p ] runs [ p ] and returns the contents that were consumed during the
parsing as a bigstring
parsing as a bigstring *)
val advance : int -> unit t
val end_of_line : unit t
val at_end_of_input : bool t
* [ at_end_of_input ] returns whether the end of the end of input has been
reached . This parser always succeeds .
reached. This parser always succeeds. *)
val end_of_input : unit t
val scan : 'state -> ('state -> char -> 'state option) -> (string * 'state) t
val scan_state : 'state -> ('state -> char -> 'state option) -> 'state t
val scan_string : 'state -> ('state -> char -> 'state option) -> string t
val int8 : int -> int t
* [ int8 i ] accepts one byte that matches the lower - order byte of [ i ] and
returns unit .
returns unit. *)
val any_uint8 : int t
val any_int8 : int t
module BE : sig
val int16 : int -> unit t
* [ int16 i ] accept two bytes that match the two lower order bytes of [ i ]
and returns unit .
and returns unit. *)
val int32 : int32 -> unit t
* [ int32 i ] accept four bytes that match the four bytes of [ i ]
and returns unit .
and returns unit. *)
val int64 : int64 -> unit t
* [ int64 i ] accept eight bytes that match the eight bytes of [ i ] and
returns unit .
returns unit. *)
val any_int16 : int t
val any_int32 : int32 t
val any_int64 : int64 t
val any_uint16 : int t
* [ any_uint16 ] reads [ 16 ] bits and interprets them as a big endian unsigned
integer .
integer. *)
val any_float : float t
* [ any_float ] reads 32 bits and interprets them as a big endian floating
point value .
point value. *)
val any_double : float t
* [ any_double ] reads 64 bits and interprets them as a big endian floating
point value .
point value. *)
end
module LE : sig
val int16 : int -> unit t
* [ int16 i ] accept two bytes that match the two lower order bytes of [ i ]
and returns unit .
and returns unit. *)
val int32 : int32 -> unit t
* [ int32 i ] accept four bytes that match the four bytes of [ i ]
and returns unit .
and returns unit. *)
val int64 : int64 -> unit t
* [ int32 i ] accept eight bytes that match the eight bytes of [ i ] and
returns unit .
returns unit. *)
val any_int16 : int t
val any_int32 : int32 t
val any_int64 : int64 t
val any_uint16 : int t
* [ uint16 ] reads [ 16 ] bits and interprets them as a little endian unsigned
integer .
integer. *)
val any_float : float t
* [ any_float ] reads 32 bits and interprets them as a little endian floating
point value .
point value. *)
val any_double : float t
* [ any_double ] reads 64 bits and interprets them as a little endian floating
point value .
point value. *)
end
val option : 'a -> 'a t -> 'a t
val both : 'a t -> 'b t -> ('a * 'b) t
val list : 'a t list -> 'a list t
val count : int -> 'a t -> 'a list t
val many : 'a t -> 'a list t
* [ many p ] runs [ p ] { i zero } or more times and returns a list of results from
the runs of [ p ] .
the runs of [p]. *)
val many1 : 'a t -> 'a list t
val many_till : 'a t -> _ t -> 'a list t
* [ e ] runs parser [ p ] { i zero } or more times until action [ e ]
succeeds and returns the list of result from the runs of [ p ] .
succeeds and returns the list of result from the runs of [p]. *)
val sep_by : _ t -> 'a t -> 'a list t
* [ sep_by s p ] runs [ p ] { i zero } or more times , interspersing runs of [ s ] in between .
val sep_by1 : _ t -> 'a t -> 'a list t
val skip_many : _ t -> unit t
* [ skip_many p ] runs [ p ] { i zero } or more times , discarding the results .
val skip_many1 : _ t -> unit t
val fix : ('a t -> 'a t) -> 'a t
* [ fix f ] computes the fixpoint of [ f ] and runs the resultant parser . The
argument that [ f ] receives is the result of [ fix f ] , which [ f ] must use ,
paradoxically , to define [ fix f ] .
[ fix ] is useful when constructing parsers for inductively - defined types
such as sequences , trees , etc . Consider for example the implementation of
the { ! many } combinator defined in this library :
{ [ let many p =
fix ( fun m - >
( cons < $ > p < * > m ) < | > return [ ] ) ] }
[ many p ] is a parser that will run [ p ] zero or more times , accumulating the
result of every run into a list , returning the result . It 's defined by
passing [ fix ] a function . This function assumes its argument [ m ] is a
parser that behaves exactly like [ many p ] . You can see this in the
expression comprising the left hand side of the alternative operator
[ < | > ] . This expression runs the parser [ p ] followed by the parser [ m ] , and
after which the result of [ p ] is cons'd onto the list that [ m ] produces .
The right - hand side of the alternative operator provides a base case for
the combinator : if [ p ] fails and the parse can not proceed , return an empty
list .
Another way to illustrate the uses of [ fix ] is to construct a JSON parser .
Assuming that parsers exist for the basic types such as [ false ] , [ true ] ,
[ null ] , strings , and numbers , the question then becomes how to define a
parser for objects and arrays ? Both contain values that are themselves JSON
values , so it seems as though it 's impossible to write a parser that will
accept JSON objects and arrays before writing a parser for JSON values as a
whole .
This is the exact situation that [ fix ] was made for . By defining the
parsers for arrays and objects within the function that you pass to [ fix ] ,
you will gain access to a parser that you can use to parse JSON values , the
very parser you are defining !
{ [ let fix ( fun json - >
let = char ' [ ' * > sep_by ( char ' , ' ) json < * char ' ] ' in
let obj = char ' { ' * > ... json ... < * char ' } ' in
choice [ str ; ; , ... ] ) ] }
argument that [f] receives is the result of [fix f], which [f] must use,
paradoxically, to define [fix f].
[fix] is useful when constructing parsers for inductively-defined types
such as sequences, trees, etc. Consider for example the implementation of
the {!many} combinator defined in this library:
{[let many p =
fix (fun m ->
(cons <$> p <*> m) <|> return [])]}
[many p] is a parser that will run [p] zero or more times, accumulating the
result of every run into a list, returning the result. It's defined by
passing [fix] a function. This function assumes its argument [m] is a
parser that behaves exactly like [many p]. You can see this in the
expression comprising the left hand side of the alternative operator
[<|>]. This expression runs the parser [p] followed by the parser [m], and
after which the result of [p] is cons'd onto the list that [m] produces.
The right-hand side of the alternative operator provides a base case for
the combinator: if [p] fails and the parse cannot proceed, return an empty
list.
Another way to illustrate the uses of [fix] is to construct a JSON parser.
Assuming that parsers exist for the basic types such as [false], [true],
[null], strings, and numbers, the question then becomes how to define a
parser for objects and arrays? Both contain values that are themselves JSON
values, so it seems as though it's impossible to write a parser that will
accept JSON objects and arrays before writing a parser for JSON values as a
whole.
This is the exact situation that [fix] was made for. By defining the
parsers for arrays and objects within the function that you pass to [fix],
you will gain access to a parser that you can use to parse JSON values, the
very parser you are defining!
{[let json =
fix (fun json ->
let arr = char '[' *> sep_by (char ',') json <* char ']' in
let obj = char '{' *> ... json ... <* char '}' in
choice [str; num; arr json, ...])]} *)
* { 2 Alternatives }
val (<|>) : 'a t -> 'a t -> 'a t
val choice : ?failure_msg:string -> 'a t list -> 'a t
* [ choice ? failure_msg ts ] runs each parser in [ ts ] in order until one
succeeds and returns that result . In the case that none of the parser
succeeds , then the parser will fail with the message [ failure_msg ] , if
provided , or a much less informative message otherwise .
succeeds and returns that result. In the case that none of the parser
succeeds, then the parser will fail with the message [failure_msg], if
provided, or a much less informative message otherwise. *)
val (<?>) : 'a t -> string -> 'a t
val commit : unit t
* { 2 Monadic / Applicative interface }
val return : 'a -> 'a t
val fail : string -> _ t
val (>>=) : 'a t -> ('a -> 'b t) -> 'b t
val bind : 'a t -> f:('a -> 'b t) -> 'b t
val (>>|) : 'a t -> ('a -> 'b) -> 'b t
val (<*>) : ('a -> 'b) t -> 'a t -> 'b t
val (<$>) : ('a -> 'b) -> 'a t -> 'b t
val ( *>) : _ t -> 'a t -> 'a t
val (<* ) : 'a t -> _ t -> 'a t
val lift : ('a -> 'b) -> 'a t -> 'b t
val lift2 : ('a -> 'b -> 'c) -> 'a t -> 'b t -> 'c t
val lift3 : ('a -> 'b -> 'c -> 'd) -> 'a t -> 'b t -> 'c t -> 'd t
val lift4 : ('a -> 'b -> 'c -> 'd -> 'e) -> 'a t -> 'b t -> 'c t -> 'd t -> 'e t
* The [ liftn ] family of functions promote functions to the parser monad .
For any of these functions , the following equivalence holds :
{ [ liftn f p1 ... pn = f < $ > p1 < * > ... < * > pn ] }
These functions are more efficient than using the applicative interface
directly , mostly in terms of memory allocation but also in terms of speed .
Prefer them over the applicative interface , even when the arity of the
function to be lifted exceeds the maximum [ n ] for which there is an
implementation for [ liftn ] . In other words , if [ f ] has an arity of [ 5 ] but
only [ lift4 ] is provided , do the following :
{ [ lift4 f m1 m2 m3 m4 < * > m5 ] }
Even with the partial application , it will be more efficient than the
applicative implementation .
For any of these functions, the following equivalence holds:
{[liftn f p1 ... pn = f <$> p1 <*> ... <*> pn]}
These functions are more efficient than using the applicative interface
directly, mostly in terms of memory allocation but also in terms of speed.
Prefer them over the applicative interface, even when the arity of the
function to be lifted exceeds the maximum [n] for which there is an
implementation for [liftn]. In other words, if [f] has an arity of [5] but
only [lift4] is provided, do the following:
{[lift4 f m1 m2 m3 m4 <*> m5]}
Even with the partial application, it will be more efficient than the
applicative implementation. *)
val map : 'a t -> f:('a -> 'b) -> 'b t
val map2 : 'a t -> 'b t -> f:('a -> 'b -> 'c) -> 'c t
val map3 : 'a t -> 'b t -> 'c t -> f:('a -> 'b -> 'c -> 'd) -> 'd t
val map4 : 'a t -> 'b t -> 'c t -> 'd t -> f:('a -> 'b -> 'c -> 'd -> 'e) -> 'e t
module Let_syntax : sig
val return : 'a -> 'a t
val ( >>| ) : 'a t -> ('a -> 'b) -> 'b t
val ( >>= ) : 'a t -> ('a -> 'b t) -> 'b t
module Let_syntax : sig
val return : 'a -> 'a t
val map : 'a t -> f:('a -> 'b) -> 'b t
val bind : 'a t -> f:('a -> 'b t) -> 'b t
val both : 'a t -> 'b t -> ('a * 'b) t
val map2 : 'a t -> 'b t -> f:('a -> 'b -> 'c) -> 'c t
val map3 : 'a t -> 'b t -> 'c t -> f:('a -> 'b -> 'c -> 'd) -> 'd t
val map4 : 'a t -> 'b t -> 'c t -> 'd t -> f:('a -> 'b -> 'c -> 'd -> 'e) -> 'e t
end
end
val ( let+ ) : 'a t -> ('a -> 'b) -> 'b t
val ( let* ) : 'a t -> ('a -> 'b t) -> 'b t
val ( and+ ) : 'a t -> 'b t -> ('a * 'b) t
* Unsafe Operations on Angstrom 's Internal Buffer
These functions are considered { b unsafe } as they expose the input buffer
to client code without any protections against modification , or leaking
references . They are exposed to support performance - sensitive parsers that
want to avoid allocation at all costs . Client code should take care to
write the input buffer callback functions such that they :
{ ul
{ - do not modify the input buffer { i outside } of the range
[ \[off , off + len ) ] ; }
{ - do not modify the input buffer { i inside } of the range
[ \[off , off + len ) ] if the parser might backtrack ; and }
{ - do not return any direct or indirect references to the input buffer . } }
If the input buffer callback functions do not do any of these things , then
the client may consider their use safe .
These functions are considered {b unsafe} as they expose the input buffer
to client code without any protections against modification, or leaking
references. They are exposed to support performance-sensitive parsers that
want to avoid allocation at all costs. Client code should take care to
write the input buffer callback functions such that they:
{ul
{- do not modify the input buffer {i outside} of the range
[\[off, off + len)];}
{- do not modify the input buffer {i inside} of the range
[\[off, off + len)] if the parser might backtrack; and}
{- do not return any direct or indirect references to the input buffer.}}
If the input buffer callback functions do not do any of these things, then
the client may consider their use safe. *)
module Unsafe : sig
val take : int -> (bigstring -> off:int -> len:int -> 'a) -> 'a t
* [ take n f ] accepts exactly [ n ] characters of input into the parser 's
internal buffer then calls [ f buffer ~len ] . [ buffer ] is the
parser 's internal buffer . [ off ] is the offset from the start of [ buffer ]
containing the requested content . [ len ] is the length of the requested
content . [ len ] is guaranteed to be equal to [ n ] .
internal buffer then calls [f buffer ~off ~len]. [buffer] is the
parser's internal buffer. [off] is the offset from the start of [buffer]
containing the requested content. [len] is the length of the requested
content. [len] is guaranteed to be equal to [n]. *)
val take_while : (char -> bool) -> (bigstring -> off:int -> len:int -> 'a) -> 'a t
* [ take_while check f ] accepts input into the parser 's interal buffer as
long as [ check ] returns [ true ] then calls [ f buffer ~len ] . [ buffer ]
is the parser 's internal buffer . [ off ] is the offset from the start of
[ buffer ] containing the requested content . [ len ] is the length of the
content matched by [ check ] .
This parser does not fail . If [ check ] returns [ false ] on the first
character , [ len ] will be [ 0 ] .
long as [check] returns [true] then calls [f buffer ~off ~len]. [buffer]
is the parser's internal buffer. [off] is the offset from the start of
[buffer] containing the requested content. [len] is the length of the
content matched by [check].
This parser does not fail. If [check] returns [false] on the first
character, [len] will be [0]. *)
val take_while1 : (char -> bool) -> (bigstring -> off:int -> len:int -> 'a) -> 'a t
* [ take_while1 check f ] accepts input into the parser 's interal buffer as
long as [ check ] returns [ true ] then calls [ f buffer ~len ] . [ buffer ]
is the parser 's internal buffer . [ off ] is the offset from the start of
[ buffer ] containing the requested content . [ len ] is the length of the
content matched by [ check ] .
This parser requires that [ f ] return [ true ] for at least one character of
input , and will fail otherwise .
long as [check] returns [true] then calls [f buffer ~off ~len]. [buffer]
is the parser's internal buffer. [off] is the offset from the start of
[buffer] containing the requested content. [len] is the length of the
content matched by [check].
This parser requires that [f] return [true] for at least one character of
input, and will fail otherwise. *)
val take_till : (char -> bool) -> (bigstring -> off:int -> len:int -> 'a) -> 'a t
* [ take_till check f ] accepts input into the parser 's interal buffer as
long as [ check ] returns [ false ] then calls [ f buffer ~len ] . [ buffer ]
is the parser 's internal buffer . [ off ] is the offset from the start of
[ buffer ] containing the requested content . [ len ] is the length of the
content matched by [ check ] .
This parser does not fail . If [ check ] returns [ true ] on the first
character , [ len ] will be [ 0 ] .
long as [check] returns [false] then calls [f buffer ~off ~len]. [buffer]
is the parser's internal buffer. [off] is the offset from the start of
[buffer] containing the requested content. [len] is the length of the
content matched by [check].
This parser does not fail. If [check] returns [true] on the first
character, [len] will be [0]. *)
val peek : int -> (bigstring -> off:int -> len:int -> 'a) -> 'a t
* [ peek n ~f ] accepts exactly [ n ] characters and calls [ f buffer ~len ]
with ] . If there is not enough input , it will fail .
This parser does not advance the input . Use it for lookahead .
with [len = n]. If there is not enough input, it will fail.
This parser does not advance the input. Use it for lookahead. *)
end
module Consume : sig
type t =
* Require parser to reach eof .
end
val parse_bigstring : consume:Consume.t -> 'a t -> bigstring -> ('a, string) result
val parse_string : consume:Consume.t -> 'a t -> string -> ('a, string) result
val parse_reader :
?initial_buffer_size:int ->
consume:Consume.t ->
'a t ->
(Cstruct.t -> int) ->
('a, string) result
* [ parse_reader ~consume t read_into ] parses a stream using parser [ t ] .
When it needs more data , it calls [ ] to collect some .
[ read_into ] should return the number of bytes written , or raise
[ End_of_file ] if no more data is coming .
@param initial_buffer_size The initial size for the buffer ( defaulting
to 4k bytes ) . It will automatically grow the buffer as needed .
@param consume See { ! Consume } .
@return The parsed result , or a suitable error message .
When it needs more data, it calls [read_into buf] to collect some.
[read_into] should return the number of bytes written, or raise
[End_of_file] if no more data is coming.
@param initial_buffer_size The initial size for the buffer (defaulting
to 4k bytes). It will automatically grow the buffer as needed.
@param consume See {!Consume}.
@return The parsed result, or a suitable error message. *)
* Old buffered parsing interface . Use { ! parse_reader } instead in new code .
Parsers run through this module perform internal buffering of input . The
parser state will keep track of unconsumed input and attempt to minimize
memory allocation and copying . The { ! Buffered.state . Partial } parser state
will accept newly - read , incremental input and copy it into the internal
buffer . Users can feed parser states using the { ! feed } function . As a
result , the interface is much easier to use than the one exposed by the
{ ! Unbuffered } module .
On success or failure , any unconsumed input will be returned to the user
for additional processing . The buffer that the unconsumed input is returned
in can also be reused .
Parsers run through this module perform internal buffering of input. The
parser state will keep track of unconsumed input and attempt to minimize
memory allocation and copying. The {!Buffered.state.Partial} parser state
will accept newly-read, incremental input and copy it into the internal
buffer. Users can feed parser states using the {!feed} function. As a
result, the interface is much easier to use than the one exposed by the
{!Unbuffered} module.
On success or failure, any unconsumed input will be returned to the user
for additional processing. The buffer that the unconsumed input is returned
in can also be reused. *)
module Buffered : sig
type unconsumed =
{ buf : bigstring
; off : int
; len : int }
type input =
[ `Bigstring of bigstring
| `String of string ]
type 'a state =
val parse : ?initial_buffer_size:int -> 'a t -> 'a state
* [ parse ? initial_buffer_size t ] runs [ t ] and awaits input if needed .
[ parse ] will allocate a buffer of size [ initial_buffer_size ] ( defaulting
to 4k bytes ) to do input buffering and automatically grows the buffer as
needed .
[parse] will allocate a buffer of size [initial_buffer_size] (defaulting
to 4k bytes) to do input buffering and automatically grows the buffer as
needed. *)
val feed : 'a state -> [ input | `Eof ] -> 'a state
val state_to_option : 'a state -> 'a option
val state_to_result : 'a state -> ('a, string) result
* [ state ] returns [ Ok v ] if the parser is in the [ Done ( bs , v ) ]
state and [ Error msg ] if it is in the [ Fail ] or [ Partial ] state .
This function has no effect on the current state of the parser .
state and [Error msg] if it is in the [Fail] or [Partial] state.
This function has no effect on the current state of the parser. *)
val state_to_unconsumed : _ state -> unconsumed option
* [ state_to_unconsumed state ] returns [ Some bs ] if [ state = Done(bs , _ ) ] or
[ state = Fail(bs , _ , _ ) ] and [ None ] otherwise .
[state = Fail(bs, _, _)] and [None] otherwise. *)
end
val parse :
buffer:Buffered.unconsumed ->
'a t ->
(Cstruct.t -> int) ->
Buffered.unconsumed * ('a, string) result
* [ parse ~buffer t read_into ] parses a stream using [ t ] . When it needs more data ,
it calls [ ] to collect some . [ read_into ] should return the
number of bytes written , or raise [ End_of_file ] if no more data is coming .
Note : { ! parse_reader } provides a simpler interface if you do n't need the ability
to resume parsing afterwards .
@param buffer The buffer to use for parsing . This can be a fresh buffer ( with off = len=0 )
or a buffer returned from a previous call to [ parse ] .
@return A pair of the unconsumed input and the parsed result .
it calls [read_into cs] to collect some. [read_into] should return the
number of bytes written, or raise [End_of_file] if no more data is coming.
Note: {!parse_reader} provides a simpler interface if you don't need the ability
to resume parsing afterwards.
@param buffer The buffer to use for parsing. This can be a fresh buffer (with off=len=0)
or a buffer returned from a previous call to [parse].
@return A pair of the unconsumed input and the parsed result. *)
* Unbuffered parsing interface .
Use this module for total control over memory allocation and copying .
Parsers run through this module perform no internal buffering . Instead , the
user is responsible for managing a buffer containing the entirety of the
input that has yet to be consumed by the parser . The
{ ! Unbuffered.state . Partial } parser state reports to the user how much input
the parser consumed during its last run , via the
{ ! Unbuffered.partial.committed } field . This area of input must be discarded
before parsing can resume . Once additional input has been collected , the
unconsumed input as well as new input must be passed to the parser state
via the { ! Unbuffered.partial.continue } function , together with an
indication of whether there is { ! Unbuffered.more } input to come .
The logic that must be implemented in order to make proper use of this
module is intricate and tied to your OS environment . It 's advisable to use
the { ! Buffered } module when initially developing and testing your parsers .
For production use - cases , consider the Async and Lwt support that this
library includes before attempting to use this module directly .
Use this module for total control over memory allocation and copying.
Parsers run through this module perform no internal buffering. Instead, the
user is responsible for managing a buffer containing the entirety of the
input that has yet to be consumed by the parser. The
{!Unbuffered.state.Partial} parser state reports to the user how much input
the parser consumed during its last run, via the
{!Unbuffered.partial.committed} field. This area of input must be discarded
before parsing can resume. Once additional input has been collected, the
unconsumed input as well as new input must be passed to the parser state
via the {!Unbuffered.partial.continue} function, together with an
indication of whether there is {!Unbuffered.more} input to come.
The logic that must be implemented in order to make proper use of this
module is intricate and tied to your OS environment. It's advisable to use
the {!Buffered} module when initially developing and testing your parsers.
For production use-cases, consider the Async and Lwt support that this
library includes before attempting to use this module directly. *)
module Unbuffered : sig
type more =
| Complete
| Incomplete
type 'a parse_result =
type reader = int -> (bigstring * int * int * more)
val parse : read:reader -> 'a t -> 'a parse_result
* [ parse t ] runs [ t ] and returns the result .
It calls [ read ] whenever more input is needed .
It calls [read] whenever more input is needed. *)
val state_to_option : 'a parse_result -> 'a option
val state_to_result : 'a parse_result -> ('a, string) result
* [ state ] returns [ Ok v ] if the parser is in the
[ Done ( bs , v ) ] state and [ Error msg ] if it is in the [ Fail ] state .
This function has no effect on the current state of the parser .
[Done (bs, v)] state and [Error msg] if it is in the [Fail] state.
This function has no effect on the current state of the parser. *)
end
* { 2 Expert Parsers }
For people that know what they 're doing . If you want to use them , read the
code . No further documentation will be provided .
For people that know what they're doing. If you want to use them, read the
code. No further documentation will be provided. *)
val pos : int t
val available : int t
|
0661d7fe365bc5f54ab65edbd4bd782faabd1742f6864925e64bf9ce928fdc4f | ocaml-omake/omake | omake_builtin_rule.ml |
* Some builtin functions .
*
* \begin{doc }
* functions and utilities }
* \label{chapter : build }
* \cutname{omake - build.html }
* \end{doc }
*
* Some builtin functions.
*
* \begin{doc}
* \chapter{Build functions and utilities}
* \label{chapter:build}
* \cutname{omake-build.html}
* \end{doc}
*
*)
include Omake_pos.Make (struct let name = "Omake_builtin_rule" end)
* These targets are decribed in doc / src / omake - rules.tex
*
* \begin{doc }
* \section{Builtin .PHONY targets }
*
* The complete set of builtin \verb+.PHONY+ targets include the following .
*
* \begin{description }
* \item[.PHONY ] Declares new phony targets ( Section~\ref{target:.PHONY } ) .
* \item[.DEFAULT ] Declare the default build targets ( Section~\ref{target:.DEFAULT } ) .
* \item[.SUBDIRS ] Include a directory as part of the project ( Section~\ref{target:.SUBDIRS } ) .
* \item[.SCANNER ] Define a dependency scanner ( } ) .
* \item[.INCLUDE ] Include a file ( Section~\ref{target:.INCLUDE } ) .
* \item[.ORDER ] Define a file - dependency ordering rule ( Section~\ref{target:.ORDER } ) .
* \item[.BUILD\_BEGIN ] Commands to be executed at the beginning of a build .
* \item[.BUILD\_SUCCESS ] Commands to be executed if the build is successful .
* \item[.BUILD\_FAILURE ] Commands to be executed if the build fails .
* \end{description }
*
* \targetlabelref{.BUILD_BEGIN}{.BUILD\_BEGIN }
* }
* \targetlabelref{.BUILD_FAILURE}{.BUILD\_FAILURE }
*
* The \verb+.BUILD+ targets can be used to specify commands to be executed at
* the beginning and end of the build . The \verb+.BUILD_BEGIN+ target is built
* at the beginning of a project build , and one of \verb+.BUILD_FAILURE+ or
* \verb+.BUILD_SUCCESS+ is executed when the build terminates .
*
* For example , the following set of rules simply print additional messages
* about the status of the build .
*
* }
* .BUILD_BEGIN :
* echo Build starting
*
* .BUILD_SUCCESS :
* echo The build was successful
*
* .BUILD_FAILURE :
* println($"The build failed : $ ( length $ ( find - build - targets Failed ) ) targets could not be built " )
* \end{verbatim }
*
* Another common use is to define notifications to be performed when
* the build completes . For example , the following rule will create
* a new X terminal displaying the summary of the build
* ( using the \hypervarx{BUILD_SUMMARY}{BUILD\_SUMMARY } ) .
*
* }
* .BUILD_FAILURE :
* xterm -e vi $ ( BUILD_SUMMARY )
* \end{verbatim }
*
* If you do not wish to add these rules directly to your project ( which
* is probably a good idea if you work with others ) , you can
* define them in your \verb+.omakerc+ ( see Section~\ref{section:.omakerc } ) .
*
* The \hyperfun{find - build - targets }
* is useful for obtaining a firther summary of the build . Note that
* when output diversions are in effect ( with the \verb+--output-*+ options --- see Chapter~\ref{chapter : options } ) ,
* any output produced by the commands is copied to a file . The name of the
* file is specified by the \verb+output - file+ field of the \hyperobj{Target } .
* You may find this useful in defining custom build summaries .
* \end{doc }
* These targets are decribed in doc/src/omake-rules.tex
*
* \begin{doc}
* \section{Builtin .PHONY targets}
*
* The complete set of builtin \verb+.PHONY+ targets include the following.
*
* \begin{description}
* \item[.PHONY] Declares new phony targets (Section~\ref{target:.PHONY}).
* \item[.DEFAULT] Declare the default build targets (Section~\ref{target:.DEFAULT}).
* \item[.SUBDIRS] Include a directory as part of the project (Section~\ref{target:.SUBDIRS}).
* \item[.SCANNER] Define a dependency scanner (Section~\ref{target:.SUBDIRS}).
* \item[.INCLUDE] Include a file (Section~\ref{target:.INCLUDE}).
* \item[.ORDER] Define a file-dependency ordering rule (Section~\ref{target:.ORDER}).
* \item[.BUILD\_BEGIN] Commands to be executed at the beginning of a build.
* \item[.BUILD\_SUCCESS] Commands to be executed if the build is successful.
* \item[.BUILD\_FAILURE] Commands to be executed if the build fails.
* \end{description}
*
* \targetlabelref{.BUILD_BEGIN}{.BUILD\_BEGIN}
* \targetlabelref{.BUILD_SUCCESS}{.BUILD\_SUCCESS}
* \targetlabelref{.BUILD_FAILURE}{.BUILD\_FAILURE}
*
* The \verb+.BUILD+ targets can be used to specify commands to be executed at
* the beginning and end of the build. The \verb+.BUILD_BEGIN+ target is built
* at the beginning of a project build, and one of \verb+.BUILD_FAILURE+ or
* \verb+.BUILD_SUCCESS+ is executed when the build terminates.
*
* For example, the following set of rules simply print additional messages
* about the status of the build.
*
* \begin{verbatim}
* .BUILD_BEGIN:
* echo Build starting
*
* .BUILD_SUCCESS:
* echo The build was successful
*
* .BUILD_FAILURE:
* println($"The build failed: $(length $(find-build-targets Failed)) targets could not be built")
* \end{verbatim}
*
* Another common use is to define notifications to be performed when
* the build completes. For example, the following rule will create
* a new X terminal displaying the summary of the build
* (using the \hypervarx{BUILD_SUMMARY}{BUILD\_SUMMARY}).
*
* \begin{verbatim}
* .BUILD_FAILURE:
* xterm -e vi $(BUILD_SUMMARY)
* \end{verbatim}
*
* If you do not wish to add these rules directly to your project (which
* is probably a good idea if you work with others), you can
* define them in your \verb+.omakerc+ (see Section~\ref{section:.omakerc}).
*
* The \hyperfun{find-build-targets}
* is useful for obtaining a firther summary of the build. Note that
* when output diversions are in effect (with the \verb+--output-*+ options --- see Chapter~\ref{chapter:options}),
* any output produced by the commands is copied to a file. The name of the
* file is specified by the \verb+output-file+ field of the \hyperobj{Target}.
* You may find this useful in defining custom build summaries.
* \end{doc}
*)
let phony_targets =
[".PHONY"; ".DEFAULT"; ".SUBDIRS"; ".SCANNER"; ".INCLUDE"; ".ORDER";
".BUILD_BEGIN"; ".BUILD_SUCCESS"; ".BUILD_FAILURE"]
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Set options .
*
* \begin{doc }
* \section{Options and versioning }
* \fun{OMakeFlags }
*
* }
* )
* options : String
* \end{verbatim }
*
* The \verb+OMakeFlags+ function is used to set \verb+omake+ options from
* within \File{OMakefile}s . The options have exactly the same format as
* options on the command line .
*
* For example , the following code displays the progress bar unless
* the \verb+VERBOSE+ environment variable is defined .
*
* }
* if $ ( not $ ( defined - env VERBOSE ) )
* OMakeFlags(-S --progress )
* export
* \end{verbatim }
* \end{doc }
* Set options.
*
* \begin{doc}
* \section{Options and versioning}
* \fun{OMakeFlags}
*
* \begin{verbatim}
* OMakeFlags(options)
* options : String
* \end{verbatim}
*
* The \verb+OMakeFlags+ function is used to set \verb+omake+ options from
* within \File{OMakefile}s. The options have exactly the same format as
* options on the command line.
*
* For example, the following code displays the progress bar unless
* the \verb+VERBOSE+ environment variable is defined.
*
* \begin{verbatim}
* if $(not $(defined-env VERBOSE))
* OMakeFlags(-S --progress)
* export
* \end{verbatim}
* \end{doc}
*)
let set_options venv pos loc args _ =
let pos = string_pos "OMakeFlags" pos in
match args with
[arg] ->
let argv = Omake_value.strings_of_value venv pos arg in
let venv = Omake_env.venv_set_options venv loc pos argv in
venv, Omake_value_type.ValNone
| _ ->
raise (Omake_value_type.OmakeException (loc_pos loc pos, ArityMismatch (ArityExact 1, List.length args)))
* Version checking .
*
* \begin{doc }
* \fun{OMakeVersion }
*
* }
* OMakeVersion(version1 )
* OMakeVersion(version1 , )
* version1 , : String
* \end{verbatim }
*
* The \verb+OMakeVersion+ function is used for version checking
* in \File{OMakefile}s . It takes one or two arguments .
*
* In the one argument form , if the \Prog{omake } version number
* is less than \verb+<version1>+ ,
* then an exception is raised . In the two argument form ,
* the version must lie between \verb+version1 + and \verb+version2 + .
*
* \fun{cmp - versions }
* \begin{verbatim }
* $ ( cmp - versions version1 , )
* version1 , : String
* \end{verbatim }
*
* The \verb+cmp - versions\+ functions can be used to compare arbitrary version strings .
* It returns 0 when the two version strings are equal , a negative number when the first
* string represents an earlier version , and a positive number otherwise .
* \end{doc }
* Version checking.
*
* \begin{doc}
* \fun{OMakeVersion}
*
* \begin{verbatim}
* OMakeVersion(version1)
* OMakeVersion(version1, version2)
* version1, version2 : String
* \end{verbatim}
*
* The \verb+OMakeVersion+ function is used for version checking
* in \File{OMakefile}s. It takes one or two arguments.
*
* In the one argument form, if the \Prog{omake} version number
* is less than \verb+<version1>+,
* then an exception is raised. In the two argument form,
* the version must lie between \verb+version1+ and \verb+version2+.
*
* \fun{cmp-versions}
* \begin{verbatim}
* $(cmp-versions version1, version2)
* version1, version2 : String
* \end{verbatim}
*
* The \verb+cmp-versions\+ functions can be used to compare arbitrary version strings.
* It returns 0 when the two version strings are equal, a negative number when the first
* string represents an earlier version, and a positive number otherwise.
* \end{doc}
*)
let split_int =
let rec split_int_aux i s =
match String.length s with
0 -> i, s
| l ->
begin
match s.[0] with
'0'..'9' as c ->
split_int_aux (i * 10 + (Char.code c - 48)) (String.sub s 1 (l - 1))
| _ ->
i, s
end
in
split_int_aux 0
let rec compare_versions v1 v2 =
match String.length v1, String.length v2 with
0, 0 -> 0
| 0, _ -> -1
| _, 0 -> 1
| l1, l2 ->
begin
match v1.[0],v2.[0] with
'0'..'9', '0'..'9' ->
let i1, s1 = split_int v1 in
let i2, s2 = split_int v2 in
begin
match i1 - i2 with
0 -> compare_versions s1 s2
| i -> i
end
| c1, c2 when c1 = c2 ->
compare_versions (String.sub v1 1 (l1 - 1)) (String.sub v2 1 (l2 - 1))
| c1, c2 ->
Char.code c1 - Char.code c2
end
let check_version venv pos loc args =
let pos = string_pos "check_version" pos in
let version = Omake_magic.version in
let check lowest highest =
if compare_versions version lowest < 0 then
raise (Omake_value_type.OmakeFatalErr (loc_pos loc pos, LazyError (fun out ->
Format.fprintf out "@[<0>This version of OMake is too old,@ you need to upgrade to at least version@ %s;@ current OMake version is@ %s.@ You should be able to download the latest version of OMake from @]" lowest version)));
match highest with
Some highest ->
if compare_versions version highest > 0 then
raise (Omake_value_type.OmakeFatalErr (loc_pos loc pos, LazyError (fun out ->
Format.fprintf out "@[<0>This version of OMake is too new or the given file is too old.@ This file accepts versions@ %s-%s;@ current OMake version is@ %s@]" lowest highest version)))
| None ->
()
in
match args with
[lowest] ->
let lowest = Lm_string_util.trim (Omake_value.string_of_value venv pos lowest) in
check lowest None;
Omake_value_type.ValString version
| [lowest; highest] ->
let lowest = Lm_string_util.trim (Omake_value.string_of_value venv pos lowest) in
let highest = Lm_string_util.trim (Omake_value.string_of_value venv pos highest) in
check lowest (Some highest);
ValString version
| _ ->
raise (Omake_value_type.OmakeException (loc_pos loc pos, ArityMismatch (ArityRange (1,2), List.length args)))
let cmp_version venv pos loc args =
let pos = string_pos "cmp_version" pos in
match args with
[v1; v2] ->
let v1 = Lm_string_util.trim (Omake_value.string_of_value venv pos v1) in
let v2 = Lm_string_util.trim (Omake_value.string_of_value venv pos v2) in
Omake_value_type.ValInt (compare_versions v1 v2)
| _ ->
raise (Omake_value_type.OmakeException (loc_pos loc pos, ArityMismatch (ArityExact 2, List.length args)))
* Add the command - line vars .
*
* \begin{doc }
* \fun{DefineCommandVars }
*
* }
* ( )
* \end{verbatim }
*
* The \verb+DefineCommandVars+ function redefines the variables passed on
* the commandline . Variables definitions are passed on the command line
* in the form \verb+name = value+ . This function is primarily for internal
* use by \Prog{omake } to define these variables for the first time .
* \end{doc }
* Add the command-line vars.
*
* \begin{doc}
* \fun{DefineCommandVars}
*
* \begin{verbatim}
* DefineCommandVars()
* \end{verbatim}
*
* The \verb+DefineCommandVars+ function redefines the variables passed on
* the commandline. Variables definitions are passed on the command line
* in the form \verb+name=value+. This function is primarily for internal
* use by \Prog{omake} to define these variables for the first time.
* \end{doc}
*)
let define_command_vars venv pos loc args kargs =
let pos = string_pos "DefineCommandVars" pos in
match args, kargs with
[], []
| [_], [] ->
Omake_builtin.venv_add_command_defs venv, Omake_value_type.ValNone
| _ ->
raise (Omake_value_type.OmakeException (loc_pos loc pos, ArityMismatch (ArityRange (0, 1), List.length args)))
(*
* Table of built-in functions.
*)
let () =
let builtin_funs =
[true, "OMakeVersion", check_version, Omake_ir.ArityRange (1, 2);
true, "cmp-versions", cmp_version, ArityExact 2;
]
in
let builtin_kfuns =
[true, "OMakeFlags", set_options, Omake_ir.ArityExact 1;
true, "DefineCommandVars", define_command_vars, ArityRange (0, 1);
]
in
let builtin_rules =
[true, [".PHONY"], phony_targets]
in
let builtin_info =
{ Omake_builtin_type.builtin_empty with builtin_funs = builtin_funs;
builtin_kfuns = builtin_kfuns;
builtin_rules = builtin_rules;
phony_targets = phony_targets
}
in
Omake_builtin.register_builtin builtin_info
| null | https://raw.githubusercontent.com/ocaml-omake/omake/08b2a83fb558f6eb6847566cbe1a562230da2b14/src/builtin/omake_builtin_rule.ml | ocaml |
* Table of built-in functions.
|
* Some builtin functions .
*
* \begin{doc }
* functions and utilities }
* \label{chapter : build }
* \cutname{omake - build.html }
* \end{doc }
*
* Some builtin functions.
*
* \begin{doc}
* \chapter{Build functions and utilities}
* \label{chapter:build}
* \cutname{omake-build.html}
* \end{doc}
*
*)
include Omake_pos.Make (struct let name = "Omake_builtin_rule" end)
* These targets are decribed in doc / src / omake - rules.tex
*
* \begin{doc }
* \section{Builtin .PHONY targets }
*
* The complete set of builtin \verb+.PHONY+ targets include the following .
*
* \begin{description }
* \item[.PHONY ] Declares new phony targets ( Section~\ref{target:.PHONY } ) .
* \item[.DEFAULT ] Declare the default build targets ( Section~\ref{target:.DEFAULT } ) .
* \item[.SUBDIRS ] Include a directory as part of the project ( Section~\ref{target:.SUBDIRS } ) .
* \item[.SCANNER ] Define a dependency scanner ( } ) .
* \item[.INCLUDE ] Include a file ( Section~\ref{target:.INCLUDE } ) .
* \item[.ORDER ] Define a file - dependency ordering rule ( Section~\ref{target:.ORDER } ) .
* \item[.BUILD\_BEGIN ] Commands to be executed at the beginning of a build .
* \item[.BUILD\_SUCCESS ] Commands to be executed if the build is successful .
* \item[.BUILD\_FAILURE ] Commands to be executed if the build fails .
* \end{description }
*
* \targetlabelref{.BUILD_BEGIN}{.BUILD\_BEGIN }
* }
* \targetlabelref{.BUILD_FAILURE}{.BUILD\_FAILURE }
*
* The \verb+.BUILD+ targets can be used to specify commands to be executed at
* the beginning and end of the build . The \verb+.BUILD_BEGIN+ target is built
* at the beginning of a project build , and one of \verb+.BUILD_FAILURE+ or
* \verb+.BUILD_SUCCESS+ is executed when the build terminates .
*
* For example , the following set of rules simply print additional messages
* about the status of the build .
*
* }
* .BUILD_BEGIN :
* echo Build starting
*
* .BUILD_SUCCESS :
* echo The build was successful
*
* .BUILD_FAILURE :
* println($"The build failed : $ ( length $ ( find - build - targets Failed ) ) targets could not be built " )
* \end{verbatim }
*
* Another common use is to define notifications to be performed when
* the build completes . For example , the following rule will create
* a new X terminal displaying the summary of the build
* ( using the \hypervarx{BUILD_SUMMARY}{BUILD\_SUMMARY } ) .
*
* }
* .BUILD_FAILURE :
* xterm -e vi $ ( BUILD_SUMMARY )
* \end{verbatim }
*
* If you do not wish to add these rules directly to your project ( which
* is probably a good idea if you work with others ) , you can
* define them in your \verb+.omakerc+ ( see Section~\ref{section:.omakerc } ) .
*
* The \hyperfun{find - build - targets }
* is useful for obtaining a firther summary of the build . Note that
* when output diversions are in effect ( with the \verb+--output-*+ options --- see Chapter~\ref{chapter : options } ) ,
* any output produced by the commands is copied to a file . The name of the
* file is specified by the \verb+output - file+ field of the \hyperobj{Target } .
* You may find this useful in defining custom build summaries .
* \end{doc }
* These targets are decribed in doc/src/omake-rules.tex
*
* \begin{doc}
* \section{Builtin .PHONY targets}
*
* The complete set of builtin \verb+.PHONY+ targets include the following.
*
* \begin{description}
* \item[.PHONY] Declares new phony targets (Section~\ref{target:.PHONY}).
* \item[.DEFAULT] Declare the default build targets (Section~\ref{target:.DEFAULT}).
* \item[.SUBDIRS] Include a directory as part of the project (Section~\ref{target:.SUBDIRS}).
* \item[.SCANNER] Define a dependency scanner (Section~\ref{target:.SUBDIRS}).
* \item[.INCLUDE] Include a file (Section~\ref{target:.INCLUDE}).
* \item[.ORDER] Define a file-dependency ordering rule (Section~\ref{target:.ORDER}).
* \item[.BUILD\_BEGIN] Commands to be executed at the beginning of a build.
* \item[.BUILD\_SUCCESS] Commands to be executed if the build is successful.
* \item[.BUILD\_FAILURE] Commands to be executed if the build fails.
* \end{description}
*
* \targetlabelref{.BUILD_BEGIN}{.BUILD\_BEGIN}
* \targetlabelref{.BUILD_SUCCESS}{.BUILD\_SUCCESS}
* \targetlabelref{.BUILD_FAILURE}{.BUILD\_FAILURE}
*
* The \verb+.BUILD+ targets can be used to specify commands to be executed at
* the beginning and end of the build. The \verb+.BUILD_BEGIN+ target is built
* at the beginning of a project build, and one of \verb+.BUILD_FAILURE+ or
* \verb+.BUILD_SUCCESS+ is executed when the build terminates.
*
* For example, the following set of rules simply print additional messages
* about the status of the build.
*
* \begin{verbatim}
* .BUILD_BEGIN:
* echo Build starting
*
* .BUILD_SUCCESS:
* echo The build was successful
*
* .BUILD_FAILURE:
* println($"The build failed: $(length $(find-build-targets Failed)) targets could not be built")
* \end{verbatim}
*
* Another common use is to define notifications to be performed when
* the build completes. For example, the following rule will create
* a new X terminal displaying the summary of the build
* (using the \hypervarx{BUILD_SUMMARY}{BUILD\_SUMMARY}).
*
* \begin{verbatim}
* .BUILD_FAILURE:
* xterm -e vi $(BUILD_SUMMARY)
* \end{verbatim}
*
* If you do not wish to add these rules directly to your project (which
* is probably a good idea if you work with others), you can
* define them in your \verb+.omakerc+ (see Section~\ref{section:.omakerc}).
*
* The \hyperfun{find-build-targets}
* is useful for obtaining a firther summary of the build. Note that
* when output diversions are in effect (with the \verb+--output-*+ options --- see Chapter~\ref{chapter:options}),
* any output produced by the commands is copied to a file. The name of the
* file is specified by the \verb+output-file+ field of the \hyperobj{Target}.
* You may find this useful in defining custom build summaries.
* \end{doc}
*)
let phony_targets =
[".PHONY"; ".DEFAULT"; ".SUBDIRS"; ".SCANNER"; ".INCLUDE"; ".ORDER";
".BUILD_BEGIN"; ".BUILD_SUCCESS"; ".BUILD_FAILURE"]
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Set options .
*
* \begin{doc }
* \section{Options and versioning }
* \fun{OMakeFlags }
*
* }
* )
* options : String
* \end{verbatim }
*
* The \verb+OMakeFlags+ function is used to set \verb+omake+ options from
* within \File{OMakefile}s . The options have exactly the same format as
* options on the command line .
*
* For example , the following code displays the progress bar unless
* the \verb+VERBOSE+ environment variable is defined .
*
* }
* if $ ( not $ ( defined - env VERBOSE ) )
* OMakeFlags(-S --progress )
* export
* \end{verbatim }
* \end{doc }
* Set options.
*
* \begin{doc}
* \section{Options and versioning}
* \fun{OMakeFlags}
*
* \begin{verbatim}
* OMakeFlags(options)
* options : String
* \end{verbatim}
*
* The \verb+OMakeFlags+ function is used to set \verb+omake+ options from
* within \File{OMakefile}s. The options have exactly the same format as
* options on the command line.
*
* For example, the following code displays the progress bar unless
* the \verb+VERBOSE+ environment variable is defined.
*
* \begin{verbatim}
* if $(not $(defined-env VERBOSE))
* OMakeFlags(-S --progress)
* export
* \end{verbatim}
* \end{doc}
*)
let set_options venv pos loc args _ =
let pos = string_pos "OMakeFlags" pos in
match args with
[arg] ->
let argv = Omake_value.strings_of_value venv pos arg in
let venv = Omake_env.venv_set_options venv loc pos argv in
venv, Omake_value_type.ValNone
| _ ->
raise (Omake_value_type.OmakeException (loc_pos loc pos, ArityMismatch (ArityExact 1, List.length args)))
* Version checking .
*
* \begin{doc }
* \fun{OMakeVersion }
*
* }
* OMakeVersion(version1 )
* OMakeVersion(version1 , )
* version1 , : String
* \end{verbatim }
*
* The \verb+OMakeVersion+ function is used for version checking
* in \File{OMakefile}s . It takes one or two arguments .
*
* In the one argument form , if the \Prog{omake } version number
* is less than \verb+<version1>+ ,
* then an exception is raised . In the two argument form ,
* the version must lie between \verb+version1 + and \verb+version2 + .
*
* \fun{cmp - versions }
* \begin{verbatim }
* $ ( cmp - versions version1 , )
* version1 , : String
* \end{verbatim }
*
* The \verb+cmp - versions\+ functions can be used to compare arbitrary version strings .
* It returns 0 when the two version strings are equal , a negative number when the first
* string represents an earlier version , and a positive number otherwise .
* \end{doc }
* Version checking.
*
* \begin{doc}
* \fun{OMakeVersion}
*
* \begin{verbatim}
* OMakeVersion(version1)
* OMakeVersion(version1, version2)
* version1, version2 : String
* \end{verbatim}
*
* The \verb+OMakeVersion+ function is used for version checking
* in \File{OMakefile}s. It takes one or two arguments.
*
* In the one argument form, if the \Prog{omake} version number
* is less than \verb+<version1>+,
* then an exception is raised. In the two argument form,
* the version must lie between \verb+version1+ and \verb+version2+.
*
* \fun{cmp-versions}
* \begin{verbatim}
* $(cmp-versions version1, version2)
* version1, version2 : String
* \end{verbatim}
*
* The \verb+cmp-versions\+ functions can be used to compare arbitrary version strings.
* It returns 0 when the two version strings are equal, a negative number when the first
* string represents an earlier version, and a positive number otherwise.
* \end{doc}
*)
let split_int =
let rec split_int_aux i s =
match String.length s with
0 -> i, s
| l ->
begin
match s.[0] with
'0'..'9' as c ->
split_int_aux (i * 10 + (Char.code c - 48)) (String.sub s 1 (l - 1))
| _ ->
i, s
end
in
split_int_aux 0
let rec compare_versions v1 v2 =
match String.length v1, String.length v2 with
0, 0 -> 0
| 0, _ -> -1
| _, 0 -> 1
| l1, l2 ->
begin
match v1.[0],v2.[0] with
'0'..'9', '0'..'9' ->
let i1, s1 = split_int v1 in
let i2, s2 = split_int v2 in
begin
match i1 - i2 with
0 -> compare_versions s1 s2
| i -> i
end
| c1, c2 when c1 = c2 ->
compare_versions (String.sub v1 1 (l1 - 1)) (String.sub v2 1 (l2 - 1))
| c1, c2 ->
Char.code c1 - Char.code c2
end
let check_version venv pos loc args =
let pos = string_pos "check_version" pos in
let version = Omake_magic.version in
let check lowest highest =
if compare_versions version lowest < 0 then
raise (Omake_value_type.OmakeFatalErr (loc_pos loc pos, LazyError (fun out ->
Format.fprintf out "@[<0>This version of OMake is too old,@ you need to upgrade to at least version@ %s;@ current OMake version is@ %s.@ You should be able to download the latest version of OMake from @]" lowest version)));
match highest with
Some highest ->
if compare_versions version highest > 0 then
raise (Omake_value_type.OmakeFatalErr (loc_pos loc pos, LazyError (fun out ->
Format.fprintf out "@[<0>This version of OMake is too new or the given file is too old.@ This file accepts versions@ %s-%s;@ current OMake version is@ %s@]" lowest highest version)))
| None ->
()
in
match args with
[lowest] ->
let lowest = Lm_string_util.trim (Omake_value.string_of_value venv pos lowest) in
check lowest None;
Omake_value_type.ValString version
| [lowest; highest] ->
let lowest = Lm_string_util.trim (Omake_value.string_of_value venv pos lowest) in
let highest = Lm_string_util.trim (Omake_value.string_of_value venv pos highest) in
check lowest (Some highest);
ValString version
| _ ->
raise (Omake_value_type.OmakeException (loc_pos loc pos, ArityMismatch (ArityRange (1,2), List.length args)))
let cmp_version venv pos loc args =
let pos = string_pos "cmp_version" pos in
match args with
[v1; v2] ->
let v1 = Lm_string_util.trim (Omake_value.string_of_value venv pos v1) in
let v2 = Lm_string_util.trim (Omake_value.string_of_value venv pos v2) in
Omake_value_type.ValInt (compare_versions v1 v2)
| _ ->
raise (Omake_value_type.OmakeException (loc_pos loc pos, ArityMismatch (ArityExact 2, List.length args)))
* Add the command - line vars .
*
* \begin{doc }
* \fun{DefineCommandVars }
*
* }
* ( )
* \end{verbatim }
*
* The \verb+DefineCommandVars+ function redefines the variables passed on
* the commandline . Variables definitions are passed on the command line
* in the form \verb+name = value+ . This function is primarily for internal
* use by \Prog{omake } to define these variables for the first time .
* \end{doc }
* Add the command-line vars.
*
* \begin{doc}
* \fun{DefineCommandVars}
*
* \begin{verbatim}
* DefineCommandVars()
* \end{verbatim}
*
* The \verb+DefineCommandVars+ function redefines the variables passed on
* the commandline. Variables definitions are passed on the command line
* in the form \verb+name=value+. This function is primarily for internal
* use by \Prog{omake} to define these variables for the first time.
* \end{doc}
*)
let define_command_vars venv pos loc args kargs =
let pos = string_pos "DefineCommandVars" pos in
match args, kargs with
[], []
| [_], [] ->
Omake_builtin.venv_add_command_defs venv, Omake_value_type.ValNone
| _ ->
raise (Omake_value_type.OmakeException (loc_pos loc pos, ArityMismatch (ArityRange (0, 1), List.length args)))
let () =
let builtin_funs =
[true, "OMakeVersion", check_version, Omake_ir.ArityRange (1, 2);
true, "cmp-versions", cmp_version, ArityExact 2;
]
in
let builtin_kfuns =
[true, "OMakeFlags", set_options, Omake_ir.ArityExact 1;
true, "DefineCommandVars", define_command_vars, ArityRange (0, 1);
]
in
let builtin_rules =
[true, [".PHONY"], phony_targets]
in
let builtin_info =
{ Omake_builtin_type.builtin_empty with builtin_funs = builtin_funs;
builtin_kfuns = builtin_kfuns;
builtin_rules = builtin_rules;
phony_targets = phony_targets
}
in
Omake_builtin.register_builtin builtin_info
|
d1b96728183839034960ffa40d99a0853148f0639cde843a9f130947b1b4c51a | bnoordhuis/chicken-core | port-tests.scm | (require-extension srfi-1 ports utils srfi-4 extras tcp posix)
(define-syntax assert-error
(syntax-rules ()
((_ expr)
(assert (handle-exceptions _ #t expr #f)))))
(define *text* #<<EOF
this is a test
33 > ( let ( ( in ( open - input - string " " ) ) ) ( close - input - port in )
(read-char in)) [09:40]
<foof> Error: (read-char) port already closed: #<input port "(string)">
33 > ( let ( ( in ( open - input - string " " ) ) ) ( close - input - port in )
(read-line in))
<foof> Error: call of non-procedure: #t
<foof> ... that's a little odd
<Bunny351> yuck. [09:44]
<Bunny351> double yuck. [10:00]
<sjamaan> yuck squared! [10:01]
<Bunny351> yuck powered by yuck
<Bunny351> (to the power of yuck, of course) [10:02]
<pbusser3> My yuck is bigger than yours!!!
<foof> yuck!
<foof> (that's a factorial)
<sjamaan> heh
<sjamaan> I think you outyucked us all [10:03]
<foof> well, for large enough values of yuck, yuck! ~= yuck^yuck [10:04]
ERC>
EOF
)
(define p (open-input-string *text*))
(assert (string=? "this is a test" (read-line p)))
(assert
(string=?
"<foof> #;33> (let ((in (open-input-string \"\"))) (close-input-port in)"
(read-line p)))
(assert (= 20 (length (read-lines (open-input-string *text*)))))
;;; copy-port
(assert
(string=?
*text*
(with-output-to-string
(lambda ()
(copy-port (open-input-string *text*) (current-output-port)))))) ; read-char -> write-char
(assert
(equal?
'(3 2 1)
(let ((out '()))
(copy-port ; read -> custom
(open-input-string "1 2 3")
#f
read
(lambda (x port) (set! out (cons x out))))
out)))
(assert
(equal?
"abc"
(let ((out (open-output-string)))
(copy-port ; read-char -> custom
(open-input-string "abc")
out
read-char
(lambda (x out) (write-char x out)))
(get-output-string out))))
(assert
(equal?
"abc"
(let ((in (open-input-string "abc") )
(out (open-output-string)))
(copy-port ; custom -> write-char
in out
(lambda (in) (read-char in)))
(get-output-string out))))
;; fill buffers
(read-all "compiler.scm")
(print "slow...")
(time
(with-input-from-file "compiler.scm"
(lambda ()
(with-output-to-file "compiler.scm.2"
(lambda ()
(copy-port
(current-input-port) (current-output-port)
(lambda (port) (read-char port))
(lambda (x port) (write-char x port))))))))
(print "fast...")
(time
(with-input-from-file "compiler.scm"
(lambda ()
(with-output-to-file "compiler.scm.2"
(lambda ()
(copy-port (current-input-port) (current-output-port)))))))
(delete-file "compiler.scm.2")
(define-syntax check
(syntax-rules ()
((_ (expr-head expr-rest ...))
(check 'expr-head (expr-head expr-rest ...)))
((_ name expr)
(let ((okay (list 'okay)))
(assert
(eq? okay
(condition-case
(begin (print* name "...")
(flush-output)
(let ((output expr))
(printf "FAIL [ ~S ]\n" output)))
((exn i/o file) (printf "OK\n") okay))))))))
(cond-expand
((not mingw32)
(define proc (process-fork (lambda () (tcp-accept (tcp-listen 8080)))))
(on-exit (lambda () (handle-exceptions exn #f (process-signal proc))))
(print "\n\nProcedures check on TCP ports being closed\n")
(receive (in out)
(let lp ()
(condition-case (tcp-connect "localhost" 8080)
((exn i/o net) (lp))))
(close-output-port out)
(close-input-port in)
(check (tcp-addresses in))
(check (tcp-port-numbers in))
(check (tcp-abandon-port in))) ; Not sure about abandon-port
This tests for two bugs which occurred on and possibly
;; other platforms, possibly due to multiprocessing:
;; read-line with EINTR would loop endlessly and process-wait would
;; signal a condition when interrupted rather than retrying.
Should be a noop but triggers
(receive (in out)
(create-pipe)
(receive (pid ok? status)
(process-wait
(process-fork
(lambda ()
(file-close in) ; close receiving end
(with-output-to-port (open-output-file* out)
(lambda ()
(display "hello, world\n")
;; exit prevents buffers from being discarded by implicit _exit
(exit 0))))))
(file-close out) ; close sending end
(assert (equal? '(#t 0 ("hello, world"))
(list ok? status (read-lines (open-input-file* in)))))))
)
(else))
(print "\n\nProcedures check on output ports being closed\n")
(with-output-to-file "empty-file" void)
(call-with-output-file "empty-file"
(lambda (out)
(close-output-port out)
(check (write '(foo) out))
(check (fprintf out "blabla"))
(check "print-call-chain" (begin (print-call-chain out) (void)))
(check (print-error-message (make-property-condition 'exn 'message "foo") out))
(check "print" (with-output-to-port out
(lambda () (print "foo"))))
(check "print*" (with-output-to-port out
(lambda () (print* "foo"))))
(check (display "foo" out))
(check (terminal-port? out)) ; Calls isatty() on C_SCHEME_FALSE?
(check (newline out))
(check (write-char #\x out))
(check (write-line "foo" out))
(check (write-u8vector '#u8(1 2 3) out))
;;(check (port->fileno in))
(check (flush-output out))
#+(not mingw32)
(begin
(check (file-test-lock out))
(check (file-lock out))
(check (file-lock/blocking out)))
(check (write-byte 120 out))
(check (write-string "foo" #f out))))
(print "\n\nProcedures check on input ports being closed\n")
(call-with-input-file "empty-file"
(lambda (in)
(close-input-port in)
(check (read in))
(check (read-char in))
(check (char-ready? in))
(check (peek-char in))
;;(check (port->fileno in))
(check (terminal-port? in)) ; Calls isatty() on C_SCHEME_FALSE?
(check (read-line in 5))
(check (read-u8vector 5 in))
(check "read-u8vector!" (let ((dest (make-u8vector 5)))
(read-u8vector! 5 dest in)))
#+(not mingw32)
(begin
(check (file-test-lock in))
(check (file-lock in))
(check (file-lock/blocking in)))
(check (read-byte in))
(check (read-token (constantly #t) in))
(check (read-string 10 in))
(check "read-string!" (let ((buf (make-string 10)))
(read-string! 10 buf in) buf))))
(print "\nEmbedded NUL bytes in filenames are rejected\n")
(assert-error (with-output-to-file "embedded\x00null-byte" void)) | null | https://raw.githubusercontent.com/bnoordhuis/chicken-core/56d30e3be095b6abe1bddcfe10505fa726a43bb5/tests/port-tests.scm | scheme | copy-port
read-char -> write-char
read -> custom
read-char -> custom
custom -> write-char
fill buffers
Not sure about abandon-port
other platforms, possibly due to multiprocessing:
read-line with EINTR would loop endlessly and process-wait would
signal a condition when interrupted rather than retrying.
close receiving end
exit prevents buffers from being discarded by implicit _exit
close sending end
Calls isatty() on C_SCHEME_FALSE?
(check (port->fileno in))
(check (port->fileno in))
Calls isatty() on C_SCHEME_FALSE? | (require-extension srfi-1 ports utils srfi-4 extras tcp posix)
(define-syntax assert-error
(syntax-rules ()
((_ expr)
(assert (handle-exceptions _ #t expr #f)))))
(define *text* #<<EOF
this is a test
33 > ( let ( ( in ( open - input - string " " ) ) ) ( close - input - port in )
(read-char in)) [09:40]
<foof> Error: (read-char) port already closed: #<input port "(string)">
33 > ( let ( ( in ( open - input - string " " ) ) ) ( close - input - port in )
(read-line in))
<foof> Error: call of non-procedure: #t
<foof> ... that's a little odd
<Bunny351> yuck. [09:44]
<Bunny351> double yuck. [10:00]
<sjamaan> yuck squared! [10:01]
<Bunny351> yuck powered by yuck
<Bunny351> (to the power of yuck, of course) [10:02]
<pbusser3> My yuck is bigger than yours!!!
<foof> yuck!
<foof> (that's a factorial)
<sjamaan> heh
<sjamaan> I think you outyucked us all [10:03]
<foof> well, for large enough values of yuck, yuck! ~= yuck^yuck [10:04]
ERC>
EOF
)
(define p (open-input-string *text*))
(assert (string=? "this is a test" (read-line p)))
(assert
(string=?
"<foof> #;33> (let ((in (open-input-string \"\"))) (close-input-port in)"
(read-line p)))
(assert (= 20 (length (read-lines (open-input-string *text*)))))
(assert
(string=?
*text*
(with-output-to-string
(lambda ()
(assert
(equal?
'(3 2 1)
(let ((out '()))
(open-input-string "1 2 3")
#f
read
(lambda (x port) (set! out (cons x out))))
out)))
(assert
(equal?
"abc"
(let ((out (open-output-string)))
(open-input-string "abc")
out
read-char
(lambda (x out) (write-char x out)))
(get-output-string out))))
(assert
(equal?
"abc"
(let ((in (open-input-string "abc") )
(out (open-output-string)))
in out
(lambda (in) (read-char in)))
(get-output-string out))))
(read-all "compiler.scm")
(print "slow...")
(time
(with-input-from-file "compiler.scm"
(lambda ()
(with-output-to-file "compiler.scm.2"
(lambda ()
(copy-port
(current-input-port) (current-output-port)
(lambda (port) (read-char port))
(lambda (x port) (write-char x port))))))))
(print "fast...")
(time
(with-input-from-file "compiler.scm"
(lambda ()
(with-output-to-file "compiler.scm.2"
(lambda ()
(copy-port (current-input-port) (current-output-port)))))))
(delete-file "compiler.scm.2")
(define-syntax check
(syntax-rules ()
((_ (expr-head expr-rest ...))
(check 'expr-head (expr-head expr-rest ...)))
((_ name expr)
(let ((okay (list 'okay)))
(assert
(eq? okay
(condition-case
(begin (print* name "...")
(flush-output)
(let ((output expr))
(printf "FAIL [ ~S ]\n" output)))
((exn i/o file) (printf "OK\n") okay))))))))
(cond-expand
((not mingw32)
(define proc (process-fork (lambda () (tcp-accept (tcp-listen 8080)))))
(on-exit (lambda () (handle-exceptions exn #f (process-signal proc))))
(print "\n\nProcedures check on TCP ports being closed\n")
(receive (in out)
(let lp ()
(condition-case (tcp-connect "localhost" 8080)
((exn i/o net) (lp))))
(close-output-port out)
(close-input-port in)
(check (tcp-addresses in))
(check (tcp-port-numbers in))
This tests for two bugs which occurred on and possibly
Should be a noop but triggers
(receive (in out)
(create-pipe)
(receive (pid ok? status)
(process-wait
(process-fork
(lambda ()
(with-output-to-port (open-output-file* out)
(lambda ()
(display "hello, world\n")
(exit 0))))))
(assert (equal? '(#t 0 ("hello, world"))
(list ok? status (read-lines (open-input-file* in)))))))
)
(else))
(print "\n\nProcedures check on output ports being closed\n")
(with-output-to-file "empty-file" void)
(call-with-output-file "empty-file"
(lambda (out)
(close-output-port out)
(check (write '(foo) out))
(check (fprintf out "blabla"))
(check "print-call-chain" (begin (print-call-chain out) (void)))
(check (print-error-message (make-property-condition 'exn 'message "foo") out))
(check "print" (with-output-to-port out
(lambda () (print "foo"))))
(check "print*" (with-output-to-port out
(lambda () (print* "foo"))))
(check (display "foo" out))
(check (newline out))
(check (write-char #\x out))
(check (write-line "foo" out))
(check (write-u8vector '#u8(1 2 3) out))
(check (flush-output out))
#+(not mingw32)
(begin
(check (file-test-lock out))
(check (file-lock out))
(check (file-lock/blocking out)))
(check (write-byte 120 out))
(check (write-string "foo" #f out))))
(print "\n\nProcedures check on input ports being closed\n")
(call-with-input-file "empty-file"
(lambda (in)
(close-input-port in)
(check (read in))
(check (read-char in))
(check (char-ready? in))
(check (peek-char in))
(check (read-line in 5))
(check (read-u8vector 5 in))
(check "read-u8vector!" (let ((dest (make-u8vector 5)))
(read-u8vector! 5 dest in)))
#+(not mingw32)
(begin
(check (file-test-lock in))
(check (file-lock in))
(check (file-lock/blocking in)))
(check (read-byte in))
(check (read-token (constantly #t) in))
(check (read-string 10 in))
(check "read-string!" (let ((buf (make-string 10)))
(read-string! 10 buf in) buf))))
(print "\nEmbedded NUL bytes in filenames are rejected\n")
(assert-error (with-output-to-file "embedded\x00null-byte" void)) |
488f1130628433d37b284d862242d84c32acf6111989c7d4d53425ebcba8fd2e | active-group/active-clojure | validation_test.cljc | (ns active.clojure.validation-test
(:require #?(:clj [active.clojure.record :refer [define-record-type]]
:cljs [active.clojure.cljs.record :refer-macros [define-record-type]])
#?(:clj [clojure.test :as t]
:cljs [cljs.test :as t :include-macros true])
[active.clojure.validation :as v]))
(t/deftest fmap-success-test
(let [failure (v/make-validation-failure
[(v/make-validation-error "candidate" "message" nil)])
success (v/make-validation-success "candidate")]
(t/is (= failure (v/fmap-success clojure.string/upper-case failure)))
(t/is (= (v/make-validation-success "CANDIDATE")
(v/fmap-success clojure.string/upper-case success)))))
(t/deftest fmap-result-test
(letfn [(f [validation-result]
(v/fmap-result str (fn [error] (v/validation-error-label error :new-label)) validation-result))]
(t/is (= (v/make-validation-success "42") (f (v/validate-pos-int 42))))
(t/is (= (v/make-validation-failure [(v/make-validation-error -23 ::v/pos-int :new-label)])
(f (v/validate-pos-int -23))))))
(t/deftest seq-validation-test
(let [failure (v/make-validation-failure
[(v/make-validation-error "candidate" "message" nil)])
failure-2 (v/make-validation-failure
[(v/make-validation-error "candidate-2" "message-2" nil)])
flat-success (v/make-validation-success "candidate")]
(t/testing "failures are concatenated"
(t/is (= (v/make-validation-failure
[(v/make-validation-error "candidate" "message" nil)
(v/make-validation-error "candidate-2" "message-2" nil)])
(v/seq-validation failure failure-2))))
(t/testing "one failure leads to failure"
(t/is (= failure
(v/seq-validation (v/pure-validation identity) failure))))
(t/testing "two successes lead to success"
(t/is (= flat-success
(v/seq-validation (v/pure-validation identity) flat-success))))))
(t/deftest curry-n-test
(t/is (= 42 ((v/curry-n (fn [] 42) 0))))
(t/is (= 42 (((v/curry-n (fn [a b] (+ a b)) 2) 1) 41)))
(t/is (= 42 ((((v/curry-n (fn [a b c] (+ a b c)) 3) 1) 40) 1))))
(define-record-type Person {:rtd-record? true}
make-person person?
[name person-name
age person-age])
(defn- validate-person
[[name age]]
(v/validation make-person
(v/validate-string name)
(v/validate-pos-int age)))
(defn- validate-person-with-labels
[[name age]]
(v/validation make-person
(v/validate-string name :name)
(v/validate-pos-int age :age)))
(t/deftest validation-test
(t/is (= (v/make-validation-success (make-person "Mimi" 1))
(validate-person ["Mimi" 1])))
(t/is (= (v/make-validation-success (make-person "Mimi" 1))
(validate-person-with-labels ["Mimi" 1])))
(t/testing "every failure is collected in the result"
(t/is (= (v/make-validation-failure
[(v/make-validation-error 1 ::v/string nil)
(v/make-validation-error "Mimi" ::v/pos-int nil)])
(validate-person [1 "Mimi"])))
(t/is (= (v/make-validation-failure
[(v/make-validation-error 1 ::v/string :name)
(v/make-validation-error "Mimi" ::v/pos-int :age)])
(validate-person-with-labels [1 "Mimi"])))))
(t/deftest validate-string-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 ::v/string nil)])
(v/validate-string 42)))
(t/is (= (v/make-validation-success "string")
(v/validate-string "string"))))
(t/deftest validate-non-empty-string-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 ::v/non-empty-string nil)])
(v/validate-non-empty-string 42)))
(t/is (= (v/make-validation-failure
[(v/make-validation-error "" ::v/non-empty-string nil)])
(v/validate-non-empty-string "")))
(t/is (= (v/make-validation-success "string")
(v/validate-non-empty-string "string"))))
(t/deftest validate-int-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error "string" ::v/int nil)])
(v/validate-int "string")))
(t/is (= (v/make-validation-success 42)
(v/validate-int 42))))
(t/deftest pos-int-validation-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error "string" ::v/pos-int nil)])
(v/validate-pos-int "string")))
(t/is (= (v/make-validation-failure
[(v/make-validation-error -23 ::v/pos-int nil)])
(v/validate-pos-int -23)))
(t/is (= (v/make-validation-success 42)
(v/validate-pos-int 42))))
(t/deftest validate-boolean-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error "string" ::v/boolean nil)])
(v/validate-boolean "string")))
(t/is (= (v/make-validation-success true)
(v/validate-boolean true)))
(t/is (= (v/make-validation-success false)
(v/validate-boolean false))))
(t/deftest validate-keyword-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 ::v/keyword nil)])
(v/validate-keyword 42)))
(t/is (= (v/make-validation-success :keyword)
(v/validate-keyword :keyword))))
(t/deftest validate-one-of-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 [::v/one-of #{:a :b :c}] nil)])
(v/validate-one-of [:a :b :c] 42)))
(t/is (= (v/make-validation-success :a)
(v/validate-one-of [:a :b :c] :a)))
(t/is (= (v/make-validation-success :c)
(v/validate-one-of [:a :b :c] :c))))
(t/deftest validate-list-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 ::v/list nil)])
(v/validate-list 42)))
(t/testing "vectors are not lists"
(t/is (= (v/make-validation-failure
[(v/make-validation-error [1 2 3] ::v/list nil)])
(v/validate-list [1 2 3]))))
(t/is (= (v/make-validation-success (list 1 2 3))
(v/validate-list (list 1 2 3)))))
(t/deftest validate-vector-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 ::v/vector nil)])
(v/validate-vector 42)))
(t/testing "lists are not vectors"
(t/is (= (v/make-validation-failure
[(v/make-validation-error (list 1 2 3) ::v/vector nil)])
(v/validate-vector (list 1 2 3)))))
(t/is (= (v/make-validation-success [1 2 3])
(v/validate-vector [1 2 3]))))
(t/deftest validate-map-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 ::v/map nil)])
(v/validate-map 42)))
(t/is (= (v/make-validation-success {:a "b"})
(v/validate-map {:a "b"}))))
(t/deftest validate-set-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 ::v/set nil)])
(v/validate-set 42)))
(t/is (= (v/make-validation-success #{:a :b :c})
(v/validate-set #{:a :b :c}))))
(t/deftest validate-sequential-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 ::v/sequential nil)])
(v/validate-sequential 42)))
(t/is (= (v/make-validation-success (list 1 2 3))
(v/validate-sequential (list 1 2 3))))
(t/is (= (v/make-validation-success [1 2 3])
(v/validate-sequential [1 2 3]))))
(t/deftest optional-test
(let [validate-optional-string (v/optional v/validate-string)]
(t/is (= (v/make-validation-success "string")
(validate-optional-string "string")))
(t/is (= (v/make-validation-success nil)
(validate-optional-string nil)))
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 [::v/optional ::v/string] nil)])
(validate-optional-string 42)))))
(define-record-type Node {:rtd-record? true}
make-node node?
[label node-label
neighbors node-neighbors])
(defn- validate-node
[[label neighbors]]
(v/validation make-node
(v/validate-non-empty-string label :label)
(v/sequence-of validate-node neighbors :neighbors)))
(t/deftest sequence-of-test
(t/testing "an empty collection is always a valid `sequence-of`"
(t/is (= (v/make-validation-success [])
(v/sequence-of v/validate-non-empty-string []))))
(t/is (= (v/make-validation-success ["a" "b" "c"])
(v/sequence-of v/validate-non-empty-string ["a" "b" "c"])))
(t/is (= (v/make-validation-failure
[(v/make-validation-error nil ::v/non-empty-string [::v/seq 0])
(v/make-validation-error 32 ::v/non-empty-string [::v/seq 2])])
(v/sequence-of v/validate-non-empty-string [nil "b" 32])))
(t/testing "labels are used correctly"
(t/is (= (v/make-validation-failure
[(v/make-validation-error nil ::v/non-empty-string [::some-name 0])
(v/make-validation-error 32 ::v/non-empty-string [::some-name 2])])
(v/sequence-of v/validate-non-empty-string [nil "b" 32] ::some-name))))
(t/testing "sequential validations can be nested"
(t/testing "one level deep"
(t/is (= (v/make-validation-success
(make-node "a" []))
(validate-node ["a" []])))
(t/is (= (v/make-validation-success
(make-node "a" [(make-node "b" []) (make-node "c" [])]))
(validate-node ["a" [["b" []] ["c" []]]])))
(t/is (= (v/make-validation-failure
[(v/make-validation-error "" ::v/non-empty-string [[:neighbors 1] :label])])
(validate-node ["a" [["b" []] ["" []]]]))))
(t/testing "multiple levels deep"
(t/is (= (v/make-validation-success
(make-node "a" [(make-node "b" [(make-node "c" [])])
(make-node "d" [(make-node "e" [(make-node "f" [])])])]))
(validate-node ["a" [["b" [["c" []]]]
["d" [["e" [["f" []]]]]]]])))
(t/is (= (v/make-validation-failure
[(v/make-validation-error :e ::v/non-empty-string [[:neighbors 1] [[:neighbors 0] :label]])
(v/make-validation-error :f ::v/non-empty-string [[:neighbors 1] [[:neighbors 0] [[:neighbors 0] :label]]])])
(validate-node ["a" [["b" [["c" []]]]
["d" [[:e [[:f []]]]]]]]))))))
(t/deftest validate-choice-test
(let [validate-string-or-int (fn [candidate & [label]]
(v/validate-choice [v/validate-string
v/validate-int]
candidate
label))]
(t/testing "an empty choice can never have a valid result"
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 [::v/choice ::v/no-validators] :label)])
(v/validate-choice [] 42 :label))))
(t/testing "exactly one success leads to success"
(t/is (= (v/make-validation-success "string")
(validate-string-or-int "string")))
(t/is (= (v/make-validation-success 42)
(validate-string-or-int 42))))
(t/testing "every error is returned"
(t/is (= (v/make-validation-failure
[(v/make-validation-error :key ::v/string nil)
(v/make-validation-error :key ::v/int nil)])
(validate-string-or-int :key)))))
(t/testing "more than one success is a failure, too"
(let [validate-even
(fn [candidate & [label]]
(v/make-validator candidate even? ::even label))
validate-either-even-or-positive-number
(fn [candidate & [label]]
(v/validate-choice [v/validate-pos-int
validate-even]
candidate
label))]
(t/is (= (v/make-validation-failure
[(v/make-validation-error 2 [::v/choice ::v/more-than-one-success] nil)])
(validate-either-even-or-positive-number 2)))
(t/testing "more than once success is a failure combined with other failures"
(let [v (fn [candidate & [label]]
(v/validate-choice [v/validate-pos-int
validate-even
v/validate-boolean
v/validate-keyword]
candidate
label))]
(t/is (= (v/make-validation-failure
[(v/make-validation-error 2 [::v/choice ::v/more-than-one-success] nil)
(v/make-validation-error 2 ::v/boolean nil)
(v/make-validation-error 2 ::v/keyword nil)])
(v 2))))))))
(t/deftest validate-all-test
(let [v (fn [c]
(v/validate-all [v/validate-non-empty-string
(fn [candidate & [label]]
(if (= candidate "clojure")
(v/make-validation-success candidate)
(v/make-validation-failure [(v/make-validation-error candidate ::not-clojure label)])))]
c
:non-empty-and-clojure))]
(t/testing "validating with an empty seq of validators is always successful"
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 [::v/all ::v/no-validators] :label)])
(v/validate-all [] 42 :label))))
(t/is (= (v/make-validation-failure
[(v/make-validation-error "" ::v/non-empty-string :non-empty-and-clojure)
(v/make-validation-error "" ::not-clojure :non-empty-and-clojure)])
(v "")))
(t/is (= (v/make-validation-failure
[(v/make-validation-error "clj" ::not-clojure :non-empty-and-clojure)])
(v "clj")))
(t/is (= (v/make-validation-success "clojure") (v "clojure")))))
(t/deftest sequence-test
(t/testing "the empty sequence"
(t/is (= (v/make-validation-success [])
(v/sequence []))))
(t/testing "only successes"
(t/is (= (v/make-validation-success ['a 'b])
(v/sequence [(v/make-validation-success 'a)
(v/make-validation-success 'b)]))))
(t/testing "mixed success and failure"
(t/is (= (v/make-validation-failure
[(v/make-validation-error 'a :msg :label)])
(v/sequence [(v/make-validation-failure [(v/make-validation-error 'a :msg :label)])
(v/make-validation-success 'b)]))))
(t/testing "only failure"
(t/is (= (v/make-validation-failure
[(v/make-validation-error 'a :msg :label)
(v/make-validation-error 'b :msg :label)
(v/make-validation-error 'c :msg :label)])
(v/sequence [(v/make-validation-failure [(v/make-validation-error 'a :msg :label)])
(v/make-validation-failure [(v/make-validation-error 'b :msg :label)
(v/make-validation-error 'c :msg :label)])])))))
| null | https://raw.githubusercontent.com/active-group/active-clojure/44050a1292fa610dde732d5fbfc42c37ad976d3a/test/active/clojure/validation_test.cljc | clojure | (ns active.clojure.validation-test
(:require #?(:clj [active.clojure.record :refer [define-record-type]]
:cljs [active.clojure.cljs.record :refer-macros [define-record-type]])
#?(:clj [clojure.test :as t]
:cljs [cljs.test :as t :include-macros true])
[active.clojure.validation :as v]))
(t/deftest fmap-success-test
(let [failure (v/make-validation-failure
[(v/make-validation-error "candidate" "message" nil)])
success (v/make-validation-success "candidate")]
(t/is (= failure (v/fmap-success clojure.string/upper-case failure)))
(t/is (= (v/make-validation-success "CANDIDATE")
(v/fmap-success clojure.string/upper-case success)))))
(t/deftest fmap-result-test
(letfn [(f [validation-result]
(v/fmap-result str (fn [error] (v/validation-error-label error :new-label)) validation-result))]
(t/is (= (v/make-validation-success "42") (f (v/validate-pos-int 42))))
(t/is (= (v/make-validation-failure [(v/make-validation-error -23 ::v/pos-int :new-label)])
(f (v/validate-pos-int -23))))))
(t/deftest seq-validation-test
(let [failure (v/make-validation-failure
[(v/make-validation-error "candidate" "message" nil)])
failure-2 (v/make-validation-failure
[(v/make-validation-error "candidate-2" "message-2" nil)])
flat-success (v/make-validation-success "candidate")]
(t/testing "failures are concatenated"
(t/is (= (v/make-validation-failure
[(v/make-validation-error "candidate" "message" nil)
(v/make-validation-error "candidate-2" "message-2" nil)])
(v/seq-validation failure failure-2))))
(t/testing "one failure leads to failure"
(t/is (= failure
(v/seq-validation (v/pure-validation identity) failure))))
(t/testing "two successes lead to success"
(t/is (= flat-success
(v/seq-validation (v/pure-validation identity) flat-success))))))
(t/deftest curry-n-test
(t/is (= 42 ((v/curry-n (fn [] 42) 0))))
(t/is (= 42 (((v/curry-n (fn [a b] (+ a b)) 2) 1) 41)))
(t/is (= 42 ((((v/curry-n (fn [a b c] (+ a b c)) 3) 1) 40) 1))))
(define-record-type Person {:rtd-record? true}
make-person person?
[name person-name
age person-age])
(defn- validate-person
[[name age]]
(v/validation make-person
(v/validate-string name)
(v/validate-pos-int age)))
(defn- validate-person-with-labels
[[name age]]
(v/validation make-person
(v/validate-string name :name)
(v/validate-pos-int age :age)))
(t/deftest validation-test
(t/is (= (v/make-validation-success (make-person "Mimi" 1))
(validate-person ["Mimi" 1])))
(t/is (= (v/make-validation-success (make-person "Mimi" 1))
(validate-person-with-labels ["Mimi" 1])))
(t/testing "every failure is collected in the result"
(t/is (= (v/make-validation-failure
[(v/make-validation-error 1 ::v/string nil)
(v/make-validation-error "Mimi" ::v/pos-int nil)])
(validate-person [1 "Mimi"])))
(t/is (= (v/make-validation-failure
[(v/make-validation-error 1 ::v/string :name)
(v/make-validation-error "Mimi" ::v/pos-int :age)])
(validate-person-with-labels [1 "Mimi"])))))
(t/deftest validate-string-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 ::v/string nil)])
(v/validate-string 42)))
(t/is (= (v/make-validation-success "string")
(v/validate-string "string"))))
(t/deftest validate-non-empty-string-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 ::v/non-empty-string nil)])
(v/validate-non-empty-string 42)))
(t/is (= (v/make-validation-failure
[(v/make-validation-error "" ::v/non-empty-string nil)])
(v/validate-non-empty-string "")))
(t/is (= (v/make-validation-success "string")
(v/validate-non-empty-string "string"))))
(t/deftest validate-int-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error "string" ::v/int nil)])
(v/validate-int "string")))
(t/is (= (v/make-validation-success 42)
(v/validate-int 42))))
(t/deftest pos-int-validation-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error "string" ::v/pos-int nil)])
(v/validate-pos-int "string")))
(t/is (= (v/make-validation-failure
[(v/make-validation-error -23 ::v/pos-int nil)])
(v/validate-pos-int -23)))
(t/is (= (v/make-validation-success 42)
(v/validate-pos-int 42))))
(t/deftest validate-boolean-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error "string" ::v/boolean nil)])
(v/validate-boolean "string")))
(t/is (= (v/make-validation-success true)
(v/validate-boolean true)))
(t/is (= (v/make-validation-success false)
(v/validate-boolean false))))
(t/deftest validate-keyword-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 ::v/keyword nil)])
(v/validate-keyword 42)))
(t/is (= (v/make-validation-success :keyword)
(v/validate-keyword :keyword))))
(t/deftest validate-one-of-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 [::v/one-of #{:a :b :c}] nil)])
(v/validate-one-of [:a :b :c] 42)))
(t/is (= (v/make-validation-success :a)
(v/validate-one-of [:a :b :c] :a)))
(t/is (= (v/make-validation-success :c)
(v/validate-one-of [:a :b :c] :c))))
(t/deftest validate-list-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 ::v/list nil)])
(v/validate-list 42)))
(t/testing "vectors are not lists"
(t/is (= (v/make-validation-failure
[(v/make-validation-error [1 2 3] ::v/list nil)])
(v/validate-list [1 2 3]))))
(t/is (= (v/make-validation-success (list 1 2 3))
(v/validate-list (list 1 2 3)))))
(t/deftest validate-vector-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 ::v/vector nil)])
(v/validate-vector 42)))
(t/testing "lists are not vectors"
(t/is (= (v/make-validation-failure
[(v/make-validation-error (list 1 2 3) ::v/vector nil)])
(v/validate-vector (list 1 2 3)))))
(t/is (= (v/make-validation-success [1 2 3])
(v/validate-vector [1 2 3]))))
(t/deftest validate-map-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 ::v/map nil)])
(v/validate-map 42)))
(t/is (= (v/make-validation-success {:a "b"})
(v/validate-map {:a "b"}))))
(t/deftest validate-set-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 ::v/set nil)])
(v/validate-set 42)))
(t/is (= (v/make-validation-success #{:a :b :c})
(v/validate-set #{:a :b :c}))))
(t/deftest validate-sequential-test
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 ::v/sequential nil)])
(v/validate-sequential 42)))
(t/is (= (v/make-validation-success (list 1 2 3))
(v/validate-sequential (list 1 2 3))))
(t/is (= (v/make-validation-success [1 2 3])
(v/validate-sequential [1 2 3]))))
(t/deftest optional-test
(let [validate-optional-string (v/optional v/validate-string)]
(t/is (= (v/make-validation-success "string")
(validate-optional-string "string")))
(t/is (= (v/make-validation-success nil)
(validate-optional-string nil)))
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 [::v/optional ::v/string] nil)])
(validate-optional-string 42)))))
(define-record-type Node {:rtd-record? true}
make-node node?
[label node-label
neighbors node-neighbors])
(defn- validate-node
[[label neighbors]]
(v/validation make-node
(v/validate-non-empty-string label :label)
(v/sequence-of validate-node neighbors :neighbors)))
(t/deftest sequence-of-test
(t/testing "an empty collection is always a valid `sequence-of`"
(t/is (= (v/make-validation-success [])
(v/sequence-of v/validate-non-empty-string []))))
(t/is (= (v/make-validation-success ["a" "b" "c"])
(v/sequence-of v/validate-non-empty-string ["a" "b" "c"])))
(t/is (= (v/make-validation-failure
[(v/make-validation-error nil ::v/non-empty-string [::v/seq 0])
(v/make-validation-error 32 ::v/non-empty-string [::v/seq 2])])
(v/sequence-of v/validate-non-empty-string [nil "b" 32])))
(t/testing "labels are used correctly"
(t/is (= (v/make-validation-failure
[(v/make-validation-error nil ::v/non-empty-string [::some-name 0])
(v/make-validation-error 32 ::v/non-empty-string [::some-name 2])])
(v/sequence-of v/validate-non-empty-string [nil "b" 32] ::some-name))))
(t/testing "sequential validations can be nested"
(t/testing "one level deep"
(t/is (= (v/make-validation-success
(make-node "a" []))
(validate-node ["a" []])))
(t/is (= (v/make-validation-success
(make-node "a" [(make-node "b" []) (make-node "c" [])]))
(validate-node ["a" [["b" []] ["c" []]]])))
(t/is (= (v/make-validation-failure
[(v/make-validation-error "" ::v/non-empty-string [[:neighbors 1] :label])])
(validate-node ["a" [["b" []] ["" []]]]))))
(t/testing "multiple levels deep"
(t/is (= (v/make-validation-success
(make-node "a" [(make-node "b" [(make-node "c" [])])
(make-node "d" [(make-node "e" [(make-node "f" [])])])]))
(validate-node ["a" [["b" [["c" []]]]
["d" [["e" [["f" []]]]]]]])))
(t/is (= (v/make-validation-failure
[(v/make-validation-error :e ::v/non-empty-string [[:neighbors 1] [[:neighbors 0] :label]])
(v/make-validation-error :f ::v/non-empty-string [[:neighbors 1] [[:neighbors 0] [[:neighbors 0] :label]]])])
(validate-node ["a" [["b" [["c" []]]]
["d" [[:e [[:f []]]]]]]]))))))
(t/deftest validate-choice-test
(let [validate-string-or-int (fn [candidate & [label]]
(v/validate-choice [v/validate-string
v/validate-int]
candidate
label))]
(t/testing "an empty choice can never have a valid result"
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 [::v/choice ::v/no-validators] :label)])
(v/validate-choice [] 42 :label))))
(t/testing "exactly one success leads to success"
(t/is (= (v/make-validation-success "string")
(validate-string-or-int "string")))
(t/is (= (v/make-validation-success 42)
(validate-string-or-int 42))))
(t/testing "every error is returned"
(t/is (= (v/make-validation-failure
[(v/make-validation-error :key ::v/string nil)
(v/make-validation-error :key ::v/int nil)])
(validate-string-or-int :key)))))
(t/testing "more than one success is a failure, too"
(let [validate-even
(fn [candidate & [label]]
(v/make-validator candidate even? ::even label))
validate-either-even-or-positive-number
(fn [candidate & [label]]
(v/validate-choice [v/validate-pos-int
validate-even]
candidate
label))]
(t/is (= (v/make-validation-failure
[(v/make-validation-error 2 [::v/choice ::v/more-than-one-success] nil)])
(validate-either-even-or-positive-number 2)))
(t/testing "more than once success is a failure combined with other failures"
(let [v (fn [candidate & [label]]
(v/validate-choice [v/validate-pos-int
validate-even
v/validate-boolean
v/validate-keyword]
candidate
label))]
(t/is (= (v/make-validation-failure
[(v/make-validation-error 2 [::v/choice ::v/more-than-one-success] nil)
(v/make-validation-error 2 ::v/boolean nil)
(v/make-validation-error 2 ::v/keyword nil)])
(v 2))))))))
(t/deftest validate-all-test
(let [v (fn [c]
(v/validate-all [v/validate-non-empty-string
(fn [candidate & [label]]
(if (= candidate "clojure")
(v/make-validation-success candidate)
(v/make-validation-failure [(v/make-validation-error candidate ::not-clojure label)])))]
c
:non-empty-and-clojure))]
(t/testing "validating with an empty seq of validators is always successful"
(t/is (= (v/make-validation-failure
[(v/make-validation-error 42 [::v/all ::v/no-validators] :label)])
(v/validate-all [] 42 :label))))
(t/is (= (v/make-validation-failure
[(v/make-validation-error "" ::v/non-empty-string :non-empty-and-clojure)
(v/make-validation-error "" ::not-clojure :non-empty-and-clojure)])
(v "")))
(t/is (= (v/make-validation-failure
[(v/make-validation-error "clj" ::not-clojure :non-empty-and-clojure)])
(v "clj")))
(t/is (= (v/make-validation-success "clojure") (v "clojure")))))
(t/deftest sequence-test
(t/testing "the empty sequence"
(t/is (= (v/make-validation-success [])
(v/sequence []))))
(t/testing "only successes"
(t/is (= (v/make-validation-success ['a 'b])
(v/sequence [(v/make-validation-success 'a)
(v/make-validation-success 'b)]))))
(t/testing "mixed success and failure"
(t/is (= (v/make-validation-failure
[(v/make-validation-error 'a :msg :label)])
(v/sequence [(v/make-validation-failure [(v/make-validation-error 'a :msg :label)])
(v/make-validation-success 'b)]))))
(t/testing "only failure"
(t/is (= (v/make-validation-failure
[(v/make-validation-error 'a :msg :label)
(v/make-validation-error 'b :msg :label)
(v/make-validation-error 'c :msg :label)])
(v/sequence [(v/make-validation-failure [(v/make-validation-error 'a :msg :label)])
(v/make-validation-failure [(v/make-validation-error 'b :msg :label)
(v/make-validation-error 'c :msg :label)])])))))
|
|
636844afedb502483a5b25818aa0352abd96ff2fd168b9a61b99ef2b17602272 | erlang-ls/erlang_ls | els_definition_SUITE.erl | %%==============================================================================
%% Unit Tests for Code Navigation
%%==============================================================================
-module(els_definition_SUITE).
%% CT Callbacks
-export([
all/0,
init_per_suite/1,
end_per_suite/1,
init_per_testcase/2,
end_per_testcase/2,
suite/0
]).
%% Test cases
-export([
application_local/1,
application_remote/1,
atom/1,
behaviour/1,
definition_after_closing/1,
duplicate_definition/1,
export_entry/1,
fun_local/1,
fun_remote/1,
import_entry/1,
module_import_entry/1,
include/1,
include_lib/1,
macro/1,
macro_lowercase/1,
macro_included/1,
macro_with_args/1,
macro_with_args_included/1,
macro_with_implicit_args/1,
multiple_atom_instances_same_mod/1,
multiple_atom_instances_diff_mod/1,
parse_transform/1,
record_access/1,
record_access_included/1,
record_access_macro_name/1,
record_expr/1,
record_expr_included/1,
record_expr_macro_name/1,
record_field/1,
record_field_included/1,
record_type_macro_name/1,
testcase/1,
type_application_remote/1,
type_application_undefined/1,
type_application_user/1,
type_export_entry/1,
variable/1,
opaque_application_remote/1,
opaque_application_user/1,
parse_incomplete/1
]).
%%==============================================================================
%% Includes
%%==============================================================================
-include_lib("common_test/include/ct.hrl").
-include_lib("stdlib/include/assert.hrl").
%%==============================================================================
%% Types
%%==============================================================================
-type config() :: [{atom(), any()}].
%%==============================================================================
%% CT Callbacks
%%==============================================================================
-spec all() -> [atom()].
all() ->
els_test_utils:all(?MODULE).
-spec init_per_suite(config()) -> config().
init_per_suite(Config) ->
els_test_utils:init_per_suite(Config).
-spec end_per_suite(config()) -> ok.
end_per_suite(Config) ->
els_test_utils:end_per_suite(Config).
-spec init_per_testcase(atom(), config()) -> config().
init_per_testcase(TestCase, Config) ->
els_test_utils:init_per_testcase(TestCase, Config).
-spec end_per_testcase(atom(), config()) -> ok.
end_per_testcase(TestCase, Config) ->
els_test_utils:end_per_testcase(TestCase, Config).
-spec suite() -> [tuple()].
suite() ->
[{timetrap, {seconds, 30}}].
%%==============================================================================
%% Testcases
%%==============================================================================
-spec application_local(config()) -> ok.
application_local(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 22, 5),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {25, 1}, to => {25, 11}}),
Range
),
ok.
-spec application_remote(config()) -> ok.
application_remote(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 32, 13),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(?config(code_navigation_extra_uri, Config), DefUri),
?assertEqual(
els_protocol:range(#{from => {5, 1}, to => {5, 3}}),
Range
),
ok.
-spec atom(config()) -> ok.
atom(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def0 = els_client:definition(Uri, 84, 20),
Def1 = els_client:definition(Uri, 85, 20),
Def2 = els_client:definition(Uri, 86, 20),
Def3 = els_client:definition(Uri, 85, 27),
#{result := [#{range := Range0, uri := DefUri0}]} = Def0,
#{result := [#{range := Range1, uri := DefUri1}, #{range := Range12, uri := DefUri12}]} =
Def1,
#{result := [#{range := Range2, uri := DefUri2}, #{range := Range22, uri := DefUri22}]} =
Def2,
#{result := [#{range := Range3, uri := DefUri3}]} = Def3,
?assertEqual(?config(code_navigation_types_uri, Config), DefUri0),
?assertEqual(
els_protocol:range(#{from => {1, 9}, to => {1, 30}}),
Range0
),
?assertEqual(?config(code_navigation_extra_uri, Config), DefUri12),
?assertEqual(
els_protocol:range(#{from => {1, 9}, to => {1, 30}}),
Range12
),
?assertEqual(Uri, DefUri1),
?assertEqual(
els_protocol:range(#{from => {132, 1}, to => {132, 22}}),
Range1
),
?assertEqual(?config(code_navigation_extra_uri, Config), DefUri22),
?assertEqual(
els_protocol:range(#{from => {1, 9}, to => {1, 30}}),
Range22
),
?assertEqual(Uri, DefUri2),
?assertEqual(
els_protocol:range(#{from => {132, 1}, to => {132, 22}}),
Range2
),
?assertEqual(?config('Code.Navigation.Elixirish_uri', Config), DefUri3),
?assertEqual(
els_protocol:range(#{from => {1, 9}, to => {1, 36}}),
Range3
),
ok.
-spec behaviour(config()) -> ok.
behaviour(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 3, 16),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(?config(behaviour_a_uri, Config), DefUri),
?assertEqual(
els_protocol:range(#{from => {1, 9}, to => {1, 20}}),
Range
),
ok.
-spec testcase(config()) -> ok.
testcase(Config) ->
Uri = ?config(sample_SUITE_uri, Config),
Def = els_client:definition(Uri, 35, 6),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {58, 1}, to => {58, 4}}),
Range
),
ok.
-spec multiple_atom_instances_same_mod(config()) -> ok.
multiple_atom_instances_same_mod(Config) ->
Uri = ?config(code_navigation_uri, Config),
Defs = els_client:definition(Uri, 130, 36),
#{result := Results} = Defs,
?assertEqual(3, length(Results)),
ExpectedRanges = [
els_protocol:range(#{from => {1, 9}, to => {1, 24}}),
els_protocol:range(#{from => {126, 1}, to => {126, 16}}),
els_protocol:range(#{from => {128, 1}, to => {128, 16}})
],
lists:foreach(
fun(Def) ->
#{range := Range, uri := DefUri} = Def,
?assertEqual(Uri, DefUri),
?assert(lists:member(Range, ExpectedRanges))
end,
Results
),
ok.
-spec multiple_atom_instances_diff_mod(config()) -> ok.
multiple_atom_instances_diff_mod(Config) ->
Uri = ?config(code_navigation_uri, Config),
Defs = els_client:definition(Uri, 134, 35),
#{result := Results} = Defs,
?assertEqual(2, length(Results)),
RangeDef1 = els_protocol:range(#{from => {132, 1}, to => {132, 22}}),
RangeDef2 = els_protocol:range(#{from => {1, 9}, to => {1, 30}}),
Uri2 = ?config(code_navigation_extra_uri, Config),
?assertMatch(
[
#{
range := RangeDef1,
uri := Uri
},
#{
range := RangeDef2,
uri := Uri2
}
],
Results
),
ok.
Issue # 191 : Definition not found after document is closed
-spec definition_after_closing(config()) -> ok.
definition_after_closing(Config) ->
Uri = ?config(code_navigation_uri, Config),
ExtraUri = ?config(code_navigation_extra_uri, Config),
Def = els_client:definition(Uri, 32, 13),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(ExtraUri, DefUri),
?assertEqual(
els_protocol:range(#{from => {5, 1}, to => {5, 3}}),
Range
),
%% Close file, get definition
ok = els_client:did_close(ExtraUri),
Def1 = els_client:definition(Uri, 32, 13),
#{result := [#{range := Range, uri := DefUri}]} = Def1,
ok.
-spec duplicate_definition(config()) -> ok.
duplicate_definition(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 57, 5),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {60, 1}, to => {60, 11}}),
Range
),
ok.
-spec export_entry(config()) -> ok.
export_entry(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 8, 15),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {28, 1}, to => {28, 11}}),
Range
),
ok.
-spec fun_local(config()) -> ok.
fun_local(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 51, 16),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {25, 1}, to => {25, 11}}),
Range
),
ok.
-spec fun_remote(config()) -> ok.
fun_remote(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 52, 14),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(?config(code_navigation_extra_uri, Config), DefUri),
?assertEqual(
els_protocol:range(#{from => {5, 1}, to => {5, 3}}),
Range
),
ok.
-spec import_entry(config()) -> ok.
import_entry(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 10, 34),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(?config(code_navigation_extra_uri, Config), DefUri),
?assertEqual(
els_protocol:range(#{from => {5, 1}, to => {5, 3}}),
Range
),
ok.
-spec module_import_entry(config()) -> ok.
module_import_entry(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 90, 3),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(?config(code_navigation_extra_uri, Config), DefUri),
?assertEqual(
els_protocol:range(#{from => {5, 1}, to => {5, 3}}),
Range
),
ok.
-spec include(config()) -> ok.
include(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 12, 20),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(?config(code_navigation_h_uri, Config), DefUri),
?assertEqual(
els_protocol:range(#{from => {1, 1}, to => {1, 1}}),
Range
),
ok.
-spec include_lib(config()) -> ok.
include_lib(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 13, 22),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(?config(code_navigation_h_uri, Config), DefUri),
?assertEqual(
els_protocol:range(#{from => {1, 1}, to => {1, 1}}),
Range
),
ok.
-spec macro(config()) -> ok.
macro(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 26, 5),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {18, 9}, to => {18, 16}}),
Range
),
ok.
-spec macro_lowercase(config()) -> ok.
macro_lowercase(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 48, 3),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {45, 9}, to => {45, 16}}),
Range
),
ok.
-spec macro_included(config()) -> ok.
macro_included(Config) ->
Uri = ?config(code_navigation_uri, Config),
UriHeader = ?config(code_navigation_h_uri, Config),
#{result := [#{range := Range1, uri := DefUri1}]} =
els_client:definition(Uri, 53, 19),
?assertEqual(UriHeader, DefUri1),
?assertEqual(
els_protocol:range(#{from => {3, 9}, to => {3, 25}}),
Range1
),
#{result := [#{range := RangeQuoted, uri := DefUri2}]} =
els_client:definition(Uri, 52, 75),
?assertEqual(UriHeader, DefUri2),
?assertEqual(
els_protocol:range(#{from => {7, 9}, to => {7, 27}}),
RangeQuoted
),
ok.
-spec macro_with_args(config()) -> ok.
macro_with_args(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 40, 9),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {19, 9}, to => {19, 16}}),
Range
),
ok.
-spec macro_with_args_included(config()) -> ok.
macro_with_args_included(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 43, 9),
#{result := [#{uri := DefUri}]} = Def,
?assertEqual(
<<"assert.hrl">>,
filename:basename(els_uri:path(DefUri))
),
Do not assert on line number to avoid binding to a specific OTP version
ok.
-spec macro_with_implicit_args(config()) -> ok.
macro_with_implicit_args(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 124, 5),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {118, 9}, to => {118, 16}}),
Range
),
ok.
-spec parse_transform(config()) -> ok.
parse_transform(Config) ->
Uri = ?config(diagnostics_parse_transform_usage_uri, Config),
Def = els_client:definition(Uri, 5, 45),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(?config(diagnostics_parse_transform_uri, Config), DefUri),
?assertEqual(
els_protocol:range(#{from => {1, 9}, to => {1, 36}}),
Range
),
ok.
-spec record_access(config()) -> ok.
record_access(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 34, 13),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {16, 9}, to => {16, 17}}),
Range
),
ok.
-spec record_access_included(config()) -> ok.
record_access_included(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 52, 43),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(?config(code_navigation_h_uri, Config), DefUri),
?assertEqual(
els_protocol:range(#{from => {1, 9}, to => {1, 26}}),
Range
),
ok.
-spec record_access_macro_name(config()) -> ok.
record_access_macro_name(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 116, 33),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {111, 9}, to => {111, 16}}),
Range
),
ok.
TODO : Additional constructors for POI
TODO : Navigation should return POI , not range
-spec record_expr(config()) -> ok.
record_expr(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 33, 11),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {16, 9}, to => {16, 17}}),
Range
),
ok.
-spec record_expr_included(config()) -> ok.
record_expr_included(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 53, 30),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(?config(code_navigation_h_uri, Config), DefUri),
?assertEqual(
els_protocol:range(#{from => {1, 9}, to => {1, 26}}),
Range
),
ok.
-spec record_expr_macro_name(config()) -> ok.
record_expr_macro_name(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 115, 11),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {111, 9}, to => {111, 16}}),
Range
),
ok.
-spec record_field(config()) -> ok.
record_field(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 33, 20),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {16, 20}, to => {16, 27}}),
Range
),
ok.
-spec record_field_included(config()) -> ok.
record_field_included(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 53, 45),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(?config(code_navigation_h_uri, Config), DefUri),
?assertEqual(
els_protocol:range(#{from => {1, 29}, to => {1, 45}}),
Range
),
ok.
-spec record_type_macro_name(config()) -> ok.
record_type_macro_name(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 113, 28),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {111, 9}, to => {111, 16}}),
Range
),
ok.
-spec type_application_remote(config()) -> ok.
type_application_remote(Config) ->
ExtraUri = ?config(code_navigation_extra_uri, Config),
TypesUri = ?config(code_navigation_types_uri, Config),
Def = els_client:definition(ExtraUri, 11, 38),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(TypesUri, DefUri),
?assertEqual(
els_protocol:range(#{from => {3, 1}, to => {3, 26}}),
Range
),
ok.
-spec type_application_undefined(config()) -> ok.
type_application_undefined(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 55, 42),
#{result := Result} = Def,
Expected = [
#{
range => #{
'end' => #{character => 49, line => 54},
start => #{character => 33, line => 54}
},
uri => Uri
}
],
?assertEqual(Expected, Result),
ok.
-spec type_application_user(config()) -> ok.
type_application_user(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 55, 25),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {37, 1}, to => {37, 25}}),
Range
),
ok.
-spec type_export_entry(config()) -> ok.
type_export_entry(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 9, 17),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {37, 1}, to => {37, 25}}),
Range
),
ok.
-spec variable(config()) -> ok.
variable(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def0 = els_client:definition(Uri, 104, 9),
Def1 = els_client:definition(Uri, 105, 10),
Def2 = els_client:definition(Uri, 107, 10),
Def3 = els_client:definition(Uri, 108, 10),
Def4 = els_client:definition(Uri, 19, 36),
#{result := [#{range := Range0, uri := DefUri0}]} = Def0,
#{result := [#{range := Range1, uri := DefUri0}]} = Def1,
#{result := [#{range := Range2, uri := DefUri0}]} = Def2,
#{result := [#{range := Range3, uri := DefUri0}]} = Def3,
#{result := [#{range := Range4, uri := DefUri0}]} = Def4,
?assertEqual(?config(code_navigation_uri, Config), DefUri0),
?assertEqual(
els_protocol:range(#{from => {103, 12}, to => {103, 15}}),
Range0
),
?assertEqual(
els_protocol:range(#{from => {104, 3}, to => {104, 6}}),
Range1
),
?assertEqual(
els_protocol:range(#{from => {106, 12}, to => {106, 15}}),
Range2
),
?assertEqual(
els_protocol:range(#{from => {106, 12}, to => {106, 15}}),
Range3
),
%% Inside macro
?assertEqual(
els_protocol:range(#{from => {19, 17}, to => {19, 18}}),
Range4
),
ok.
-spec opaque_application_remote(config()) -> ok.
opaque_application_remote(Config) ->
ExtraUri = ?config(code_navigation_extra_uri, Config),
TypesUri = ?config(code_navigation_types_uri, Config),
Def = els_client:definition(ExtraUri, 16, 61),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(TypesUri, DefUri),
?assertEqual(
els_protocol:range(#{from => {7, 1}, to => {7, 35}}),
Range
),
ok.
-spec opaque_application_user(config()) -> ok.
opaque_application_user(Config) ->
ExtraUri = ?config(code_navigation_extra_uri, Config),
Def = els_client:definition(ExtraUri, 16, 24),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(ExtraUri, DefUri),
?assertEqual(
els_protocol:range(#{from => {20, 1}, to => {20, 34}}),
Range
),
ok.
-spec parse_incomplete(config()) -> ok.
parse_incomplete(Config) ->
Uri = ?config(code_navigation_broken_uri, Config),
Range = els_protocol:range(#{from => {3, 1}, to => {3, 11}}),
?assertMatch(
#{result := [#{range := Range, uri := Uri}]},
els_client:definition(Uri, 7, 3)
),
?assertMatch(
#{result := [#{range := Range, uri := Uri}]},
els_client:definition(Uri, 8, 3)
),
?assertMatch(
#{result := [#{range := Range, uri := Uri}]},
els_client:definition(Uri, 9, 8)
),
?assertMatch(
#{result := [#{range := Range, uri := Uri}]},
els_client:definition(Uri, 11, 7)
),
?assertMatch(
#{result := [#{range := Range, uri := Uri}]},
els_client:definition(Uri, 12, 12)
),
?assertMatch(
#{result := [#{range := Range, uri := Uri}]},
els_client:definition(Uri, 17, 3)
),
?assertMatch(
#{result := [#{range := Range, uri := Uri}]},
els_client:definition(Uri, 19, 3)
),
ok.
| null | https://raw.githubusercontent.com/erlang-ls/erlang_ls/4ad07492c2f577da4a1fbd79877036f820d9e2c3/apps/els_lsp/test/els_definition_SUITE.erl | erlang | ==============================================================================
Unit Tests for Code Navigation
==============================================================================
CT Callbacks
Test cases
==============================================================================
Includes
==============================================================================
==============================================================================
Types
==============================================================================
==============================================================================
CT Callbacks
==============================================================================
==============================================================================
Testcases
==============================================================================
Close file, get definition
Inside macro | -module(els_definition_SUITE).
-export([
all/0,
init_per_suite/1,
end_per_suite/1,
init_per_testcase/2,
end_per_testcase/2,
suite/0
]).
-export([
application_local/1,
application_remote/1,
atom/1,
behaviour/1,
definition_after_closing/1,
duplicate_definition/1,
export_entry/1,
fun_local/1,
fun_remote/1,
import_entry/1,
module_import_entry/1,
include/1,
include_lib/1,
macro/1,
macro_lowercase/1,
macro_included/1,
macro_with_args/1,
macro_with_args_included/1,
macro_with_implicit_args/1,
multiple_atom_instances_same_mod/1,
multiple_atom_instances_diff_mod/1,
parse_transform/1,
record_access/1,
record_access_included/1,
record_access_macro_name/1,
record_expr/1,
record_expr_included/1,
record_expr_macro_name/1,
record_field/1,
record_field_included/1,
record_type_macro_name/1,
testcase/1,
type_application_remote/1,
type_application_undefined/1,
type_application_user/1,
type_export_entry/1,
variable/1,
opaque_application_remote/1,
opaque_application_user/1,
parse_incomplete/1
]).
-include_lib("common_test/include/ct.hrl").
-include_lib("stdlib/include/assert.hrl").
-type config() :: [{atom(), any()}].
-spec all() -> [atom()].
all() ->
els_test_utils:all(?MODULE).
-spec init_per_suite(config()) -> config().
init_per_suite(Config) ->
els_test_utils:init_per_suite(Config).
-spec end_per_suite(config()) -> ok.
end_per_suite(Config) ->
els_test_utils:end_per_suite(Config).
-spec init_per_testcase(atom(), config()) -> config().
init_per_testcase(TestCase, Config) ->
els_test_utils:init_per_testcase(TestCase, Config).
-spec end_per_testcase(atom(), config()) -> ok.
end_per_testcase(TestCase, Config) ->
els_test_utils:end_per_testcase(TestCase, Config).
-spec suite() -> [tuple()].
suite() ->
[{timetrap, {seconds, 30}}].
-spec application_local(config()) -> ok.
application_local(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 22, 5),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {25, 1}, to => {25, 11}}),
Range
),
ok.
-spec application_remote(config()) -> ok.
application_remote(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 32, 13),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(?config(code_navigation_extra_uri, Config), DefUri),
?assertEqual(
els_protocol:range(#{from => {5, 1}, to => {5, 3}}),
Range
),
ok.
-spec atom(config()) -> ok.
atom(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def0 = els_client:definition(Uri, 84, 20),
Def1 = els_client:definition(Uri, 85, 20),
Def2 = els_client:definition(Uri, 86, 20),
Def3 = els_client:definition(Uri, 85, 27),
#{result := [#{range := Range0, uri := DefUri0}]} = Def0,
#{result := [#{range := Range1, uri := DefUri1}, #{range := Range12, uri := DefUri12}]} =
Def1,
#{result := [#{range := Range2, uri := DefUri2}, #{range := Range22, uri := DefUri22}]} =
Def2,
#{result := [#{range := Range3, uri := DefUri3}]} = Def3,
?assertEqual(?config(code_navigation_types_uri, Config), DefUri0),
?assertEqual(
els_protocol:range(#{from => {1, 9}, to => {1, 30}}),
Range0
),
?assertEqual(?config(code_navigation_extra_uri, Config), DefUri12),
?assertEqual(
els_protocol:range(#{from => {1, 9}, to => {1, 30}}),
Range12
),
?assertEqual(Uri, DefUri1),
?assertEqual(
els_protocol:range(#{from => {132, 1}, to => {132, 22}}),
Range1
),
?assertEqual(?config(code_navigation_extra_uri, Config), DefUri22),
?assertEqual(
els_protocol:range(#{from => {1, 9}, to => {1, 30}}),
Range22
),
?assertEqual(Uri, DefUri2),
?assertEqual(
els_protocol:range(#{from => {132, 1}, to => {132, 22}}),
Range2
),
?assertEqual(?config('Code.Navigation.Elixirish_uri', Config), DefUri3),
?assertEqual(
els_protocol:range(#{from => {1, 9}, to => {1, 36}}),
Range3
),
ok.
-spec behaviour(config()) -> ok.
behaviour(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 3, 16),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(?config(behaviour_a_uri, Config), DefUri),
?assertEqual(
els_protocol:range(#{from => {1, 9}, to => {1, 20}}),
Range
),
ok.
-spec testcase(config()) -> ok.
testcase(Config) ->
Uri = ?config(sample_SUITE_uri, Config),
Def = els_client:definition(Uri, 35, 6),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {58, 1}, to => {58, 4}}),
Range
),
ok.
-spec multiple_atom_instances_same_mod(config()) -> ok.
multiple_atom_instances_same_mod(Config) ->
Uri = ?config(code_navigation_uri, Config),
Defs = els_client:definition(Uri, 130, 36),
#{result := Results} = Defs,
?assertEqual(3, length(Results)),
ExpectedRanges = [
els_protocol:range(#{from => {1, 9}, to => {1, 24}}),
els_protocol:range(#{from => {126, 1}, to => {126, 16}}),
els_protocol:range(#{from => {128, 1}, to => {128, 16}})
],
lists:foreach(
fun(Def) ->
#{range := Range, uri := DefUri} = Def,
?assertEqual(Uri, DefUri),
?assert(lists:member(Range, ExpectedRanges))
end,
Results
),
ok.
-spec multiple_atom_instances_diff_mod(config()) -> ok.
multiple_atom_instances_diff_mod(Config) ->
Uri = ?config(code_navigation_uri, Config),
Defs = els_client:definition(Uri, 134, 35),
#{result := Results} = Defs,
?assertEqual(2, length(Results)),
RangeDef1 = els_protocol:range(#{from => {132, 1}, to => {132, 22}}),
RangeDef2 = els_protocol:range(#{from => {1, 9}, to => {1, 30}}),
Uri2 = ?config(code_navigation_extra_uri, Config),
?assertMatch(
[
#{
range := RangeDef1,
uri := Uri
},
#{
range := RangeDef2,
uri := Uri2
}
],
Results
),
ok.
Issue # 191 : Definition not found after document is closed
-spec definition_after_closing(config()) -> ok.
definition_after_closing(Config) ->
Uri = ?config(code_navigation_uri, Config),
ExtraUri = ?config(code_navigation_extra_uri, Config),
Def = els_client:definition(Uri, 32, 13),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(ExtraUri, DefUri),
?assertEqual(
els_protocol:range(#{from => {5, 1}, to => {5, 3}}),
Range
),
ok = els_client:did_close(ExtraUri),
Def1 = els_client:definition(Uri, 32, 13),
#{result := [#{range := Range, uri := DefUri}]} = Def1,
ok.
-spec duplicate_definition(config()) -> ok.
duplicate_definition(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 57, 5),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {60, 1}, to => {60, 11}}),
Range
),
ok.
-spec export_entry(config()) -> ok.
export_entry(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 8, 15),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {28, 1}, to => {28, 11}}),
Range
),
ok.
-spec fun_local(config()) -> ok.
fun_local(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 51, 16),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {25, 1}, to => {25, 11}}),
Range
),
ok.
-spec fun_remote(config()) -> ok.
fun_remote(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 52, 14),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(?config(code_navigation_extra_uri, Config), DefUri),
?assertEqual(
els_protocol:range(#{from => {5, 1}, to => {5, 3}}),
Range
),
ok.
-spec import_entry(config()) -> ok.
import_entry(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 10, 34),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(?config(code_navigation_extra_uri, Config), DefUri),
?assertEqual(
els_protocol:range(#{from => {5, 1}, to => {5, 3}}),
Range
),
ok.
-spec module_import_entry(config()) -> ok.
module_import_entry(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 90, 3),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(?config(code_navigation_extra_uri, Config), DefUri),
?assertEqual(
els_protocol:range(#{from => {5, 1}, to => {5, 3}}),
Range
),
ok.
-spec include(config()) -> ok.
include(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 12, 20),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(?config(code_navigation_h_uri, Config), DefUri),
?assertEqual(
els_protocol:range(#{from => {1, 1}, to => {1, 1}}),
Range
),
ok.
-spec include_lib(config()) -> ok.
include_lib(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 13, 22),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(?config(code_navigation_h_uri, Config), DefUri),
?assertEqual(
els_protocol:range(#{from => {1, 1}, to => {1, 1}}),
Range
),
ok.
-spec macro(config()) -> ok.
macro(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 26, 5),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {18, 9}, to => {18, 16}}),
Range
),
ok.
-spec macro_lowercase(config()) -> ok.
macro_lowercase(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 48, 3),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {45, 9}, to => {45, 16}}),
Range
),
ok.
-spec macro_included(config()) -> ok.
macro_included(Config) ->
Uri = ?config(code_navigation_uri, Config),
UriHeader = ?config(code_navigation_h_uri, Config),
#{result := [#{range := Range1, uri := DefUri1}]} =
els_client:definition(Uri, 53, 19),
?assertEqual(UriHeader, DefUri1),
?assertEqual(
els_protocol:range(#{from => {3, 9}, to => {3, 25}}),
Range1
),
#{result := [#{range := RangeQuoted, uri := DefUri2}]} =
els_client:definition(Uri, 52, 75),
?assertEqual(UriHeader, DefUri2),
?assertEqual(
els_protocol:range(#{from => {7, 9}, to => {7, 27}}),
RangeQuoted
),
ok.
-spec macro_with_args(config()) -> ok.
macro_with_args(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 40, 9),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {19, 9}, to => {19, 16}}),
Range
),
ok.
-spec macro_with_args_included(config()) -> ok.
macro_with_args_included(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 43, 9),
#{result := [#{uri := DefUri}]} = Def,
?assertEqual(
<<"assert.hrl">>,
filename:basename(els_uri:path(DefUri))
),
Do not assert on line number to avoid binding to a specific OTP version
ok.
-spec macro_with_implicit_args(config()) -> ok.
macro_with_implicit_args(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 124, 5),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {118, 9}, to => {118, 16}}),
Range
),
ok.
-spec parse_transform(config()) -> ok.
parse_transform(Config) ->
Uri = ?config(diagnostics_parse_transform_usage_uri, Config),
Def = els_client:definition(Uri, 5, 45),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(?config(diagnostics_parse_transform_uri, Config), DefUri),
?assertEqual(
els_protocol:range(#{from => {1, 9}, to => {1, 36}}),
Range
),
ok.
-spec record_access(config()) -> ok.
record_access(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 34, 13),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {16, 9}, to => {16, 17}}),
Range
),
ok.
-spec record_access_included(config()) -> ok.
record_access_included(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 52, 43),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(?config(code_navigation_h_uri, Config), DefUri),
?assertEqual(
els_protocol:range(#{from => {1, 9}, to => {1, 26}}),
Range
),
ok.
-spec record_access_macro_name(config()) -> ok.
record_access_macro_name(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 116, 33),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {111, 9}, to => {111, 16}}),
Range
),
ok.
TODO : Additional constructors for POI
TODO : Navigation should return POI , not range
-spec record_expr(config()) -> ok.
record_expr(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 33, 11),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {16, 9}, to => {16, 17}}),
Range
),
ok.
-spec record_expr_included(config()) -> ok.
record_expr_included(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 53, 30),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(?config(code_navigation_h_uri, Config), DefUri),
?assertEqual(
els_protocol:range(#{from => {1, 9}, to => {1, 26}}),
Range
),
ok.
-spec record_expr_macro_name(config()) -> ok.
record_expr_macro_name(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 115, 11),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {111, 9}, to => {111, 16}}),
Range
),
ok.
-spec record_field(config()) -> ok.
record_field(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 33, 20),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {16, 20}, to => {16, 27}}),
Range
),
ok.
-spec record_field_included(config()) -> ok.
record_field_included(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 53, 45),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(?config(code_navigation_h_uri, Config), DefUri),
?assertEqual(
els_protocol:range(#{from => {1, 29}, to => {1, 45}}),
Range
),
ok.
-spec record_type_macro_name(config()) -> ok.
record_type_macro_name(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 113, 28),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {111, 9}, to => {111, 16}}),
Range
),
ok.
-spec type_application_remote(config()) -> ok.
type_application_remote(Config) ->
ExtraUri = ?config(code_navigation_extra_uri, Config),
TypesUri = ?config(code_navigation_types_uri, Config),
Def = els_client:definition(ExtraUri, 11, 38),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(TypesUri, DefUri),
?assertEqual(
els_protocol:range(#{from => {3, 1}, to => {3, 26}}),
Range
),
ok.
-spec type_application_undefined(config()) -> ok.
type_application_undefined(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 55, 42),
#{result := Result} = Def,
Expected = [
#{
range => #{
'end' => #{character => 49, line => 54},
start => #{character => 33, line => 54}
},
uri => Uri
}
],
?assertEqual(Expected, Result),
ok.
-spec type_application_user(config()) -> ok.
type_application_user(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 55, 25),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {37, 1}, to => {37, 25}}),
Range
),
ok.
-spec type_export_entry(config()) -> ok.
type_export_entry(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def = els_client:definition(Uri, 9, 17),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(Uri, DefUri),
?assertEqual(
els_protocol:range(#{from => {37, 1}, to => {37, 25}}),
Range
),
ok.
-spec variable(config()) -> ok.
variable(Config) ->
Uri = ?config(code_navigation_uri, Config),
Def0 = els_client:definition(Uri, 104, 9),
Def1 = els_client:definition(Uri, 105, 10),
Def2 = els_client:definition(Uri, 107, 10),
Def3 = els_client:definition(Uri, 108, 10),
Def4 = els_client:definition(Uri, 19, 36),
#{result := [#{range := Range0, uri := DefUri0}]} = Def0,
#{result := [#{range := Range1, uri := DefUri0}]} = Def1,
#{result := [#{range := Range2, uri := DefUri0}]} = Def2,
#{result := [#{range := Range3, uri := DefUri0}]} = Def3,
#{result := [#{range := Range4, uri := DefUri0}]} = Def4,
?assertEqual(?config(code_navigation_uri, Config), DefUri0),
?assertEqual(
els_protocol:range(#{from => {103, 12}, to => {103, 15}}),
Range0
),
?assertEqual(
els_protocol:range(#{from => {104, 3}, to => {104, 6}}),
Range1
),
?assertEqual(
els_protocol:range(#{from => {106, 12}, to => {106, 15}}),
Range2
),
?assertEqual(
els_protocol:range(#{from => {106, 12}, to => {106, 15}}),
Range3
),
?assertEqual(
els_protocol:range(#{from => {19, 17}, to => {19, 18}}),
Range4
),
ok.
-spec opaque_application_remote(config()) -> ok.
opaque_application_remote(Config) ->
ExtraUri = ?config(code_navigation_extra_uri, Config),
TypesUri = ?config(code_navigation_types_uri, Config),
Def = els_client:definition(ExtraUri, 16, 61),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(TypesUri, DefUri),
?assertEqual(
els_protocol:range(#{from => {7, 1}, to => {7, 35}}),
Range
),
ok.
-spec opaque_application_user(config()) -> ok.
opaque_application_user(Config) ->
ExtraUri = ?config(code_navigation_extra_uri, Config),
Def = els_client:definition(ExtraUri, 16, 24),
#{result := [#{range := Range, uri := DefUri}]} = Def,
?assertEqual(ExtraUri, DefUri),
?assertEqual(
els_protocol:range(#{from => {20, 1}, to => {20, 34}}),
Range
),
ok.
-spec parse_incomplete(config()) -> ok.
parse_incomplete(Config) ->
Uri = ?config(code_navigation_broken_uri, Config),
Range = els_protocol:range(#{from => {3, 1}, to => {3, 11}}),
?assertMatch(
#{result := [#{range := Range, uri := Uri}]},
els_client:definition(Uri, 7, 3)
),
?assertMatch(
#{result := [#{range := Range, uri := Uri}]},
els_client:definition(Uri, 8, 3)
),
?assertMatch(
#{result := [#{range := Range, uri := Uri}]},
els_client:definition(Uri, 9, 8)
),
?assertMatch(
#{result := [#{range := Range, uri := Uri}]},
els_client:definition(Uri, 11, 7)
),
?assertMatch(
#{result := [#{range := Range, uri := Uri}]},
els_client:definition(Uri, 12, 12)
),
?assertMatch(
#{result := [#{range := Range, uri := Uri}]},
els_client:definition(Uri, 17, 3)
),
?assertMatch(
#{result := [#{range := Range, uri := Uri}]},
els_client:definition(Uri, 19, 3)
),
ok.
|
7f0a6a00024ec8c072b207dd64b7b390625add980ae2ac13625e2167f4e0b2f1 | ocaml-multicore/tezos | views.ml | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2021 Nomadic Labs < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
Testing
-------
Component :
Invocation : dune exec / tests / main.exe -- --file views.ml
Subject : Call smart contract views to catch performance regressions .
-------
Component: Michelson
Invocation: dune exec tezt/tests/main.exe -- --file views.ml
Subject: Call smart contract views to catch performance regressions.
*)
(* This contract registers all SOURCE addresses that ever call it. It has views
that return registered callers count and the last caller address respectively. *)
let register_callers_src =
{|
parameter unit;
storage (list address);
code {
CDR ;
SOURCE ;
CONS ;
NIL operation ;
PAIR ;
};
view "calls_count" unit nat { CDR ; SIZE };
view "last_caller" unit (option address) { CDR ; IF_CONS { DIP { DROP } ; SOME } { NONE address } };
|}
(* This script calls views on register_callers contract and verifies whether
its responses are consistent, i.e. if the view calls_count returned 0, then
last caller is None, otherwise β it's Some address. *)
let check_caller_src =
{|
parameter address ;
storage (option address) ;
code {
CAR ;
DUP ;
UNIT ;
VIEW "calls_count" nat ;
IF_NONE { UNIT ; FAILWITH } {} ;
DIP {
UNIT ;
VIEW "last_caller" (option address) ;
} ;
PUSH nat 0 ;
/* Check if the caller address is consistent with given calls count. */
IFCMPEQ {
IF_NONE { UNIT ; FAILWITH } { IF_NONE {} { UNIT ; FAILWITH }} ;
NONE address ;
}
{
IF_NONE { UNIT ; FAILWITH } { IF_NONE { UNIT ; FAILWITH } {}} ;
SOME ;
} ;
NIL operation ;
PAIR ;
}
|}
(* Normally "--base-dir" would appear in regression logs. However, since
it is a different dir on every run, we need to mask it in regression
logs so that it doesn't cause false differences. *)
let hooks =
let rec mask_temp_dir = function
| [] -> []
| "--base-dir" :: _ :: rest -> "--base-dir" :: "<masked>" :: rest
| arg :: args -> arg :: mask_temp_dir args
in
{
Regression.hooks with
on_spawn =
(fun cmd args -> mask_temp_dir args |> Regression.hooks.on_spawn cmd);
}
let register ~protocols () =
Protocol.register_regression_test
~__FILE__
~title:"Run views"
~tags:["client"; "michelson"]
~output_file:"run_views"
~protocols
@@ fun protocol ->
let* client = Client.init_mockup ~protocol () in
let* register_callers =
Client.originate_contract
~hooks
~burn_cap:Tez.one
~alias:"register_calls"
~amount:Tez.zero
~src:"bootstrap1"
~prg:register_callers_src
~init:"{}"
client
in
let arg = Format.sprintf "\"%s\"" register_callers in
let* check_caller =
Client.originate_contract
~hooks
~burn_cap:Tez.one
~alias:"check_caller"
~amount:Tez.zero
~src:"bootstrap1"
~prg:check_caller_src
~init:"None"
client
in
let* () =
Client.transfer
~hooks
~burn_cap:Tez.one
~amount:Tez.one
~giver:"bootstrap1"
~receiver:check_caller
~arg
client
in
let* () =
Client.transfer
~hooks
~burn_cap:Tez.one
~amount:Tez.one
~giver:"bootstrap1"
~receiver:register_callers
client
in
let* () =
Client.transfer
~hooks
~burn_cap:Tez.one
~amount:Tez.one
~giver:"bootstrap1"
~receiver:check_caller
~arg
client
in
return ()
| null | https://raw.githubusercontent.com/ocaml-multicore/tezos/e4fd21a1cb02d194b3162ab42d512b7c985ee8a9/tezt/tests/views.ml | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
This contract registers all SOURCE addresses that ever call it. It has views
that return registered callers count and the last caller address respectively.
This script calls views on register_callers contract and verifies whether
its responses are consistent, i.e. if the view calls_count returned 0, then
last caller is None, otherwise β it's Some address.
Normally "--base-dir" would appear in regression logs. However, since
it is a different dir on every run, we need to mask it in regression
logs so that it doesn't cause false differences. | Copyright ( c ) 2021 Nomadic Labs < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
Testing
-------
Component :
Invocation : dune exec / tests / main.exe -- --file views.ml
Subject : Call smart contract views to catch performance regressions .
-------
Component: Michelson
Invocation: dune exec tezt/tests/main.exe -- --file views.ml
Subject: Call smart contract views to catch performance regressions.
*)
let register_callers_src =
{|
parameter unit;
storage (list address);
code {
CDR ;
SOURCE ;
CONS ;
NIL operation ;
PAIR ;
};
view "calls_count" unit nat { CDR ; SIZE };
view "last_caller" unit (option address) { CDR ; IF_CONS { DIP { DROP } ; SOME } { NONE address } };
|}
let check_caller_src =
{|
parameter address ;
storage (option address) ;
code {
CAR ;
DUP ;
UNIT ;
VIEW "calls_count" nat ;
IF_NONE { UNIT ; FAILWITH } {} ;
DIP {
UNIT ;
VIEW "last_caller" (option address) ;
} ;
PUSH nat 0 ;
/* Check if the caller address is consistent with given calls count. */
IFCMPEQ {
IF_NONE { UNIT ; FAILWITH } { IF_NONE {} { UNIT ; FAILWITH }} ;
NONE address ;
}
{
IF_NONE { UNIT ; FAILWITH } { IF_NONE { UNIT ; FAILWITH } {}} ;
SOME ;
} ;
NIL operation ;
PAIR ;
}
|}
let hooks =
let rec mask_temp_dir = function
| [] -> []
| "--base-dir" :: _ :: rest -> "--base-dir" :: "<masked>" :: rest
| arg :: args -> arg :: mask_temp_dir args
in
{
Regression.hooks with
on_spawn =
(fun cmd args -> mask_temp_dir args |> Regression.hooks.on_spawn cmd);
}
let register ~protocols () =
Protocol.register_regression_test
~__FILE__
~title:"Run views"
~tags:["client"; "michelson"]
~output_file:"run_views"
~protocols
@@ fun protocol ->
let* client = Client.init_mockup ~protocol () in
let* register_callers =
Client.originate_contract
~hooks
~burn_cap:Tez.one
~alias:"register_calls"
~amount:Tez.zero
~src:"bootstrap1"
~prg:register_callers_src
~init:"{}"
client
in
let arg = Format.sprintf "\"%s\"" register_callers in
let* check_caller =
Client.originate_contract
~hooks
~burn_cap:Tez.one
~alias:"check_caller"
~amount:Tez.zero
~src:"bootstrap1"
~prg:check_caller_src
~init:"None"
client
in
let* () =
Client.transfer
~hooks
~burn_cap:Tez.one
~amount:Tez.one
~giver:"bootstrap1"
~receiver:check_caller
~arg
client
in
let* () =
Client.transfer
~hooks
~burn_cap:Tez.one
~amount:Tez.one
~giver:"bootstrap1"
~receiver:register_callers
client
in
let* () =
Client.transfer
~hooks
~burn_cap:Tez.one
~amount:Tez.one
~giver:"bootstrap1"
~receiver:check_caller
~arg
client
in
return ()
|
5eec4844b94407be16c84facbae4d850424017104e2e6f55e45a8eac048d6f1a | jaybosamiya/automatic-differentiation | auto_differentiate.ml | open Dual_number
module D = struct
type t = Dual.t
let variable x = Dual.from_floats x 1.
let constant x = Dual.from_floats x 0.
let value f = fun x ->
variable x |> f |> Dual.real
let derivative f = fun x ->
variable x |> f |> Dual.non_real
let val_deriv f =
value f, derivative f
module Operators = struct
include Dual.Infix
let ( ~$ ) = constant
let ( < ) a b =
Dual.real a < Dual.real b
let ( = ) a b =
Dual.real a = Dual.real b
let ( > ) a b =
Dual.real a > Dual.real b
let ( <= ) a b =
a = b || a < b
let ( >= ) a b =
a = b || a > b
let ( <> ) a b =
Dual.real a <> Dual.real b
let sqrt a =
let arsqrt = sqrt (Dual.real a) in
let anr = Dual.non_real a in
Dual.from_floats arsqrt (0.5 *. anr /. arsqrt)
let ( ** ) a b =
Dual.from_floats
(Dual.real a ** b)
(b *. Dual.non_real a *. (Dual.real a ** (b -. 1.)))
end
end
| null | https://raw.githubusercontent.com/jaybosamiya/automatic-differentiation/b7935851afaaaca317d78f6cf187a94adfaa795f/auto_differentiate.ml | ocaml | open Dual_number
module D = struct
type t = Dual.t
let variable x = Dual.from_floats x 1.
let constant x = Dual.from_floats x 0.
let value f = fun x ->
variable x |> f |> Dual.real
let derivative f = fun x ->
variable x |> f |> Dual.non_real
let val_deriv f =
value f, derivative f
module Operators = struct
include Dual.Infix
let ( ~$ ) = constant
let ( < ) a b =
Dual.real a < Dual.real b
let ( = ) a b =
Dual.real a = Dual.real b
let ( > ) a b =
Dual.real a > Dual.real b
let ( <= ) a b =
a = b || a < b
let ( >= ) a b =
a = b || a > b
let ( <> ) a b =
Dual.real a <> Dual.real b
let sqrt a =
let arsqrt = sqrt (Dual.real a) in
let anr = Dual.non_real a in
Dual.from_floats arsqrt (0.5 *. anr /. arsqrt)
let ( ** ) a b =
Dual.from_floats
(Dual.real a ** b)
(b *. Dual.non_real a *. (Dual.real a ** (b -. 1.)))
end
end
|
|
9964adc7c4b1a9680307152ac063da1b64328dca27a047d288fd71c901e80ca4 | kevinlynx/dhtcrawler | start_dep_app.erl | %%
%% 06.15.2013
%%
-module(start_dep_app).
-export([startcouchdb/0, startmongo/0]).
Damn , couchbeam requires jiffy , ibrowse , public_key , sasl , crypto .
startcouchdb() ->
code:add_path("e:/prj/ibrowse/ebin"),
code:add_path("e:/prj/couchbeam/ebin"),
code:add_path("e:/prj/jiffy/ebin"),
Apps = [crypto, public_key, ssl, sasl, inets, jiffy, ibrowse, couchbeam],
[application:start(App) || App <- Apps].
startmongo() ->
code:add_path("deps/bson/ebin"),
code:add_path("deps/mongodb/ebin"),
code:add_path("deps/kdht/ebin"),
Apps = [crypto, public_key, ssl, inets, bson, mongodb],
[application:start(App) || App <- Apps].
| null | https://raw.githubusercontent.com/kevinlynx/dhtcrawler/b8a486dbebb57f9672a773a6901d59ddae97a278/test/start_dep_app.erl | erlang |
06.15.2013
|
-module(start_dep_app).
-export([startcouchdb/0, startmongo/0]).
Damn , couchbeam requires jiffy , ibrowse , public_key , sasl , crypto .
startcouchdb() ->
code:add_path("e:/prj/ibrowse/ebin"),
code:add_path("e:/prj/couchbeam/ebin"),
code:add_path("e:/prj/jiffy/ebin"),
Apps = [crypto, public_key, ssl, sasl, inets, jiffy, ibrowse, couchbeam],
[application:start(App) || App <- Apps].
startmongo() ->
code:add_path("deps/bson/ebin"),
code:add_path("deps/mongodb/ebin"),
code:add_path("deps/kdht/ebin"),
Apps = [crypto, public_key, ssl, inets, bson, mongodb],
[application:start(App) || App <- Apps].
|
d89a0e996115db4caee0ba97cf77259b7586f2979898700452783340cac73cad | DeepSec-prover/deepsec | generic_equivalence.mli | (**************************************************************************)
(* *)
DeepSec
(* *)
, project PESTO ,
, project PESTO ,
, project PESTO ,
(* *)
Copyright ( C ) INRIA 2017 - 2020
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU General Public License version 3.0 as described in the
(* file LICENSE *)
(* *)
(**************************************************************************)
open Types
open Generic_process
type origin_process =
| Left
| Right
type configuration =
{
current_process : generic_process;
origin_process : origin_process;
trace : transition list
}
type equivalence_problem =
{
csys_set : configuration Constraint_system.set;
size_frame : int
}
val export_equivalence_problem : equivalence_problem -> equivalence_problem * (recipe_variable * recipe) list
val import_equivalence_problem : (unit -> 'a) -> equivalence_problem -> (recipe_variable * recipe) list -> 'a
val initialise_equivalence_problem : configuration Constraint_system.set -> equivalence_problem
(*** Apply transition ***)
exception Not_Trace_Equivalent of (bool * transition list)
val apply_one_transition_and_rules_classic : equivalence_problem -> (equivalence_problem -> (unit -> unit) -> unit) -> (unit -> unit) -> unit
val apply_one_transition_and_rules_private : equivalence_problem -> (equivalence_problem -> (unit -> unit) -> unit) -> (unit -> unit) -> unit
val apply_one_transition_and_rules_eavesdrop : equivalence_problem -> (equivalence_problem -> (unit -> unit) -> unit) -> (unit -> unit) -> unit
| null | https://raw.githubusercontent.com/DeepSec-prover/deepsec/8ddc45ec79de5ec49810302ea7da32d3dc9f46e4/Source/query_solving/generic_equivalence.mli | ocaml | ************************************************************************
All rights reserved. This file is distributed under the terms of
file LICENSE
************************************************************************
** Apply transition ** | DeepSec
, project PESTO ,
, project PESTO ,
, project PESTO ,
Copyright ( C ) INRIA 2017 - 2020
the GNU General Public License version 3.0 as described in the
open Types
open Generic_process
type origin_process =
| Left
| Right
type configuration =
{
current_process : generic_process;
origin_process : origin_process;
trace : transition list
}
type equivalence_problem =
{
csys_set : configuration Constraint_system.set;
size_frame : int
}
val export_equivalence_problem : equivalence_problem -> equivalence_problem * (recipe_variable * recipe) list
val import_equivalence_problem : (unit -> 'a) -> equivalence_problem -> (recipe_variable * recipe) list -> 'a
val initialise_equivalence_problem : configuration Constraint_system.set -> equivalence_problem
exception Not_Trace_Equivalent of (bool * transition list)
val apply_one_transition_and_rules_classic : equivalence_problem -> (equivalence_problem -> (unit -> unit) -> unit) -> (unit -> unit) -> unit
val apply_one_transition_and_rules_private : equivalence_problem -> (equivalence_problem -> (unit -> unit) -> unit) -> (unit -> unit) -> unit
val apply_one_transition_and_rules_eavesdrop : equivalence_problem -> (equivalence_problem -> (unit -> unit) -> unit) -> (unit -> unit) -> unit
|
b326a89b3cd85d8971c06c08bd8934bfda93c25437e8592ea04b4519837a4f95 | matterhorn-chat/matterhorn | TabbedWindow.hs | {-# LANGUAGE MultiWayIf #-}
module Matterhorn.Types.TabbedWindow
( TabbedWindow(..)
, TabbedWindowEntry(..)
, TabbedWindowTemplate(..)
, tabbedWindow
, getCurrentTabbedWindowEntry
, tabbedWindowNextTab
, tabbedWindowPreviousTab
, runTabShowHandlerFor
)
where
import Prelude ()
import Matterhorn.Prelude
import Brick ( Widget )
import Data.List ( nub, elemIndex )
import qualified Data.Text as T
import qualified Graphics.Vty as Vty
-- | An entry in a tabbed window corresponding to a tab and its content.
-- Parameterized over an abstract handle type ('a') for the tabs so we
-- can give each a unique handle.
data TabbedWindowEntry s m n a =
TabbedWindowEntry { tweValue :: a
-- ^ The handle for this tab.
, tweRender :: a -> s -> Widget n
-- ^ The rendering function to use when this tab
-- is selected.
, tweHandleEvent :: a -> Vty.Event -> m ()
-- ^ The event-handling function to use when this
-- tab is selected.
, tweTitle :: a -> Bool -> T.Text
-- ^ Title function for this tab, with a boolean
-- indicating whether this is the current tab.
, tweShowHandler :: a -> m ()
-- ^ A handler to be invoked when this tab is
-- shown.
}
-- | The definition of a tabbed window. Note that this does not track
-- the *state* of the window; it merely provides a collection of tab
-- window entries (see above). To track the state of a tabbed window,
use a TabbedWindow .
--
-- Parameterized over an abstract handle type ('a') for the tabs so we
-- can give each a unique handle.
data TabbedWindowTemplate s m n a =
TabbedWindowTemplate { twtEntries :: [TabbedWindowEntry s m n a]
-- ^ The entries in tabbed windows with this
-- structure.
, twtTitle :: a -> Widget n
-- ^ The title-rendering function for this kind
-- of tabbed window.
}
-- | An instantiated tab window. This is based on a template and tracks
-- the state of the tabbed window (current tab).
--
-- Parameterized over an abstract handle type ('a') for the tabs so we
-- can give each a unique handle.
data TabbedWindow s m n a =
TabbedWindow { twValue :: a
-- ^ The handle of the currently-selected tab.
, twTemplate :: TabbedWindowTemplate s m n a
-- ^ The template to use as a basis for rendering the
-- window and handling user input.
, twWindowWidth :: Int
, twWindowHeight :: Int
-- ^ Window dimensions
}
-- | Construct a new tabbed window from a template. This will raise an
-- exception if the initially-selected tab does not exist in the window
-- template, or if the window template has any duplicated tab handles.
--
-- Note that the caller is responsible for determining whether to call
-- the initially-selected tab's on-show handler.
tabbedWindow :: (Show a, Eq a)
=> a
-- ^ The handle corresponding to the tab that should be
-- selected initially.
-> TabbedWindowTemplate s m n a
-- ^ The template for the window to construct.
-> (Int, Int)
-- ^ The window dimensions (width, height).
-> TabbedWindow s m n a
tabbedWindow initialVal t (width, height) =
let handles = tweValue <$> twtEntries t
in if | null handles ->
error "BUG: tabbed window template must provide at least one entry"
| length handles /= length (nub handles) ->
error "BUG: tabbed window should have one entry per handle"
| not (initialVal `elem` handles) ->
error $ "BUG: tabbed window handle " <>
show initialVal <> " not present in template"
| otherwise ->
TabbedWindow { twTemplate = t
, twValue = initialVal
, twWindowWidth = width
, twWindowHeight = height
}
-- | Get the currently-selected tab entry for a tabbed window. Raise
-- an exception if the window's selected tab handle is not found in its
-- template (which is a bug in the tabbed window infrastructure).
getCurrentTabbedWindowEntry :: (Show a, Eq a)
=> TabbedWindow s m n a
-> TabbedWindowEntry s m n a
getCurrentTabbedWindowEntry w =
lookupTabbedWindowEntry (twValue w) w
-- | Run the on-show handler for the window tab entry with the specified
-- handle.
runTabShowHandlerFor :: (Eq a, Show a) => a -> TabbedWindow s m n a -> m ()
runTabShowHandlerFor handle w = do
let entry = lookupTabbedWindowEntry handle w
tweShowHandler entry handle
-- | Look up a tabbed window entry by handle. Raises an exception if no
-- such entry exists.
lookupTabbedWindowEntry :: (Eq a, Show a)
=> a
-> TabbedWindow s m n a
-> TabbedWindowEntry s m n a
lookupTabbedWindowEntry handle w =
let matchesVal e = tweValue e == handle
in case filter matchesVal (twtEntries $ twTemplate w) of
[e] -> e
_ -> error $ "BUG: tabbed window entry for " <> show (twValue w) <>
" should have matched a single entry"
-- | Switch a tabbed window's selected tab to its next tab, cycling back
to the first tab if the last tab is the selected tab . This also
-- invokes the on-show handler for the newly-selected tab.
--
Note that this does nothing if the window has only one tab .
tabbedWindowNextTab :: (Monad m, Show a, Eq a)
=> TabbedWindow s m n a
-> m (TabbedWindow s m n a)
tabbedWindowNextTab w | length (twtEntries $ twTemplate w) == 1 = return w
tabbedWindowNextTab w = do
let curIdx = case elemIndex (tweValue curEntry) allHandles of
Nothing ->
error $ "BUG: tabbedWindowNextTab: could not find " <>
"current handle in handle list"
Just i -> i
nextIdx = if curIdx == length allHandles - 1
then 0
else curIdx + 1
newHandle = allHandles !! nextIdx
allHandles = tweValue <$> twtEntries (twTemplate w)
curEntry = getCurrentTabbedWindowEntry w
newWin = w { twValue = newHandle }
runTabShowHandlerFor newHandle newWin
return newWin
-- | Switch a tabbed window's selected tab to its previous tab, cycling
to the last tab if the first tab is the selected tab . This also
-- invokes the on-show handler for the newly-selected tab.
--
Note that this does nothing if the window has only one tab .
tabbedWindowPreviousTab :: (Monad m, Show a, Eq a)
=> TabbedWindow s m n a
-> m (TabbedWindow s m n a)
tabbedWindowPreviousTab w | length (twtEntries $ twTemplate w) == 1 = return w
tabbedWindowPreviousTab w = do
let curIdx = case elemIndex (tweValue curEntry) allHandles of
Nothing ->
error $ "BUG: tabbedWindowPreviousTab: could not find " <>
"current handle in handle list"
Just i -> i
nextIdx = if curIdx == 0
then length allHandles - 1
else curIdx - 1
newHandle = allHandles !! nextIdx
allHandles = tweValue <$> twtEntries (twTemplate w)
curEntry = getCurrentTabbedWindowEntry w
newWin = w { twValue = newHandle }
runTabShowHandlerFor newHandle newWin
return newWin
| null | https://raw.githubusercontent.com/matterhorn-chat/matterhorn/19a73ce833a8a8de3616cf884c03e9f08a4db0a7/src/Matterhorn/Types/TabbedWindow.hs | haskell | # LANGUAGE MultiWayIf #
| An entry in a tabbed window corresponding to a tab and its content.
Parameterized over an abstract handle type ('a') for the tabs so we
can give each a unique handle.
^ The handle for this tab.
^ The rendering function to use when this tab
is selected.
^ The event-handling function to use when this
tab is selected.
^ Title function for this tab, with a boolean
indicating whether this is the current tab.
^ A handler to be invoked when this tab is
shown.
| The definition of a tabbed window. Note that this does not track
the *state* of the window; it merely provides a collection of tab
window entries (see above). To track the state of a tabbed window,
Parameterized over an abstract handle type ('a') for the tabs so we
can give each a unique handle.
^ The entries in tabbed windows with this
structure.
^ The title-rendering function for this kind
of tabbed window.
| An instantiated tab window. This is based on a template and tracks
the state of the tabbed window (current tab).
Parameterized over an abstract handle type ('a') for the tabs so we
can give each a unique handle.
^ The handle of the currently-selected tab.
^ The template to use as a basis for rendering the
window and handling user input.
^ Window dimensions
| Construct a new tabbed window from a template. This will raise an
exception if the initially-selected tab does not exist in the window
template, or if the window template has any duplicated tab handles.
Note that the caller is responsible for determining whether to call
the initially-selected tab's on-show handler.
^ The handle corresponding to the tab that should be
selected initially.
^ The template for the window to construct.
^ The window dimensions (width, height).
| Get the currently-selected tab entry for a tabbed window. Raise
an exception if the window's selected tab handle is not found in its
template (which is a bug in the tabbed window infrastructure).
| Run the on-show handler for the window tab entry with the specified
handle.
| Look up a tabbed window entry by handle. Raises an exception if no
such entry exists.
| Switch a tabbed window's selected tab to its next tab, cycling back
invokes the on-show handler for the newly-selected tab.
| Switch a tabbed window's selected tab to its previous tab, cycling
invokes the on-show handler for the newly-selected tab.
| module Matterhorn.Types.TabbedWindow
( TabbedWindow(..)
, TabbedWindowEntry(..)
, TabbedWindowTemplate(..)
, tabbedWindow
, getCurrentTabbedWindowEntry
, tabbedWindowNextTab
, tabbedWindowPreviousTab
, runTabShowHandlerFor
)
where
import Prelude ()
import Matterhorn.Prelude
import Brick ( Widget )
import Data.List ( nub, elemIndex )
import qualified Data.Text as T
import qualified Graphics.Vty as Vty
data TabbedWindowEntry s m n a =
TabbedWindowEntry { tweValue :: a
, tweRender :: a -> s -> Widget n
, tweHandleEvent :: a -> Vty.Event -> m ()
, tweTitle :: a -> Bool -> T.Text
, tweShowHandler :: a -> m ()
}
use a TabbedWindow .
data TabbedWindowTemplate s m n a =
TabbedWindowTemplate { twtEntries :: [TabbedWindowEntry s m n a]
, twtTitle :: a -> Widget n
}
data TabbedWindow s m n a =
TabbedWindow { twValue :: a
, twTemplate :: TabbedWindowTemplate s m n a
, twWindowWidth :: Int
, twWindowHeight :: Int
}
tabbedWindow :: (Show a, Eq a)
=> a
-> TabbedWindowTemplate s m n a
-> (Int, Int)
-> TabbedWindow s m n a
tabbedWindow initialVal t (width, height) =
let handles = tweValue <$> twtEntries t
in if | null handles ->
error "BUG: tabbed window template must provide at least one entry"
| length handles /= length (nub handles) ->
error "BUG: tabbed window should have one entry per handle"
| not (initialVal `elem` handles) ->
error $ "BUG: tabbed window handle " <>
show initialVal <> " not present in template"
| otherwise ->
TabbedWindow { twTemplate = t
, twValue = initialVal
, twWindowWidth = width
, twWindowHeight = height
}
getCurrentTabbedWindowEntry :: (Show a, Eq a)
=> TabbedWindow s m n a
-> TabbedWindowEntry s m n a
getCurrentTabbedWindowEntry w =
lookupTabbedWindowEntry (twValue w) w
runTabShowHandlerFor :: (Eq a, Show a) => a -> TabbedWindow s m n a -> m ()
runTabShowHandlerFor handle w = do
let entry = lookupTabbedWindowEntry handle w
tweShowHandler entry handle
lookupTabbedWindowEntry :: (Eq a, Show a)
=> a
-> TabbedWindow s m n a
-> TabbedWindowEntry s m n a
lookupTabbedWindowEntry handle w =
let matchesVal e = tweValue e == handle
in case filter matchesVal (twtEntries $ twTemplate w) of
[e] -> e
_ -> error $ "BUG: tabbed window entry for " <> show (twValue w) <>
" should have matched a single entry"
to the first tab if the last tab is the selected tab . This also
Note that this does nothing if the window has only one tab .
tabbedWindowNextTab :: (Monad m, Show a, Eq a)
=> TabbedWindow s m n a
-> m (TabbedWindow s m n a)
tabbedWindowNextTab w | length (twtEntries $ twTemplate w) == 1 = return w
tabbedWindowNextTab w = do
let curIdx = case elemIndex (tweValue curEntry) allHandles of
Nothing ->
error $ "BUG: tabbedWindowNextTab: could not find " <>
"current handle in handle list"
Just i -> i
nextIdx = if curIdx == length allHandles - 1
then 0
else curIdx + 1
newHandle = allHandles !! nextIdx
allHandles = tweValue <$> twtEntries (twTemplate w)
curEntry = getCurrentTabbedWindowEntry w
newWin = w { twValue = newHandle }
runTabShowHandlerFor newHandle newWin
return newWin
to the last tab if the first tab is the selected tab . This also
Note that this does nothing if the window has only one tab .
tabbedWindowPreviousTab :: (Monad m, Show a, Eq a)
=> TabbedWindow s m n a
-> m (TabbedWindow s m n a)
tabbedWindowPreviousTab w | length (twtEntries $ twTemplate w) == 1 = return w
tabbedWindowPreviousTab w = do
let curIdx = case elemIndex (tweValue curEntry) allHandles of
Nothing ->
error $ "BUG: tabbedWindowPreviousTab: could not find " <>
"current handle in handle list"
Just i -> i
nextIdx = if curIdx == 0
then length allHandles - 1
else curIdx - 1
newHandle = allHandles !! nextIdx
allHandles = tweValue <$> twtEntries (twTemplate w)
curEntry = getCurrentTabbedWindowEntry w
newWin = w { twValue = newHandle }
runTabShowHandlerFor newHandle newWin
return newWin
|
618f1174237df92e2d0fde9127b6eada800990a83e4292fe2b5a919766e71655 | jaspervdj/advent-of-code | 21.hs | module Main where
import Data.Either (partitionEithers)
import Data.Foldable.Extra (maximumOn, minimumOn)
import Data.List.Extra (selectN)
data Combatant = Combatant
{ cHitpoints :: !Int
, cDamage :: !Int
, cArmor :: !Int
} deriving (Show)
simulate :: Combatant -> Combatant -> (Bool, Combatant, Combatant)
simulate = go True
where
go _ cmb1 cmb2 | cHitpoints cmb1 <= 0 = (False, cmb1, cmb2)
go _ cmb1 cmb2 | cHitpoints cmb2 <= 0 = (True, cmb1, cmb2)
go True cmb1 cmb2 = go False cmb1 (attack cmb1 cmb2)
go False cmb1 cmb2 = go True (attack cmb2 cmb1) cmb2
attack attacker defender =
let dmg = max 1 $ cDamage attacker - cArmor defender in
defender {cHitpoints = cHitpoints defender - dmg}
data Inventory = Inventory
{ iItems :: [String]
, iCost :: !Int
, iDamage :: !Int
, iArmor :: !Int
} deriving (Show)
instance Semigroup Inventory where
x <> y = Inventory
{ iItems = iItems x ++ iItems y
, iCost = iCost x + iCost y
, iDamage = iDamage x + iDamage y
, iArmor = iArmor x + iArmor y
}
instance Monoid Inventory where
mempty = Inventory [] 0 0 0
inventoryToCombatant :: Int -> Inventory -> Combatant
inventoryToCombatant hitpoints inventory = Combatant
{ cHitpoints = hitpoints
, cDamage = iDamage inventory
, cArmor = iArmor inventory
}
weapons :: [Inventory]
weapons =
[ Inventory ["Dagger"] 8 4 0
, Inventory ["Shortsword"] 10 5 0
, Inventory ["Warhammer"] 25 6 0
, Inventory ["Longsword"] 40 7 0
, Inventory ["Greataxe"] 74 8 0
]
armor :: [Inventory]
armor =
[ Inventory ["Leather"] 13 0 1
, Inventory ["Chainmail"] 31 0 2
, Inventory ["Splintmail"] 53 0 3
, Inventory ["Bandedmail"] 75 0 4
, Inventory ["Platemail"] 102 0 5
]
rings :: [Inventory]
rings =
[ Inventory ["Damage +1"] 25 1 0
, Inventory ["Damage +2"] 50 2 0
, Inventory ["Damage +3"] 100 3 0
, Inventory ["Defense +1"] 20 0 1
, Inventory ["Defense +2"] 40 0 2
, Inventory ["Defense +3"] 80 0 3
]
shop :: [Inventory]
shop = do
weapon <- weapons
armor' <- mempty : armor
numRings <- [0 .. 2]
(rings', _) <- selectN numRings rings
pure $ weapon <> armor' <> mconcat rings'
main :: IO ()
main = do
let (wins, losses) = partitionEithers $ do
inventory <- shop
let player = inventoryToCombatant 100 inventory
boss = Combatant 100 8 2
(won, _, _) = simulate player boss
pure $ if won then Left inventory else Right inventory
print . iCost $ minimumOn iCost wins
print . iCost $ maximumOn iCost losses
| null | https://raw.githubusercontent.com/jaspervdj/advent-of-code/75f426c4fc149cf6ab5fb6d95c96f3132097bdfd/2015/21.hs | haskell | module Main where
import Data.Either (partitionEithers)
import Data.Foldable.Extra (maximumOn, minimumOn)
import Data.List.Extra (selectN)
data Combatant = Combatant
{ cHitpoints :: !Int
, cDamage :: !Int
, cArmor :: !Int
} deriving (Show)
simulate :: Combatant -> Combatant -> (Bool, Combatant, Combatant)
simulate = go True
where
go _ cmb1 cmb2 | cHitpoints cmb1 <= 0 = (False, cmb1, cmb2)
go _ cmb1 cmb2 | cHitpoints cmb2 <= 0 = (True, cmb1, cmb2)
go True cmb1 cmb2 = go False cmb1 (attack cmb1 cmb2)
go False cmb1 cmb2 = go True (attack cmb2 cmb1) cmb2
attack attacker defender =
let dmg = max 1 $ cDamage attacker - cArmor defender in
defender {cHitpoints = cHitpoints defender - dmg}
data Inventory = Inventory
{ iItems :: [String]
, iCost :: !Int
, iDamage :: !Int
, iArmor :: !Int
} deriving (Show)
instance Semigroup Inventory where
x <> y = Inventory
{ iItems = iItems x ++ iItems y
, iCost = iCost x + iCost y
, iDamage = iDamage x + iDamage y
, iArmor = iArmor x + iArmor y
}
instance Monoid Inventory where
mempty = Inventory [] 0 0 0
inventoryToCombatant :: Int -> Inventory -> Combatant
inventoryToCombatant hitpoints inventory = Combatant
{ cHitpoints = hitpoints
, cDamage = iDamage inventory
, cArmor = iArmor inventory
}
weapons :: [Inventory]
weapons =
[ Inventory ["Dagger"] 8 4 0
, Inventory ["Shortsword"] 10 5 0
, Inventory ["Warhammer"] 25 6 0
, Inventory ["Longsword"] 40 7 0
, Inventory ["Greataxe"] 74 8 0
]
armor :: [Inventory]
armor =
[ Inventory ["Leather"] 13 0 1
, Inventory ["Chainmail"] 31 0 2
, Inventory ["Splintmail"] 53 0 3
, Inventory ["Bandedmail"] 75 0 4
, Inventory ["Platemail"] 102 0 5
]
rings :: [Inventory]
rings =
[ Inventory ["Damage +1"] 25 1 0
, Inventory ["Damage +2"] 50 2 0
, Inventory ["Damage +3"] 100 3 0
, Inventory ["Defense +1"] 20 0 1
, Inventory ["Defense +2"] 40 0 2
, Inventory ["Defense +3"] 80 0 3
]
shop :: [Inventory]
shop = do
weapon <- weapons
armor' <- mempty : armor
numRings <- [0 .. 2]
(rings', _) <- selectN numRings rings
pure $ weapon <> armor' <> mconcat rings'
main :: IO ()
main = do
let (wins, losses) = partitionEithers $ do
inventory <- shop
let player = inventoryToCombatant 100 inventory
boss = Combatant 100 8 2
(won, _, _) = simulate player boss
pure $ if won then Left inventory else Right inventory
print . iCost $ minimumOn iCost wins
print . iCost $ maximumOn iCost losses
|
|
cd23dcf89a392288a3e75e673f0f33eacc25cd32edbf647faaa5472f149ebba8 | jeanparpaillon/erlang-dbus | dbus_marshaller.erl | %%
2006 - 2007 , 2014 - 2106 Jean Parpaillon
%%
@author < >
@author < >
%% @doc D-Bus binary format (un)marshaling.
%%
%% See <a href="-specification.html#message-protocol-marshaling" >D-Bus Specification</a>.
%%
%% @end
-module(dbus_marshaller).
-include("dbus.hrl").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
%% api
-export([
marshal_message/1,
marshal_signature/1,
marshal_list/2,
unmarshal_data/1,
unmarshal_signature/1
]).
-define(HEADER_SIGNATURE, [byte, byte, byte, byte, uint32, uint32, {array, {struct, [byte, variant]}}]).
-type errors() :: invalid_serial
| {marshaling, dbus_type(), binary()}
| {unmarshaling, dbus_type(), binary()}
| {dbus_parse_error, term()}
| {bad_type_code, integer()}
| dbus_parse_error
| body_parse_error
| bad_header
| term().
-export_type([errors/0]).
%%%
%%% API
%%%
%% @doc Encode a message
Encodes a dbus_message into iolist , including any padding that may be required
Such a marshalled message is ready to send through a socket onto dbus .
%% As defined in dbus.hrl, a message is a header record and a body.
%% The marshal_message/1 function marshals the header but passes through
%% the body portion unchanged. It follows that given the result of this function is
%% an iolist, and the result of this function is [Header,Body], then Body must
%% be a valid iolist.
%% Note that prior to marshalling the message serial must be set, and that
%% the message body is unaffected by marshalling and so should be in a final form
%% ready for transmission.
%% @end
-spec marshal_message(dbus_message()) -> iolist().
marshal_message(#dbus_message{header=#dbus_header{serial=0}}=_Msg) ->
throw(invalid_serial);
marshal_message(#dbus_message{header=#dbus_header{type=Type, flags=Flags, serial=S, fields=Fields},
body= <<>>}=_Msg) ->
marshal_header([$l, Type, Flags, ?DBUS_VERSION_MAJOR, 0, S, Fields]);
marshal_message(#dbus_message{header=#dbus_header{type=Type, flags=Flags, serial=S, fields=Fields},
body=Body}=_Msg) ->
[ marshal_header([$l, Type, Flags, ?DBUS_VERSION_MAJOR, iolist_size(Body), S, Fields]), Body ].
%% @doc Encode a signature
%% @end
-spec marshal_signature(dbus_signature()) -> iolist().
marshal_signature(byte) -> "y";
marshal_signature(boolean) -> "b";
marshal_signature(int16) -> "n";
marshal_signature(uint16) -> "q";
marshal_signature(int32) -> "i";
marshal_signature(uint32) -> "u";
marshal_signature(int64) -> "x";
marshal_signature(uint64) -> "t";
marshal_signature(double) -> "d";
marshal_signature(string) -> "s";
marshal_signature(object_path) -> "o";
marshal_signature(signature) -> "g";
marshal_signature({array, Type}) ->
[$a, marshal_signature(Type)];
marshal_signature({struct, SubTypes}) ->
["(", marshal_struct_signature(SubTypes, []), ")"];
marshal_signature(variant) ->
"v";
marshal_signature({dict, KeyType, ValueType}) ->
KeySig = marshal_signature(KeyType),
ValueSig = marshal_signature(ValueType),
["a{", KeySig, ValueSig, "}"];
marshal_signature([]) ->
"";
marshal_signature([Type|R]) ->
[marshal_signature(Type), marshal_signature(R)].
%% @doc Encode objects, given a signature
%% @end
-spec marshal_list(dbus_signature(), term()) -> {iolist(), integer()}.
marshal_list(Types, Value) ->
marshal_list(Types, Value, 0, []).
%% @doc Decode messages
%%
%% Returns:
%% * `{ok, [dbus_message()], binary()}': if binary describe a complete list of messages, eventually with remaining binary.
%% * `more': if no complete message could be decoded.
%% @end
-spec unmarshal_data(binary()) -> {ok, Msgs :: [dbus_message()], Rest :: binary()}
| {error, errors()}
| more.
unmarshal_data(Data) ->
try unmarshal_data(Data, [])
catch throw:Err ->
{error, Err}
end.
%% @doc Decode a signature
%%
%% Returns `more' if no complete signature could be decoded.
%% @end
-spec unmarshal_signature(binary()) -> {ok, dbus_signature()} | more.
unmarshal_signature(<<>>) ->
{ok, []};
unmarshal_signature(Bin) when is_binary(Bin) ->
case unmarshal_signature(Bin, []) of
{ok, Signature, <<>>} -> {ok, Signature};
more -> more
end.
%%%
%%% Priv marshalling
%%%
marshal_header(Header) when is_list(Header) ->
{Value, Pos} = marshal_list(?HEADER_SIGNATURE, Header),
case pad(8, Pos) of
0 -> Value;
Pad -> [Value, <<0:Pad>>]
end.
marshal_list([], [], Pos, Res) ->
{Res, Pos};
marshal_list([Type | T], [Value | V], Pos, Res) ->
{Res1, Pos1} = marshal(Type, Value, Pos),
marshal_list(T, V, Pos1, [Res, Res1]).
marshal(byte, Value, Pos) when is_integer(Value) andalso 255 >= Value ->
marshal_uint(1, Value, Pos);
marshal(boolean, Value, Pos) when true =:= Value orelse false =:= Value ->
Int =
case Value of
true -> 1;
false -> 0
end,
marshal(uint32, Int, Pos);
marshal(int16, Value, Pos) when Value > -32767 andalso Value =< 32767 ->
marshal_int(2, Value, Pos);
marshal(uint16, Value, Pos) when Value >= 0 andalso Value =< 65535 ->
marshal_uint(2, Value, Pos);
marshal(int32, Value, Pos) when Value >= -2147483647 andalso Value =< 2147483647->
marshal_int(4, Value, Pos);
marshal(uint32, Value, Pos) when Value >= 0 andalso Value =< 4294967295 ->
marshal_uint(4, Value, Pos);
marshal(int64, Value, Pos) ->
marshal_int(8, Value, Pos);
marshal(uint64, Value, Pos) when Value >= 0 ->
marshal_uint(8, Value, Pos);
marshal(double, Value, Pos) when is_integer(Value) ->
Pad = pad(8, Pos),
{<< 0:Pad, (float(Value)):64/little-float >>, Pos + Pad div 8+ 8};
marshal(double, Value, Pos) when is_float(Value) ->
Pad = pad(8, Pos),
{<< 0:Pad, Value:64/little-float >>, Pos + Pad div 8+ 8};
marshal(string, Value, Pos) when is_atom(Value) ->
marshal(string, atom_to_binary(Value, utf8), Pos);
marshal(string, Value, Pos) when is_binary(Value) ->
marshal_string(uint32, Value, Pos);
marshal(string, Value, Pos) when is_list(Value) ->
marshal(string, list_to_binary(Value), Pos);
marshal(object_path, Value, Pos) ->
marshal(string, Value, Pos);
marshal(signature, Value, Pos) ->
marshal_string(byte, Value, Pos);
marshal({array, {struct, [_KeyType, _ValueType]}=SubType}, Value, Pos) when is_map(Value) ->
marshal_array(SubType, maps:to_list(Value), Pos);
marshal({array, byte}=_Type, Value, Pos) when is_binary(Value) ->
marshal_byte_array(Value, Pos);
marshal({array, SubType}, Value, Pos) when is_list(Value) ->
marshal_array(SubType, Value, Pos);
marshal({struct, _SubTypes}=Type, Value, Pos) when is_tuple(Value) ->
marshal(Type, tuple_to_list(Value), Pos);
marshal({struct, SubTypes}, Value, Pos) when is_list(Value) ->
marshal_struct(SubTypes, Value, Pos);
marshal({dict, KeyType, ValueType}, Value, Pos) ->
marshal_dict(KeyType, ValueType, Value, Pos);
marshal(variant, Value, Pos) when is_binary(Value) ->
marshal_variant({array, byte}, Value, Pos);
marshal(variant, #dbus_variant{type=Type, value=Value}, Pos) ->
marshal_variant(Type, Value, Pos);
marshal(variant, true=Value, Pos) ->
marshal_variant(boolean, Value, Pos);
marshal(variant, false=Value, Pos) ->
marshal_variant(boolean, Value, Pos);
marshal(variant, Value, Pos) when is_float(Value) ->
marshal_variant(double, Value, Pos);
marshal(variant, Value, Pos) when is_integer(Value), Value < 0 ->
marshal_int_variant(Value, Pos);
marshal(variant, Value, Pos) when is_integer(Value), Value >= 0 ->
marshal_uint_variant(Value, Pos);
marshal(variant, Value, Pos) when is_list(Value) ->
marshal(variant, list_to_binary(Value), Pos);
marshal(variant, Value, Pos) when is_atom(Value) ->
marshal_variant(string, atom_to_binary(Value, utf8), Pos);
marshal(variant, Value, Pos) ->
Type = infer_type(Value),
marshal_variant(Type, Value, Pos);
marshal(Type, {dbus_variant, Type, Value}, Pos) ->
marshal(Type, Value, Pos);
marshal(Type, Value, _) ->
throw({marshaling, Type, Value}).
infer_type(Value) when is_binary(Value)->
{array, byte};
infer_type(true) ->
boolean;
infer_type(false) ->
boolean;
infer_type(Value) when is_integer(Value), Value < 0 ->
infer_int(Value);
infer_type(Value) when is_integer(Value), Value >= 0 ->
infer_uint(Value);
infer_type(Value) when is_tuple(Value) ->
infer_struct(tuple_to_list(Value));
infer_type(Value) when is_atom(Value)->
string;
infer_type(Value) when is_list(Value) ->
string;
infer_type(Value) when is_map(Value) ->
infer_dict(Value).
infer_struct(Values) ->
{struct, infer_struct(Values, [])}.
infer_struct([], Res) ->
lists:reverse(Res);
infer_struct([ Value | R ], Res) ->
infer_struct(R, [ infer_type(Value) | Res ]).
infer_int(Value) when Value >= -32767 ->
int16;
infer_int(Value) when Value >= -2147483647 ->
int32;
infer_int(_Value) ->
int64.
infer_uint(Value) when Value < 32768 ->
uint16;
infer_uint(Value) when Value < 4294967296 ->
uint32;
infer_uint(_Value) ->
uint64.
infer_dict(_Value) ->
%% Can do better without going through all keys ?...
{dict, variant, variant}.
marshal_int_variant(Value, Pos) when Value >= -32768 ->
marshal_variant(int16, Value, Pos);
marshal_int_variant(Value, Pos) when Value >= -4294967296 ->
marshal_variant(int32, Value, Pos);
marshal_int_variant(Value, Pos) ->
marshal_variant(int64, Value, Pos).
marshal_uint_variant(Value, Pos) when Value < 32768 ->
marshal_variant(uint16, Value, Pos);
marshal_uint_variant(Value, Pos) when Value < 4294967296 ->
marshal_variant(uint32, Value, Pos);
marshal_uint_variant(Value, Pos) ->
marshal_variant(uint64, Value, Pos).
marshal_variant(Type, Value, Pos) ->
{Value1, Pos1} = marshal(signature, marshal_signature(Type), Pos),
{Value2, Pos2} = marshal(Type, Value, Pos1),
{[Value1, Value2], Pos2}.
marshal_uint(Len, Value, Pos) when is_integer(Value) ->
Pad = pad(Len, Pos),
{<< 0:Pad, Value:(Len*8)/little-unsigned >>, Pos + Pad div 8 + Len}.
marshal_int(Len, Value, Pos) when is_integer(Value) ->
Pad = pad(Len, Pos),
{<< 0:Pad, Value:(Len*8)/little-signed >>, Pos + Pad div 8 + Len}.
marshal_string(LenType, Value, Pos) when is_list(Value) ->
marshal_string(LenType, list_to_binary(Value), Pos);
marshal_string(LenType, Value, Pos) when is_binary(Value) ->
Length = byte_size(Value),
{Value1, Pos1} = marshal(LenType, Length, Pos),
{[Value1, Value, 0], Pos1 + Length + 1}.
marshal_byte_array(Value, Pos) ->
Pad = pad(uint32, Pos),
Pos0 = Pos + Pad div 8,
Pos1 = Pos0 + 4,
Pad1 = pad(byte, Pos1),
Pos1b = Pos1 + Pad1 div 8,
Length = byte_size(Value),
Pos2 = Pos1b + Length,
{Value1, Pos1} = marshal(uint32, Length, Pos0),
{[<<0:Pad>>, Value1, <<0:Pad1>>, Value], Pos2}.
marshal_array(SubType, Value, Pos) ->
Pad = pad(uint32, Pos),
Pos0 = Pos + Pad div 8,
Pos1 = Pos0 + 4,
Pad1 = pad(SubType, Pos1),
Pos1b = Pos1 + Pad1 div 8,
{Value2, Pos2} = marshal_array_item(SubType, Value, Pos1b),
Length = Pos2 - Pos1b,
{Value1, Pos1} = marshal(uint32, Length, Pos0),
{[<<0:Pad>>, Value1, <<0:Pad1>>, Value2], Pos2}.
marshal_array_item(SubType, Array, Pos) ->
marshal_array_item(SubType, Array, Pos, []).
marshal_array_item(_SubType, [], Pos, Res) ->
{Res, Pos};
marshal_array_item(SubType, [ Value | R ], Pos, Res) ->
{Value1, Pos1} = marshal(SubType, Value, Pos),
marshal_array_item(SubType, R, Pos1, [Res, Value1]).
marshal_dict(KeyType, ValueType, Value, Pos) when is_map(Value) ->
marshal_array({struct, [KeyType, ValueType]}, maps:to_list(Value), Pos);
marshal_dict(KeyType, ValueType, Value, Pos) when is_list(Value) ->
marshal_array({struct, [KeyType, ValueType]}, Value, Pos);
marshal_dict(KeyType, ValueType, Value, Pos) when element(1, Value) == dict ->
marshal_array({struct, [KeyType, ValueType]}, dict:to_list(Value), Pos).
marshal_struct(SubTypes, Values, Pos) ->
Pad = pad(8, Pos),
{Values1, Pos1} = marshal_struct(SubTypes, Values, Pos + Pad div 8, []),
if
Pad == 0 ->
{Values1, Pos1};
Pad > 0 ->
{[<< 0:Pad >>, Values1], Pos1}
end.
marshal_struct([], [], Pos, Res) ->
{Res, Pos};
marshal_struct([SubType|R], [Value|V], Pos, Res) ->
{Value1, Pos1} = marshal(SubType, Value, Pos),
marshal_struct(R, V, Pos1, [Res, Value1]).
marshal_struct_signature([], Res) ->
Res;
marshal_struct_signature([SubType|R], Res) ->
marshal_struct_signature(R, [Res, marshal_signature(SubType)]).
%%%
%%% Private unmarshaling
%%%
unmarshal_data(<<>>, []) ->
more;
unmarshal_data(<<>>, Acc) ->
{ok, lists:reverse(Acc), <<>>};
unmarshal_data(Data, Acc) ->
try unmarshal_message(Data) of
{ok, #dbus_message{}=Msg, Rest} ->
unmarshal_data(Rest, [Msg | Acc]);
more when [] =:= Acc ->
more;
more ->
{ok, lists:reverse(Acc), Data};
_ ->
?error("Error parsing data~n", []),
throw(dbus_parse_error)
catch
{'EXIT', Err} ->
throw({dbus_parse_error, Err})
end.
unmarshal_message(<<>>) ->
more;
unmarshal_message(Data) when is_binary(Data) ->
case unmarshal_header(Data) of
more ->
more;
{ok, #dbus_header{endian=Endian, type=MsgType}=Header, BodyBin, Rest} ->
case dbus_message:find_field(?FIELD_SIGNATURE, Header) of
undefined ->
case BodyBin of
<<>> -> {ok, #dbus_message{header=Header, body=undefined}, Rest};
_ -> throw(body_parse_error)
end;
Signature ->
case unmarshal_body(MsgType, Signature, BodyBin, Endian) of
{ok, Body} -> {ok, #dbus_message{header=Header, body=Body}, Rest};
more -> more;
{error, Err} -> throw(Err)
end
end
end.
unmarshal_body(?TYPE_INVALID, _, _, _) ->
{ok, undefined};
unmarshal_body(_, SigBin, BodyBin, Endian) ->
case unmarshal_signature(SigBin) of
{ok, Sig} ->
case unmarshal_tuple(Sig, BodyBin, Endian) of
more -> more;
{ok, {}, <<>>, _Pos} ->
{ok, undefined};
{ok, {Body}, <<>>, _Pos} ->
{ok, Body};
{ok, Body, <<>>, _Pos} ->
{ok, Body};
{ok, _Body, _, _} -> {error, body_parse_error}
end;
more -> more
end.
unmarshal_header(Bin) when byte_size(Bin) < 16 ->
more;
unmarshal_header(<<Endian/integer, Type/integer, Flags/integer, ?DBUS_VERSION_MAJOR, Rest/bits>>) ->
unmarshal_header2(Rest, #dbus_header{endian=Endian, type=Type, flags=Flags});
unmarshal_header(_Data) ->
?debug("Bad message header: ~p~n", [_Data]),
throw(bad_header).
unmarshal_header2(<<Length:4/unsigned-little-integer-unit:8, Serial:4/unsigned-little-integer-unit:8, Bin/bits>>,
#dbus_header{endian=$l}=Header) ->
unmarshal_header_fields(Bin, Header#dbus_header{size=Length, serial=Serial});
unmarshal_header2(<<Length:4/unsigned-big-integer-unit:8, Serial:4/unsigned-big-integer-unit:8, Bin/bits>>,
#dbus_header{endian=$B}=Header) ->
unmarshal_header_fields(Bin, Header#dbus_header{size=Length, serial=Serial}).
unmarshal_header_fields(Bin, #dbus_header{endian=Endian, size=Size}=Header) ->
case unmarshal({array, {struct, [byte, variant]}}, Bin, 12, Endian) of
more ->
more;
{ok, [_, _, _, ?DBUS_VERSION_MAJOR, Size, _, _], Rest, _} when byte_size(Rest) < Size ->
more;
{ok, Fields, Rest, Pos} ->
Pad = pad(8, Pos),
if
byte_size(Rest) < Pad/8 + Size ->
more;
true ->
<<0:Pad, Body:Size/binary, Rest2/binary>> = Rest,
{ok, Header#dbus_header{fields=Fields}, Body, Rest2}
end
end.
unmarshal_single_type(<<>>) ->
empty;
unmarshal_single_type(Bin) when is_binary(Bin) ->
case unmarshal_signature(Bin, []) of
{ok, [Type], <<>>} -> {ok, Type};
{ok, _, _} -> throw({unmarshaling, signature, Bin});
more -> more
end.
unmarshal(_, <<>>, _, _) ->
more;
unmarshal(byte, Data, Pos, _) ->
<< Value:8, Data1/binary >> = Data,
{ok, Value, Data1, Pos + 1};
unmarshal(boolean, Data, Pos, Endian) ->
case unmarshal(uint32, Data, Pos, Endian) of
more -> more;
{ok, 1, Data1, Pos1} ->
{ok, true, Data1, Pos1};
{ok, 0, Data1, Pos1} ->
{ok, false, Data1, Pos1};
{ok, _, _, _} ->
throw({unmarshaling, boolean, Data})
end;
unmarshal(uint16, Data, Pos, Endian) ->
unmarshal_uint(2, Data, Pos, Endian);
unmarshal(uint32, Data, Pos, Endian) ->
unmarshal_uint(4, Data, Pos, Endian);
unmarshal(uint64, Data, Pos, Endian) ->
unmarshal_uint(8, Data, Pos, Endian);
unmarshal(int16, Data, Pos, Endian) ->
unmarshal_int(2, Data, Pos, Endian);
unmarshal(int32, Data, Pos, Endian) ->
unmarshal_int(4, Data, Pos, Endian);
unmarshal(int64, Data, Pos, Endian) ->
unmarshal_int(8, Data, Pos, Endian);
unmarshal(double, Data, _, _) when byte_size(Data) < 8 ->
more;
unmarshal(double, Data, Pos, Endian) ->
Pad = pad(8, Pos),
{Value, Data1} = case Endian of
$l ->
<< 0:Pad, V:64/little-float, D/binary >> = Data,
{V, D};
$B ->
<< 0:Pad, V:64/big-float, D/binary >> = Data,
{V, D}
end,
Pos1 = Pos + Pad div 8 + 8,
{ok, Value, Data1, Pos1};
unmarshal(signature, Data, Pos, Endian) ->
unmarshal_string(byte, Data, Pos, Endian);
unmarshal(string, Data, Pos, Endian) ->
unmarshal_string(uint32, Data, Pos, Endian);
unmarshal(object_path, Data, Pos, Endian) ->
unmarshal_string(uint32, Data, Pos, Endian);
unmarshal({array, SubType}, Data, Pos, Endian) ->
case unmarshal(uint32, Data, Pos, Endian) of
more ->
more;
{ok, Length, Rest, NewPos} ->
unmarshal_array(SubType, Length, Rest, NewPos, Endian)
end;
unmarshal({struct, _}, Data, _, _) when byte_size(Data) < 8 ->
more;
unmarshal({struct, SubTypes}, Data, Pos, Endian) ->
Pad = pad(8, Pos),
<< 0:Pad, Data1/binary >> = Data,
Pos1 = Pos + Pad div 8,
case unmarshal_struct(SubTypes, Data1, Pos1, Endian) of
more ->
more;
{ok, Res, Data2, Pos2} ->
{ok, list_to_tuple(Res), Data2, Pos2}
end;
unmarshal({dict, KeyType, ValueType}, Data, Pos, Endian) ->
case unmarshal(uint32, Data, Pos, Endian) of
more ->
more;
{ok, Length, Data1, Pos1} ->
case unmarshal_dict(KeyType, ValueType, Length, Data1, Pos1, Endian) of
more ->
more;
{ok, Res, Data2, Pos2} ->
{ok, Res, Data2, Pos2}
end
end;
unmarshal(variant, Data, Pos, Endian) ->
case unmarshal(signature, Data, Pos, Endian) of
more ->
more;
{ok, _, <<>>, _} ->
more;
{ok, Signature, Data1, Pos1} ->
case unmarshal_single_type(Signature) of
more -> more;
{ok, Type} ->
case unmarshal(Type, Data1, Pos1, Endian) of
more ->
more;
{ok, Value, Data2, Pos2} ->
{ok, Value, Data2, Pos2}
end
end
end.
unmarshal_uint(Len, Data, _, _) when is_integer(Len) andalso byte_size(Data) < Len ->
more;
unmarshal_uint(Len, Data, Pos, Endian) when is_integer(Len) ->
Bitlen = Len * 8,
Pad = pad(Len, Pos),
{Value, Data1} = case Endian of
$l ->
<< 0:Pad, V:Bitlen/little-unsigned, D/binary >> = Data,
{V, D};
$B ->
<< 0:Pad, V:Bitlen/big-unsigned, D/binary >> = Data,
{V, D}
end,
Pos1 = Pos + Pad div 8 + Len,
{ok, Value, Data1, Pos1}.
unmarshal_int(Len, Data, _, _) when is_integer(Len) andalso byte_size(Data) < Len ->
more;
unmarshal_int(Len, Data, Pos, Endian) ->
Bitlen = Len * 8,
Pad = pad(Len, Pos),
{Value, Data1} = case Endian of
$l ->
<< 0:Pad, V:Bitlen/little-signed, D/binary >> = Data,
{V, D};
$B ->
<< 0:Pad, V:Bitlen/big-signed, D/binary >> = Data,
{V, D}
end,
Pos1 = Pos + Pad div 8 + Len,
{ok, Value, Data1, Pos1}.
unmarshal_signature(<<>>, Acc) ->
{ok, lists:reverse(Acc), <<>>};
unmarshal_signature(<<$a, ${, KeySig, Rest/bits>>, Acc) ->
KeyType = unmarshal_type_code(KeySig),
case unmarshal_signature(Rest, []) of
{ok, [], _} ->
more;
{ok, [ValueType], Rest2} ->
unmarshal_signature(Rest2, [ {dict, KeyType, ValueType} | Acc ]);
{ok, _, _} ->
throw({unmarshaling, dict, KeySig, Rest});
more ->
more
end;
unmarshal_signature(<<$a, Rest/bits>>, Acc) ->
case unmarshal_array_signature(Rest) of
{ok, Type, Rest2} ->
unmarshal_signature(Rest2, [ {array, Type} | Acc ]);
more -> more
end;
unmarshal_signature(<<$(, Rest/bits>>, Acc) ->
case unmarshal_signature(Rest, []) of
{ok, [], _} -> more;
{ok, Types, Rest2} ->
unmarshal_signature(Rest2, [ {struct, Types} | Acc ]);
more -> more
end;
unmarshal_signature(<<$), Rest/bits>>, Acc) ->
{ok, lists:reverse(Acc), Rest};
unmarshal_signature(<<$}, Rest/bits>>, Acc) ->
{ok, Acc, Rest};
unmarshal_signature(<<C, Rest/bits>>, Acc) ->
Code = unmarshal_type_code(C),
unmarshal_signature(Rest, [Code | Acc]).
unmarshal_array_signature(<<>>) ->
more;
unmarshal_array_signature(<< $a, Rest/bits >>) ->
unmarshal_signature(<< $a, Rest/bits >>, []);
unmarshal_array_signature(<< $(, Rest/bits >>) ->
case unmarshal_signature(Rest, []) of
{ok, [], _} ->
more;
{ok, Types, Rest2} ->
{ok, {struct, Types}, Rest2};
more ->
more
end;
unmarshal_array_signature(<< C, Rest/bits >>) ->
Code = unmarshal_type_code(C),
{ok, Code, Rest}.
unmarshal_type_code($y) -> byte;
unmarshal_type_code($b) -> boolean;
unmarshal_type_code($n) -> int16;
unmarshal_type_code($q) -> uint16;
unmarshal_type_code($i) -> int32;
unmarshal_type_code($u) -> uint32;
unmarshal_type_code($x) -> int64;
unmarshal_type_code($t) -> uint64;
unmarshal_type_code($d) -> double;
unmarshal_type_code($s) -> string;
unmarshal_type_code($o) -> object_path;
unmarshal_type_code($g) -> signature;
unmarshal_type_code($r) -> struct;
unmarshal_type_code($v) -> variant;
unmarshal_type_code($e) -> dict_entry;
unmarshal_type_code($a) -> array;
unmarshal_type_code(_C) -> throw({bad_type_code, _C}).
unmarshal_struct(SubTypes, Data, Pos, Endian) ->
unmarshal_struct(SubTypes, Data, [], Pos, Endian).
unmarshal_struct([], Data, Acc, Pos, _) ->
{ok, lists:reverse(Acc), Data, Pos};
unmarshal_struct([SubType | S], Data, Acc, Pos, Endian) ->
case unmarshal(SubType, Data, Pos, Endian) of
more -> more;
{ok, Value, Data1, Pos1} ->
unmarshal_struct(S, Data1, [Value | Acc], Pos1, Endian)
end.
unmarshal_dict(KeyType, ValueType, Length, Data, Pos, Endian) ->
SubType = {struct, [KeyType, ValueType]},
Pad = pad(padding(SubType), Pos),
if
byte_size(Data) < Pad / 8 ->
more;
true ->
<< 0:Pad, Rest/binary >> = Data,
NewPos = Pos + Pad div 8,
unmarshal_dict(KeyType, ValueType, Length, Rest, #{}, NewPos, Endian)
end.
unmarshal_dict(_KeyType, _ValueType, 0, Data, Acc, Pos, _) ->
{ok, Acc, Data, Pos};
unmarshal_dict(KeyType, ValueType, Length, Data, Acc, Pos, Endian) when is_integer(Length), Length > 0 ->
SubType = {struct, [KeyType, ValueType]},
case unmarshal(SubType, Data, Pos, Endian) of
more ->
more;
{ok, {Key, Value}, Data1, Pos1} ->
Size = Pos1 - Pos,
unmarshal_dict(KeyType, ValueType, Length - Size, Data1, Acc#{ Key => Value }, Pos1, Endian)
end.
unmarshal_array(SubType, Length, Data, Pos, Endian) ->
Pad = pad(padding(SubType), Pos),
if
byte_size(Data) < Pad / 8 ->
more;
true ->
<< 0:Pad, Rest/binary >> = Data,
NewPos = Pos + Pad div 8,
unmarshal_array(SubType, Length, Rest, [], NewPos, Endian)
end.
unmarshal_array(_SubType, 0, Data, Acc, Pos, _) ->
{ok, lists:reverse(Acc), Data, Pos};
unmarshal_array(SubType, Length, Data, Acc, Pos, Endian) when is_integer(Length), Length > 0 ->
case unmarshal(SubType, Data, Pos, Endian) of
more ->
more;
{ok, Value, Data1, Pos1} ->
Size = Pos1 - Pos,
unmarshal_array(SubType, Length - Size, Data1, [Value | Acc], Pos1, Endian)
end.
unmarshal_tuple(Type, Data, Endian) when is_atom(Type), is_binary(Data), byte_size(Data) > 0 ->
unmarshal(Type, Data, 0, Endian);
unmarshal_tuple(Types, Data, Endian) when is_list(Types), is_binary(Data) ->
unmarshal_tuple(Types, Data, [], 0, Endian).
unmarshal_tuple([], Rest, Acc, Pos, _) ->
{ok, list_to_tuple(lists:reverse(Acc)), Rest, Pos};
unmarshal_tuple([Type|T], Data, Acc, Pos, Endian) when byte_size(Data) > 0 ->
case unmarshal(Type, Data, Pos, Endian) of
more ->
more;
{ok, Value, Rest, Pos1} ->
unmarshal_tuple(T, Rest, [Value | Acc], Pos1, Endian)
end.
unmarshal_string(LenType, Data, Pos, Endian) ->
case unmarshal(LenType, Data, Pos, Endian) of
more ->
more;
{ok, Length, Data1, _} when byte_size(Data1) < Length ->
more;
{ok, Length, Data1, Pos1} ->
<< String:Length/binary, 0, Data2/binary >> = Data1,
Pos2 = Pos1 + Length + 1,
{ok, String, Data2, Pos2}
end.
%%%
%%% Priv common
%%%
padding(byte) -> 1;
padding(boolean) -> 4;
padding(int16) -> 2;
padding(uint16) -> 2;
padding(int32) -> 4;
padding(uint32) -> 4;
padding(int64) -> 8;
padding(uint64) -> 8;
padding(double) -> 8;
padding(string) -> 4;
padding(object_path) -> 4;
padding(signature) -> 1;
padding({array, _Type}) -> 4;
padding({struct, _Types}) -> 8;
padding(variant) -> 1;
padding(dict) -> 4.
-spec pad(Size :: atom()|integer(), MessagePos :: integer()) ->
PaddingBits :: integer().
@param
% The size of the binary alignment in bytes
% @param Pos
% The length of the formatted message in bytes
%
% Pos rem Size gives how many bytes beyond padding boundary
% the current data sits.
% (Size - (Pos rem Size)) gives the number of bytes of
padding except in the case where Pos rem Size is 0 ,
which will instead of 0 .
% There are several ways of dealing with this case, the
% method chosen here is to do another rem.
% Finally, the padding should be represented in bits (not
bytes ) so multiply by 8 .
pad(Size, Pos) when is_integer(Size) ->
((Size - (Pos rem Size)) rem Size) * 8;
pad(Type, Pos) when is_atom(Type);
array =:= element(1, Type);
struct =:= element(1, Type)->
pad(padding(Type), Pos).
%%%
%%% eunit
%%%
-ifdef(TEST).
marshal_list_test() ->
{Bin, Pos} = marshal_list([string, {array, {struct, [byte, string, variant]}}, uint32, int32],
[<<"#compute">>, [], 1, -1]),
?assertMatch({<<
50:8/little-unsigned-unit:4, "#compute", 0,
0, %% string + padding
0:8/little-unsigned-unit:4, 0:8/little-unsigned-unit:4, %% array length + padding (struct)
1 ( uint32 )
-1:8/little-signed-unit:4 %% -1 (int32
>>,
72},
{iolist_to_binary(Bin), Pos}),
{Bin2, Pos2} = marshal_list([string, {array, {struct, [byte, string, variant]}}, uint32, int32],
[<<"#compute">>,
[{1, <<"str">>, 24}], 1, -1]),
?assertMatch({<<
50:8/little-unsigned-unit:4, "#compute", 0,
0, %% string + padding
18:8/little-unsigned-unit:4, 0:8/little-unsigned-unit:4, %% array length + padding (struct)
1:8, 0:8/unit:3, %% struct<byte + padding, ...
3:8/little-unsigned-unit:4, "str", 0, %% ...string...
1:8/little-unsigned-unit:1, $q, 0, 0, %% ...variant...
24:8/little-unsigned-unit:2, 0:8/little-unsigned-unit:2, %% uint16> + padding
1 ( uint32 )
-1:8/little-signed-unit:4 %% -1 (int32
>>,
92},
{iolist_to_binary(Bin2), Pos2}).
marshall_byte_test_() ->
[
?_assertMatch({<< 16#ff >>, 1}, marshal(byte, 16#ff, 0)),
?_assertThrow({marshaling, byte, 256}, marshal(byte, 256, 3))
].
marshall_boolean_test_() ->
[
?_assertMatch({<< 1:8/integer-little-unit:4 >>, 4}, marshal(boolean, true, 0)),
?_assertMatch({<< 0:8/integer-little-unit:4 >>, 4}, marshal(boolean, false, 0)),
?_assertThrow({marshaling, boolean, else}, marshal(boolean, else, 0))
].
marshall_int_test_() ->
[
?_assertMatch({<< 67:8/integer-little-signed-unit:2 >>, 2}, marshal(int16, 67, 0)),
?_assertMatch({<< -67:8/integer-little-signed-unit:2 >>, 2}, marshal(int16, -67, 0)),
?_assertThrow({marshaling, int16, 300000}, marshal(int16, 300000, 0)),
?_assertMatch({<< 67:8/integer-little-unsigned-unit:2 >>, 2}, marshal(uint16, 67, 0)),
?_assertThrow({marshaling, uint16, -67}, marshal(uint16, -67, 0)),
?_assertMatch({<< 2000000000:8/integer-little-signed-unit:4 >>, 4}, marshal(int32, 2000000000, 0)),
?_assertMatch({<< -2000000000:8/integer-little-signed-unit:4 >>, 4}, marshal(int32, -2000000000, 0)),
?_assertThrow({marshaling, int32, 3000000000}, marshal(int32, 3000000000, 0)),
?_assertMatch({<< 4000000:8/integer-little-unsigned-unit:4 >>, 4}, marshal(uint32, 4000000, 0)),
?_assertThrow({marshaling, uint32, -67}, marshal(uint32, -67, 0)),
?_assertMatch({<< 4000000000:8/integer-little-signed-unit:8 >>, 8}, marshal(int64, 4000000000, 0)),
?_assertMatch({<< 4000000000:8/integer-little-unsigned-unit:8 >>, 8}, marshal(uint64, 4000000000, 0)),
?_assertThrow({marshaling, uint64, -400000}, marshal(uint64, -400000, 0))
].
marshall_float_test_() ->
[
?_assertMatch({<< 67:64/float-little-signed-unit:1 >>, 8}, marshal(double, 67, 0)),
%% Tests alignement
?_assertMatch({<< 0:8/unit:6, 67:64/float-little-signed-unit:1 >>, 16}, marshal(double, 67, 2))
].
marshall_string_test_() ->
[
?_assertMatch({[<< 9:8/integer-little-unsigned-unit:4 >>, <<"my string">>, 0 ], 14},
marshal(string, "my string", 0)),
?_assertMatch({[<< 9:8/integer-little-unsigned-unit:4 >>, <<"my string">>, 0 ], 14},
marshal(string, <<"my string">>, 0)),
?_assertMatch({[<< 7:8/integer-little-unsigned-unit:4 >>, <<"an_atom">>, 0 ], 12},
marshal(string, 'an_atom', 0)),
?_assertMatch({[<< 0:8/unit:2, 9:8/integer-little-unsigned-unit:4 >>, <<"my string">>, 0 ], 18},
marshal(string, "my string", 2))
].
marshall_object_path_test_() ->
[
?_assertMatch({[<< 10:8/integer-little-unsigned-unit:4 >>, <<"/my/string">>, 0 ], 15},
marshal(object_path, <<"/my/string">>, 0))
].
marshall_signature_test_() ->
[
?_assertMatch({[<< 6:8/integer-little-unsigned-unit:1 >>, <<"yasgoy">>, 0 ], 8},
marshal(signature, <<"yasgoy">>, 0))
].
marshall_array_test() ->
{Io, Pad} = marshal({array, string}, ["un", "deux", "trois"], 0),
?assertMatch({<<
30:8/integer-little-unsigned-unit:4,
2:8/integer-little-unsigned-unit:4, "un", 0, 0:8/unit:1,
4:8/integer-little-unsigned-unit:4, "deux", 0, 0:8/unit:3,
5:8/integer-little-unsigned-unit:4, "trois", 0
>>, 34},
{iolist_to_binary(Io), Pad}),
{Io2, Pad2} = marshal({array, string}, ["un", "deux", "trois"], 1),
?assertMatch({<<
0:8/unit:3, 30:8/integer-little-unsigned-unit:4,
2:8/integer-little-unsigned-unit:4, "un", 0, 0:8/unit:1,
4:8/integer-little-unsigned-unit:4, "deux", 0, 0:8/unit:3,
5:8/integer-little-unsigned-unit:4, "trois", 0
>>, 38},
{iolist_to_binary(Io2), Pad2}),
{Io3, Pad3} = marshal({array, uint64}, [500, 245], 0),
?assertMatch({<<
16:8/integer-little-unsigned-unit:4, 0:8/unit:4,
500:64/integer-little-unsigned-unit:1,
245:64/integer-little-unsigned-unit:1
>>, 24},
{iolist_to_binary(Io3), Pad3}).
unmarshal_byte_test_() ->
[
?_assertEqual({ok, 4, <<>>, 1}, unmarshal(byte, <<4>>, 0, $l))
,?_assertEqual({ok, 4, <<"xyz">>, 1}, unmarshal(byte, <<4, "xyz">>, 0, $l))
].
unmarshal_boolean_test_() ->
[
?_assertEqual({ok, true, <<>>, 4}, unmarshal(boolean, <<1,0,0,0>>, 0, $l))
,?_assertEqual({ok, true, <<"xyz">>, 4}, unmarshal(boolean, <<1,0,0,0,"xyz">>, 0, $l))
,?_assertEqual({ok, false, <<>>, 4}, unmarshal(boolean, <<0,0,0,0>>, 0, $l))
,?_assertEqual(more, unmarshal(boolean, <<"x">>, 0, $l))
,?_assertThrow({unmarshaling, boolean, <<2, 0, 0, 0>>}, unmarshal(boolean, <<2,0,0,0>>, 0, $l))
].
unmarshal_endian_test_() ->
[
?_assertEqual({ok, 1, <<>>, 4}, unmarshal(uint32, <<1,0,0,0>>, 0, $l))
,?_assertEqual({ok, 1, <<>>, 4}, unmarshal(uint32, <<0,0,0,1>>, 0, $B))
,?_assertEqual({ok, 1, <<"xyz">>, 4}, unmarshal(uint32, <<1,0,0,0, "xyz">>, 0, $l))
,?_assertEqual({ok, 1, <<"xyz">>, 4}, unmarshal(uint32, <<0,0,0,1, "xyz">>, 0, $B))
].
unmarshal_dict_test() ->
Bin = <<
29:8/integer-little-unsigned-unit:4, 0:8/unit:4,
$a, 0:8/unit:3,
4:8/integer-little-unsigned-unit:4, "plop", 0, 0:8/unit:3,
$b, 0:8/unit:3,
4:8/integer-little-unsigned-unit:4, "truc", 0
>>,
?assertMatch({ok, #{ $a := <<"plop">>, $b := <<"truc">> }, <<>>, 37},
unmarshal({dict, byte, string}, Bin, 0, $l)),
?assertMatch({ok, [ {$a, <<"plop">>}, {$b, <<"truc">>} ], <<>>, 37},
unmarshal({array, {struct, [byte, string]}}, Bin, 0, $l)),
DictVariant = <<
5:8/integer-little-unsigned-unit:1, "a{ys}", 0, 0:8/unit:1,
Bin/binary
>>,
?assertMatch({ok, #{ $a := <<"plop">>, $b := <<"truc">> }, <<>>, 45},
unmarshal(variant, DictVariant, 0, $l)),
ArrayVariant = <<
5:8/integer-little-unsigned-unit:1, "a(ys)", 0, 0:8/unit:1,
Bin/binary
>>,
?assertMatch({ok, [ {$a, <<"plop">>}, {$b, <<"truc">>} ], <<>>, 45},
unmarshal(variant, ArrayVariant, 0, $l)).
unmarshal_string_test_() ->
Bin = <<
8:8/integer-little-unsigned-unit:4,
"a string", 0
>>,
Variant = <<
1, $s, 0, 0:8/unit:1,
Bin/binary
>>,
[
?_assertMatch({ok, <<"a string">>, <<>>, 13},
unmarshal(string, Bin, 0, $l)),
?_assertMatch({ok, <<"a string">>, <<>>, 17},
unmarshal(variant, Variant, 0, $l))
].
unmarshal_signature_test() ->
[
?_assertMatch([
{array, {array, {array, string}}}, byte
], unmarshal_signature(<<"aaasy">>)),
?_assertMatch([
byte,
{dict, boolean, variant},
string,
string
], unmarshal_signature(<<"ya{bv}ss">>)),
?_assertMatch([
{array, {struct, [string, string, {array, string}, {dict, string, variant}, string]}},
string
], unmarshal_signature(<<"a(ssasa{sv}s)s">>))
].
-endif.
| null | https://raw.githubusercontent.com/jeanparpaillon/erlang-dbus/48a095b40bc81973ea49e542fe1d8854abc83efc/src/dbus_marshaller.erl | erlang |
@doc D-Bus binary format (un)marshaling.
See <a href="-specification.html#message-protocol-marshaling" >D-Bus Specification</a>.
@end
api
API
@doc Encode a message
As defined in dbus.hrl, a message is a header record and a body.
The marshal_message/1 function marshals the header but passes through
the body portion unchanged. It follows that given the result of this function is
an iolist, and the result of this function is [Header,Body], then Body must
be a valid iolist.
Note that prior to marshalling the message serial must be set, and that
the message body is unaffected by marshalling and so should be in a final form
ready for transmission.
@end
@doc Encode a signature
@end
@doc Encode objects, given a signature
@end
@doc Decode messages
Returns:
* `{ok, [dbus_message()], binary()}': if binary describe a complete list of messages, eventually with remaining binary.
* `more': if no complete message could be decoded.
@end
@doc Decode a signature
Returns `more' if no complete signature could be decoded.
@end
Priv marshalling
Can do better without going through all keys ?...
Private unmarshaling
Priv common
The size of the binary alignment in bytes
@param Pos
The length of the formatted message in bytes
Pos rem Size gives how many bytes beyond padding boundary
the current data sits.
(Size - (Pos rem Size)) gives the number of bytes of
There are several ways of dealing with this case, the
method chosen here is to do another rem.
Finally, the padding should be represented in bits (not
eunit
string + padding
array length + padding (struct)
-1 (int32
string + padding
array length + padding (struct)
struct<byte + padding, ...
...string...
...variant...
uint16> + padding
-1 (int32
Tests alignement | 2006 - 2007 , 2014 - 2106 Jean Parpaillon
@author < >
@author < >
-module(dbus_marshaller).
-include("dbus.hrl").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([
marshal_message/1,
marshal_signature/1,
marshal_list/2,
unmarshal_data/1,
unmarshal_signature/1
]).
-define(HEADER_SIGNATURE, [byte, byte, byte, byte, uint32, uint32, {array, {struct, [byte, variant]}}]).
-type errors() :: invalid_serial
| {marshaling, dbus_type(), binary()}
| {unmarshaling, dbus_type(), binary()}
| {dbus_parse_error, term()}
| {bad_type_code, integer()}
| dbus_parse_error
| body_parse_error
| bad_header
| term().
-export_type([errors/0]).
Encodes a dbus_message into iolist , including any padding that may be required
Such a marshalled message is ready to send through a socket onto dbus .
-spec marshal_message(dbus_message()) -> iolist().
marshal_message(#dbus_message{header=#dbus_header{serial=0}}=_Msg) ->
throw(invalid_serial);
marshal_message(#dbus_message{header=#dbus_header{type=Type, flags=Flags, serial=S, fields=Fields},
body= <<>>}=_Msg) ->
marshal_header([$l, Type, Flags, ?DBUS_VERSION_MAJOR, 0, S, Fields]);
marshal_message(#dbus_message{header=#dbus_header{type=Type, flags=Flags, serial=S, fields=Fields},
body=Body}=_Msg) ->
[ marshal_header([$l, Type, Flags, ?DBUS_VERSION_MAJOR, iolist_size(Body), S, Fields]), Body ].
-spec marshal_signature(dbus_signature()) -> iolist().
marshal_signature(byte) -> "y";
marshal_signature(boolean) -> "b";
marshal_signature(int16) -> "n";
marshal_signature(uint16) -> "q";
marshal_signature(int32) -> "i";
marshal_signature(uint32) -> "u";
marshal_signature(int64) -> "x";
marshal_signature(uint64) -> "t";
marshal_signature(double) -> "d";
marshal_signature(string) -> "s";
marshal_signature(object_path) -> "o";
marshal_signature(signature) -> "g";
marshal_signature({array, Type}) ->
[$a, marshal_signature(Type)];
marshal_signature({struct, SubTypes}) ->
["(", marshal_struct_signature(SubTypes, []), ")"];
marshal_signature(variant) ->
"v";
marshal_signature({dict, KeyType, ValueType}) ->
KeySig = marshal_signature(KeyType),
ValueSig = marshal_signature(ValueType),
["a{", KeySig, ValueSig, "}"];
marshal_signature([]) ->
"";
marshal_signature([Type|R]) ->
[marshal_signature(Type), marshal_signature(R)].
-spec marshal_list(dbus_signature(), term()) -> {iolist(), integer()}.
marshal_list(Types, Value) ->
marshal_list(Types, Value, 0, []).
-spec unmarshal_data(binary()) -> {ok, Msgs :: [dbus_message()], Rest :: binary()}
| {error, errors()}
| more.
unmarshal_data(Data) ->
try unmarshal_data(Data, [])
catch throw:Err ->
{error, Err}
end.
-spec unmarshal_signature(binary()) -> {ok, dbus_signature()} | more.
unmarshal_signature(<<>>) ->
{ok, []};
unmarshal_signature(Bin) when is_binary(Bin) ->
case unmarshal_signature(Bin, []) of
{ok, Signature, <<>>} -> {ok, Signature};
more -> more
end.
marshal_header(Header) when is_list(Header) ->
{Value, Pos} = marshal_list(?HEADER_SIGNATURE, Header),
case pad(8, Pos) of
0 -> Value;
Pad -> [Value, <<0:Pad>>]
end.
marshal_list([], [], Pos, Res) ->
{Res, Pos};
marshal_list([Type | T], [Value | V], Pos, Res) ->
{Res1, Pos1} = marshal(Type, Value, Pos),
marshal_list(T, V, Pos1, [Res, Res1]).
marshal(byte, Value, Pos) when is_integer(Value) andalso 255 >= Value ->
marshal_uint(1, Value, Pos);
marshal(boolean, Value, Pos) when true =:= Value orelse false =:= Value ->
Int =
case Value of
true -> 1;
false -> 0
end,
marshal(uint32, Int, Pos);
marshal(int16, Value, Pos) when Value > -32767 andalso Value =< 32767 ->
marshal_int(2, Value, Pos);
marshal(uint16, Value, Pos) when Value >= 0 andalso Value =< 65535 ->
marshal_uint(2, Value, Pos);
marshal(int32, Value, Pos) when Value >= -2147483647 andalso Value =< 2147483647->
marshal_int(4, Value, Pos);
marshal(uint32, Value, Pos) when Value >= 0 andalso Value =< 4294967295 ->
marshal_uint(4, Value, Pos);
marshal(int64, Value, Pos) ->
marshal_int(8, Value, Pos);
marshal(uint64, Value, Pos) when Value >= 0 ->
marshal_uint(8, Value, Pos);
marshal(double, Value, Pos) when is_integer(Value) ->
Pad = pad(8, Pos),
{<< 0:Pad, (float(Value)):64/little-float >>, Pos + Pad div 8+ 8};
marshal(double, Value, Pos) when is_float(Value) ->
Pad = pad(8, Pos),
{<< 0:Pad, Value:64/little-float >>, Pos + Pad div 8+ 8};
marshal(string, Value, Pos) when is_atom(Value) ->
marshal(string, atom_to_binary(Value, utf8), Pos);
marshal(string, Value, Pos) when is_binary(Value) ->
marshal_string(uint32, Value, Pos);
marshal(string, Value, Pos) when is_list(Value) ->
marshal(string, list_to_binary(Value), Pos);
marshal(object_path, Value, Pos) ->
marshal(string, Value, Pos);
marshal(signature, Value, Pos) ->
marshal_string(byte, Value, Pos);
marshal({array, {struct, [_KeyType, _ValueType]}=SubType}, Value, Pos) when is_map(Value) ->
marshal_array(SubType, maps:to_list(Value), Pos);
marshal({array, byte}=_Type, Value, Pos) when is_binary(Value) ->
marshal_byte_array(Value, Pos);
marshal({array, SubType}, Value, Pos) when is_list(Value) ->
marshal_array(SubType, Value, Pos);
marshal({struct, _SubTypes}=Type, Value, Pos) when is_tuple(Value) ->
marshal(Type, tuple_to_list(Value), Pos);
marshal({struct, SubTypes}, Value, Pos) when is_list(Value) ->
marshal_struct(SubTypes, Value, Pos);
marshal({dict, KeyType, ValueType}, Value, Pos) ->
marshal_dict(KeyType, ValueType, Value, Pos);
marshal(variant, Value, Pos) when is_binary(Value) ->
marshal_variant({array, byte}, Value, Pos);
marshal(variant, #dbus_variant{type=Type, value=Value}, Pos) ->
marshal_variant(Type, Value, Pos);
marshal(variant, true=Value, Pos) ->
marshal_variant(boolean, Value, Pos);
marshal(variant, false=Value, Pos) ->
marshal_variant(boolean, Value, Pos);
marshal(variant, Value, Pos) when is_float(Value) ->
marshal_variant(double, Value, Pos);
marshal(variant, Value, Pos) when is_integer(Value), Value < 0 ->
marshal_int_variant(Value, Pos);
marshal(variant, Value, Pos) when is_integer(Value), Value >= 0 ->
marshal_uint_variant(Value, Pos);
marshal(variant, Value, Pos) when is_list(Value) ->
marshal(variant, list_to_binary(Value), Pos);
marshal(variant, Value, Pos) when is_atom(Value) ->
marshal_variant(string, atom_to_binary(Value, utf8), Pos);
marshal(variant, Value, Pos) ->
Type = infer_type(Value),
marshal_variant(Type, Value, Pos);
marshal(Type, {dbus_variant, Type, Value}, Pos) ->
marshal(Type, Value, Pos);
marshal(Type, Value, _) ->
throw({marshaling, Type, Value}).
infer_type(Value) when is_binary(Value)->
{array, byte};
infer_type(true) ->
boolean;
infer_type(false) ->
boolean;
infer_type(Value) when is_integer(Value), Value < 0 ->
infer_int(Value);
infer_type(Value) when is_integer(Value), Value >= 0 ->
infer_uint(Value);
infer_type(Value) when is_tuple(Value) ->
infer_struct(tuple_to_list(Value));
infer_type(Value) when is_atom(Value)->
string;
infer_type(Value) when is_list(Value) ->
string;
infer_type(Value) when is_map(Value) ->
infer_dict(Value).
infer_struct(Values) ->
{struct, infer_struct(Values, [])}.
infer_struct([], Res) ->
lists:reverse(Res);
infer_struct([ Value | R ], Res) ->
infer_struct(R, [ infer_type(Value) | Res ]).
infer_int(Value) when Value >= -32767 ->
int16;
infer_int(Value) when Value >= -2147483647 ->
int32;
infer_int(_Value) ->
int64.
infer_uint(Value) when Value < 32768 ->
uint16;
infer_uint(Value) when Value < 4294967296 ->
uint32;
infer_uint(_Value) ->
uint64.
infer_dict(_Value) ->
{dict, variant, variant}.
marshal_int_variant(Value, Pos) when Value >= -32768 ->
marshal_variant(int16, Value, Pos);
marshal_int_variant(Value, Pos) when Value >= -4294967296 ->
marshal_variant(int32, Value, Pos);
marshal_int_variant(Value, Pos) ->
marshal_variant(int64, Value, Pos).
marshal_uint_variant(Value, Pos) when Value < 32768 ->
marshal_variant(uint16, Value, Pos);
marshal_uint_variant(Value, Pos) when Value < 4294967296 ->
marshal_variant(uint32, Value, Pos);
marshal_uint_variant(Value, Pos) ->
marshal_variant(uint64, Value, Pos).
marshal_variant(Type, Value, Pos) ->
{Value1, Pos1} = marshal(signature, marshal_signature(Type), Pos),
{Value2, Pos2} = marshal(Type, Value, Pos1),
{[Value1, Value2], Pos2}.
marshal_uint(Len, Value, Pos) when is_integer(Value) ->
Pad = pad(Len, Pos),
{<< 0:Pad, Value:(Len*8)/little-unsigned >>, Pos + Pad div 8 + Len}.
marshal_int(Len, Value, Pos) when is_integer(Value) ->
Pad = pad(Len, Pos),
{<< 0:Pad, Value:(Len*8)/little-signed >>, Pos + Pad div 8 + Len}.
marshal_string(LenType, Value, Pos) when is_list(Value) ->
marshal_string(LenType, list_to_binary(Value), Pos);
marshal_string(LenType, Value, Pos) when is_binary(Value) ->
Length = byte_size(Value),
{Value1, Pos1} = marshal(LenType, Length, Pos),
{[Value1, Value, 0], Pos1 + Length + 1}.
marshal_byte_array(Value, Pos) ->
Pad = pad(uint32, Pos),
Pos0 = Pos + Pad div 8,
Pos1 = Pos0 + 4,
Pad1 = pad(byte, Pos1),
Pos1b = Pos1 + Pad1 div 8,
Length = byte_size(Value),
Pos2 = Pos1b + Length,
{Value1, Pos1} = marshal(uint32, Length, Pos0),
{[<<0:Pad>>, Value1, <<0:Pad1>>, Value], Pos2}.
marshal_array(SubType, Value, Pos) ->
Pad = pad(uint32, Pos),
Pos0 = Pos + Pad div 8,
Pos1 = Pos0 + 4,
Pad1 = pad(SubType, Pos1),
Pos1b = Pos1 + Pad1 div 8,
{Value2, Pos2} = marshal_array_item(SubType, Value, Pos1b),
Length = Pos2 - Pos1b,
{Value1, Pos1} = marshal(uint32, Length, Pos0),
{[<<0:Pad>>, Value1, <<0:Pad1>>, Value2], Pos2}.
marshal_array_item(SubType, Array, Pos) ->
marshal_array_item(SubType, Array, Pos, []).
marshal_array_item(_SubType, [], Pos, Res) ->
{Res, Pos};
marshal_array_item(SubType, [ Value | R ], Pos, Res) ->
{Value1, Pos1} = marshal(SubType, Value, Pos),
marshal_array_item(SubType, R, Pos1, [Res, Value1]).
marshal_dict(KeyType, ValueType, Value, Pos) when is_map(Value) ->
marshal_array({struct, [KeyType, ValueType]}, maps:to_list(Value), Pos);
marshal_dict(KeyType, ValueType, Value, Pos) when is_list(Value) ->
marshal_array({struct, [KeyType, ValueType]}, Value, Pos);
marshal_dict(KeyType, ValueType, Value, Pos) when element(1, Value) == dict ->
marshal_array({struct, [KeyType, ValueType]}, dict:to_list(Value), Pos).
marshal_struct(SubTypes, Values, Pos) ->
Pad = pad(8, Pos),
{Values1, Pos1} = marshal_struct(SubTypes, Values, Pos + Pad div 8, []),
if
Pad == 0 ->
{Values1, Pos1};
Pad > 0 ->
{[<< 0:Pad >>, Values1], Pos1}
end.
marshal_struct([], [], Pos, Res) ->
{Res, Pos};
marshal_struct([SubType|R], [Value|V], Pos, Res) ->
{Value1, Pos1} = marshal(SubType, Value, Pos),
marshal_struct(R, V, Pos1, [Res, Value1]).
marshal_struct_signature([], Res) ->
Res;
marshal_struct_signature([SubType|R], Res) ->
marshal_struct_signature(R, [Res, marshal_signature(SubType)]).
unmarshal_data(<<>>, []) ->
more;
unmarshal_data(<<>>, Acc) ->
{ok, lists:reverse(Acc), <<>>};
unmarshal_data(Data, Acc) ->
try unmarshal_message(Data) of
{ok, #dbus_message{}=Msg, Rest} ->
unmarshal_data(Rest, [Msg | Acc]);
more when [] =:= Acc ->
more;
more ->
{ok, lists:reverse(Acc), Data};
_ ->
?error("Error parsing data~n", []),
throw(dbus_parse_error)
catch
{'EXIT', Err} ->
throw({dbus_parse_error, Err})
end.
unmarshal_message(<<>>) ->
more;
unmarshal_message(Data) when is_binary(Data) ->
case unmarshal_header(Data) of
more ->
more;
{ok, #dbus_header{endian=Endian, type=MsgType}=Header, BodyBin, Rest} ->
case dbus_message:find_field(?FIELD_SIGNATURE, Header) of
undefined ->
case BodyBin of
<<>> -> {ok, #dbus_message{header=Header, body=undefined}, Rest};
_ -> throw(body_parse_error)
end;
Signature ->
case unmarshal_body(MsgType, Signature, BodyBin, Endian) of
{ok, Body} -> {ok, #dbus_message{header=Header, body=Body}, Rest};
more -> more;
{error, Err} -> throw(Err)
end
end
end.
unmarshal_body(?TYPE_INVALID, _, _, _) ->
{ok, undefined};
unmarshal_body(_, SigBin, BodyBin, Endian) ->
case unmarshal_signature(SigBin) of
{ok, Sig} ->
case unmarshal_tuple(Sig, BodyBin, Endian) of
more -> more;
{ok, {}, <<>>, _Pos} ->
{ok, undefined};
{ok, {Body}, <<>>, _Pos} ->
{ok, Body};
{ok, Body, <<>>, _Pos} ->
{ok, Body};
{ok, _Body, _, _} -> {error, body_parse_error}
end;
more -> more
end.
unmarshal_header(Bin) when byte_size(Bin) < 16 ->
more;
unmarshal_header(<<Endian/integer, Type/integer, Flags/integer, ?DBUS_VERSION_MAJOR, Rest/bits>>) ->
unmarshal_header2(Rest, #dbus_header{endian=Endian, type=Type, flags=Flags});
unmarshal_header(_Data) ->
?debug("Bad message header: ~p~n", [_Data]),
throw(bad_header).
unmarshal_header2(<<Length:4/unsigned-little-integer-unit:8, Serial:4/unsigned-little-integer-unit:8, Bin/bits>>,
#dbus_header{endian=$l}=Header) ->
unmarshal_header_fields(Bin, Header#dbus_header{size=Length, serial=Serial});
unmarshal_header2(<<Length:4/unsigned-big-integer-unit:8, Serial:4/unsigned-big-integer-unit:8, Bin/bits>>,
#dbus_header{endian=$B}=Header) ->
unmarshal_header_fields(Bin, Header#dbus_header{size=Length, serial=Serial}).
unmarshal_header_fields(Bin, #dbus_header{endian=Endian, size=Size}=Header) ->
case unmarshal({array, {struct, [byte, variant]}}, Bin, 12, Endian) of
more ->
more;
{ok, [_, _, _, ?DBUS_VERSION_MAJOR, Size, _, _], Rest, _} when byte_size(Rest) < Size ->
more;
{ok, Fields, Rest, Pos} ->
Pad = pad(8, Pos),
if
byte_size(Rest) < Pad/8 + Size ->
more;
true ->
<<0:Pad, Body:Size/binary, Rest2/binary>> = Rest,
{ok, Header#dbus_header{fields=Fields}, Body, Rest2}
end
end.
unmarshal_single_type(<<>>) ->
empty;
unmarshal_single_type(Bin) when is_binary(Bin) ->
case unmarshal_signature(Bin, []) of
{ok, [Type], <<>>} -> {ok, Type};
{ok, _, _} -> throw({unmarshaling, signature, Bin});
more -> more
end.
unmarshal(_, <<>>, _, _) ->
more;
unmarshal(byte, Data, Pos, _) ->
<< Value:8, Data1/binary >> = Data,
{ok, Value, Data1, Pos + 1};
unmarshal(boolean, Data, Pos, Endian) ->
case unmarshal(uint32, Data, Pos, Endian) of
more -> more;
{ok, 1, Data1, Pos1} ->
{ok, true, Data1, Pos1};
{ok, 0, Data1, Pos1} ->
{ok, false, Data1, Pos1};
{ok, _, _, _} ->
throw({unmarshaling, boolean, Data})
end;
unmarshal(uint16, Data, Pos, Endian) ->
unmarshal_uint(2, Data, Pos, Endian);
unmarshal(uint32, Data, Pos, Endian) ->
unmarshal_uint(4, Data, Pos, Endian);
unmarshal(uint64, Data, Pos, Endian) ->
unmarshal_uint(8, Data, Pos, Endian);
unmarshal(int16, Data, Pos, Endian) ->
unmarshal_int(2, Data, Pos, Endian);
unmarshal(int32, Data, Pos, Endian) ->
unmarshal_int(4, Data, Pos, Endian);
unmarshal(int64, Data, Pos, Endian) ->
unmarshal_int(8, Data, Pos, Endian);
unmarshal(double, Data, _, _) when byte_size(Data) < 8 ->
more;
unmarshal(double, Data, Pos, Endian) ->
Pad = pad(8, Pos),
{Value, Data1} = case Endian of
$l ->
<< 0:Pad, V:64/little-float, D/binary >> = Data,
{V, D};
$B ->
<< 0:Pad, V:64/big-float, D/binary >> = Data,
{V, D}
end,
Pos1 = Pos + Pad div 8 + 8,
{ok, Value, Data1, Pos1};
unmarshal(signature, Data, Pos, Endian) ->
unmarshal_string(byte, Data, Pos, Endian);
unmarshal(string, Data, Pos, Endian) ->
unmarshal_string(uint32, Data, Pos, Endian);
unmarshal(object_path, Data, Pos, Endian) ->
unmarshal_string(uint32, Data, Pos, Endian);
unmarshal({array, SubType}, Data, Pos, Endian) ->
case unmarshal(uint32, Data, Pos, Endian) of
more ->
more;
{ok, Length, Rest, NewPos} ->
unmarshal_array(SubType, Length, Rest, NewPos, Endian)
end;
unmarshal({struct, _}, Data, _, _) when byte_size(Data) < 8 ->
more;
unmarshal({struct, SubTypes}, Data, Pos, Endian) ->
Pad = pad(8, Pos),
<< 0:Pad, Data1/binary >> = Data,
Pos1 = Pos + Pad div 8,
case unmarshal_struct(SubTypes, Data1, Pos1, Endian) of
more ->
more;
{ok, Res, Data2, Pos2} ->
{ok, list_to_tuple(Res), Data2, Pos2}
end;
unmarshal({dict, KeyType, ValueType}, Data, Pos, Endian) ->
case unmarshal(uint32, Data, Pos, Endian) of
more ->
more;
{ok, Length, Data1, Pos1} ->
case unmarshal_dict(KeyType, ValueType, Length, Data1, Pos1, Endian) of
more ->
more;
{ok, Res, Data2, Pos2} ->
{ok, Res, Data2, Pos2}
end
end;
unmarshal(variant, Data, Pos, Endian) ->
case unmarshal(signature, Data, Pos, Endian) of
more ->
more;
{ok, _, <<>>, _} ->
more;
{ok, Signature, Data1, Pos1} ->
case unmarshal_single_type(Signature) of
more -> more;
{ok, Type} ->
case unmarshal(Type, Data1, Pos1, Endian) of
more ->
more;
{ok, Value, Data2, Pos2} ->
{ok, Value, Data2, Pos2}
end
end
end.
unmarshal_uint(Len, Data, _, _) when is_integer(Len) andalso byte_size(Data) < Len ->
more;
unmarshal_uint(Len, Data, Pos, Endian) when is_integer(Len) ->
Bitlen = Len * 8,
Pad = pad(Len, Pos),
{Value, Data1} = case Endian of
$l ->
<< 0:Pad, V:Bitlen/little-unsigned, D/binary >> = Data,
{V, D};
$B ->
<< 0:Pad, V:Bitlen/big-unsigned, D/binary >> = Data,
{V, D}
end,
Pos1 = Pos + Pad div 8 + Len,
{ok, Value, Data1, Pos1}.
unmarshal_int(Len, Data, _, _) when is_integer(Len) andalso byte_size(Data) < Len ->
more;
unmarshal_int(Len, Data, Pos, Endian) ->
Bitlen = Len * 8,
Pad = pad(Len, Pos),
{Value, Data1} = case Endian of
$l ->
<< 0:Pad, V:Bitlen/little-signed, D/binary >> = Data,
{V, D};
$B ->
<< 0:Pad, V:Bitlen/big-signed, D/binary >> = Data,
{V, D}
end,
Pos1 = Pos + Pad div 8 + Len,
{ok, Value, Data1, Pos1}.
unmarshal_signature(<<>>, Acc) ->
{ok, lists:reverse(Acc), <<>>};
unmarshal_signature(<<$a, ${, KeySig, Rest/bits>>, Acc) ->
KeyType = unmarshal_type_code(KeySig),
case unmarshal_signature(Rest, []) of
{ok, [], _} ->
more;
{ok, [ValueType], Rest2} ->
unmarshal_signature(Rest2, [ {dict, KeyType, ValueType} | Acc ]);
{ok, _, _} ->
throw({unmarshaling, dict, KeySig, Rest});
more ->
more
end;
unmarshal_signature(<<$a, Rest/bits>>, Acc) ->
case unmarshal_array_signature(Rest) of
{ok, Type, Rest2} ->
unmarshal_signature(Rest2, [ {array, Type} | Acc ]);
more -> more
end;
unmarshal_signature(<<$(, Rest/bits>>, Acc) ->
case unmarshal_signature(Rest, []) of
{ok, [], _} -> more;
{ok, Types, Rest2} ->
unmarshal_signature(Rest2, [ {struct, Types} | Acc ]);
more -> more
end;
unmarshal_signature(<<$), Rest/bits>>, Acc) ->
{ok, lists:reverse(Acc), Rest};
unmarshal_signature(<<$}, Rest/bits>>, Acc) ->
{ok, Acc, Rest};
unmarshal_signature(<<C, Rest/bits>>, Acc) ->
Code = unmarshal_type_code(C),
unmarshal_signature(Rest, [Code | Acc]).
unmarshal_array_signature(<<>>) ->
more;
unmarshal_array_signature(<< $a, Rest/bits >>) ->
unmarshal_signature(<< $a, Rest/bits >>, []);
unmarshal_array_signature(<< $(, Rest/bits >>) ->
case unmarshal_signature(Rest, []) of
{ok, [], _} ->
more;
{ok, Types, Rest2} ->
{ok, {struct, Types}, Rest2};
more ->
more
end;
unmarshal_array_signature(<< C, Rest/bits >>) ->
Code = unmarshal_type_code(C),
{ok, Code, Rest}.
unmarshal_type_code($y) -> byte;
unmarshal_type_code($b) -> boolean;
unmarshal_type_code($n) -> int16;
unmarshal_type_code($q) -> uint16;
unmarshal_type_code($i) -> int32;
unmarshal_type_code($u) -> uint32;
unmarshal_type_code($x) -> int64;
unmarshal_type_code($t) -> uint64;
unmarshal_type_code($d) -> double;
unmarshal_type_code($s) -> string;
unmarshal_type_code($o) -> object_path;
unmarshal_type_code($g) -> signature;
unmarshal_type_code($r) -> struct;
unmarshal_type_code($v) -> variant;
unmarshal_type_code($e) -> dict_entry;
unmarshal_type_code($a) -> array;
unmarshal_type_code(_C) -> throw({bad_type_code, _C}).
unmarshal_struct(SubTypes, Data, Pos, Endian) ->
unmarshal_struct(SubTypes, Data, [], Pos, Endian).
unmarshal_struct([], Data, Acc, Pos, _) ->
{ok, lists:reverse(Acc), Data, Pos};
unmarshal_struct([SubType | S], Data, Acc, Pos, Endian) ->
case unmarshal(SubType, Data, Pos, Endian) of
more -> more;
{ok, Value, Data1, Pos1} ->
unmarshal_struct(S, Data1, [Value | Acc], Pos1, Endian)
end.
unmarshal_dict(KeyType, ValueType, Length, Data, Pos, Endian) ->
SubType = {struct, [KeyType, ValueType]},
Pad = pad(padding(SubType), Pos),
if
byte_size(Data) < Pad / 8 ->
more;
true ->
<< 0:Pad, Rest/binary >> = Data,
NewPos = Pos + Pad div 8,
unmarshal_dict(KeyType, ValueType, Length, Rest, #{}, NewPos, Endian)
end.
unmarshal_dict(_KeyType, _ValueType, 0, Data, Acc, Pos, _) ->
{ok, Acc, Data, Pos};
unmarshal_dict(KeyType, ValueType, Length, Data, Acc, Pos, Endian) when is_integer(Length), Length > 0 ->
SubType = {struct, [KeyType, ValueType]},
case unmarshal(SubType, Data, Pos, Endian) of
more ->
more;
{ok, {Key, Value}, Data1, Pos1} ->
Size = Pos1 - Pos,
unmarshal_dict(KeyType, ValueType, Length - Size, Data1, Acc#{ Key => Value }, Pos1, Endian)
end.
unmarshal_array(SubType, Length, Data, Pos, Endian) ->
Pad = pad(padding(SubType), Pos),
if
byte_size(Data) < Pad / 8 ->
more;
true ->
<< 0:Pad, Rest/binary >> = Data,
NewPos = Pos + Pad div 8,
unmarshal_array(SubType, Length, Rest, [], NewPos, Endian)
end.
unmarshal_array(_SubType, 0, Data, Acc, Pos, _) ->
{ok, lists:reverse(Acc), Data, Pos};
unmarshal_array(SubType, Length, Data, Acc, Pos, Endian) when is_integer(Length), Length > 0 ->
case unmarshal(SubType, Data, Pos, Endian) of
more ->
more;
{ok, Value, Data1, Pos1} ->
Size = Pos1 - Pos,
unmarshal_array(SubType, Length - Size, Data1, [Value | Acc], Pos1, Endian)
end.
unmarshal_tuple(Type, Data, Endian) when is_atom(Type), is_binary(Data), byte_size(Data) > 0 ->
unmarshal(Type, Data, 0, Endian);
unmarshal_tuple(Types, Data, Endian) when is_list(Types), is_binary(Data) ->
unmarshal_tuple(Types, Data, [], 0, Endian).
unmarshal_tuple([], Rest, Acc, Pos, _) ->
{ok, list_to_tuple(lists:reverse(Acc)), Rest, Pos};
unmarshal_tuple([Type|T], Data, Acc, Pos, Endian) when byte_size(Data) > 0 ->
case unmarshal(Type, Data, Pos, Endian) of
more ->
more;
{ok, Value, Rest, Pos1} ->
unmarshal_tuple(T, Rest, [Value | Acc], Pos1, Endian)
end.
unmarshal_string(LenType, Data, Pos, Endian) ->
case unmarshal(LenType, Data, Pos, Endian) of
more ->
more;
{ok, Length, Data1, _} when byte_size(Data1) < Length ->
more;
{ok, Length, Data1, Pos1} ->
<< String:Length/binary, 0, Data2/binary >> = Data1,
Pos2 = Pos1 + Length + 1,
{ok, String, Data2, Pos2}
end.
padding(byte) -> 1;
padding(boolean) -> 4;
padding(int16) -> 2;
padding(uint16) -> 2;
padding(int32) -> 4;
padding(uint32) -> 4;
padding(int64) -> 8;
padding(uint64) -> 8;
padding(double) -> 8;
padding(string) -> 4;
padding(object_path) -> 4;
padding(signature) -> 1;
padding({array, _Type}) -> 4;
padding({struct, _Types}) -> 8;
padding(variant) -> 1;
padding(dict) -> 4.
-spec pad(Size :: atom()|integer(), MessagePos :: integer()) ->
PaddingBits :: integer().
@param
padding except in the case where Pos rem Size is 0 ,
which will instead of 0 .
bytes ) so multiply by 8 .
pad(Size, Pos) when is_integer(Size) ->
((Size - (Pos rem Size)) rem Size) * 8;
pad(Type, Pos) when is_atom(Type);
array =:= element(1, Type);
struct =:= element(1, Type)->
pad(padding(Type), Pos).
-ifdef(TEST).
marshal_list_test() ->
{Bin, Pos} = marshal_list([string, {array, {struct, [byte, string, variant]}}, uint32, int32],
[<<"#compute">>, [], 1, -1]),
?assertMatch({<<
50:8/little-unsigned-unit:4, "#compute", 0,
1 ( uint32 )
>>,
72},
{iolist_to_binary(Bin), Pos}),
{Bin2, Pos2} = marshal_list([string, {array, {struct, [byte, string, variant]}}, uint32, int32],
[<<"#compute">>,
[{1, <<"str">>, 24}], 1, -1]),
?assertMatch({<<
50:8/little-unsigned-unit:4, "#compute", 0,
1 ( uint32 )
>>,
92},
{iolist_to_binary(Bin2), Pos2}).
marshall_byte_test_() ->
[
?_assertMatch({<< 16#ff >>, 1}, marshal(byte, 16#ff, 0)),
?_assertThrow({marshaling, byte, 256}, marshal(byte, 256, 3))
].
marshall_boolean_test_() ->
[
?_assertMatch({<< 1:8/integer-little-unit:4 >>, 4}, marshal(boolean, true, 0)),
?_assertMatch({<< 0:8/integer-little-unit:4 >>, 4}, marshal(boolean, false, 0)),
?_assertThrow({marshaling, boolean, else}, marshal(boolean, else, 0))
].
marshall_int_test_() ->
[
?_assertMatch({<< 67:8/integer-little-signed-unit:2 >>, 2}, marshal(int16, 67, 0)),
?_assertMatch({<< -67:8/integer-little-signed-unit:2 >>, 2}, marshal(int16, -67, 0)),
?_assertThrow({marshaling, int16, 300000}, marshal(int16, 300000, 0)),
?_assertMatch({<< 67:8/integer-little-unsigned-unit:2 >>, 2}, marshal(uint16, 67, 0)),
?_assertThrow({marshaling, uint16, -67}, marshal(uint16, -67, 0)),
?_assertMatch({<< 2000000000:8/integer-little-signed-unit:4 >>, 4}, marshal(int32, 2000000000, 0)),
?_assertMatch({<< -2000000000:8/integer-little-signed-unit:4 >>, 4}, marshal(int32, -2000000000, 0)),
?_assertThrow({marshaling, int32, 3000000000}, marshal(int32, 3000000000, 0)),
?_assertMatch({<< 4000000:8/integer-little-unsigned-unit:4 >>, 4}, marshal(uint32, 4000000, 0)),
?_assertThrow({marshaling, uint32, -67}, marshal(uint32, -67, 0)),
?_assertMatch({<< 4000000000:8/integer-little-signed-unit:8 >>, 8}, marshal(int64, 4000000000, 0)),
?_assertMatch({<< 4000000000:8/integer-little-unsigned-unit:8 >>, 8}, marshal(uint64, 4000000000, 0)),
?_assertThrow({marshaling, uint64, -400000}, marshal(uint64, -400000, 0))
].
marshall_float_test_() ->
[
?_assertMatch({<< 67:64/float-little-signed-unit:1 >>, 8}, marshal(double, 67, 0)),
?_assertMatch({<< 0:8/unit:6, 67:64/float-little-signed-unit:1 >>, 16}, marshal(double, 67, 2))
].
marshall_string_test_() ->
[
?_assertMatch({[<< 9:8/integer-little-unsigned-unit:4 >>, <<"my string">>, 0 ], 14},
marshal(string, "my string", 0)),
?_assertMatch({[<< 9:8/integer-little-unsigned-unit:4 >>, <<"my string">>, 0 ], 14},
marshal(string, <<"my string">>, 0)),
?_assertMatch({[<< 7:8/integer-little-unsigned-unit:4 >>, <<"an_atom">>, 0 ], 12},
marshal(string, 'an_atom', 0)),
?_assertMatch({[<< 0:8/unit:2, 9:8/integer-little-unsigned-unit:4 >>, <<"my string">>, 0 ], 18},
marshal(string, "my string", 2))
].
marshall_object_path_test_() ->
[
?_assertMatch({[<< 10:8/integer-little-unsigned-unit:4 >>, <<"/my/string">>, 0 ], 15},
marshal(object_path, <<"/my/string">>, 0))
].
marshall_signature_test_() ->
[
?_assertMatch({[<< 6:8/integer-little-unsigned-unit:1 >>, <<"yasgoy">>, 0 ], 8},
marshal(signature, <<"yasgoy">>, 0))
].
marshall_array_test() ->
{Io, Pad} = marshal({array, string}, ["un", "deux", "trois"], 0),
?assertMatch({<<
30:8/integer-little-unsigned-unit:4,
2:8/integer-little-unsigned-unit:4, "un", 0, 0:8/unit:1,
4:8/integer-little-unsigned-unit:4, "deux", 0, 0:8/unit:3,
5:8/integer-little-unsigned-unit:4, "trois", 0
>>, 34},
{iolist_to_binary(Io), Pad}),
{Io2, Pad2} = marshal({array, string}, ["un", "deux", "trois"], 1),
?assertMatch({<<
0:8/unit:3, 30:8/integer-little-unsigned-unit:4,
2:8/integer-little-unsigned-unit:4, "un", 0, 0:8/unit:1,
4:8/integer-little-unsigned-unit:4, "deux", 0, 0:8/unit:3,
5:8/integer-little-unsigned-unit:4, "trois", 0
>>, 38},
{iolist_to_binary(Io2), Pad2}),
{Io3, Pad3} = marshal({array, uint64}, [500, 245], 0),
?assertMatch({<<
16:8/integer-little-unsigned-unit:4, 0:8/unit:4,
500:64/integer-little-unsigned-unit:1,
245:64/integer-little-unsigned-unit:1
>>, 24},
{iolist_to_binary(Io3), Pad3}).
unmarshal_byte_test_() ->
[
?_assertEqual({ok, 4, <<>>, 1}, unmarshal(byte, <<4>>, 0, $l))
,?_assertEqual({ok, 4, <<"xyz">>, 1}, unmarshal(byte, <<4, "xyz">>, 0, $l))
].
unmarshal_boolean_test_() ->
[
?_assertEqual({ok, true, <<>>, 4}, unmarshal(boolean, <<1,0,0,0>>, 0, $l))
,?_assertEqual({ok, true, <<"xyz">>, 4}, unmarshal(boolean, <<1,0,0,0,"xyz">>, 0, $l))
,?_assertEqual({ok, false, <<>>, 4}, unmarshal(boolean, <<0,0,0,0>>, 0, $l))
,?_assertEqual(more, unmarshal(boolean, <<"x">>, 0, $l))
,?_assertThrow({unmarshaling, boolean, <<2, 0, 0, 0>>}, unmarshal(boolean, <<2,0,0,0>>, 0, $l))
].
unmarshal_endian_test_() ->
[
?_assertEqual({ok, 1, <<>>, 4}, unmarshal(uint32, <<1,0,0,0>>, 0, $l))
,?_assertEqual({ok, 1, <<>>, 4}, unmarshal(uint32, <<0,0,0,1>>, 0, $B))
,?_assertEqual({ok, 1, <<"xyz">>, 4}, unmarshal(uint32, <<1,0,0,0, "xyz">>, 0, $l))
,?_assertEqual({ok, 1, <<"xyz">>, 4}, unmarshal(uint32, <<0,0,0,1, "xyz">>, 0, $B))
].
unmarshal_dict_test() ->
Bin = <<
29:8/integer-little-unsigned-unit:4, 0:8/unit:4,
$a, 0:8/unit:3,
4:8/integer-little-unsigned-unit:4, "plop", 0, 0:8/unit:3,
$b, 0:8/unit:3,
4:8/integer-little-unsigned-unit:4, "truc", 0
>>,
?assertMatch({ok, #{ $a := <<"plop">>, $b := <<"truc">> }, <<>>, 37},
unmarshal({dict, byte, string}, Bin, 0, $l)),
?assertMatch({ok, [ {$a, <<"plop">>}, {$b, <<"truc">>} ], <<>>, 37},
unmarshal({array, {struct, [byte, string]}}, Bin, 0, $l)),
DictVariant = <<
5:8/integer-little-unsigned-unit:1, "a{ys}", 0, 0:8/unit:1,
Bin/binary
>>,
?assertMatch({ok, #{ $a := <<"plop">>, $b := <<"truc">> }, <<>>, 45},
unmarshal(variant, DictVariant, 0, $l)),
ArrayVariant = <<
5:8/integer-little-unsigned-unit:1, "a(ys)", 0, 0:8/unit:1,
Bin/binary
>>,
?assertMatch({ok, [ {$a, <<"plop">>}, {$b, <<"truc">>} ], <<>>, 45},
unmarshal(variant, ArrayVariant, 0, $l)).
unmarshal_string_test_() ->
Bin = <<
8:8/integer-little-unsigned-unit:4,
"a string", 0
>>,
Variant = <<
1, $s, 0, 0:8/unit:1,
Bin/binary
>>,
[
?_assertMatch({ok, <<"a string">>, <<>>, 13},
unmarshal(string, Bin, 0, $l)),
?_assertMatch({ok, <<"a string">>, <<>>, 17},
unmarshal(variant, Variant, 0, $l))
].
unmarshal_signature_test() ->
[
?_assertMatch([
{array, {array, {array, string}}}, byte
], unmarshal_signature(<<"aaasy">>)),
?_assertMatch([
byte,
{dict, boolean, variant},
string,
string
], unmarshal_signature(<<"ya{bv}ss">>)),
?_assertMatch([
{array, {struct, [string, string, {array, string}, {dict, string, variant}, string]}},
string
], unmarshal_signature(<<"a(ssasa{sv}s)s">>))
].
-endif.
|
b1e5962cd6ec27ccb88b1cc974c1a82fa6b91bb4c889a78361c2cf4cf9dda889 | serokell/ariadne | Face.hs | module Ariadne.UI.Vty.Face
( UiFeatures (..)
, UiFace (..)
, UiLangFace (..)
, UiHistoryFace (..)
, UiEvent (..)
, UiCommandId (..)
, UiCommandEvent (..)
, UiCommandAction (..)
, UiBackendEvent (..)
, UiBackendStatusUpdate (..)
, UiWalletEvent (..)
, UiNewVersionEvent (..)
, UiPasswordEvent (..)
, UiConfirmEvent (..)
, UiConfirmationType (..)
, UiConfirmSendInfo (..)
, UiDeletingItem (..)
, UiCommand (..)
, UiSendOutput (..)
, UiSendArgs (..)
, UiFeeArgs (..)
, UiNewWalletArgs (..)
, UiNewAccountArgs (..)
, UiNewAddressArgs (..)
, UiRestoreWalletArgs (..)
, UiRenameArgs (..)
, UiChangePasswordArgs (..)
, UiCommandResult (..)
, UiBalanceCommandResult (..)
, UiTxHistoryRowPart (..)
, UiTxHistoryRow (..)
, UiTxHistoryCommandResult (..)
, UiSendCommandResult (..)
, UiFeeCommandResult (..)
, UiNewWalletCommandResult (..)
, UiNewAccountCommandResult (..)
, UiNewAddressCommandResult (..)
, UiRestoreWalletCommandResult (..)
, UiRenameCommandResult (..)
, UiRemoveCommandResult (..)
, UiExportCommandResult (..)
, UiChangePasswordCommandResult (..)
, UiTreeItem (..)
, UiTree
, UiTreeSelection(..)
, TreePath
, UiWalletInfo(..)
, UiAccountInfo(..)
, UiAddressInfo(..)
, UiSelectionInfo(..)
) where
import qualified Control.Concurrent.Event as CE
import Data.Loc (Loc, Span)
import qualified Data.Text.Buildable as Buildable
import Data.Tree (Tree)
import Data.Version (Version)
import Formatting (bprint, int, (%))
import Text.PrettyPrint.ANSI.Leijen (Doc)
import Ariadne.UX.PasswordManager (WalletId, PasswordRequestMode)
-- | UI library settings for a particular currency implementation
-- Mostly boolean flags for enabled widgets
data UiFeatures = UiFeatures
{ featureStatus :: !Bool
, featureExport :: !Bool
, featureAccounts :: !Bool
, featureTxHistory :: !Bool
, featureSecretKeyName :: !Text -- ^ "Secret key"/"Mnemonic"/etc
}
----------------------------------------------------------------------------
-- Faces
----------------------------------------------------------------------------
-- API for the UI.
data UiFace = UiFace
{ -- Update the user interface with an event. Does not block unless the
-- queue of events is full (should not normally happen).
putUiEvent :: UiEvent -> IO ()
}
The backend language ( Knit by default ) interface as perceived by the UI .
data UiLangFace = forall err expr. UiLangFace
{ langPutCommand :: expr -> IO UiCommandId
, langPutUiCommand :: UiCommand -> IO (Either Text UiCommandId)
, langPutUISilentCommand :: UiCommand -> IO (Either Text UiCommandId)
, langParse :: Text -> Either err expr
, langAutocomplete :: Loc -> Text -> [(Loc, Text)]
, langPpExpr :: expr -> Doc
, langPpParseError :: err -> Doc
, langParseErrSpans :: err -> [Span]
, langGetHelp :: [Doc]
}
-- Interface for the command history
data UiHistoryFace = UiHistoryFace
{ historyAddCommand :: Text -> IO ()
, historySetPrefix :: Text -> IO ()
, historyNextCommand :: IO (Maybe Text)
, historyPrevCommand :: IO (Maybe Text)
}
----------------------------------------------------------------------------
-- UI events and their payloads
----------------------------------------------------------------------------
-- | Events as perceived by the UI. They will be generated from backend-specific
-- events in the 'Glue' module. They must be independent from the backends and
-- capture /what the UI can handle/, not what the backends can generate.
data UiEvent
= UiCommandEvent UiCommandId UiCommandEvent
| UiCommandResult UiCommandId UiCommandResult
| UiCommandAction UiCommandAction
| UiBackendEvent UiBackendEvent
| UiWalletEvent UiWalletEvent
| UiNewVersionEvent UiNewVersionEvent
| UiPasswordEvent UiPasswordEvent
| UiConfirmEvent UiConfirmEvent
data UiCommandId = UiCommandId
This field is used to compare whether two command identifiers are equal .
-- The mapping from actual command identifiers to these integers must be
-- injective.
cmdIdEqObject :: Natural
, -- This field is the visual representation of a command identifier. The
-- mapping from actual command identifiers to text need not be injective,
-- but it would be very unfair to the user, as different command identifiers
-- would appear the same to her.
cmdTaskIdRendered :: Maybe Text
Task identifier object .
, cmdTaskId :: Maybe Natural
}
instance Buildable UiCommandId where
build UiCommandId {..} =
case cmdTaskIdRendered of
Just rendered -> Buildable.build rendered
Nothing -> bprint ("EqObject:"%int) cmdIdEqObject
instance Eq UiCommandId where
a == b = cmdIdEqObject a == cmdIdEqObject b
-- A REPL command has either finished or sent some information.
data UiCommandEvent
= UiCommandSuccess Doc
| UiCommandFailure Doc
| UiCommandOutput Doc
| UiCommandWidget Doc
deriving (Show)
-- UI event triggered by REPL command
data UiCommandAction
= UiCommandHelp
| UiCommandLogs
-- Update current displayed slot, chain difficulty, etc
data UiBackendEvent
= UiBackendLogEvent Text
| UiBackendStatusUpdateEvent UiBackendStatusUpdate
data UiBackendStatusUpdate = UiBackendStatusUpdate
{ syncProgress :: Maybe Text
, blockchainLocal :: Text
, blockchainNetwork :: Text
}
-- Full Wallet update
data UiWalletEvent = UiWalletUpdate
{ wuTrees :: [UiTree]
, wuSelection :: Maybe UiTreeSelection
, wuSelectionInfo :: Maybe UiSelectionInfo
}
data UiNewVersionEvent = UiNewVersion
{ nvVersion :: Version
, nvUpdateURL :: Text
}
| Ui event triggered by the password manager
data UiPasswordEvent
= UiPasswordRequest PasswordRequestMode WalletId CE.Event
| Ui event to handle confirmations
data UiConfirmEvent
= UiConfirmRequest (MVar Bool) UiConfirmationType
data UiConfirmationType
= UiConfirmMnemonic [Text] -- ^ mnemonic
| UiConfirmRemove UiDeletingItem -- ^ selection
| UiConfirmSend [UiConfirmSendInfo] -- ^ lists of outputs
data UiConfirmSendInfo =
UiConfirmSendInfo
{ csiAddress :: Text
, csiAmount :: Text
, csiCoin :: Text
}
data UiDeletingItem
= UiDelWallet (Maybe Text)
| UiDelAccount (Maybe Text)
| UiDelUnknownKeys Text
| UiDelBrokenWallets Text
deriving Eq
----------------------------------------------------------------------------
-- UI commands
----------------------------------------------------------------------------
-- | Commands issued by the UI widgets
data UiCommand
= UiSelect [Word]
| UiBalance
| UiTxHistory
| UiSend UiSendArgs
| UiFee UiFeeArgs
| UiNewWallet UiNewWalletArgs
| UiNewAccount UiNewAccountArgs
| UiNewAddress UiNewAddressArgs
| UiRestoreWallet UiRestoreWalletArgs
| UiRename UiRenameArgs
| UiChangePassword UiChangePasswordArgs
| UiRemove
| UiExport
| UiKill Natural
data UiSendOutput = UiSendOutput
{ usoAddress :: !Text
, usoAmount :: !Text
}
data UiSendArgs = UiSendArgs
{ usaWalletIdx :: !(Maybe Word)
, usaAccounts :: ![Word32]
, usaOutputs :: [UiSendOutput]
, usaPassphrase :: !Text
}
data UiFeeArgs = UiFeeArgs
{ ufaWalletIdx :: !(Maybe Word)
, ufaAccounts :: ![Word32]
, ufaOutputs :: [UiSendOutput]
}
data UiNewWalletArgs = UiNewWalletArgs
{ unwaName :: !Text
, unwaPassphrase :: !Text
}
data UiNewAccountArgs = UiNewAccountArgs
{ unaaWalletIdx :: !(Maybe Word)
, unaaName :: !Text
}
data UiNewAddressArgs = UiNewAddressArgs
{ unadaWalletIdx :: !(Maybe Word)
, unadaAccountIdx :: !(Maybe Word)
}
data UiRestoreWalletArgs = UiRestoreWalletArgs
{ urwaName :: !Text
, urwaMnemonic :: !Text
, urwaPassphrase :: !Text
}
data UiRenameArgs = UiRenameArgs
{ uraName :: !Text
}
data UiChangePasswordArgs = UiChangePasswordArgs
----------------------------------------------------------------------------
-- UI command results
----------------------------------------------------------------------------
-- | Results of commands issued by the UI widgets
data UiCommandResult
= UiBalanceCommandResult UiBalanceCommandResult
| UiTxHistoryCommandResult UiTxHistoryCommandResult
| UiSendCommandResult UiSendCommandResult
| UiFeeCommandResult UiFeeCommandResult
| UiNewWalletCommandResult UiNewWalletCommandResult
| UiNewAccountCommandResult UiNewAccountCommandResult
| UiNewAddressCommandResult UiNewAddressCommandResult
| UiRestoreWalletCommandResult UiRestoreWalletCommandResult
| UiRenameCommandResult UiRenameCommandResult
| UiRemoveCommandResult UiRemoveCommandResult
| UiExportCommandResult UiExportCommandResult
| UiChangePasswordCommandResult UiChangePasswordCommandResult
data UiBalanceCommandResult
= UiBalanceCommandSuccess Text
| UiBalanceCommandFailure Text
data UiSendCommandResult
= UiSendCommandSuccess Text
| UiSendCommandFailure Text
data UiFeeCommandResult
= UiFeeCommandSuccess Text
| UiFeeCommandFailure Text
data UiTxHistoryRowPart = UiTxHistoryRowPart
{ uthrpAddress :: Text
, uthrpAmount :: Text
}
deriving (Eq, Show)
data UiTxHistoryRow = UiTxHistoryRow
{ uthrId :: Text
, uthrTotal :: Text
, uthrFrom :: [UiTxHistoryRowPart]
, uthrTo :: [UiTxHistoryRowPart]
}
deriving (Eq, Show)
data UiTxHistoryCommandResult
= UiTxHistoryCommandSuccess [UiTxHistoryRow]
| UiTxHistoryCommandFailure Text
data UiNewWalletCommandResult
= UiNewWalletCommandSuccess [Text]
| UiNewWalletCommandFailure Text
data UiNewAccountCommandResult
= UiNewAccountCommandSuccess
| UiNewAccountCommandFailure Text
data UiNewAddressCommandResult
= UiNewAddressCommandSuccess
| UiNewAddressCommandFailure Text
data UiRestoreWalletCommandResult
= UiRestoreWalletCommandSuccess
| UiRestoreWalletCommandFailure Text
data UiRenameCommandResult
= UiRenameCommandSuccess
| UiRenameCommandFailure Text
data UiRemoveCommandResult
= UiRemoveCommandSuccess
| UiRemoveCommandFailure Text
data UiExportCommandResult
= UiExportCommandSuccess Text
| UiExportCommandFailure Text
data UiChangePasswordCommandResult
= UiChangePasswordCommandSuccess
| UiChangePasswordCommandFailure Text
----------------------------------------------------------------------------
-- Wallet widget model
----------------------------------------------------------------------------
-- | A node in HD-wallet tree.
data UiTreeItem = UiTreeItem
{ wtiLabel :: !(Maybe Text)
^ Some text to display ( name ) .
, wtiPath :: ![Word]
-- ^ Path to this node in the tree. Can be used as an identifier
-- (hopefully).
, wtiShowPath :: !Bool
-- ^ Whether the path should be displayed.
}
type UiTree = Tree UiTreeItem
-- | Path in a 'Tree'.
--
-- N.B. The head of this list is the index in root's children.
-- I find this order more intuitive, but if perfomance turns out
-- to be an issue, we may consider changing it.
type TreePath = [Word]
data UiTreeSelection = UiTreeSelection
{ wtsWalletIdx :: Word
, wtsPath :: TreePath
}
-- Display info for entities on all HD-wallet tree levels
data UiWalletInfo = UiWalletInfo
{ uwiLabel :: !(Maybe Text)
, uwiId :: !Text
, uwiWalletIdx :: !Word
, uwiBalance :: !(Maybe Text)
, uwiAccounts :: ![UiAccountInfo]
}
instance Eq UiWalletInfo where
a == b = uwiWalletIdx a == uwiWalletIdx b
data UiAccountInfo = UiAccountInfo
{ uaciLabel :: !(Maybe Text)
, uaciWalletIdx :: !Word
, uaciPath :: !TreePath
, uaciBalance :: !(Maybe Text)
, uaciAddresses :: ![UiAddressInfo]
}
instance Eq UiAccountInfo where
a == b =
uaciWalletIdx a == uaciWalletIdx b &&
uaciPath a == uaciPath b
data UiAddressInfo = UiAddressInfo
{ uadiWalletIdx :: !Word
, uadiPath :: !TreePath
, uadiAddress :: !Text
, uadiBalance :: !(Maybe Text)
}
instance Eq UiAddressInfo where
a == b =
uadiWalletIdx a == uadiWalletIdx b &&
uadiPath a == uadiPath b
-- | Info for currently selected tree item
data UiSelectionInfo
= UiSelectionWallet !UiWalletInfo
| UiSelectionAccount !UiAccountInfo
| null | https://raw.githubusercontent.com/serokell/ariadne/5f49ee53b6bbaf332cb6f110c75f7b971acdd452/ui/vty-lib/src/Ariadne/UI/Vty/Face.hs | haskell | | UI library settings for a particular currency implementation
Mostly boolean flags for enabled widgets
^ "Secret key"/"Mnemonic"/etc
--------------------------------------------------------------------------
Faces
--------------------------------------------------------------------------
API for the UI.
Update the user interface with an event. Does not block unless the
queue of events is full (should not normally happen).
Interface for the command history
--------------------------------------------------------------------------
UI events and their payloads
--------------------------------------------------------------------------
| Events as perceived by the UI. They will be generated from backend-specific
events in the 'Glue' module. They must be independent from the backends and
capture /what the UI can handle/, not what the backends can generate.
The mapping from actual command identifiers to these integers must be
injective.
This field is the visual representation of a command identifier. The
mapping from actual command identifiers to text need not be injective,
but it would be very unfair to the user, as different command identifiers
would appear the same to her.
A REPL command has either finished or sent some information.
UI event triggered by REPL command
Update current displayed slot, chain difficulty, etc
Full Wallet update
^ mnemonic
^ selection
^ lists of outputs
--------------------------------------------------------------------------
UI commands
--------------------------------------------------------------------------
| Commands issued by the UI widgets
--------------------------------------------------------------------------
UI command results
--------------------------------------------------------------------------
| Results of commands issued by the UI widgets
--------------------------------------------------------------------------
Wallet widget model
--------------------------------------------------------------------------
| A node in HD-wallet tree.
^ Path to this node in the tree. Can be used as an identifier
(hopefully).
^ Whether the path should be displayed.
| Path in a 'Tree'.
N.B. The head of this list is the index in root's children.
I find this order more intuitive, but if perfomance turns out
to be an issue, we may consider changing it.
Display info for entities on all HD-wallet tree levels
| Info for currently selected tree item | module Ariadne.UI.Vty.Face
( UiFeatures (..)
, UiFace (..)
, UiLangFace (..)
, UiHistoryFace (..)
, UiEvent (..)
, UiCommandId (..)
, UiCommandEvent (..)
, UiCommandAction (..)
, UiBackendEvent (..)
, UiBackendStatusUpdate (..)
, UiWalletEvent (..)
, UiNewVersionEvent (..)
, UiPasswordEvent (..)
, UiConfirmEvent (..)
, UiConfirmationType (..)
, UiConfirmSendInfo (..)
, UiDeletingItem (..)
, UiCommand (..)
, UiSendOutput (..)
, UiSendArgs (..)
, UiFeeArgs (..)
, UiNewWalletArgs (..)
, UiNewAccountArgs (..)
, UiNewAddressArgs (..)
, UiRestoreWalletArgs (..)
, UiRenameArgs (..)
, UiChangePasswordArgs (..)
, UiCommandResult (..)
, UiBalanceCommandResult (..)
, UiTxHistoryRowPart (..)
, UiTxHistoryRow (..)
, UiTxHistoryCommandResult (..)
, UiSendCommandResult (..)
, UiFeeCommandResult (..)
, UiNewWalletCommandResult (..)
, UiNewAccountCommandResult (..)
, UiNewAddressCommandResult (..)
, UiRestoreWalletCommandResult (..)
, UiRenameCommandResult (..)
, UiRemoveCommandResult (..)
, UiExportCommandResult (..)
, UiChangePasswordCommandResult (..)
, UiTreeItem (..)
, UiTree
, UiTreeSelection(..)
, TreePath
, UiWalletInfo(..)
, UiAccountInfo(..)
, UiAddressInfo(..)
, UiSelectionInfo(..)
) where
import qualified Control.Concurrent.Event as CE
import Data.Loc (Loc, Span)
import qualified Data.Text.Buildable as Buildable
import Data.Tree (Tree)
import Data.Version (Version)
import Formatting (bprint, int, (%))
import Text.PrettyPrint.ANSI.Leijen (Doc)
import Ariadne.UX.PasswordManager (WalletId, PasswordRequestMode)
data UiFeatures = UiFeatures
{ featureStatus :: !Bool
, featureExport :: !Bool
, featureAccounts :: !Bool
, featureTxHistory :: !Bool
}
data UiFace = UiFace
putUiEvent :: UiEvent -> IO ()
}
The backend language ( Knit by default ) interface as perceived by the UI .
data UiLangFace = forall err expr. UiLangFace
{ langPutCommand :: expr -> IO UiCommandId
, langPutUiCommand :: UiCommand -> IO (Either Text UiCommandId)
, langPutUISilentCommand :: UiCommand -> IO (Either Text UiCommandId)
, langParse :: Text -> Either err expr
, langAutocomplete :: Loc -> Text -> [(Loc, Text)]
, langPpExpr :: expr -> Doc
, langPpParseError :: err -> Doc
, langParseErrSpans :: err -> [Span]
, langGetHelp :: [Doc]
}
data UiHistoryFace = UiHistoryFace
{ historyAddCommand :: Text -> IO ()
, historySetPrefix :: Text -> IO ()
, historyNextCommand :: IO (Maybe Text)
, historyPrevCommand :: IO (Maybe Text)
}
data UiEvent
= UiCommandEvent UiCommandId UiCommandEvent
| UiCommandResult UiCommandId UiCommandResult
| UiCommandAction UiCommandAction
| UiBackendEvent UiBackendEvent
| UiWalletEvent UiWalletEvent
| UiNewVersionEvent UiNewVersionEvent
| UiPasswordEvent UiPasswordEvent
| UiConfirmEvent UiConfirmEvent
data UiCommandId = UiCommandId
This field is used to compare whether two command identifiers are equal .
cmdIdEqObject :: Natural
cmdTaskIdRendered :: Maybe Text
Task identifier object .
, cmdTaskId :: Maybe Natural
}
instance Buildable UiCommandId where
build UiCommandId {..} =
case cmdTaskIdRendered of
Just rendered -> Buildable.build rendered
Nothing -> bprint ("EqObject:"%int) cmdIdEqObject
instance Eq UiCommandId where
a == b = cmdIdEqObject a == cmdIdEqObject b
data UiCommandEvent
= UiCommandSuccess Doc
| UiCommandFailure Doc
| UiCommandOutput Doc
| UiCommandWidget Doc
deriving (Show)
data UiCommandAction
= UiCommandHelp
| UiCommandLogs
data UiBackendEvent
= UiBackendLogEvent Text
| UiBackendStatusUpdateEvent UiBackendStatusUpdate
data UiBackendStatusUpdate = UiBackendStatusUpdate
{ syncProgress :: Maybe Text
, blockchainLocal :: Text
, blockchainNetwork :: Text
}
data UiWalletEvent = UiWalletUpdate
{ wuTrees :: [UiTree]
, wuSelection :: Maybe UiTreeSelection
, wuSelectionInfo :: Maybe UiSelectionInfo
}
data UiNewVersionEvent = UiNewVersion
{ nvVersion :: Version
, nvUpdateURL :: Text
}
| Ui event triggered by the password manager
data UiPasswordEvent
= UiPasswordRequest PasswordRequestMode WalletId CE.Event
| Ui event to handle confirmations
data UiConfirmEvent
= UiConfirmRequest (MVar Bool) UiConfirmationType
data UiConfirmationType
data UiConfirmSendInfo =
UiConfirmSendInfo
{ csiAddress :: Text
, csiAmount :: Text
, csiCoin :: Text
}
data UiDeletingItem
= UiDelWallet (Maybe Text)
| UiDelAccount (Maybe Text)
| UiDelUnknownKeys Text
| UiDelBrokenWallets Text
deriving Eq
data UiCommand
= UiSelect [Word]
| UiBalance
| UiTxHistory
| UiSend UiSendArgs
| UiFee UiFeeArgs
| UiNewWallet UiNewWalletArgs
| UiNewAccount UiNewAccountArgs
| UiNewAddress UiNewAddressArgs
| UiRestoreWallet UiRestoreWalletArgs
| UiRename UiRenameArgs
| UiChangePassword UiChangePasswordArgs
| UiRemove
| UiExport
| UiKill Natural
data UiSendOutput = UiSendOutput
{ usoAddress :: !Text
, usoAmount :: !Text
}
data UiSendArgs = UiSendArgs
{ usaWalletIdx :: !(Maybe Word)
, usaAccounts :: ![Word32]
, usaOutputs :: [UiSendOutput]
, usaPassphrase :: !Text
}
data UiFeeArgs = UiFeeArgs
{ ufaWalletIdx :: !(Maybe Word)
, ufaAccounts :: ![Word32]
, ufaOutputs :: [UiSendOutput]
}
data UiNewWalletArgs = UiNewWalletArgs
{ unwaName :: !Text
, unwaPassphrase :: !Text
}
data UiNewAccountArgs = UiNewAccountArgs
{ unaaWalletIdx :: !(Maybe Word)
, unaaName :: !Text
}
data UiNewAddressArgs = UiNewAddressArgs
{ unadaWalletIdx :: !(Maybe Word)
, unadaAccountIdx :: !(Maybe Word)
}
data UiRestoreWalletArgs = UiRestoreWalletArgs
{ urwaName :: !Text
, urwaMnemonic :: !Text
, urwaPassphrase :: !Text
}
data UiRenameArgs = UiRenameArgs
{ uraName :: !Text
}
data UiChangePasswordArgs = UiChangePasswordArgs
data UiCommandResult
= UiBalanceCommandResult UiBalanceCommandResult
| UiTxHistoryCommandResult UiTxHistoryCommandResult
| UiSendCommandResult UiSendCommandResult
| UiFeeCommandResult UiFeeCommandResult
| UiNewWalletCommandResult UiNewWalletCommandResult
| UiNewAccountCommandResult UiNewAccountCommandResult
| UiNewAddressCommandResult UiNewAddressCommandResult
| UiRestoreWalletCommandResult UiRestoreWalletCommandResult
| UiRenameCommandResult UiRenameCommandResult
| UiRemoveCommandResult UiRemoveCommandResult
| UiExportCommandResult UiExportCommandResult
| UiChangePasswordCommandResult UiChangePasswordCommandResult
data UiBalanceCommandResult
= UiBalanceCommandSuccess Text
| UiBalanceCommandFailure Text
data UiSendCommandResult
= UiSendCommandSuccess Text
| UiSendCommandFailure Text
data UiFeeCommandResult
= UiFeeCommandSuccess Text
| UiFeeCommandFailure Text
data UiTxHistoryRowPart = UiTxHistoryRowPart
{ uthrpAddress :: Text
, uthrpAmount :: Text
}
deriving (Eq, Show)
data UiTxHistoryRow = UiTxHistoryRow
{ uthrId :: Text
, uthrTotal :: Text
, uthrFrom :: [UiTxHistoryRowPart]
, uthrTo :: [UiTxHistoryRowPart]
}
deriving (Eq, Show)
data UiTxHistoryCommandResult
= UiTxHistoryCommandSuccess [UiTxHistoryRow]
| UiTxHistoryCommandFailure Text
data UiNewWalletCommandResult
= UiNewWalletCommandSuccess [Text]
| UiNewWalletCommandFailure Text
data UiNewAccountCommandResult
= UiNewAccountCommandSuccess
| UiNewAccountCommandFailure Text
data UiNewAddressCommandResult
= UiNewAddressCommandSuccess
| UiNewAddressCommandFailure Text
data UiRestoreWalletCommandResult
= UiRestoreWalletCommandSuccess
| UiRestoreWalletCommandFailure Text
data UiRenameCommandResult
= UiRenameCommandSuccess
| UiRenameCommandFailure Text
data UiRemoveCommandResult
= UiRemoveCommandSuccess
| UiRemoveCommandFailure Text
data UiExportCommandResult
= UiExportCommandSuccess Text
| UiExportCommandFailure Text
data UiChangePasswordCommandResult
= UiChangePasswordCommandSuccess
| UiChangePasswordCommandFailure Text
data UiTreeItem = UiTreeItem
{ wtiLabel :: !(Maybe Text)
^ Some text to display ( name ) .
, wtiPath :: ![Word]
, wtiShowPath :: !Bool
}
type UiTree = Tree UiTreeItem
type TreePath = [Word]
data UiTreeSelection = UiTreeSelection
{ wtsWalletIdx :: Word
, wtsPath :: TreePath
}
data UiWalletInfo = UiWalletInfo
{ uwiLabel :: !(Maybe Text)
, uwiId :: !Text
, uwiWalletIdx :: !Word
, uwiBalance :: !(Maybe Text)
, uwiAccounts :: ![UiAccountInfo]
}
instance Eq UiWalletInfo where
a == b = uwiWalletIdx a == uwiWalletIdx b
data UiAccountInfo = UiAccountInfo
{ uaciLabel :: !(Maybe Text)
, uaciWalletIdx :: !Word
, uaciPath :: !TreePath
, uaciBalance :: !(Maybe Text)
, uaciAddresses :: ![UiAddressInfo]
}
instance Eq UiAccountInfo where
a == b =
uaciWalletIdx a == uaciWalletIdx b &&
uaciPath a == uaciPath b
data UiAddressInfo = UiAddressInfo
{ uadiWalletIdx :: !Word
, uadiPath :: !TreePath
, uadiAddress :: !Text
, uadiBalance :: !(Maybe Text)
}
instance Eq UiAddressInfo where
a == b =
uadiWalletIdx a == uadiWalletIdx b &&
uadiPath a == uadiPath b
data UiSelectionInfo
= UiSelectionWallet !UiWalletInfo
| UiSelectionAccount !UiAccountInfo
|
1b0a395381b4611934efa133c29a6056241963994561890856a74a4e5d245bd2 | emqx/emqx-auth-ldap | emqx_auth_ldap_cli.erl | %%--------------------------------------------------------------------
Copyright ( c ) 2020 EMQ Technologies Co. , Ltd. All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_auth_ldap_cli).
-behaviour(ecpool_worker).
-include("emqx_auth_ldap.hrl").
-include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/logger.hrl").
%% ecpool callback
-export([connect/1]).
-export([ search/3
, search/4
, post_bind/3
, init_args/1
]).
-import(proplists,
[ get_value/2
, get_value/3
]).
%%--------------------------------------------------------------------
LDAP Connect / Search
%%--------------------------------------------------------------------
connect(Opts) ->
Servers = get_value(servers, Opts, ["localhost"]),
Port = get_value(port, Opts, 389),
Timeout = get_value(timeout, Opts, 30),
BindDn = get_value(bind_dn, Opts),
BindPassword = get_value(bind_password, Opts),
LdapOpts = case get_value(ssl, Opts, false)of
true ->
SslOpts = get_value(sslopts, Opts),
[{port, Port}, {timeout, Timeout}, {sslopts, SslOpts}];
false ->
[{port, Port}, {timeout, Timeout}]
end,
?LOG(debug, "[LDAP] Connecting to OpenLDAP server: ~p, Opts:~p ...", [Servers, LdapOpts]),
case eldap2:open(Servers, LdapOpts) of
{ok, LDAP} ->
try eldap2:simple_bind(LDAP, BindDn, BindPassword) of
ok -> {ok, LDAP};
{error, Error} ->
?LOG(error, "[LDAP] Can't authenticated to OpenLDAP server: ~p", [Error]),
{error, Error}
catch
error:Reason ->
?LOG(error, "[LDAP] Can't authenticated to OpenLDAP server: ~p", [Reason]),
{error, Reason}
end;
{error, Reason} ->
?LOG(error, "[LDAP] Can't connect to OpenLDAP server: ~p", [Reason]),
{error, Reason}
end.
search(Pool, Base, Filter) ->
ecpool:with_client(Pool,
fun(C) ->
case application:get_env(?APP, bind_as_user) of
{ok, true} ->
{ok, Opts} = application:get_env(?APP, ldap),
BindDn = get_value(bind_dn, Opts),
BindPassword = get_value(bind_password, Opts),
try eldap2:simple_bind(C, BindDn, BindPassword) of
ok ->
eldap2:search(C, [{base, Base},
{filter, Filter},
{deref, eldap2:derefFindingBaseObj()}]);
{error, Error} ->
{error, Error}
catch
error:Reason -> {error, Reason}
end;
{ok, false} ->
eldap2:search(C, [{base, Base},
{filter, Filter},
{deref, eldap2:derefFindingBaseObj()}])
end
end).
search(Pool, Base, Filter, Attributes) ->
ecpool:with_client(Pool,
fun(C) ->
case application:get_env(?APP, bind_as_user) of
{ok, true} ->
{ok, Opts} = application:get_env(?APP, ldap),
BindDn = get_value(bind_dn, Opts),
BindPassword = get_value(bind_password, Opts),
try eldap2:simple_bind(C, BindDn, BindPassword) of
ok ->
eldap2:search(C, [{base, Base},
{filter, Filter},
{attributes, Attributes},
{deref, eldap2:derefFindingBaseObj()}]);
{error, Error} ->
{error, Error}
catch
error:Reason -> {error, Reason}
end;
{ok, false} ->
eldap2:search(C, [{base, Base},
{filter, Filter},
{attributes, Attributes},
{deref, eldap2:derefFindingBaseObj()}])
end
end).
post_bind(Pool, BindDn, BindPassword) ->
ecpool:with_client(Pool,
fun(C) ->
try eldap2:simple_bind(C, BindDn, BindPassword) of
ok -> ok;
{error, Error} ->
{error, Error}
catch
error:Reason -> {error, Reason}
end
end).
init_args(ENVS) ->
DeviceDn = get_value(device_dn, ENVS),
ObjectClass = get_value(match_objectclass, ENVS),
UidAttr = get_value(username_attr, ENVS),
PasswdAttr = get_value(password_attr, ENVS),
{ok, #{device_dn => DeviceDn,
match_objectclass => ObjectClass,
username_attr => UidAttr,
password_attr => PasswdAttr}}.
| null | https://raw.githubusercontent.com/emqx/emqx-auth-ldap/2b8a5cb782a4620f708ba7d0acac20264d2f54a8/src/emqx_auth_ldap_cli.erl | erlang | --------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--------------------------------------------------------------------
ecpool callback
--------------------------------------------------------------------
-------------------------------------------------------------------- | Copyright ( c ) 2020 EMQ Technologies Co. , Ltd. All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(emqx_auth_ldap_cli).
-behaviour(ecpool_worker).
-include("emqx_auth_ldap.hrl").
-include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/logger.hrl").
-export([connect/1]).
-export([ search/3
, search/4
, post_bind/3
, init_args/1
]).
-import(proplists,
[ get_value/2
, get_value/3
]).
LDAP Connect / Search
connect(Opts) ->
Servers = get_value(servers, Opts, ["localhost"]),
Port = get_value(port, Opts, 389),
Timeout = get_value(timeout, Opts, 30),
BindDn = get_value(bind_dn, Opts),
BindPassword = get_value(bind_password, Opts),
LdapOpts = case get_value(ssl, Opts, false)of
true ->
SslOpts = get_value(sslopts, Opts),
[{port, Port}, {timeout, Timeout}, {sslopts, SslOpts}];
false ->
[{port, Port}, {timeout, Timeout}]
end,
?LOG(debug, "[LDAP] Connecting to OpenLDAP server: ~p, Opts:~p ...", [Servers, LdapOpts]),
case eldap2:open(Servers, LdapOpts) of
{ok, LDAP} ->
try eldap2:simple_bind(LDAP, BindDn, BindPassword) of
ok -> {ok, LDAP};
{error, Error} ->
?LOG(error, "[LDAP] Can't authenticated to OpenLDAP server: ~p", [Error]),
{error, Error}
catch
error:Reason ->
?LOG(error, "[LDAP] Can't authenticated to OpenLDAP server: ~p", [Reason]),
{error, Reason}
end;
{error, Reason} ->
?LOG(error, "[LDAP] Can't connect to OpenLDAP server: ~p", [Reason]),
{error, Reason}
end.
search(Pool, Base, Filter) ->
ecpool:with_client(Pool,
fun(C) ->
case application:get_env(?APP, bind_as_user) of
{ok, true} ->
{ok, Opts} = application:get_env(?APP, ldap),
BindDn = get_value(bind_dn, Opts),
BindPassword = get_value(bind_password, Opts),
try eldap2:simple_bind(C, BindDn, BindPassword) of
ok ->
eldap2:search(C, [{base, Base},
{filter, Filter},
{deref, eldap2:derefFindingBaseObj()}]);
{error, Error} ->
{error, Error}
catch
error:Reason -> {error, Reason}
end;
{ok, false} ->
eldap2:search(C, [{base, Base},
{filter, Filter},
{deref, eldap2:derefFindingBaseObj()}])
end
end).
search(Pool, Base, Filter, Attributes) ->
ecpool:with_client(Pool,
fun(C) ->
case application:get_env(?APP, bind_as_user) of
{ok, true} ->
{ok, Opts} = application:get_env(?APP, ldap),
BindDn = get_value(bind_dn, Opts),
BindPassword = get_value(bind_password, Opts),
try eldap2:simple_bind(C, BindDn, BindPassword) of
ok ->
eldap2:search(C, [{base, Base},
{filter, Filter},
{attributes, Attributes},
{deref, eldap2:derefFindingBaseObj()}]);
{error, Error} ->
{error, Error}
catch
error:Reason -> {error, Reason}
end;
{ok, false} ->
eldap2:search(C, [{base, Base},
{filter, Filter},
{attributes, Attributes},
{deref, eldap2:derefFindingBaseObj()}])
end
end).
post_bind(Pool, BindDn, BindPassword) ->
ecpool:with_client(Pool,
fun(C) ->
try eldap2:simple_bind(C, BindDn, BindPassword) of
ok -> ok;
{error, Error} ->
{error, Error}
catch
error:Reason -> {error, Reason}
end
end).
init_args(ENVS) ->
DeviceDn = get_value(device_dn, ENVS),
ObjectClass = get_value(match_objectclass, ENVS),
UidAttr = get_value(username_attr, ENVS),
PasswdAttr = get_value(password_attr, ENVS),
{ok, #{device_dn => DeviceDn,
match_objectclass => ObjectClass,
username_attr => UidAttr,
password_attr => PasswdAttr}}.
|
61369ee562b39b4e664a59c2cc3ed430fd8ac159c5feb73e72a28f50f2f76989 | mlin/ocaml-sqlite3EZ | unit_tests.ml | Printf.printf "Tests go here, please!\n"
| null | https://raw.githubusercontent.com/mlin/ocaml-sqlite3EZ/7071b0589dc6444cf988240229f03f4e0fb54f70/unit_tests.ml | ocaml | Printf.printf "Tests go here, please!\n"
|
|
dcbeef5f29d8cd612e9141606a0475ea8931b6ef941d4847d43c45bbdcfb6655 | flexsurfer/conduitrn | views.cljs | (ns conduit.ui.edit.views
(:require [re-frame.core :as re-frame]
[clojure.string :as string]
[conduit.ui.components :as ui]
[steroid.rn.core :as rn]))
(defn reset-fields [content refs]
(fn []
(re-frame/dispatch [:reset-edit-article])
(reset! content {})
(doseq [ref @refs]
(.clear ^js ref))))
(defn upsert-article [default content-atom refs slug]
(let [content (merge default @content-atom)]
(re-frame/dispatch
[:upsert-article
{:slug slug
:article {:title (string/trim (or (:title content) ""))
:description (string/trim (or (:description content) ""))
:body (string/trim (or (:body content) ""))
:tagList (string/split (:tagList content) #" ")}}
(reset-fields content-atom refs)])))
(defn editor []
(let [content (atom {})
refs (atom #{})]
(fn []
(let [{:keys [title description body tagList slug] :as active-article}
@(re-frame/subscribe [:edit-article])
tagList (string/join " " tagList)]
[ui/keyboard-avoiding-view {}
[ui/safe-area-consumer
[rn/scroll-view {:style {:flex 1}
:keyboardShouldPersistTaps :always}
[ui/text-input {:ref #(when % (swap! refs conj %))
:style {:margin-horizontal 20 :margin-vertical 10}
:on-change-text #(swap! content assoc :title %)
:placeholder "Article Title"
:default-value title}]
[ui/text-input {:ref #(when % (swap! refs conj %))
:style {:margin-horizontal 20 :margin-vertical 10}
:on-change-text #(swap! content assoc :description %)
:placeholder "What's this article about?"
:default-value description}]
[ui/text-input {:ref #(when % (swap! refs conj %))
:style {:margin-horizontal 20 :margin-vertical 10
:height 300}
:on-change-text #(swap! content assoc :body %)
:placeholder "Write your article (in markdown)"
:default-value body}]
[ui/text-input {:ref #(when % (swap! refs conj %))
:style {:margin-horizontal 20 :margin-top 10
:margin-bottom 40}
:on-change-text #(swap! content assoc :tagList %)
:placeholder "Enter tags"
:default-value (str tagList)}]
[ui/button {:on-press #(upsert-article {:title title :description description
:body body :tagList tagList :slug slug}
content
refs
slug)
:title (if active-article
"Update Article"
"Publish Article")}]
(when (or active-article (seq @content))
[rn/view {:style {:margin-top 40}}
[ui/button {:on-press (reset-fields content refs)
:color "#b85c5c"
:title "Cancel"}]])]]]))))
| null | https://raw.githubusercontent.com/flexsurfer/conduitrn/e0c4860a24a7413b08d807b3aa3f1a76d93a444b/src/conduit/ui/edit/views.cljs | clojure | (ns conduit.ui.edit.views
(:require [re-frame.core :as re-frame]
[clojure.string :as string]
[conduit.ui.components :as ui]
[steroid.rn.core :as rn]))
(defn reset-fields [content refs]
(fn []
(re-frame/dispatch [:reset-edit-article])
(reset! content {})
(doseq [ref @refs]
(.clear ^js ref))))
(defn upsert-article [default content-atom refs slug]
(let [content (merge default @content-atom)]
(re-frame/dispatch
[:upsert-article
{:slug slug
:article {:title (string/trim (or (:title content) ""))
:description (string/trim (or (:description content) ""))
:body (string/trim (or (:body content) ""))
:tagList (string/split (:tagList content) #" ")}}
(reset-fields content-atom refs)])))
(defn editor []
(let [content (atom {})
refs (atom #{})]
(fn []
(let [{:keys [title description body tagList slug] :as active-article}
@(re-frame/subscribe [:edit-article])
tagList (string/join " " tagList)]
[ui/keyboard-avoiding-view {}
[ui/safe-area-consumer
[rn/scroll-view {:style {:flex 1}
:keyboardShouldPersistTaps :always}
[ui/text-input {:ref #(when % (swap! refs conj %))
:style {:margin-horizontal 20 :margin-vertical 10}
:on-change-text #(swap! content assoc :title %)
:placeholder "Article Title"
:default-value title}]
[ui/text-input {:ref #(when % (swap! refs conj %))
:style {:margin-horizontal 20 :margin-vertical 10}
:on-change-text #(swap! content assoc :description %)
:placeholder "What's this article about?"
:default-value description}]
[ui/text-input {:ref #(when % (swap! refs conj %))
:style {:margin-horizontal 20 :margin-vertical 10
:height 300}
:on-change-text #(swap! content assoc :body %)
:placeholder "Write your article (in markdown)"
:default-value body}]
[ui/text-input {:ref #(when % (swap! refs conj %))
:style {:margin-horizontal 20 :margin-top 10
:margin-bottom 40}
:on-change-text #(swap! content assoc :tagList %)
:placeholder "Enter tags"
:default-value (str tagList)}]
[ui/button {:on-press #(upsert-article {:title title :description description
:body body :tagList tagList :slug slug}
content
refs
slug)
:title (if active-article
"Update Article"
"Publish Article")}]
(when (or active-article (seq @content))
[rn/view {:style {:margin-top 40}}
[ui/button {:on-press (reset-fields content refs)
:color "#b85c5c"
:title "Cancel"}]])]]]))))
|
|
8212b9dc498f18d3878e98d7df885dcf892f8b999abf89a972792aac63b4f0b0 | sfrank/minheap | binary-heap.lisp | MINHEAP is by < > , 2007 - 2012 .
;;;;
;;;; Permission is hereby granted, free of charge, to any person obtaining
;;;; a copy of this software and associated documentation files (the
" Software " ) , to deal in the Software without restriction , including
;;;; without limitation the rights to use, copy, modify, merge, publish,
distribute , sublicense , and/or sell copies of the Software , and to
permit persons to whom the Software is furnished to do so , subject to
;;;; the following conditions:
;;;;
;;;; The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software .
;;;;
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
;;;; EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT .
;;;; IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT ,
;;;; TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
;;;; SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
(defpackage :binary-heap (:use :cl)
(:export
#:binary-heap
#:clear-heap
#:empty-p
#:insert
#:peek-min
#:extract-min
#:extract-node
#:heap-size
#:decrease-key
#:meld
#:alist-to-heap
))
(in-package :binary-heap)
(deftype array-index () `(integer 0 ,(1- array-dimension-limit)))
(defconstant +initial-size+ 50 "initial queue vector size")
;;;; binary min-heap
(defstruct (node (:constructor %make-node (key data index)))
(key 0 :type fixnum)
(index 0 :type array-index)
(data nil))
(defclass binary-heap ()
((array :accessor bin-heap-array
:type (vector (or null node))
:initarg :array
:initform (make-array +initial-size+
:adjustable t
:fill-pointer 0
:element-type '(or null node)
:initial-element nil))))
(defmethod print-object ((obj binary-heap) stream)
(print-unreadable-object (obj stream :type t :identity t)
(format stream "~4I~:_size: ~A~:_" (heap-size obj))))
;(declaim (inline parent left right %make-node))
(defun parent (k)
(declare (type array-index k))
(floor (1- k) 2))
(defun left (k)
(declare (type (integer 0 #.(floor array-dimension-limit 2)) k))
(1+ (* k 2)))
(defun right (k)
(declare (type (integer 0 #.(floor array-dimension-limit 2)) k))
(* (1+ k) 2))
(defun peek-min (heap)
(let ((node (aref (bin-heap-array heap) 0)))
(values (node-data node)
(node-key node))))
(defun clear-heap (heap)
(setf (fill-pointer (bin-heap-array heap)) 0))
(defun empty-p (heap)
(zerop (fill-pointer (bin-heap-array heap))))
(defun heap-size (heap)
(length (bin-heap-array heap)))
(defun extract-min (heap)
(let ((array (bin-heap-array heap))
(node (aref (bin-heap-array heap) 0)))
(assert node)
(setf (aref array 0) (aref array (1- (length array)))
(aref array (1- (length array))) nil)
(when (> (decf (fill-pointer array)) 1)
(sink array 0))
(values (node-data node)
(node-key node))))
;(declaim (inline swap-nodes))
(defun swap-nodes (array i j)
(declare (type array-index i j))
(setf (node-index (aref array i)) j
(node-index (aref array j)) i)
(rotatef (aref array i) (aref array j)))
(defun sink (array index)
(let ((maxindex (1- (length array))))
(if (zerop maxindex)
maxindex
(loop for i = index then j
with j = 0
while (<= (left i) maxindex) do
(cond
((< maxindex (right i))
(setf j (left i)))
((<= (node-key (aref array (left i)))
(node-key (aref array (right i))))
(setf j (left i)))
(t
(setf j (right i))))
(when (<= (node-key (aref array i))
(node-key (aref array j)))
(loop-finish))
(swap-nodes array i j)
finally (return array)))))
(defun perlocate-up (array vindex)
(loop for index = vindex then parent
for parent = (parent index)
with key = (node-key (aref array vindex))
while (and (>= parent 0)
(< key (node-key (aref array parent))))
do (swap-nodes array index parent)
finally (return (aref array index))))
(defun insert (heap key data)
(let ((node (%make-node key data 0))
(array (bin-heap-array heap)))
(perlocate-up array (setf (node-index node)
(vector-push-extend node array)))))
(defun decrease-key (heap node key)
(let ((vector (bin-heap-array heap)))
(when (< (node-key node) key)
(error "Cannot decrease key: new key greater than current key."))
(setf (node-key node) key)
(perlocate-up vector (node-index node))))
(defun extract-node (heap node)
(let ((key (node-key node))
(value (node-data node)))
(decrease-key heap node most-negative-fixnum)
(extract-min heap)
(values value key)))
(defun meld (heap-a heap-b)
"Melds HEAP-A and HEAP-B into a new heap and returns it. HEAP-A is
returned as new union of both heaps."
(let ((vector (bin-heap-array heap-a)))
(loop for v across (bin-heap-array heap-b)
do (vector-push-extend v vector))
(vector-to-heapvector vector)
heap-a))
(defun alist-to-heap (alist)
"Coerces an ALIST of (KEY . VALUE) conses into a heap."
(let ((node-list (loop for (key . value) in alist
collect (%make-node key value 0)))
(length (length alist)))
(let ((vector (make-array length
:adjustable t
:fill-pointer length
:element-type '(or null node)
:initial-contents node-list)))
(make-instance 'binary-heap :array (vector-to-heapvector vector)))))
(defun vector-to-heapvector (vector)
(loop for index from (floor (length vector) 2) downto 0
do (sink vector index)
finally (loop for n across vector
for i from 0 do (setf (node-index n) i))
(return vector)))
| null | https://raw.githubusercontent.com/sfrank/minheap/51cc9edcbbe13d9132fe12b0b197848f31513232/binary-heap.lisp | lisp |
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
without limitation the rights to use, copy, modify, merge, publish,
the following conditions:
The above copyright notice and this permission notice shall be included
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
binary min-heap
(declaim (inline parent left right %make-node))
(declaim (inline swap-nodes)) | MINHEAP is by < > , 2007 - 2012 .
" Software " ) , to deal in the Software without restriction , including
distribute , sublicense , and/or sell copies of the Software , and to
permit persons to whom the Software is furnished to do so , subject to
in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT .
CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT ,
(defpackage :binary-heap (:use :cl)
(:export
#:binary-heap
#:clear-heap
#:empty-p
#:insert
#:peek-min
#:extract-min
#:extract-node
#:heap-size
#:decrease-key
#:meld
#:alist-to-heap
))
(in-package :binary-heap)
(deftype array-index () `(integer 0 ,(1- array-dimension-limit)))
(defconstant +initial-size+ 50 "initial queue vector size")
(defstruct (node (:constructor %make-node (key data index)))
(key 0 :type fixnum)
(index 0 :type array-index)
(data nil))
(defclass binary-heap ()
((array :accessor bin-heap-array
:type (vector (or null node))
:initarg :array
:initform (make-array +initial-size+
:adjustable t
:fill-pointer 0
:element-type '(or null node)
:initial-element nil))))
(defmethod print-object ((obj binary-heap) stream)
(print-unreadable-object (obj stream :type t :identity t)
(format stream "~4I~:_size: ~A~:_" (heap-size obj))))
(defun parent (k)
(declare (type array-index k))
(floor (1- k) 2))
(defun left (k)
(declare (type (integer 0 #.(floor array-dimension-limit 2)) k))
(1+ (* k 2)))
(defun right (k)
(declare (type (integer 0 #.(floor array-dimension-limit 2)) k))
(* (1+ k) 2))
(defun peek-min (heap)
(let ((node (aref (bin-heap-array heap) 0)))
(values (node-data node)
(node-key node))))
(defun clear-heap (heap)
(setf (fill-pointer (bin-heap-array heap)) 0))
(defun empty-p (heap)
(zerop (fill-pointer (bin-heap-array heap))))
(defun heap-size (heap)
(length (bin-heap-array heap)))
(defun extract-min (heap)
(let ((array (bin-heap-array heap))
(node (aref (bin-heap-array heap) 0)))
(assert node)
(setf (aref array 0) (aref array (1- (length array)))
(aref array (1- (length array))) nil)
(when (> (decf (fill-pointer array)) 1)
(sink array 0))
(values (node-data node)
(node-key node))))
(defun swap-nodes (array i j)
(declare (type array-index i j))
(setf (node-index (aref array i)) j
(node-index (aref array j)) i)
(rotatef (aref array i) (aref array j)))
(defun sink (array index)
(let ((maxindex (1- (length array))))
(if (zerop maxindex)
maxindex
(loop for i = index then j
with j = 0
while (<= (left i) maxindex) do
(cond
((< maxindex (right i))
(setf j (left i)))
((<= (node-key (aref array (left i)))
(node-key (aref array (right i))))
(setf j (left i)))
(t
(setf j (right i))))
(when (<= (node-key (aref array i))
(node-key (aref array j)))
(loop-finish))
(swap-nodes array i j)
finally (return array)))))
(defun perlocate-up (array vindex)
(loop for index = vindex then parent
for parent = (parent index)
with key = (node-key (aref array vindex))
while (and (>= parent 0)
(< key (node-key (aref array parent))))
do (swap-nodes array index parent)
finally (return (aref array index))))
(defun insert (heap key data)
(let ((node (%make-node key data 0))
(array (bin-heap-array heap)))
(perlocate-up array (setf (node-index node)
(vector-push-extend node array)))))
(defun decrease-key (heap node key)
(let ((vector (bin-heap-array heap)))
(when (< (node-key node) key)
(error "Cannot decrease key: new key greater than current key."))
(setf (node-key node) key)
(perlocate-up vector (node-index node))))
(defun extract-node (heap node)
(let ((key (node-key node))
(value (node-data node)))
(decrease-key heap node most-negative-fixnum)
(extract-min heap)
(values value key)))
(defun meld (heap-a heap-b)
"Melds HEAP-A and HEAP-B into a new heap and returns it. HEAP-A is
returned as new union of both heaps."
(let ((vector (bin-heap-array heap-a)))
(loop for v across (bin-heap-array heap-b)
do (vector-push-extend v vector))
(vector-to-heapvector vector)
heap-a))
(defun alist-to-heap (alist)
"Coerces an ALIST of (KEY . VALUE) conses into a heap."
(let ((node-list (loop for (key . value) in alist
collect (%make-node key value 0)))
(length (length alist)))
(let ((vector (make-array length
:adjustable t
:fill-pointer length
:element-type '(or null node)
:initial-contents node-list)))
(make-instance 'binary-heap :array (vector-to-heapvector vector)))))
(defun vector-to-heapvector (vector)
(loop for index from (floor (length vector) 2) downto 0
do (sink vector index)
finally (loop for n across vector
for i from 0 do (setf (node-index n) i))
(return vector)))
|
9b85eeaae43f1305dce9bd96ce53575414191739e66d1452a1dfda7ce3a8febd | kendru/restful-clojure | products.clj | (ns restful-clojure.models.products
(:use korma.core)
(:require [restful-clojure.entities :as e]))
(defn create [product]
(insert e/products
(values product)))
(defn find-all []
(select e/products))
(defn find-by [field value]
(first
(select e/products
(where {field value})
(limit 1))))
(defn find-all-by [field value]
(select e/products
(where {field value})))
(defn find-by-id [id]
(find-by :id id))
(defn count-products []
(let [agg (select e/products
(aggregate (count :*) :cnt))]
(get-in agg [0 :cnt] 0)))
(defn update-product [product]
(update e/products
(set-fields (dissoc product :id))
(where {:id (product :id)})))
(defn delete-product [product]
(delete e/products
(where {:id (product :id)}))) | null | https://raw.githubusercontent.com/kendru/restful-clojure/d321066aa368516aec28a4102cf013f764548505/restful-clojure/src/restful_clojure/models/products.clj | clojure | (ns restful-clojure.models.products
(:use korma.core)
(:require [restful-clojure.entities :as e]))
(defn create [product]
(insert e/products
(values product)))
(defn find-all []
(select e/products))
(defn find-by [field value]
(first
(select e/products
(where {field value})
(limit 1))))
(defn find-all-by [field value]
(select e/products
(where {field value})))
(defn find-by-id [id]
(find-by :id id))
(defn count-products []
(let [agg (select e/products
(aggregate (count :*) :cnt))]
(get-in agg [0 :cnt] 0)))
(defn update-product [product]
(update e/products
(set-fields (dissoc product :id))
(where {:id (product :id)})))
(defn delete-product [product]
(delete e/products
(where {:id (product :id)}))) |
|
92d41882f1b3df04b8f0df54b5705932cf8f29d7387feae6dd5f1b2311b98ad2 | coalton-lang/coalton | utilities.lisp | (in-package #:coalton-tests)
(defun run-coalton-tests ()
(run-package-tests
:packages '(:coalton-tests
:quil-coalton-tests
:thih-coalton-tests)
:interactive t))
(defun set-equalp (set1 set2)
(null (set-exclusive-or set1 set2 :test #'equalp)))
(defun dag-equalp (dag1 dag2)
;; XXX: This will not check ordering of edges within vertices
(set-equalp dag1 dag2))
(defun check-coalton-types (toplevel expected-types)
(multiple-value-bind (form env)
(coalton-impl::process-coalton-toplevel toplevel *package* coalton-impl::*global-environment*)
(declare (ignore form))
(loop :for (symbol . type) :in expected-types
:do (is (coalton-impl/typechecker::type-scheme=
(tc:lookup-value-type env symbol)
(tc:parse-and-resolve-type env type))))))
(defun run-coalton-toplevel-walker (toplevel)
(coalton-impl::collect-toplevel-forms toplevel))
(defun run-coalton-typechecker (toplevel)
(coalton-impl::process-coalton-toplevel toplevel *package* coalton-impl::*global-environment*))
(defun compile-and-load-forms (coalton-forms)
"Write the COALTON-FORMS to a temporary file, compile it to a fasl, then load the compiled file.
Returns (values SOURCE-PATHNAME COMPILED-PATHNAME)."
(uiop:with-temporary-file (:stream out-stream
:pathname input-file
:suffix "lisp"
:direction :output
:keep t)
(dolist (expr coalton-forms)
(prin1 expr out-stream)
(terpri out-stream))
:close-stream
(uiop:with-temporary-file (:pathname output-file
:type #+ccl (pathname-type ccl:*.fasl-pathname*)
#+(not ccl) "fasl"
:keep t)
(compile-file input-file :output-file output-file)
(load output-file)
(values input-file output-file))))
(defmacro with-coalton-compilation ((&key package (muffle 'cl:style-warning)) &body coalton-code)
`(handler-bind
((,muffle #'muffle-warning))
(compile-and-load-forms '(,@(when package `((cl:in-package ,package)))
,@coalton-code))))
| null | https://raw.githubusercontent.com/coalton-lang/coalton/1d756c71c033fd750cd4757a3c2e91f6272e8814/tests/utilities.lisp | lisp | XXX: This will not check ordering of edges within vertices | (in-package #:coalton-tests)
(defun run-coalton-tests ()
(run-package-tests
:packages '(:coalton-tests
:quil-coalton-tests
:thih-coalton-tests)
:interactive t))
(defun set-equalp (set1 set2)
(null (set-exclusive-or set1 set2 :test #'equalp)))
(defun dag-equalp (dag1 dag2)
(set-equalp dag1 dag2))
(defun check-coalton-types (toplevel expected-types)
(multiple-value-bind (form env)
(coalton-impl::process-coalton-toplevel toplevel *package* coalton-impl::*global-environment*)
(declare (ignore form))
(loop :for (symbol . type) :in expected-types
:do (is (coalton-impl/typechecker::type-scheme=
(tc:lookup-value-type env symbol)
(tc:parse-and-resolve-type env type))))))
(defun run-coalton-toplevel-walker (toplevel)
(coalton-impl::collect-toplevel-forms toplevel))
(defun run-coalton-typechecker (toplevel)
(coalton-impl::process-coalton-toplevel toplevel *package* coalton-impl::*global-environment*))
(defun compile-and-load-forms (coalton-forms)
"Write the COALTON-FORMS to a temporary file, compile it to a fasl, then load the compiled file.
Returns (values SOURCE-PATHNAME COMPILED-PATHNAME)."
(uiop:with-temporary-file (:stream out-stream
:pathname input-file
:suffix "lisp"
:direction :output
:keep t)
(dolist (expr coalton-forms)
(prin1 expr out-stream)
(terpri out-stream))
:close-stream
(uiop:with-temporary-file (:pathname output-file
:type #+ccl (pathname-type ccl:*.fasl-pathname*)
#+(not ccl) "fasl"
:keep t)
(compile-file input-file :output-file output-file)
(load output-file)
(values input-file output-file))))
(defmacro with-coalton-compilation ((&key package (muffle 'cl:style-warning)) &body coalton-code)
`(handler-bind
((,muffle #'muffle-warning))
(compile-and-load-forms '(,@(when package `((cl:in-package ,package)))
,@coalton-code))))
|
91ae2f885ad8e00887e5f13600134bb905a814a6230d697a8c2ebb9559e73c35 | vascokk/rivus_cep | rivus_cep_event_creator_tests.erl | -module(rivus_cep_event_creator_tests).
-include_lib("eunit/include/eunit.hrl").
create_event_1_test() ->
{ok, Tokens, _Endline} = rivus_cep_scanner:string("define event10 as (attr1, attr2, attr3);"),
{ok, ParseRes} = rivus_cep_parser:parse(Tokens),
?assertEqual({event, {event10, [attr1, attr2, attr3]}}, ParseRes),
{event, EventDef} = ParseRes,
?assertEqual({module, event10}, rivus_cep_event_creator:load_event_mod(EventDef)),
Event = {event10, a, b, c},
?assertEqual(event10, event10:get_param_by_name(Event, name)),
?assertEqual(a, event10:get_param_by_name(Event, attr1)),
?assertEqual(b, event10:get_param_by_name(Event, attr2)),
?assertEqual(c, event10:get_param_by_name(Event, attr3)),
?assertError({case_clause,attr4}, event10:get_param_by_name(Event, attr4)),
?assertEqual([attr1, attr2, attr3], event10:get_param_names()).
| null | https://raw.githubusercontent.com/vascokk/rivus_cep/e9fe6ed79201d852065f7fb2a24a880414031d27/test/rivus_cep_event_creator_tests.erl | erlang | -module(rivus_cep_event_creator_tests).
-include_lib("eunit/include/eunit.hrl").
create_event_1_test() ->
{ok, Tokens, _Endline} = rivus_cep_scanner:string("define event10 as (attr1, attr2, attr3);"),
{ok, ParseRes} = rivus_cep_parser:parse(Tokens),
?assertEqual({event, {event10, [attr1, attr2, attr3]}}, ParseRes),
{event, EventDef} = ParseRes,
?assertEqual({module, event10}, rivus_cep_event_creator:load_event_mod(EventDef)),
Event = {event10, a, b, c},
?assertEqual(event10, event10:get_param_by_name(Event, name)),
?assertEqual(a, event10:get_param_by_name(Event, attr1)),
?assertEqual(b, event10:get_param_by_name(Event, attr2)),
?assertEqual(c, event10:get_param_by_name(Event, attr3)),
?assertError({case_clause,attr4}, event10:get_param_by_name(Event, attr4)),
?assertEqual([attr1, attr2, attr3], event10:get_param_names()).
|
|
86714ef7148fb01e147730ea8cd87ebf276c7b1376d907defa8b28077e154b79 | evolutics/haskell-formatter | TreeFormat.hs | {-|
Description : Parsing nested maps according to a format
-}
module Language.Haskell.Formatter.Internal.TreeFormat
(TreeFormat, Leaf(..), parseYamlFile) where
import qualified Control.Arrow as Arrow
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Map.Strict as Map
import qualified Data.Monoid as Monoid
import qualified Data.Scientific as Scientific
import qualified Data.Text as Text
import qualified Data.Yaml as Yaml
import qualified Language.Haskell.Formatter.Internal.MapTree as MapTree
import qualified Language.Haskell.Formatter.Internal.Newline as Newline
type TreeFormat a = MapTree.MapForest String (Leaf a)
data Leaf a = Boolean (RawLeaf Bool a)
| LimitedInteger (RawLeaf Int a)
| SingleFloating (RawLeaf Float a)
type RawLeaf a b = a -> b -> b
parseYamlFile :: TreeFormat a -> a -> FilePath -> IO (Either String a)
parseYamlFile format ball file
= do maybeValue <- Yaml.decodeFileEither file
let interpretation
= case maybeValue of
Left exception -> Left $ show exception
Right value -> defaultInterpret format value ball
return $ Arrow.left fileError interpretation
where fileError message = Newline.joinSeparatedLines [introduction, message]
introduction = Monoid.mappend file ":"
defaultInterpret :: TreeFormat a -> Yaml.Value -> a -> Either String a
defaultInterpret format value ball
= if MapTree.isEmpty errors then Right interpretation else
Left $ MapTree.indentTree errors
where (errors, interpretation) = interpret format value ball
interpret ::
TreeFormat a -> Yaml.Value -> a -> (MapTree.MapTree String String, a)
interpret formatMap (Yaml.Object rawValueMap) ball = (errorNode, ball')
where errorNode = MapTree.Node $ Map.mapMaybe id errorTree
(ball', errorTree) = Map.mapAccumWithKey move ball valueMap
move ballPart key value = (ballPart', maybeErrors)
where (maybeErrors, ballPart') = matchTree maybeFormat value ballPart
maybeFormat = Map.lookup key formatMap
valueMap = Map.mapKeys Text.unpack $ orderedMap rawValueMap
orderedMap = Map.fromList . HashMap.toList
interpret _ value ball = (errorLeaf, ball)
where errorLeaf = MapTree.Leaf $ unexpectedMessage "a map" value
matchTree ::
Maybe (MapTree.MapTree String (Leaf a)) ->
Yaml.Value -> a -> (Maybe (MapTree.MapTree String String), a)
matchTree Nothing _ ball = (Just $ MapTree.Leaf message, ball)
where message = "Unexpected key."
matchTree (Just (MapTree.Leaf leaf)) value ball
= case matchLeaf leaf value ball of
Left message -> (Just $ MapTree.Leaf message, ball)
Right ball' -> (Nothing, ball')
matchTree (Just (MapTree.Node node)) value ball = (maybeErrors, ball')
where maybeErrors = if MapTree.isEmpty errors then Nothing else Just errors
(errors, ball') = interpret node value ball
matchLeaf :: Leaf a -> Yaml.Value -> a -> Either String a
matchLeaf (Boolean go) (Yaml.Bool boolean) ball = Right $ go boolean ball
matchLeaf (LimitedInteger go) value@(Yaml.Number number) ball
= case Scientific.toBoundedInteger number of
Nothing -> Left message
where message = unexpectedMessage "a limited integer" value
Just integer -> Right $ go integer ball
matchLeaf (SingleFloating go) (Yaml.Number number) ball
= Right $ go floating ball
where floating = Scientific.toRealFloat number
matchLeaf format value _ = Left $ unexpectedMessage expected value
where expected
= case format of
Boolean _ -> "a Boolean"
LimitedInteger _ -> "a limited integer"
SingleFloating _ -> "a single-precision floating-point number"
unexpectedMessage :: String -> Yaml.Value -> String
unexpectedMessage expected actualValue
= Newline.joinSeparatedLines [introduction, actual]
where introduction = concat ["Expected ", expected, ", but got:"]
actual = show $ Yaml.encode actualValue
| null | https://raw.githubusercontent.com/evolutics/haskell-formatter/3919428e312db62b305de4dd1c84887e6cfa9478/src/library/Language/Haskell/Formatter/Internal/TreeFormat.hs | haskell | |
Description : Parsing nested maps according to a format
| module Language.Haskell.Formatter.Internal.TreeFormat
(TreeFormat, Leaf(..), parseYamlFile) where
import qualified Control.Arrow as Arrow
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Map.Strict as Map
import qualified Data.Monoid as Monoid
import qualified Data.Scientific as Scientific
import qualified Data.Text as Text
import qualified Data.Yaml as Yaml
import qualified Language.Haskell.Formatter.Internal.MapTree as MapTree
import qualified Language.Haskell.Formatter.Internal.Newline as Newline
type TreeFormat a = MapTree.MapForest String (Leaf a)
data Leaf a = Boolean (RawLeaf Bool a)
| LimitedInteger (RawLeaf Int a)
| SingleFloating (RawLeaf Float a)
type RawLeaf a b = a -> b -> b
parseYamlFile :: TreeFormat a -> a -> FilePath -> IO (Either String a)
parseYamlFile format ball file
= do maybeValue <- Yaml.decodeFileEither file
let interpretation
= case maybeValue of
Left exception -> Left $ show exception
Right value -> defaultInterpret format value ball
return $ Arrow.left fileError interpretation
where fileError message = Newline.joinSeparatedLines [introduction, message]
introduction = Monoid.mappend file ":"
defaultInterpret :: TreeFormat a -> Yaml.Value -> a -> Either String a
defaultInterpret format value ball
= if MapTree.isEmpty errors then Right interpretation else
Left $ MapTree.indentTree errors
where (errors, interpretation) = interpret format value ball
interpret ::
TreeFormat a -> Yaml.Value -> a -> (MapTree.MapTree String String, a)
interpret formatMap (Yaml.Object rawValueMap) ball = (errorNode, ball')
where errorNode = MapTree.Node $ Map.mapMaybe id errorTree
(ball', errorTree) = Map.mapAccumWithKey move ball valueMap
move ballPart key value = (ballPart', maybeErrors)
where (maybeErrors, ballPart') = matchTree maybeFormat value ballPart
maybeFormat = Map.lookup key formatMap
valueMap = Map.mapKeys Text.unpack $ orderedMap rawValueMap
orderedMap = Map.fromList . HashMap.toList
interpret _ value ball = (errorLeaf, ball)
where errorLeaf = MapTree.Leaf $ unexpectedMessage "a map" value
matchTree ::
Maybe (MapTree.MapTree String (Leaf a)) ->
Yaml.Value -> a -> (Maybe (MapTree.MapTree String String), a)
matchTree Nothing _ ball = (Just $ MapTree.Leaf message, ball)
where message = "Unexpected key."
matchTree (Just (MapTree.Leaf leaf)) value ball
= case matchLeaf leaf value ball of
Left message -> (Just $ MapTree.Leaf message, ball)
Right ball' -> (Nothing, ball')
matchTree (Just (MapTree.Node node)) value ball = (maybeErrors, ball')
where maybeErrors = if MapTree.isEmpty errors then Nothing else Just errors
(errors, ball') = interpret node value ball
matchLeaf :: Leaf a -> Yaml.Value -> a -> Either String a
matchLeaf (Boolean go) (Yaml.Bool boolean) ball = Right $ go boolean ball
matchLeaf (LimitedInteger go) value@(Yaml.Number number) ball
= case Scientific.toBoundedInteger number of
Nothing -> Left message
where message = unexpectedMessage "a limited integer" value
Just integer -> Right $ go integer ball
matchLeaf (SingleFloating go) (Yaml.Number number) ball
= Right $ go floating ball
where floating = Scientific.toRealFloat number
matchLeaf format value _ = Left $ unexpectedMessage expected value
where expected
= case format of
Boolean _ -> "a Boolean"
LimitedInteger _ -> "a limited integer"
SingleFloating _ -> "a single-precision floating-point number"
unexpectedMessage :: String -> Yaml.Value -> String
unexpectedMessage expected actualValue
= Newline.joinSeparatedLines [introduction, actual]
where introduction = concat ["Expected ", expected, ", but got:"]
actual = show $ Yaml.encode actualValue
|
22456b3475b4a5c236b0e90d742f94c9638ee0c6e29ca5c322afd3e2b14f7acc | vikram/lisplibraries | glyf.lisp | Copyright ( c ) 2006 , All Rights Reserved
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;;
;;; * Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;;
;;; * Redistributions in binary form must reproduce the above
;;; copyright notice, this list of conditions and the following
;;; disclaimer in the documentation and/or other materials
;;; provided with the distribution.
;;;
;;; THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED
;;; OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
;;; WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
;;; ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
;;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
;;; GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
;;; WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
;;;
;;; Loading data from the 'glyf' table.
;;;
glyf.lisp , v 1.13 2006/03/23 22:22:01 xach Exp
(in-package #:zpb-ttf)
(defclass control-point ()
((x :initarg :x :accessor x)
(y :initarg :y :accessor y)
(on-curve-p :initarg :on-curve-p :reader on-curve-p)))
(defun make-control-point (x y on-curve-p)
(make-instance 'control-point
:x x
:y y
:on-curve-p on-curve-p))
(defmethod print-object ((control-point control-point) stream)
(print-unreadable-object (control-point stream :type t)
(format stream "~D,~D~:[~;*~]"
(x control-point) (y control-point) (on-curve-p control-point))))
(defmacro do-contour-segments* ((p1 p2) contour &body body)
(let ((length (gensym))
(i (gensym))
(stack (gensym))
(next (gensym))
(next-point (gensym "NEXT-POINT"))
(midpoint (gensym "MIDPOINT"))
(contour* (gensym))
(loop (gensym "LOOP"))
(body-tag (gensym "BODY"))
(mid p1)
(end p2))
`(let* ((,i 1)
(,contour* ,contour)
(,length (length ,contour*))
,stack ,next ,mid ,end)
(unless (zerop ,length)
(flet ((,next-point ()
(when (< ,i ,length)
(prog1 (aref ,contour* ,i) (incf ,i))))
(,midpoint (p0 p1)
(make-control-point (/ (+ (x p0) (x p1)) 2)
(/ (+ (y p0) (y p1)) 2)
t)))
(tagbody
,loop
(setf ,mid nil
,next (,next-point))
(unless ,next
(setf ,mid ,stack
,end (aref ,contour* 0))
(go ,body-tag))
(if (on-curve-p ,next)
(setf ,end ,next
,mid ,stack
,stack nil)
(cond (,stack
(setf ,mid ,stack
,end (,midpoint ,stack ,next)
,stack ,next))
(t
(setf ,stack ,next)
(go ,loop))))
,body-tag
,@body
(when ,next
(go ,loop))))))))
(defmacro do-contour-segments ((p0 p1 p2) contour &body body)
"A contour is made up of segments. A segment may be a straight line
or a curve. For each segment, bind the P0 and P2 variables to the
start and end points of the segment. If the segment is a curve, set P1
to the control point of the curve, otherwise set P1 to NIL."
;; This macro started out life as a function and was converted.
(let ((start p0)
(contour* (gensym "CONTOUR")))
`(let ((,contour* ,contour))
(when (plusp (length ,contour*))
(let ((,start (aref ,contour* 0)))
(do-contour-segments* (,p1 ,p2)
,contour*
(progn ,@body)
(setf ,start ,p2)))))))
(defun explicit-contour-points (contour)
(let ((new-contour (make-array (length contour)
:adjustable t
:fill-pointer 0)))
(when (plusp (length contour))
(vector-push-extend (aref contour 0) new-contour))
(do-contour-segments* (p1 p2)
contour
(when p1
(vector-push-extend p1 new-contour))
(vector-push-extend p2 new-contour))
new-contour))
;;; Locating a glyph's contours and bounding box in the font loader's
;;; stream, and loading them
(defparameter *empty-contours*
(make-array 0 :element-type '(signed-byte 16)))
(defparameter *empty-bounding-box*
(make-array 4
:initial-element 0
:element-type '(signed-byte 16)))
(defun empty-bounding-box ()
(copy-seq *empty-bounding-box*))
(defun empty-contours ()
(copy-seq *empty-contours*))
(defun dump-compound-flags (flags)
(format t "XXX flags=~16,'0B~%" flags)
(let ((meanings '((0 . ARG_1_AND_2_ARE_WORDS)
(1 . ARGS_ARE_XY_VALUES)
(2 . ROUND_XY_TO_GRID)
(3 . WE_HAVE_A_SCALE)
(4 . OBSOLETE)
(5 . MORE_COMPONENTS)
(6 . WE_HAVE_AN_X_AND_Y_SCALE)
(7 . WE_HAVE_A_TWO_BY_TWO)
(8 . WE_HAVE_INSTRUCTIONS)
(9 . USE_MY_METRICS)
(10 . OVERLAP_COMPOUND))))
(loop for ((bit . meaning)) on meanings
do (when (logbitp bit flags)
(format t "...~A~%" meaning)))))
(defun transform-option-count (flags)
(let ((scale-p 3)
(xy-scale-p 6)
(2*2-scale-p 7))
(cond ((logbitp scale-p flags) 1)
((logbitp xy-scale-p flags) 2)
((logbitp 2*2-scale-p flags) 4)
(t 0))))
(defun make-transformer (a b c d e f)
"Given the elements of the transformation matrix specified by A, B,
C, D, E, and F, return a function of two arguments that returns the
arguments transformed as multiple values.
Ref: "
(let ((m (max (abs a) (abs b)))
(n (max (abs c) (abs d))))
(when (<= (abs (- (abs a) (abs b))) 33/65536)
(setf m (* m 2)))
(when (<= (abs (- (abs c) (abs d))) 33/65536)
(setf n (* n 2)))
(lambda (x y)
(values (* m (+ (* (/ a m) x)
(* (/ c m) y)
e))
(* n (+ (* (/ b n) x)
(* (/ d n) y)
f))))))
(defun transform-contours (fn contours)
"Call FN with the X and Y coordinates of each point of each contour
in the vector CONTOURS. FN should return two values, which are used to
update the X and Y values of each point."
(loop for contour across contours do
(loop for p across contour do
(setf (values (x p) (y p))
(funcall fn (x p) (y p))))))
(defun merge-contours (contours-list)
(let* ((total-contours (loop for contours in contours-list
summing (length contours)))
(merged (make-array total-contours))
(i 0))
(dolist (contours contours-list merged)
(loop for contour across contours do
(setf (aref merged i) contour)
(incf i)))))
(defun read-compound-contours (loader)
(let ((contours-list '())
(stream (input-stream loader)))
(loop
(let ((flags (read-uint16 stream))
(font-index (read-uint16 stream)))
(let ((position (file-position stream))
(contours (read-contours-at-index font-index loader)))
(push contours contours-list)
(file-position stream position)
(let ((args-words-p (logbitp 0 flags))
(args-xy-values-p (logbitp 1 flags))
(more-components-p (logbitp 5 flags))
arg1 arg2)
(cond ((and args-words-p args-xy-values-p)
(setf arg1 (read-int16 stream)
arg2 (read-int16 stream)))
(args-words-p
(setf arg1 (read-uint16 stream)
arg2 (read-uint16 stream))
(error "Compound glyphs relative to indexes not yet supported"))
(args-xy-values-p
(setf arg1 (read-int8 stream)
arg2 (read-int8 stream)))
(t
(setf arg1 (read-uint8 stream)
arg2 (read-uint8 stream))
(error "Compound glyphs relative to indexes not yet supported")))
Transform according to the transformation matrix
(let ((a 1.0) (b 0.0) (c 0.0) (d 1.0)
(e arg1) (f arg2))
(ecase (transform-option-count flags)
(0)
(1
(setf a (setf d (read-fixed2.14 stream))))
(2
(setf a (read-fixed2.14 stream)
d (read-fixed2.14 stream)))
(4
(setf a (read-fixed2.14 stream)
b (read-fixed2.14 stream)
c (read-fixed2.14 stream)
d (read-fixed2.14 stream))))
(let ((transform-fn (make-transformer a b c d e f)))
(transform-contours transform-fn contours)))
(unless more-components-p
(return (merge-contours contours-list)))))))))
(defun read-points-vector (stream flags count axis)
(let ((points (make-array count :fill-pointer 0))
(short-index (if (eql axis :x) 1 2))
(same-index (if (eql axis :x) 4 5)))
(flet ((save-point (point)
(vector-push point points)))
(loop for flag across flags
for short-p = (logbitp short-index flag)
for same-p = (logbitp same-index flag)
do (cond (short-p
(let ((new-point (read-uint8 stream)))
(save-point (if same-p new-point (- new-point)))))
(t
(if same-p
(save-point 0)
(save-point (read-int16 stream)))))))
points))
(defun read-simple-contours (contour-count stream)
"With the stream positioned immediately after the glyph bounding
box, read the contours data from STREAM and return it as a vector."
(let ((contour-endpoint-indexes (make-array contour-count)))
(loop for i below contour-count
for endpoint-index = (read-uint16 stream)
do (setf (svref contour-endpoint-indexes i) endpoint-index))
;; instructions
(let ((n-points (1+ (svref contour-endpoint-indexes
(1- contour-count))))
(instruction-length (read-uint16 stream)))
(loop for i below instruction-length
do (read-byte stream))
;; read the flags
(let ((flags (make-array n-points)))
(loop with i = 0
while (< i n-points) do
(let ((flag-byte (read-uint8 stream)))
(setf (svref flags i) flag-byte)
(incf i)
(when (logbitp 3 flag-byte)
(let ((n-repeats (read-uint8 stream)))
(loop repeat n-repeats do
(setf (svref flags i) flag-byte)
(incf i))))))
(let ((x-points (read-points-vector stream flags n-points :x ))
(y-points (read-points-vector stream flags n-points :y))
(control-points (make-array n-points :fill-pointer 0))
(contours (make-array contour-count)))
(loop for x-point across x-points
for y-point across y-points
for flag across flags
for x = x-point then (+ x x-point)
for y = y-point then (+ y y-point)
do
(vector-push-extend (make-control-point x y
(logbitp 0 flag))
control-points))
(loop for start = 0 then (1+ end)
for end across contour-endpoint-indexes
for i from 0
do (setf (svref contours i)
(subseq control-points start (1+ end))))
contours)))))
(defun read-contours-at-index (index loader)
"Read the contours at glyph index INDEX, discarding bounding box
information."
(let ((stream (input-stream loader)))
(file-position stream (+ (table-position "glyf" loader)
(glyph-location index loader)))
(let ((contour-count (read-int16 stream))
(xmin (read-int16 stream))
(ymin (read-int16 stream))
(xmax (read-int16 stream))
(ymax (read-int16 stream)))
(declare (ignore xmin ymin xmax ymax))
(if (= contour-count -1)
(read-compound-contours loader)
(read-simple-contours contour-count stream)))))
| null | https://raw.githubusercontent.com/vikram/lisplibraries/105e3ef2d165275eb78f36f5090c9e2cdd0754dd/site/zpb-ttf-0.7/glyf.lisp | lisp |
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials
provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Loading data from the 'glyf' table.
This macro started out life as a function and was converted.
Locating a glyph's contours and bounding box in the font loader's
stream, and loading them
instructions
read the flags | Copyright ( c ) 2006 , All Rights Reserved
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
glyf.lisp , v 1.13 2006/03/23 22:22:01 xach Exp
(in-package #:zpb-ttf)
(defclass control-point ()
((x :initarg :x :accessor x)
(y :initarg :y :accessor y)
(on-curve-p :initarg :on-curve-p :reader on-curve-p)))
(defun make-control-point (x y on-curve-p)
(make-instance 'control-point
:x x
:y y
:on-curve-p on-curve-p))
(defmethod print-object ((control-point control-point) stream)
(print-unreadable-object (control-point stream :type t)
(format stream "~D,~D~:[~;*~]"
(x control-point) (y control-point) (on-curve-p control-point))))
(defmacro do-contour-segments* ((p1 p2) contour &body body)
(let ((length (gensym))
(i (gensym))
(stack (gensym))
(next (gensym))
(next-point (gensym "NEXT-POINT"))
(midpoint (gensym "MIDPOINT"))
(contour* (gensym))
(loop (gensym "LOOP"))
(body-tag (gensym "BODY"))
(mid p1)
(end p2))
`(let* ((,i 1)
(,contour* ,contour)
(,length (length ,contour*))
,stack ,next ,mid ,end)
(unless (zerop ,length)
(flet ((,next-point ()
(when (< ,i ,length)
(prog1 (aref ,contour* ,i) (incf ,i))))
(,midpoint (p0 p1)
(make-control-point (/ (+ (x p0) (x p1)) 2)
(/ (+ (y p0) (y p1)) 2)
t)))
(tagbody
,loop
(setf ,mid nil
,next (,next-point))
(unless ,next
(setf ,mid ,stack
,end (aref ,contour* 0))
(go ,body-tag))
(if (on-curve-p ,next)
(setf ,end ,next
,mid ,stack
,stack nil)
(cond (,stack
(setf ,mid ,stack
,end (,midpoint ,stack ,next)
,stack ,next))
(t
(setf ,stack ,next)
(go ,loop))))
,body-tag
,@body
(when ,next
(go ,loop))))))))
(defmacro do-contour-segments ((p0 p1 p2) contour &body body)
"A contour is made up of segments. A segment may be a straight line
or a curve. For each segment, bind the P0 and P2 variables to the
start and end points of the segment. If the segment is a curve, set P1
to the control point of the curve, otherwise set P1 to NIL."
(let ((start p0)
(contour* (gensym "CONTOUR")))
`(let ((,contour* ,contour))
(when (plusp (length ,contour*))
(let ((,start (aref ,contour* 0)))
(do-contour-segments* (,p1 ,p2)
,contour*
(progn ,@body)
(setf ,start ,p2)))))))
(defun explicit-contour-points (contour)
(let ((new-contour (make-array (length contour)
:adjustable t
:fill-pointer 0)))
(when (plusp (length contour))
(vector-push-extend (aref contour 0) new-contour))
(do-contour-segments* (p1 p2)
contour
(when p1
(vector-push-extend p1 new-contour))
(vector-push-extend p2 new-contour))
new-contour))
(defparameter *empty-contours*
(make-array 0 :element-type '(signed-byte 16)))
(defparameter *empty-bounding-box*
(make-array 4
:initial-element 0
:element-type '(signed-byte 16)))
(defun empty-bounding-box ()
(copy-seq *empty-bounding-box*))
(defun empty-contours ()
(copy-seq *empty-contours*))
(defun dump-compound-flags (flags)
(format t "XXX flags=~16,'0B~%" flags)
(let ((meanings '((0 . ARG_1_AND_2_ARE_WORDS)
(1 . ARGS_ARE_XY_VALUES)
(2 . ROUND_XY_TO_GRID)
(3 . WE_HAVE_A_SCALE)
(4 . OBSOLETE)
(5 . MORE_COMPONENTS)
(6 . WE_HAVE_AN_X_AND_Y_SCALE)
(7 . WE_HAVE_A_TWO_BY_TWO)
(8 . WE_HAVE_INSTRUCTIONS)
(9 . USE_MY_METRICS)
(10 . OVERLAP_COMPOUND))))
(loop for ((bit . meaning)) on meanings
do (when (logbitp bit flags)
(format t "...~A~%" meaning)))))
(defun transform-option-count (flags)
(let ((scale-p 3)
(xy-scale-p 6)
(2*2-scale-p 7))
(cond ((logbitp scale-p flags) 1)
((logbitp xy-scale-p flags) 2)
((logbitp 2*2-scale-p flags) 4)
(t 0))))
(defun make-transformer (a b c d e f)
"Given the elements of the transformation matrix specified by A, B,
C, D, E, and F, return a function of two arguments that returns the
arguments transformed as multiple values.
Ref: "
(let ((m (max (abs a) (abs b)))
(n (max (abs c) (abs d))))
(when (<= (abs (- (abs a) (abs b))) 33/65536)
(setf m (* m 2)))
(when (<= (abs (- (abs c) (abs d))) 33/65536)
(setf n (* n 2)))
(lambda (x y)
(values (* m (+ (* (/ a m) x)
(* (/ c m) y)
e))
(* n (+ (* (/ b n) x)
(* (/ d n) y)
f))))))
(defun transform-contours (fn contours)
"Call FN with the X and Y coordinates of each point of each contour
in the vector CONTOURS. FN should return two values, which are used to
update the X and Y values of each point."
(loop for contour across contours do
(loop for p across contour do
(setf (values (x p) (y p))
(funcall fn (x p) (y p))))))
(defun merge-contours (contours-list)
(let* ((total-contours (loop for contours in contours-list
summing (length contours)))
(merged (make-array total-contours))
(i 0))
(dolist (contours contours-list merged)
(loop for contour across contours do
(setf (aref merged i) contour)
(incf i)))))
(defun read-compound-contours (loader)
(let ((contours-list '())
(stream (input-stream loader)))
(loop
(let ((flags (read-uint16 stream))
(font-index (read-uint16 stream)))
(let ((position (file-position stream))
(contours (read-contours-at-index font-index loader)))
(push contours contours-list)
(file-position stream position)
(let ((args-words-p (logbitp 0 flags))
(args-xy-values-p (logbitp 1 flags))
(more-components-p (logbitp 5 flags))
arg1 arg2)
(cond ((and args-words-p args-xy-values-p)
(setf arg1 (read-int16 stream)
arg2 (read-int16 stream)))
(args-words-p
(setf arg1 (read-uint16 stream)
arg2 (read-uint16 stream))
(error "Compound glyphs relative to indexes not yet supported"))
(args-xy-values-p
(setf arg1 (read-int8 stream)
arg2 (read-int8 stream)))
(t
(setf arg1 (read-uint8 stream)
arg2 (read-uint8 stream))
(error "Compound glyphs relative to indexes not yet supported")))
Transform according to the transformation matrix
(let ((a 1.0) (b 0.0) (c 0.0) (d 1.0)
(e arg1) (f arg2))
(ecase (transform-option-count flags)
(0)
(1
(setf a (setf d (read-fixed2.14 stream))))
(2
(setf a (read-fixed2.14 stream)
d (read-fixed2.14 stream)))
(4
(setf a (read-fixed2.14 stream)
b (read-fixed2.14 stream)
c (read-fixed2.14 stream)
d (read-fixed2.14 stream))))
(let ((transform-fn (make-transformer a b c d e f)))
(transform-contours transform-fn contours)))
(unless more-components-p
(return (merge-contours contours-list)))))))))
(defun read-points-vector (stream flags count axis)
(let ((points (make-array count :fill-pointer 0))
(short-index (if (eql axis :x) 1 2))
(same-index (if (eql axis :x) 4 5)))
(flet ((save-point (point)
(vector-push point points)))
(loop for flag across flags
for short-p = (logbitp short-index flag)
for same-p = (logbitp same-index flag)
do (cond (short-p
(let ((new-point (read-uint8 stream)))
(save-point (if same-p new-point (- new-point)))))
(t
(if same-p
(save-point 0)
(save-point (read-int16 stream)))))))
points))
(defun read-simple-contours (contour-count stream)
"With the stream positioned immediately after the glyph bounding
box, read the contours data from STREAM and return it as a vector."
(let ((contour-endpoint-indexes (make-array contour-count)))
(loop for i below contour-count
for endpoint-index = (read-uint16 stream)
do (setf (svref contour-endpoint-indexes i) endpoint-index))
(let ((n-points (1+ (svref contour-endpoint-indexes
(1- contour-count))))
(instruction-length (read-uint16 stream)))
(loop for i below instruction-length
do (read-byte stream))
(let ((flags (make-array n-points)))
(loop with i = 0
while (< i n-points) do
(let ((flag-byte (read-uint8 stream)))
(setf (svref flags i) flag-byte)
(incf i)
(when (logbitp 3 flag-byte)
(let ((n-repeats (read-uint8 stream)))
(loop repeat n-repeats do
(setf (svref flags i) flag-byte)
(incf i))))))
(let ((x-points (read-points-vector stream flags n-points :x ))
(y-points (read-points-vector stream flags n-points :y))
(control-points (make-array n-points :fill-pointer 0))
(contours (make-array contour-count)))
(loop for x-point across x-points
for y-point across y-points
for flag across flags
for x = x-point then (+ x x-point)
for y = y-point then (+ y y-point)
do
(vector-push-extend (make-control-point x y
(logbitp 0 flag))
control-points))
(loop for start = 0 then (1+ end)
for end across contour-endpoint-indexes
for i from 0
do (setf (svref contours i)
(subseq control-points start (1+ end))))
contours)))))
(defun read-contours-at-index (index loader)
"Read the contours at glyph index INDEX, discarding bounding box
information."
(let ((stream (input-stream loader)))
(file-position stream (+ (table-position "glyf" loader)
(glyph-location index loader)))
(let ((contour-count (read-int16 stream))
(xmin (read-int16 stream))
(ymin (read-int16 stream))
(xmax (read-int16 stream))
(ymax (read-int16 stream)))
(declare (ignore xmin ymin xmax ymax))
(if (= contour-count -1)
(read-compound-contours loader)
(read-simple-contours contour-count stream)))))
|
69e48b9157f2d3c97d6df95880bcec7a77eb931790b1c09225f6b37d895cab27 | pezipink/fairylog | info.rkt | #lang info
(define collection "fairylog")
(define version "0.0.1")
(define deps
'("base"))
(define build-deps
'("scribble-lib"
"racket-doc"))
(define pkg-desc "Verilog dialect")
(define pkg-authors '(""))
(define scribblings '(("scribblings/fairylog.scrbl" ())))
| null | https://raw.githubusercontent.com/pezipink/fairylog/f0c1d0d82e2ed9ff02486ddd91a0ede5c5483ef7/info.rkt | racket | #lang info
(define collection "fairylog")
(define version "0.0.1")
(define deps
'("base"))
(define build-deps
'("scribble-lib"
"racket-doc"))
(define pkg-desc "Verilog dialect")
(define pkg-authors '(""))
(define scribblings '(("scribblings/fairylog.scrbl" ())))
|
|
8342385841f6bf8c0a43eed41aa05d49159a9ed6016276e9a918ab3322f9c5d5 | tolysz/prepare-ghcjs | Void.hs | {-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE DeriveGeneric #
# LANGUAGE EmptyCase #
{-# LANGUAGE Safe #-}
# LANGUAGE StandaloneDeriving #
-----------------------------------------------------------------------------
-- |
Copyright : ( C ) 2008 - 2014
-- License : BSD-style (see the file libraries/base/LICENSE)
--
Maintainer : < >
-- Stability : provisional
-- Portability : portable
--
-- A logically uninhabited data type, used to indicate that a given
-- term should not exist.
--
@since 4.8.0.0
----------------------------------------------------------------------------
module Data.Void
( Void
, absurd
, vacuous
) where
import Control.Exception
import Data.Data
import Data.Ix
import GHC.Generics
-- | Uninhabited data type
--
@since 4.8.0.0
data Void deriving (Generic)
deriving instance Data Void
instance Eq Void where
_ == _ = True
instance Ord Void where
compare _ _ = EQ
-- | Reading a 'Void' value is always a parse error, considering
-- 'Void' as a data type with no constructors.
instance Read Void where
readsPrec _ _ = []
instance Show Void where
showsPrec _ = absurd
instance Ix Void where
range _ = []
index _ = absurd
inRange _ = absurd
rangeSize _ = 0
instance Exception Void
-- | Since 'Void' values logically don't exist, this witnesses the
logical reasoning tool of \"ex falso quodlibet\ " .
--
@since 4.8.0.0
absurd :: Void -> a
absurd a = case a of {}
-- | If 'Void' is uninhabited then any 'Functor' that holds only
-- values of type 'Void' is holding no values.
--
@since 4.8.0.0
vacuous :: Functor f => f Void -> f a
vacuous = fmap absurd
| null | https://raw.githubusercontent.com/tolysz/prepare-ghcjs/8499e14e27854a366e98f89fab0af355056cf055/spec-lts8/base-pure/Data/Void.hs | haskell | # LANGUAGE DeriveDataTypeable #
# LANGUAGE Safe #
---------------------------------------------------------------------------
|
License : BSD-style (see the file libraries/base/LICENSE)
Stability : provisional
Portability : portable
A logically uninhabited data type, used to indicate that a given
term should not exist.
--------------------------------------------------------------------------
| Uninhabited data type
| Reading a 'Void' value is always a parse error, considering
'Void' as a data type with no constructors.
| Since 'Void' values logically don't exist, this witnesses the
| If 'Void' is uninhabited then any 'Functor' that holds only
values of type 'Void' is holding no values.
| # LANGUAGE DeriveGeneric #
# LANGUAGE EmptyCase #
# LANGUAGE StandaloneDeriving #
Copyright : ( C ) 2008 - 2014
Maintainer : < >
@since 4.8.0.0
module Data.Void
( Void
, absurd
, vacuous
) where
import Control.Exception
import Data.Data
import Data.Ix
import GHC.Generics
@since 4.8.0.0
data Void deriving (Generic)
deriving instance Data Void
instance Eq Void where
_ == _ = True
instance Ord Void where
compare _ _ = EQ
instance Read Void where
readsPrec _ _ = []
instance Show Void where
showsPrec _ = absurd
instance Ix Void where
range _ = []
index _ = absurd
inRange _ = absurd
rangeSize _ = 0
instance Exception Void
logical reasoning tool of \"ex falso quodlibet\ " .
@since 4.8.0.0
absurd :: Void -> a
absurd a = case a of {}
@since 4.8.0.0
vacuous :: Functor f => f Void -> f a
vacuous = fmap absurd
|
b39098c9eb0fba81545c6583196be24f3946091c71e710949871ef17e0a57e4c | shoreleave/shoreleave-browser | webstorage.cljs | (ns shoreleave.browser.storage.webstorage
"An idiomatic interface to the browser's storage mechanisms (local and sessions)"
(:require [cljs.reader :as reader]
[goog.storage.mechanism.HTML5WebStorage :as html5webstorage]
[goog.iter :as g-iter]))
Google Closure attaches a common prototype to all browser storage systems called , ` WebStorage ` .
Shoreleave extends this type , to extend ClojureScript functionality / interop to all browsers storages .
WebStorage support
;; ----------------------
;;
;; For general information on localStorage, please see the docs in `localstorage.cljs`
;; For general information on sessionStorage, please see the docs in `sessionstorage.cljs`
;;
Shoreleave 's generic storage support is built against Closure 's [ interface]( - library.googlecode.com / svn / docs / class_goog_storage_mechanism_HTML5WebStorage.html )
;;
;; The extension supports the following calls:
;;
;; * map-style lookup - `(:search-results storage "default value")`
;; * `get` lookups
;; * `(count storage)` - the number of things/keys stored
;; * `(assoc! storage :new-key "saved")` - update or add an item
;; * `(dissoc! storage :saved-results)` - remove an item
;; * `(empty! storage)` - Clear out the localStorage store
;;
;;
Using storage in Pub / Sub
;; -----------------------------
;;
There is PubSub support for the specific storage types .
;; Please see the details in those files.
;; You'll need to require them directly to get support.
(defn storage-keys [ls]
(g-iter/toArray (.__iterator__ ls true)))
(defn storage-values [ls]
(g-iter/toArray (.__iterator__ ls false)))
(defn as-hash-map
([storage]
(zipmap (storage-keys storage) (storage-values storage))))
(extend-type goog.storage.mechanism.HTML5WebStorage
ILookup
(-lookup
([ls k]
(-lookup ls k nil))
([ls k not-found]
(let [read-value (if-let [v (not-empty (.get ls (name k)))]
v
(pr-str not-found))]
(reader/read-string read-value))))
ISeqable
(-seq [ls]
(map vector (storage-keys ls) (storage-values ls)))
ICounted
(-count [ls] (.getCount ls))
IFn
(-invoke
([ls k]
(-lookup ls k))
([ls k not-found]
(-lookup ls k not-found)))
ITransientCollection
(-persistent! [ls] (as-hash-map ls))
;(-conj! [c v] nil)
ITransientAssociative
(-assoc! [ls k v]
(let [old-val (-lookup ls k)]
(.set ls (name k) (pr-str v))
(-notify-watches ls {k old-val} {k v})
ls))
ITransientMap
(-dissoc! [ls k]
(do
(.remove ls (name k))
ls))
;IPrintable
;(-pr-seq [ls opts]
; #_(let [pr-pair (fn [keyval] (pr-sequential pr-seq "" " " "" opts keyval))]
; (pr-sequential pr-pair "{" ", " "}" opts ls))
; (-pr-seq (-persistent! ls) opts))
IPrintWithWriter
(-pr-writer [ls writer opts]
(let [pers-st (-persistent! ls)]
(-write writer (-persistent! ls)))))
(defn empty!
"Clear the storage"
[ls]
(.clear ls))
| null | https://raw.githubusercontent.com/shoreleave/shoreleave-browser/8623525398eaf3d7bfba88ee22f608f282df2af9/src/shoreleave/browser/storage/webstorage.cljs | clojure | ----------------------
For general information on localStorage, please see the docs in `localstorage.cljs`
For general information on sessionStorage, please see the docs in `sessionstorage.cljs`
The extension supports the following calls:
* map-style lookup - `(:search-results storage "default value")`
* `get` lookups
* `(count storage)` - the number of things/keys stored
* `(assoc! storage :new-key "saved")` - update or add an item
* `(dissoc! storage :saved-results)` - remove an item
* `(empty! storage)` - Clear out the localStorage store
-----------------------------
Please see the details in those files.
You'll need to require them directly to get support.
(-conj! [c v] nil)
IPrintable
(-pr-seq [ls opts]
#_(let [pr-pair (fn [keyval] (pr-sequential pr-seq "" " " "" opts keyval))]
(pr-sequential pr-pair "{" ", " "}" opts ls))
(-pr-seq (-persistent! ls) opts)) | (ns shoreleave.browser.storage.webstorage
"An idiomatic interface to the browser's storage mechanisms (local and sessions)"
(:require [cljs.reader :as reader]
[goog.storage.mechanism.HTML5WebStorage :as html5webstorage]
[goog.iter :as g-iter]))
Google Closure attaches a common prototype to all browser storage systems called , ` WebStorage ` .
Shoreleave extends this type , to extend ClojureScript functionality / interop to all browsers storages .
WebStorage support
Shoreleave 's generic storage support is built against Closure 's [ interface]( - library.googlecode.com / svn / docs / class_goog_storage_mechanism_HTML5WebStorage.html )
Using storage in Pub / Sub
There is PubSub support for the specific storage types .
(defn storage-keys [ls]
(g-iter/toArray (.__iterator__ ls true)))
(defn storage-values [ls]
(g-iter/toArray (.__iterator__ ls false)))
(defn as-hash-map
([storage]
(zipmap (storage-keys storage) (storage-values storage))))
(extend-type goog.storage.mechanism.HTML5WebStorage
ILookup
(-lookup
([ls k]
(-lookup ls k nil))
([ls k not-found]
(let [read-value (if-let [v (not-empty (.get ls (name k)))]
v
(pr-str not-found))]
(reader/read-string read-value))))
ISeqable
(-seq [ls]
(map vector (storage-keys ls) (storage-values ls)))
ICounted
(-count [ls] (.getCount ls))
IFn
(-invoke
([ls k]
(-lookup ls k))
([ls k not-found]
(-lookup ls k not-found)))
ITransientCollection
(-persistent! [ls] (as-hash-map ls))
ITransientAssociative
(-assoc! [ls k v]
(let [old-val (-lookup ls k)]
(.set ls (name k) (pr-str v))
(-notify-watches ls {k old-val} {k v})
ls))
ITransientMap
(-dissoc! [ls k]
(do
(.remove ls (name k))
ls))
IPrintWithWriter
(-pr-writer [ls writer opts]
(let [pers-st (-persistent! ls)]
(-write writer (-persistent! ls)))))
(defn empty!
"Clear the storage"
[ls]
(.clear ls))
|
88251cc53efa2a407847d4a03610f20a72f152a1a062e9eb541cbc1ad442be2f | aryx/xix | queue.mli | (***********************************************************************)
(* *)
(* Objective Caml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
Automatique . Distributed only by permission .
(* *)
(***********************************************************************)
(* $Id: queue.mli,v 1.7 1997/10/31 12:59:27 doligez Exp $ *)
Module [ Queue ] : first - in first - out queues
(* This module implements queues (FIFOs), with in-place modification. *)
type 'a t
(* The type of queues containing elements of type ['a]. *)
exception Empty
(* Raised when [take] is applied to an empty queue. *)
val create: unit -> 'a t
(* Return a new queue, initially empty. *)
val add: 'a -> 'a t -> unit
(* [add x q] adds the element [x] at the end of the queue [q]. *)
val take: 'a t -> 'a
[ take q ] removes and returns the first element in queue [ q ] ,
or raises [ Empty ] if the queue is empty .
or raises [Empty] if the queue is empty. *)
val peek: 'a t -> 'a
[ peek q ] returns the first element in queue [ q ] , without removing
it from the queue , or raises [ Empty ] if the queue is empty .
it from the queue, or raises [Empty] if the queue is empty. *)
val clear : 'a t -> unit
(* Discard all elements from a queue. *)
val length: 'a t -> int
(* Return the number of elements in a queue. *)
val iter: ('a -> unit) -> 'a t -> unit
(* [iter f q] applies [f] in turn to all elements of [q],
from the least recently entered to the most recently entered.
The queue itself is unchanged. *)
| null | https://raw.githubusercontent.com/aryx/xix/60ce1bd9a3f923e0e8bb2192f8938a9aa49c739c/lib_core/collections/todo/queue.mli | ocaml | *********************************************************************
Objective Caml
*********************************************************************
$Id: queue.mli,v 1.7 1997/10/31 12:59:27 doligez Exp $
This module implements queues (FIFOs), with in-place modification.
The type of queues containing elements of type ['a].
Raised when [take] is applied to an empty queue.
Return a new queue, initially empty.
[add x q] adds the element [x] at the end of the queue [q].
Discard all elements from a queue.
Return the number of elements in a queue.
[iter f q] applies [f] in turn to all elements of [q],
from the least recently entered to the most recently entered.
The queue itself is unchanged. | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
Automatique . Distributed only by permission .
Module [ Queue ] : first - in first - out queues
type 'a t
exception Empty
val create: unit -> 'a t
val add: 'a -> 'a t -> unit
val take: 'a t -> 'a
[ take q ] removes and returns the first element in queue [ q ] ,
or raises [ Empty ] if the queue is empty .
or raises [Empty] if the queue is empty. *)
val peek: 'a t -> 'a
[ peek q ] returns the first element in queue [ q ] , without removing
it from the queue , or raises [ Empty ] if the queue is empty .
it from the queue, or raises [Empty] if the queue is empty. *)
val clear : 'a t -> unit
val length: 'a t -> int
val iter: ('a -> unit) -> 'a t -> unit
|
0413d49ff12fb70471c9e8f88431ce299a895cc77aed291917feabd52018175e | hiredman/clojurebot | clojars.clj | (ns hiredman.clojurebot.clojars
(:use [hiredman.utilities :only (get-url)]
[clojure.set :only (difference)])
(:require [hiredman.clojurebot.core :as core]
[hiredman.utilities :as util]
[hiredman.schedule :as sched]))
(def recent (ref #{}))
(defn startparse-tagsoup [s ch]
(let [p (org.ccil.cowan.tagsoup.Parser.)]
(.setContentHandler p ch)
(.parse p s)))
(defn zip-soup [url]
(clojure.zip/xml-zip (clojure.xml/parse url startparse-tagsoup)))
(defn get-recent []
(-> "" zip-soup first
((partial tree-seq map? (comp seq :content)))
((partial filter #(= :ul (:tag %)))) last :content
((partial map (comp first :content first :content))) set))
(defn go []
(let [r (get-recent)
new (difference r @recent)]
(dosync (ref-set recent r))
(when (not (empty? new))
(str "recently on clojars.org: " (pr-str new)))))
| null | https://raw.githubusercontent.com/hiredman/clojurebot/1e8bde92f2dd45bb7928d4db17de8ec48557ead1/src/hiredman/clojurebot/clojars.clj | clojure | (ns hiredman.clojurebot.clojars
(:use [hiredman.utilities :only (get-url)]
[clojure.set :only (difference)])
(:require [hiredman.clojurebot.core :as core]
[hiredman.utilities :as util]
[hiredman.schedule :as sched]))
(def recent (ref #{}))
(defn startparse-tagsoup [s ch]
(let [p (org.ccil.cowan.tagsoup.Parser.)]
(.setContentHandler p ch)
(.parse p s)))
(defn zip-soup [url]
(clojure.zip/xml-zip (clojure.xml/parse url startparse-tagsoup)))
(defn get-recent []
(-> "" zip-soup first
((partial tree-seq map? (comp seq :content)))
((partial filter #(= :ul (:tag %)))) last :content
((partial map (comp first :content first :content))) set))
(defn go []
(let [r (get-recent)
new (difference r @recent)]
(dosync (ref-set recent r))
(when (not (empty? new))
(str "recently on clojars.org: " (pr-str new)))))
|
|
2c34b9e967d7409b94a3d3034331d70060f40bc2959653e998a32a73c731f7cd | open-company/open-company-web | site.clj | (ns oc.site
(:require [hiccup.page :as hp]
[environ.core :refer (env)]
[cuerdas.core :as string]
[oc.shared :as shared]
[oc.pages.not-found :as not-found]
[oc.pages.server-error :as server-error]
[oc.pages.about :as about]
[oc.pages.app-shell :as app-shell]
[oc.pages.index :as index]
[oc.pages.press-kit :as press-kit]
[oc.pages.pricing :as pricing]
[oc.pages.privacy :as privacy]
[oc.pages.slack :as slack]
[oc.pages.terms :as terms]))
(def contact-email "")
(def contact-mail-to (str "mailto:" contact-email))
(def oc-github "-company")
(def anonymous-title "Start free")
(def your-digest-title "Launch Carrot")
(def options {:contact-email contact-email
:contact-mail-to contact-mail-to
:oc-github oc-github
:anonymous-title anonymous-title
:your-digest-title your-digest-title})
(defn- body-wrapper [body page opts]
[:body
{:class (when (env :covid-banner) "covid-banner")}
shared/tag-manager-body
[:div
{:class "outer header"}
shared/ph-banner
(when (env :covid-banner)
(shared/covid-banner page))
(shared/nav (name page) opts)
(shared/mobile-menu (name page) opts)]
(if (fn? body) (body opts) body)
(shared/footer opts)
(shared/google-analytics-init)])
(def pages [{:page-name "404"
:page :404
:head shared/head
:body (partial body-wrapper not-found/not-found)
:title "Carrot | Page not found"
:target #{:dev :prod}}
{:page-name "500"
:page :500
:head shared/head
:body (partial body-wrapper server-error/server-error)
:title "Carrot | Internal server error"
:target #{:dev :prod}}
{:page-name "about"
:page :about
:head shared/head
:body (partial body-wrapper about/about)
:title "Carrot | About"
:target #{:dev :prod}}
{:page-name "app-shell"
:page :app-shell
:head (:head app-shell/app-shell)
:body (:body app-shell/app-shell)
:title "Carrot | Remote team communication"
:target #{:dev}}
{:page-name "app-shell"
:page :prod-app-shell
:head (:head app-shell/prod-app-shell)
:body (:body app-shell/prod-app-shell)
:title "Carrot | Remote team communication"
:target #{:prod}}
{:page-name "index"
:page :index
:head shared/head
:body (partial body-wrapper index/index)
:title "Carrot | Home"
:target #{:dev :prod}}
{:page-name "press-kit"
:page :press-kit
:head shared/head
:body (partial body-wrapper press-kit/press-kit)
:title "Carrot | Press kit"
:target #{:dev :prod}}
{:page-name "pricing"
:page :pricing
:head shared/head
:body (partial body-wrapper pricing/pricing)
:title "Carrot | Pricing"
:target #{:dev :prod}}
{:page-name "privacy"
:page :privacy
:head shared/head
:body (partial body-wrapper privacy/privacy)
:title "Carrot | Privacy Policy"
:target #{:dev :prod}}
{:page-name "slack"
:page :slack
:head shared/head
:body (partial body-wrapper slack/slack)
:title "Carrot | Slack"
:target #{:dev :prod}}
{:page-name "slack-lander"
:page :slack-lander
:head shared/head
:body (partial body-wrapper slack/slack-lander)
:title "Carrot | Slack lander"
:target #{:dev :prod}}
{:page-name "terms"
:page :terms
:head shared/head
:body (partial body-wrapper terms/terms)
:title "Carrot | Terms of Service"
:target #{:dev :prod}}])
(defn build-pages [env-kw]
(println (str "Building static pages (" env-kw "):"))
(doseq [{:keys [title head body page-name page target] :as p} pages
:let [filename (str "public/" page-name ".html")
opts (assoc options :env-kw env-kw)]]
(print (str "...page " (name page) (string/join "" (vec (take (- 15 (count (name page))) (repeat " ")))) " -> " filename "... "))
(if-not (env-kw target)
(println "skip!")
(do
(->> (hp/html5 {:lang "en"}
(if (fn? head) (head page opts) head)
(if (fn? body) (body page opts) body))
(spit filename))
(println "built!"))))
(println "Done!"))
(defn -main [& [env-name & args]]
(build-pages (or (string/keyword env-name) :dev))) | null | https://raw.githubusercontent.com/open-company/open-company-web/dfce3dd9bc115df91003179bceb87cca1f84b6cf/site/oc/site.clj | clojure | (ns oc.site
(:require [hiccup.page :as hp]
[environ.core :refer (env)]
[cuerdas.core :as string]
[oc.shared :as shared]
[oc.pages.not-found :as not-found]
[oc.pages.server-error :as server-error]
[oc.pages.about :as about]
[oc.pages.app-shell :as app-shell]
[oc.pages.index :as index]
[oc.pages.press-kit :as press-kit]
[oc.pages.pricing :as pricing]
[oc.pages.privacy :as privacy]
[oc.pages.slack :as slack]
[oc.pages.terms :as terms]))
(def contact-email "")
(def contact-mail-to (str "mailto:" contact-email))
(def oc-github "-company")
(def anonymous-title "Start free")
(def your-digest-title "Launch Carrot")
(def options {:contact-email contact-email
:contact-mail-to contact-mail-to
:oc-github oc-github
:anonymous-title anonymous-title
:your-digest-title your-digest-title})
(defn- body-wrapper [body page opts]
[:body
{:class (when (env :covid-banner) "covid-banner")}
shared/tag-manager-body
[:div
{:class "outer header"}
shared/ph-banner
(when (env :covid-banner)
(shared/covid-banner page))
(shared/nav (name page) opts)
(shared/mobile-menu (name page) opts)]
(if (fn? body) (body opts) body)
(shared/footer opts)
(shared/google-analytics-init)])
(def pages [{:page-name "404"
:page :404
:head shared/head
:body (partial body-wrapper not-found/not-found)
:title "Carrot | Page not found"
:target #{:dev :prod}}
{:page-name "500"
:page :500
:head shared/head
:body (partial body-wrapper server-error/server-error)
:title "Carrot | Internal server error"
:target #{:dev :prod}}
{:page-name "about"
:page :about
:head shared/head
:body (partial body-wrapper about/about)
:title "Carrot | About"
:target #{:dev :prod}}
{:page-name "app-shell"
:page :app-shell
:head (:head app-shell/app-shell)
:body (:body app-shell/app-shell)
:title "Carrot | Remote team communication"
:target #{:dev}}
{:page-name "app-shell"
:page :prod-app-shell
:head (:head app-shell/prod-app-shell)
:body (:body app-shell/prod-app-shell)
:title "Carrot | Remote team communication"
:target #{:prod}}
{:page-name "index"
:page :index
:head shared/head
:body (partial body-wrapper index/index)
:title "Carrot | Home"
:target #{:dev :prod}}
{:page-name "press-kit"
:page :press-kit
:head shared/head
:body (partial body-wrapper press-kit/press-kit)
:title "Carrot | Press kit"
:target #{:dev :prod}}
{:page-name "pricing"
:page :pricing
:head shared/head
:body (partial body-wrapper pricing/pricing)
:title "Carrot | Pricing"
:target #{:dev :prod}}
{:page-name "privacy"
:page :privacy
:head shared/head
:body (partial body-wrapper privacy/privacy)
:title "Carrot | Privacy Policy"
:target #{:dev :prod}}
{:page-name "slack"
:page :slack
:head shared/head
:body (partial body-wrapper slack/slack)
:title "Carrot | Slack"
:target #{:dev :prod}}
{:page-name "slack-lander"
:page :slack-lander
:head shared/head
:body (partial body-wrapper slack/slack-lander)
:title "Carrot | Slack lander"
:target #{:dev :prod}}
{:page-name "terms"
:page :terms
:head shared/head
:body (partial body-wrapper terms/terms)
:title "Carrot | Terms of Service"
:target #{:dev :prod}}])
(defn build-pages [env-kw]
(println (str "Building static pages (" env-kw "):"))
(doseq [{:keys [title head body page-name page target] :as p} pages
:let [filename (str "public/" page-name ".html")
opts (assoc options :env-kw env-kw)]]
(print (str "...page " (name page) (string/join "" (vec (take (- 15 (count (name page))) (repeat " ")))) " -> " filename "... "))
(if-not (env-kw target)
(println "skip!")
(do
(->> (hp/html5 {:lang "en"}
(if (fn? head) (head page opts) head)
(if (fn? body) (body page opts) body))
(spit filename))
(println "built!"))))
(println "Done!"))
(defn -main [& [env-name & args]]
(build-pages (or (string/keyword env-name) :dev))) |
|
af7109b4930882575b9468e886d2f67ad95e5155133ec110bce89dd60041394b | bcbio/bcbio.variation.recall | vcfheader.clj | (ns bcbio.variation.recall.vcfheader
"Create VCF headers using algorithms contained in Picard/Tribble tools.
This does the best job of cleanly merging and organizing headers from
multiple variant calling approaches."
(:import [htsjdk.variant.vcf VCFUtils VCFHeader]
[htsjdk.variant.variantcontext.writer VariantContextWriterFactory])
(:require [clojure.java.io :as io]
[bcbio.run.fsp :as fsp]
[bcbio.run.itx :as itx]
[bcbio.variation.variantcontext :as gvc]))
(defn merge-from-files
"Creates a merged VCF header from the supplied input VCFs."
[orig-files ref-file out-file]
(let [header-file (str (fsp/file-root out-file) "-header.vcf")
headers (map gvc/get-vcf-header orig-files)]
(with-open [vcf-writer (VariantContextWriterFactory/create (io/file header-file) (gvc/get-seq-dict ref-file)
VariantContextWriterFactory/NO_OPTIONS)]
(.writeHeader vcf-writer (VCFHeader. (VCFUtils/smartMergeHeaders headers false))))
header-file))
(defmacro with-merged
"Create a merged VCF header file from input VCFs, deleted on completion."
[[header-file orig-files ref-file out-file] & body]
`(let [~header-file (merge-from-files ~orig-files ~ref-file ~out-file)]
(try
(let [out# (do ~@body)]
out#)
(finally
(fsp/remove-path ~header-file)))))
| null | https://raw.githubusercontent.com/bcbio/bcbio.variation.recall/b7aa436dcb558535f87d004ba0abc5d7bc380b70/src/bcbio/variation/recall/vcfheader.clj | clojure | (ns bcbio.variation.recall.vcfheader
"Create VCF headers using algorithms contained in Picard/Tribble tools.
This does the best job of cleanly merging and organizing headers from
multiple variant calling approaches."
(:import [htsjdk.variant.vcf VCFUtils VCFHeader]
[htsjdk.variant.variantcontext.writer VariantContextWriterFactory])
(:require [clojure.java.io :as io]
[bcbio.run.fsp :as fsp]
[bcbio.run.itx :as itx]
[bcbio.variation.variantcontext :as gvc]))
(defn merge-from-files
"Creates a merged VCF header from the supplied input VCFs."
[orig-files ref-file out-file]
(let [header-file (str (fsp/file-root out-file) "-header.vcf")
headers (map gvc/get-vcf-header orig-files)]
(with-open [vcf-writer (VariantContextWriterFactory/create (io/file header-file) (gvc/get-seq-dict ref-file)
VariantContextWriterFactory/NO_OPTIONS)]
(.writeHeader vcf-writer (VCFHeader. (VCFUtils/smartMergeHeaders headers false))))
header-file))
(defmacro with-merged
"Create a merged VCF header file from input VCFs, deleted on completion."
[[header-file orig-files ref-file out-file] & body]
`(let [~header-file (merge-from-files ~orig-files ~ref-file ~out-file)]
(try
(let [out# (do ~@body)]
out#)
(finally
(fsp/remove-path ~header-file)))))
|
|
68d4dade547b352ebeb7589b30bd2caefefe50195243e8235f34259804563ef8 | LLNL/rhizome | ngram.clj | (ns rhizome.turbotopics.ngram
"Functions for recursively identifying n-grams"
(:use [clojure.contrib.seq-utils :only (indexed)])
(:use [rhizome.turbotopics.util :only (pfhmap pmapcat not-nil?)])
(:use [rhizome.turbotopics.bigram :only (get-occur-counts
valid-bigram?)])
(:use [rhizome.turbotopics.algorithm :only (process-topic)]))
;; Represents a single position within the corpus
(defrecord CorpusPosition [d z w])
(defn pospair-to-newterm
"Form a new term from a pair of corpus positions"
[vocab p1 p2]
(format "%s %s"
(-> p1 :w vocab)
(-> p2 :w vocab)))
(defn position-hit?
"Is this pair of positions a newly found significant bigram?"
[p1 p2 vocab vocab2idx]
(contains? vocab2idx (pospair-to-newterm vocab p1 p2)))
(defn get-new-terms
"Get all new terms from found, which maps z to SigBigrams"
[found]
(vec (set (mapcat (partial map :ngram) (vals found)))))
(defn transform-vocab
"Augment the vocabulary with newly found significant bigrams"
[vocab found]
(vec (concat vocab (get-new-terms found))))
(defn transform-positions
"Transform the corpus positions to take new n-grams into account"
[vocab positions]
(let [vocab2idx
(into (hash-map) (for [[idx term] (indexed vocab)] [term idx]))]
(loop [prev nil
cur (first positions)
remaining (rest positions)
newpositions (vector)]
(if (and (nil? cur) (nil? prev))
newpositions ;; we're done
(if (nil? prev) ;; either at start, or earlier symbol was bigram
(recur cur (first remaining) (rest remaining) newpositions)
(if (and (valid-bigram? [prev cur])
(position-hit? prev cur vocab vocab2idx)) ;; bigram hit
(recur nil (first remaining) (rest remaining)
(conj newpositions
(CorpusPosition.
(:d cur) (:z cur)
(vocab2idx (pospair-to-newterm vocab prev cur)))))
(recur cur (first remaining) (rest remaining)
(conj newpositions prev)))))))) ;; not a hit, emit prev
(defn transform-corpus
"Augment vocabulary with new n-grams and transform the corpus"
[corpus found]
(let [newvocab (transform-vocab (:vocab corpus) found)]
{:vocab newvocab,
:positions (transform-positions newvocab (:positions corpus))}))
(defn recur-ngrams
"Recursively find n-grams"
[params argcorpus]
(loop [corpus argcorpus
prevfound (hash-map)]
(let [found (pfhmap #(process-topic params %1 (:vocab corpus))
(get-occur-counts (:positions corpus)))]
(if (zero? (reduce + (map count (vals found)))) ;; no new n-grams, quit
prevfound
(recur
(transform-corpus corpus found)
(merge-with concat prevfound found))))))
| null | https://raw.githubusercontent.com/LLNL/rhizome/af8e00ac89a98e2d07fe7a6272857951c2781182/src/rhizome/turbotopics/ngram.clj | clojure | Represents a single position within the corpus
we're done
either at start, or earlier symbol was bigram
bigram hit
not a hit, emit prev
no new n-grams, quit | (ns rhizome.turbotopics.ngram
"Functions for recursively identifying n-grams"
(:use [clojure.contrib.seq-utils :only (indexed)])
(:use [rhizome.turbotopics.util :only (pfhmap pmapcat not-nil?)])
(:use [rhizome.turbotopics.bigram :only (get-occur-counts
valid-bigram?)])
(:use [rhizome.turbotopics.algorithm :only (process-topic)]))
(defrecord CorpusPosition [d z w])
(defn pospair-to-newterm
"Form a new term from a pair of corpus positions"
[vocab p1 p2]
(format "%s %s"
(-> p1 :w vocab)
(-> p2 :w vocab)))
(defn position-hit?
"Is this pair of positions a newly found significant bigram?"
[p1 p2 vocab vocab2idx]
(contains? vocab2idx (pospair-to-newterm vocab p1 p2)))
(defn get-new-terms
"Get all new terms from found, which maps z to SigBigrams"
[found]
(vec (set (mapcat (partial map :ngram) (vals found)))))
(defn transform-vocab
"Augment the vocabulary with newly found significant bigrams"
[vocab found]
(vec (concat vocab (get-new-terms found))))
(defn transform-positions
"Transform the corpus positions to take new n-grams into account"
[vocab positions]
(let [vocab2idx
(into (hash-map) (for [[idx term] (indexed vocab)] [term idx]))]
(loop [prev nil
cur (first positions)
remaining (rest positions)
newpositions (vector)]
(if (and (nil? cur) (nil? prev))
(recur cur (first remaining) (rest remaining) newpositions)
(if (and (valid-bigram? [prev cur])
(recur nil (first remaining) (rest remaining)
(conj newpositions
(CorpusPosition.
(:d cur) (:z cur)
(vocab2idx (pospair-to-newterm vocab prev cur)))))
(recur cur (first remaining) (rest remaining)
(defn transform-corpus
"Augment vocabulary with new n-grams and transform the corpus"
[corpus found]
(let [newvocab (transform-vocab (:vocab corpus) found)]
{:vocab newvocab,
:positions (transform-positions newvocab (:positions corpus))}))
(defn recur-ngrams
"Recursively find n-grams"
[params argcorpus]
(loop [corpus argcorpus
prevfound (hash-map)]
(let [found (pfhmap #(process-topic params %1 (:vocab corpus))
(get-occur-counts (:positions corpus)))]
prevfound
(recur
(transform-corpus corpus found)
(merge-with concat prevfound found))))))
|
fa728403f84fb7d22e69670e44336cc6b7a6fe64d7e5688aacb01402b69d483a | zenspider/schemers | exercise.3.43.scm | #lang racket/base
Exercise 3.43
Suppose that the balances in three accounts start out as $ 10 , $ 20 ,
and $ 30 , and that multiple processes run , exchanging the balances
;; in the accounts. Argue that if the processes are run sequentially,
;; after any number of concurrent exchanges, the account balances
should be $ 10 , $ 20 , and $ 30 in some order . Draw a timing diagram
like the one in * Note Figure 3 - 29 : : to show how this condition can
be violated if the exchanges are implemented using the first
;; version of the account-exchange program in this section. On the
;; other hand, argue that even with this `exchange' program, the sum
;; of the balances in the accounts will be preserved. Draw a timing
;; diagram to show how even this condition would be violated if we did
;; not serialize the transactions on individual accounts.
;; no
| null | https://raw.githubusercontent.com/zenspider/schemers/2939ca553ac79013a4c3aaaec812c1bad3933b16/sicp/ch_3/exercise.3.43.scm | scheme | in the accounts. Argue that if the processes are run sequentially,
after any number of concurrent exchanges, the account balances
version of the account-exchange program in this section. On the
other hand, argue that even with this `exchange' program, the sum
of the balances in the accounts will be preserved. Draw a timing
diagram to show how even this condition would be violated if we did
not serialize the transactions on individual accounts.
no | #lang racket/base
Exercise 3.43
Suppose that the balances in three accounts start out as $ 10 , $ 20 ,
and $ 30 , and that multiple processes run , exchanging the balances
should be $ 10 , $ 20 , and $ 30 in some order . Draw a timing diagram
like the one in * Note Figure 3 - 29 : : to show how this condition can
be violated if the exchanges are implemented using the first
|
ac370de883a1989d9aac53d115b1618165f1cb3ed52fb79ce939914f4247305c | onedata/op-worker | middleware_worker_handlers.erl | %%%-------------------------------------------------------------------
@author
( C ) 2021 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
%%% @end
%%%-------------------------------------------------------------------
%%% @doc
%%% This module routes middleware operations to corresponding handler modules.
%%% @end
%%%-------------------------------------------------------------------
-module(middleware_worker_handlers).
-author("Bartosz Walkowicz").
-include("middleware/middleware.hrl").
%% API
-export([execute/3]).
%%%===================================================================
%%% API
%%%===================================================================
Archives
-spec execute(
user_ctx:ctx(),
file_ctx:ctx(),
middleware_worker:operation()
) ->
ok | {ok, term()} | no_return().
execute(UserCtx, SpaceDirCtx, #archives_list_request{
dataset_id = DatasetId,
opts = Opts,
mode = ListingMode
}) ->
dataset_req:list_archives(SpaceDirCtx, DatasetId, Opts, ListingMode, UserCtx);
execute(UserCtx, SpaceDirCtx, #dataset_archive_request{
id = DatasetId,
config = Config,
preserved_callback = PreservedCallback,
deleted_callback = DeletedCallback,
description = Description
}) ->
dataset_req:create_archive(
SpaceDirCtx, DatasetId, Config, PreservedCallback, DeletedCallback, Description, UserCtx
);
execute(UserCtx, SpaceDirCtx, #archivisation_cancel_request{
id = ArchiveId,
preservation_policy = PreservationPolicy
}) ->
dataset_req:cancel_archivisation(SpaceDirCtx, ArchiveId, PreservationPolicy, UserCtx);
execute(UserCtx, SpaceDirCtx, #archive_info_get_request{id = ArchiveId}) ->
dataset_req:get_archive_info(SpaceDirCtx, ArchiveId, UserCtx);
execute(UserCtx, SpaceDirCtx, #archive_update_request{id = ArchiveId, diff = Diff}) ->
dataset_req:update_archive(SpaceDirCtx, ArchiveId, Diff, UserCtx);
execute(UserCtx, SpaceDirCtx, #archive_delete_request{id = ArchiveId, callback = CallbackUrl}) ->
dataset_req:init_archive_delete(SpaceDirCtx, ArchiveId, CallbackUrl, UserCtx);
execute(UserCtx, SpaceDirCtx, #archive_recall_request{
archive_id = ArchiveId, parent_directory_guid = ParentDirectoryGuid, target_filename = TargetName}
) ->
dataset_req:init_archive_recall(SpaceDirCtx, ArchiveId, ParentDirectoryGuid, TargetName, UserCtx);
execute(UserCtx, FileCtx, #archive_recall_cancel_request{id = Id}) ->
dataset_req:cancel_archive_recall(FileCtx, Id, UserCtx);
execute(UserCtx, FileCtx, #archive_recall_details_get_request{id = Id}) ->
dataset_req:get_archive_recall_details(FileCtx, Id, UserCtx);
execute(UserCtx, FileCtx, #archive_recall_progress_get_request{id = Id}) ->
dataset_req:get_archive_recall_progress(FileCtx, Id, UserCtx);
execute(UserCtx, FileCtx, #archive_recall_log_browse_request{id = Id, options = Options}) ->
dataset_req:browse_archive_recall_log(FileCtx, Id, UserCtx, Options);
%% Automation
execute(UserCtx, SpaceDirCtx, #atm_workflow_execution_schedule_request{
atm_workflow_schema_id = AtmWorkflowSchemaId,
atm_workflow_schema_revision_num = AtmWorkflowSchemaRevisionNum,
store_initial_content_overlay = AtmStoreInitialContentOverlay,
callback_url = CallbackUrl
}) ->
{ok, atm_workflow_execution_api:schedule(
UserCtx, file_ctx:get_space_id_const(SpaceDirCtx),
AtmWorkflowSchemaId, AtmWorkflowSchemaRevisionNum,
AtmStoreInitialContentOverlay, CallbackUrl
)};
execute(UserCtx, _SpaceDirCtx, #atm_workflow_execution_init_cancel_request{
atm_workflow_execution_id = AtmWorkflowExecutionId
}) ->
ok = atm_workflow_execution_api:init_cancel(UserCtx, AtmWorkflowExecutionId);
execute(UserCtx, _SpaceDirCtx, #atm_workflow_execution_init_pause_request{
atm_workflow_execution_id = AtmWorkflowExecutionId
}) ->
ok = atm_workflow_execution_api:init_pause(UserCtx, AtmWorkflowExecutionId);
execute(UserCtx, _SpaceDirCtx, #atm_workflow_execution_resume_request{
atm_workflow_execution_id = AtmWorkflowExecutionId
}) ->
ok = atm_workflow_execution_api:resume(UserCtx, AtmWorkflowExecutionId);
execute(UserCtx, _SpaceDirCtx, #atm_workflow_execution_repeat_request{
type = Type,
atm_workflow_execution_id = AtmWorkflowExecutionId,
atm_lane_run_selector = AtmLaneRunSelector
}) ->
ok = atm_workflow_execution_api:repeat(
UserCtx, Type, AtmLaneRunSelector, AtmWorkflowExecutionId
);
execute(_UserCtx, _SpaceDirCtx, #atm_workflow_execution_discard_request{
atm_workflow_execution_id = AtmWorkflowExecutionId
}) ->
ok = atm_workflow_execution_api:discard(AtmWorkflowExecutionId);
CDMI
execute(UserCtx, FileCtx, #transfer_encoding_get_request{}) ->
cdmi_metadata_req:get_transfer_encoding(UserCtx, FileCtx);
execute(UserCtx, FileCtx, #transfer_encoding_set_request{value = Encoding}) ->
cdmi_metadata_req:set_transfer_encoding(UserCtx, FileCtx, Encoding, false, false);
execute(UserCtx, FileCtx, #cdmi_completion_status_get_request{}) ->
cdmi_metadata_req:get_cdmi_completion_status(UserCtx, FileCtx);
execute(UserCtx, FileCtx, #cdmi_completion_status_set_request{value = CompletionStatus}) ->
cdmi_metadata_req:set_cdmi_completion_status(UserCtx, FileCtx, CompletionStatus, false, false);
execute(UserCtx, FileCtx, #mimetype_get_request{}) ->
cdmi_metadata_req:get_mimetype(UserCtx, FileCtx);
execute(UserCtx, FileCtx, #mimetype_set_request{value = CompletionStatus}) ->
cdmi_metadata_req:set_mimetype(UserCtx, FileCtx, CompletionStatus, false, false);
%% Datasets
execute(UserCtx, SpaceDirCtx, #top_datasets_list_request{state = State, opts = Opts, mode = ListingMode}) ->
SpaceId = file_ctx:get_space_id_const(SpaceDirCtx),
dataset_req:list_top_datasets(SpaceId, State, Opts, ListingMode, UserCtx);
execute(UserCtx, SpaceDirCtx, #children_datasets_list_request{
id = DatasetId,
opts = Opts,
mode = ListingMode
}) ->
dataset_req:list_children_datasets(SpaceDirCtx, DatasetId, Opts, ListingMode, UserCtx);
execute(UserCtx, FileCtx, #dataset_establish_request{protection_flags = ProtectionFlags}) ->
dataset_req:establish(FileCtx, ProtectionFlags, UserCtx);
execute(UserCtx, SpaceDirCtx, #dataset_info_get_request{id = DatasetId}) ->
dataset_req:get_info(SpaceDirCtx, DatasetId, UserCtx);
execute(UserCtx, SpaceDirCtx, #dataset_update_request{
id = DatasetId,
state = NewState,
flags_to_set = FlagsToSet,
flags_to_unset = FlagsToUnset
}) ->
dataset_req:update(SpaceDirCtx, DatasetId, NewState, FlagsToSet, FlagsToUnset, UserCtx);
execute(UserCtx, SpaceDirCtx, #dataset_remove_request{id = DatasetId}) ->
dataset_req:remove(SpaceDirCtx, DatasetId, UserCtx);
execute(UserCtx, FileCtx, #file_eff_dataset_summary_get_request{}) ->
dataset_req:get_file_eff_summary(FileCtx, UserCtx);
%% File metadata
execute(UserCtx, FileCtx, #custom_metadata_get_request{
type = Type,
query = Query,
inherited = Inherited
}) ->
metadata_req:get_metadata(UserCtx, FileCtx, Type, Query, Inherited);
execute(UserCtx, FileCtx, #custom_metadata_set_request{
type = Type,
query = Query,
value = Value
}) ->
metadata_req:set_metadata(UserCtx, FileCtx, Type, Value, Query, false, false);
execute(UserCtx, FileCtx, #custom_metadata_remove_request{type = Type}) ->
metadata_req:remove_metadata(UserCtx, FileCtx, Type);
execute(UserCtx, FileCtx, #data_distribution_gather_request{}) ->
data_distribution:gather(UserCtx, FileCtx);
execute(UserCtx, FileCtx, #historical_dir_size_stats_get_request{request = Request, provider_id = ProviderId}) ->
{ok, dir_size_stats_req:get_historical(UserCtx, FileCtx, ProviderId, Request)};
execute(UserCtx, FileCtx, #file_storage_locations_get_request{}) ->
data_distribution:gather_storage_locations(UserCtx, FileCtx);
QoS
execute(UserCtx, FileCtx, #qos_entry_add_request{
expression = Expression,
replicas_num = ReplicasNum,
entry_type = EntryType
}) ->
qos_req:add_qos_entry(UserCtx, FileCtx, Expression, ReplicasNum, EntryType);
execute(UserCtx, FileCtx, #effective_file_qos_get_request{}) ->
qos_req:get_effective_file_qos(UserCtx, FileCtx);
execute(UserCtx, FileCtx, #qos_entry_get_request{id = QosEntryId}) ->
qos_req:get_qos_entry(UserCtx, FileCtx, QosEntryId);
execute(UserCtx, FileCtx, #qos_entry_remove_request{id = QosEntryId}) ->
qos_req:remove_qos_entry(UserCtx, FileCtx, QosEntryId);
execute(UserCtx, FileCtx, #qos_status_check_request{qos_id = QosEntryId}) ->
qos_req:check_status(UserCtx, FileCtx, QosEntryId);
%% Shares
execute(UserCtx, FileCtx, #share_create_request{name = Name, description = Description}) ->
share_req:create_share(UserCtx, FileCtx, Name, Description);
execute(UserCtx, FileCtx, #share_remove_request{share_id = ShareId}) ->
share_req:remove_share(UserCtx, FileCtx, ShareId);
%% Transfers
execute(UserCtx, FileCtx, #file_transfer_schedule_request{
replicating_provider_id = ReplicatingProviderId,
evicting_provider_id = EvictingProviderId,
callback = Callback
}) ->
transfer_req:schedule_file_transfer(
UserCtx, FileCtx,
ReplicatingProviderId, EvictingProviderId,
Callback
);
execute(UserCtx, FileCtx, #view_transfer_schedule_request{
replicating_provider_id = ReplicatingProviderId,
evicting_provider_id = EvictingProviderId,
view_name = ViewName,
query_view_params = QueryViewParams,
callback = Callback
}) ->
transfer_req:schedule_view_transfer(
UserCtx, FileCtx,
ReplicatingProviderId, EvictingProviderId,
ViewName, QueryViewParams,
Callback
).
| null | https://raw.githubusercontent.com/onedata/op-worker/14d788c29cb80ac83bef9885c13daed68c68e559/src/middleware/worker/middleware_worker_handlers.erl | erlang | -------------------------------------------------------------------
@end
-------------------------------------------------------------------
@doc
This module routes middleware operations to corresponding handler modules.
@end
-------------------------------------------------------------------
API
===================================================================
API
===================================================================
Automation
Datasets
File metadata
Shares
Transfers | @author
( C ) 2021 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
-module(middleware_worker_handlers).
-author("Bartosz Walkowicz").
-include("middleware/middleware.hrl").
-export([execute/3]).
Archives
-spec execute(
user_ctx:ctx(),
file_ctx:ctx(),
middleware_worker:operation()
) ->
ok | {ok, term()} | no_return().
execute(UserCtx, SpaceDirCtx, #archives_list_request{
dataset_id = DatasetId,
opts = Opts,
mode = ListingMode
}) ->
dataset_req:list_archives(SpaceDirCtx, DatasetId, Opts, ListingMode, UserCtx);
execute(UserCtx, SpaceDirCtx, #dataset_archive_request{
id = DatasetId,
config = Config,
preserved_callback = PreservedCallback,
deleted_callback = DeletedCallback,
description = Description
}) ->
dataset_req:create_archive(
SpaceDirCtx, DatasetId, Config, PreservedCallback, DeletedCallback, Description, UserCtx
);
execute(UserCtx, SpaceDirCtx, #archivisation_cancel_request{
id = ArchiveId,
preservation_policy = PreservationPolicy
}) ->
dataset_req:cancel_archivisation(SpaceDirCtx, ArchiveId, PreservationPolicy, UserCtx);
execute(UserCtx, SpaceDirCtx, #archive_info_get_request{id = ArchiveId}) ->
dataset_req:get_archive_info(SpaceDirCtx, ArchiveId, UserCtx);
execute(UserCtx, SpaceDirCtx, #archive_update_request{id = ArchiveId, diff = Diff}) ->
dataset_req:update_archive(SpaceDirCtx, ArchiveId, Diff, UserCtx);
execute(UserCtx, SpaceDirCtx, #archive_delete_request{id = ArchiveId, callback = CallbackUrl}) ->
dataset_req:init_archive_delete(SpaceDirCtx, ArchiveId, CallbackUrl, UserCtx);
execute(UserCtx, SpaceDirCtx, #archive_recall_request{
archive_id = ArchiveId, parent_directory_guid = ParentDirectoryGuid, target_filename = TargetName}
) ->
dataset_req:init_archive_recall(SpaceDirCtx, ArchiveId, ParentDirectoryGuid, TargetName, UserCtx);
execute(UserCtx, FileCtx, #archive_recall_cancel_request{id = Id}) ->
dataset_req:cancel_archive_recall(FileCtx, Id, UserCtx);
execute(UserCtx, FileCtx, #archive_recall_details_get_request{id = Id}) ->
dataset_req:get_archive_recall_details(FileCtx, Id, UserCtx);
execute(UserCtx, FileCtx, #archive_recall_progress_get_request{id = Id}) ->
dataset_req:get_archive_recall_progress(FileCtx, Id, UserCtx);
execute(UserCtx, FileCtx, #archive_recall_log_browse_request{id = Id, options = Options}) ->
dataset_req:browse_archive_recall_log(FileCtx, Id, UserCtx, Options);
execute(UserCtx, SpaceDirCtx, #atm_workflow_execution_schedule_request{
atm_workflow_schema_id = AtmWorkflowSchemaId,
atm_workflow_schema_revision_num = AtmWorkflowSchemaRevisionNum,
store_initial_content_overlay = AtmStoreInitialContentOverlay,
callback_url = CallbackUrl
}) ->
{ok, atm_workflow_execution_api:schedule(
UserCtx, file_ctx:get_space_id_const(SpaceDirCtx),
AtmWorkflowSchemaId, AtmWorkflowSchemaRevisionNum,
AtmStoreInitialContentOverlay, CallbackUrl
)};
execute(UserCtx, _SpaceDirCtx, #atm_workflow_execution_init_cancel_request{
atm_workflow_execution_id = AtmWorkflowExecutionId
}) ->
ok = atm_workflow_execution_api:init_cancel(UserCtx, AtmWorkflowExecutionId);
execute(UserCtx, _SpaceDirCtx, #atm_workflow_execution_init_pause_request{
atm_workflow_execution_id = AtmWorkflowExecutionId
}) ->
ok = atm_workflow_execution_api:init_pause(UserCtx, AtmWorkflowExecutionId);
execute(UserCtx, _SpaceDirCtx, #atm_workflow_execution_resume_request{
atm_workflow_execution_id = AtmWorkflowExecutionId
}) ->
ok = atm_workflow_execution_api:resume(UserCtx, AtmWorkflowExecutionId);
execute(UserCtx, _SpaceDirCtx, #atm_workflow_execution_repeat_request{
type = Type,
atm_workflow_execution_id = AtmWorkflowExecutionId,
atm_lane_run_selector = AtmLaneRunSelector
}) ->
ok = atm_workflow_execution_api:repeat(
UserCtx, Type, AtmLaneRunSelector, AtmWorkflowExecutionId
);
execute(_UserCtx, _SpaceDirCtx, #atm_workflow_execution_discard_request{
atm_workflow_execution_id = AtmWorkflowExecutionId
}) ->
ok = atm_workflow_execution_api:discard(AtmWorkflowExecutionId);
CDMI
execute(UserCtx, FileCtx, #transfer_encoding_get_request{}) ->
cdmi_metadata_req:get_transfer_encoding(UserCtx, FileCtx);
execute(UserCtx, FileCtx, #transfer_encoding_set_request{value = Encoding}) ->
cdmi_metadata_req:set_transfer_encoding(UserCtx, FileCtx, Encoding, false, false);
execute(UserCtx, FileCtx, #cdmi_completion_status_get_request{}) ->
cdmi_metadata_req:get_cdmi_completion_status(UserCtx, FileCtx);
execute(UserCtx, FileCtx, #cdmi_completion_status_set_request{value = CompletionStatus}) ->
cdmi_metadata_req:set_cdmi_completion_status(UserCtx, FileCtx, CompletionStatus, false, false);
execute(UserCtx, FileCtx, #mimetype_get_request{}) ->
cdmi_metadata_req:get_mimetype(UserCtx, FileCtx);
execute(UserCtx, FileCtx, #mimetype_set_request{value = CompletionStatus}) ->
cdmi_metadata_req:set_mimetype(UserCtx, FileCtx, CompletionStatus, false, false);
execute(UserCtx, SpaceDirCtx, #top_datasets_list_request{state = State, opts = Opts, mode = ListingMode}) ->
SpaceId = file_ctx:get_space_id_const(SpaceDirCtx),
dataset_req:list_top_datasets(SpaceId, State, Opts, ListingMode, UserCtx);
execute(UserCtx, SpaceDirCtx, #children_datasets_list_request{
id = DatasetId,
opts = Opts,
mode = ListingMode
}) ->
dataset_req:list_children_datasets(SpaceDirCtx, DatasetId, Opts, ListingMode, UserCtx);
execute(UserCtx, FileCtx, #dataset_establish_request{protection_flags = ProtectionFlags}) ->
dataset_req:establish(FileCtx, ProtectionFlags, UserCtx);
execute(UserCtx, SpaceDirCtx, #dataset_info_get_request{id = DatasetId}) ->
dataset_req:get_info(SpaceDirCtx, DatasetId, UserCtx);
execute(UserCtx, SpaceDirCtx, #dataset_update_request{
id = DatasetId,
state = NewState,
flags_to_set = FlagsToSet,
flags_to_unset = FlagsToUnset
}) ->
dataset_req:update(SpaceDirCtx, DatasetId, NewState, FlagsToSet, FlagsToUnset, UserCtx);
execute(UserCtx, SpaceDirCtx, #dataset_remove_request{id = DatasetId}) ->
dataset_req:remove(SpaceDirCtx, DatasetId, UserCtx);
execute(UserCtx, FileCtx, #file_eff_dataset_summary_get_request{}) ->
dataset_req:get_file_eff_summary(FileCtx, UserCtx);
execute(UserCtx, FileCtx, #custom_metadata_get_request{
type = Type,
query = Query,
inherited = Inherited
}) ->
metadata_req:get_metadata(UserCtx, FileCtx, Type, Query, Inherited);
execute(UserCtx, FileCtx, #custom_metadata_set_request{
type = Type,
query = Query,
value = Value
}) ->
metadata_req:set_metadata(UserCtx, FileCtx, Type, Value, Query, false, false);
execute(UserCtx, FileCtx, #custom_metadata_remove_request{type = Type}) ->
metadata_req:remove_metadata(UserCtx, FileCtx, Type);
execute(UserCtx, FileCtx, #data_distribution_gather_request{}) ->
data_distribution:gather(UserCtx, FileCtx);
execute(UserCtx, FileCtx, #historical_dir_size_stats_get_request{request = Request, provider_id = ProviderId}) ->
{ok, dir_size_stats_req:get_historical(UserCtx, FileCtx, ProviderId, Request)};
execute(UserCtx, FileCtx, #file_storage_locations_get_request{}) ->
data_distribution:gather_storage_locations(UserCtx, FileCtx);
QoS
execute(UserCtx, FileCtx, #qos_entry_add_request{
expression = Expression,
replicas_num = ReplicasNum,
entry_type = EntryType
}) ->
qos_req:add_qos_entry(UserCtx, FileCtx, Expression, ReplicasNum, EntryType);
execute(UserCtx, FileCtx, #effective_file_qos_get_request{}) ->
qos_req:get_effective_file_qos(UserCtx, FileCtx);
execute(UserCtx, FileCtx, #qos_entry_get_request{id = QosEntryId}) ->
qos_req:get_qos_entry(UserCtx, FileCtx, QosEntryId);
execute(UserCtx, FileCtx, #qos_entry_remove_request{id = QosEntryId}) ->
qos_req:remove_qos_entry(UserCtx, FileCtx, QosEntryId);
execute(UserCtx, FileCtx, #qos_status_check_request{qos_id = QosEntryId}) ->
qos_req:check_status(UserCtx, FileCtx, QosEntryId);
execute(UserCtx, FileCtx, #share_create_request{name = Name, description = Description}) ->
share_req:create_share(UserCtx, FileCtx, Name, Description);
execute(UserCtx, FileCtx, #share_remove_request{share_id = ShareId}) ->
share_req:remove_share(UserCtx, FileCtx, ShareId);
execute(UserCtx, FileCtx, #file_transfer_schedule_request{
replicating_provider_id = ReplicatingProviderId,
evicting_provider_id = EvictingProviderId,
callback = Callback
}) ->
transfer_req:schedule_file_transfer(
UserCtx, FileCtx,
ReplicatingProviderId, EvictingProviderId,
Callback
);
execute(UserCtx, FileCtx, #view_transfer_schedule_request{
replicating_provider_id = ReplicatingProviderId,
evicting_provider_id = EvictingProviderId,
view_name = ViewName,
query_view_params = QueryViewParams,
callback = Callback
}) ->
transfer_req:schedule_view_transfer(
UserCtx, FileCtx,
ReplicatingProviderId, EvictingProviderId,
ViewName, QueryViewParams,
Callback
).
|
ac5be38b4b712ededfdb095aae693c6bbdb04565f2467f3aa1c21652e48209a3 | schemedoc/ffi-cookbook | iota-gambit.scm | ;; TODO: Are we using still vs movable objects correctly?
;;
;; Relevant example from the manual:
;; /~gambit/doc/gambit.html#c_002ddefine_002dtype
(define c-iota
(c-lambda (int) scheme-object "
___SCMOBJ list = ___NUL;
___SCMOBJ new_list;
int n = ___arg1;
while (n > 0) {
n--;
new_list = ___EXT(___make_pair) (___PSTATE, ___FIX(n), list);
___EXT(___release_scmobj) (list);
list = new_list;
if (___FIXNUMP(list)) ___return(___FAL); /* allocation failed */
}
___return(list);
"))
(display (c-iota 100))
(newline)
| null | https://raw.githubusercontent.com/schemedoc/ffi-cookbook/75d3594135b5a4c5deea9a064a1aef5a95312f85/return-list/iota-gambit.scm | scheme | TODO: Are we using still vs movable objects correctly?
Relevant example from the manual:
/~gambit/doc/gambit.html#c_002ddefine_002dtype
/* allocation failed */
|
(define c-iota
(c-lambda (int) scheme-object "
while (n > 0) {
}
"))
(display (c-iota 100))
(newline)
|
ef2f266ad0884d06f87598e70366d67446829b467a811f01a4f3029dbbf255f0 | phadej/cabal-fmt | Type.hs | -- |
-- License: GPL-3.0-or-later
Copyright :
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
module CabalFmt.Refactoring.Type (
FieldRefactoring,
CommentsPragmas,
rewriteFields,
) where
import qualified Distribution.Fields as C
import CabalFmt.Comments
import CabalFmt.Monad
import CabalFmt.Pragma
-------------------------------------------------------------------------------
-- Refactoring type
-------------------------------------------------------------------------------
type CommentsPragmas = (Comments, [FieldPragma])
type FieldRefactoring
= forall r m. MonadCabalFmt r m
=> (C.Field CommentsPragmas -> m (Maybe (C.Field CommentsPragmas)))
-------------------------------------------------------------------------------
-- Traversing refactoring
-------------------------------------------------------------------------------
-- | A top-to-bottom rewrite of sections and fields
rewriteFields
:: MonadCabalFmt r m
=> (C.Field CommentsPragmas -> m (Maybe (C.Field CommentsPragmas)))
-> [C.Field CommentsPragmas] -> m [C.Field CommentsPragmas]
rewriteFields f = goMany where
goMany = traverse go
go x = do
m <- f x
case m of
Just y -> return y
Nothing -> case x of
C.Field {} -> return x
C.Section name args fs -> C.Section name args <$> goMany fs
| null | https://raw.githubusercontent.com/phadej/cabal-fmt/ead940a3dd955a2c7b32b8817b03885ff550c128/src/CabalFmt/Refactoring/Type.hs | haskell | |
License: GPL-3.0-or-later
# LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
-----------------------------------------------------------------------------
Refactoring type
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
Traversing refactoring
-----------------------------------------------------------------------------
| A top-to-bottom rewrite of sections and fields | Copyright :
module CabalFmt.Refactoring.Type (
FieldRefactoring,
CommentsPragmas,
rewriteFields,
) where
import qualified Distribution.Fields as C
import CabalFmt.Comments
import CabalFmt.Monad
import CabalFmt.Pragma
type CommentsPragmas = (Comments, [FieldPragma])
type FieldRefactoring
= forall r m. MonadCabalFmt r m
=> (C.Field CommentsPragmas -> m (Maybe (C.Field CommentsPragmas)))
rewriteFields
:: MonadCabalFmt r m
=> (C.Field CommentsPragmas -> m (Maybe (C.Field CommentsPragmas)))
-> [C.Field CommentsPragmas] -> m [C.Field CommentsPragmas]
rewriteFields f = goMany where
goMany = traverse go
go x = do
m <- f x
case m of
Just y -> return y
Nothing -> case x of
C.Field {} -> return x
C.Section name args fs -> C.Section name args <$> goMany fs
|
3caaa5409c6fcb900bd24afbe1e1d717447d589d1ac6957a346d20bee0e83efb | alertlogic/rebar3_erllambda | rebar3_erllambda_release.erl | %%%---------------------------------------------------------------------------
@doc rebar3_erllambda_release - Build a erlang lambda release
%%
%% This module will build an erllambda release on top of a standard relx
%% release so that the result can just be started.
%%
%%
2017 Alert Logic , Inc
%%%---------------------------------------------------------------------------
-module(rebar3_erllambda_release).
-author('Paul Fisher <>').
-behaviour(provider).
-export([init/1, do/1, format_error/1]).
%%============================================================================
%% Constant Definitions
%%============================================================================
-define(PROVIDER, release).
-define(NAMESPACE, erllambda).
-define(DEPS, [{default, compile}]).
%%============================================================================
%% API Functions
%%============================================================================
%%%---------------------------------------------------------------------------
-spec init( State :: rebar_state:t() ) -> {ok, rebar_state:t()}.
%%%---------------------------------------------------------------------------
%% @doc Initialize the release provider
%%
init( State ) ->
Options = [
{name, ?PROVIDER},
{module, ?MODULE},
{namespace, ?NAMESPACE},
{bare, true},
{deps, ?DEPS},
{example, "rebar3 erllambda release"},
{opts, rebar3_erllambda:opt_spec_list()},
{short_desc, "Rebar3 erllambda release provider"},
{desc,
"Performs erllamba specific release generation on top of the "
"standard rebar3 release generation."}
],
Provider = providers:create( Options ),
{ok, rebar_state:add_provider(State, Provider)}.
%%%---------------------------------------------------------------------------
-spec do( State :: rebar_state:t() ) ->
{ok, rebar_state:t()} | {error, string()}.
%%%---------------------------------------------------------------------------
%% @doc Initialize the release provider
%%
do( State ) ->
try
rebar_api:info("running erllambda release generator", []),
ErllambdaDir = rebar3_erllambda:erllambda_dir( State ),
StartScript = start_script( ErllambdaDir ),
{Command, _} = handler_info( State ),
TargetDir = rebar3_erllambda:target_dir( State ),
generate_start_script( TargetDir, Command, StartScript ),
{ok, State}
catch
throw:Error ->
{error, format_error(Error)}
end.
%%%---------------------------------------------------------------------------
-spec format_error( Error :: any() ) -> iolist().
%%%---------------------------------------------------------------------------
%% @doc Format error for output
%%
format_error( Error ) ->
rebar3_erllambda:format_error( Error ).
%%============================================================================
%% Internal Functions
%%============================================================================
generate_start_script( Dir, Command, Script ) ->
rebar_api:info( "generating start script bin/~s", [Command] ),
Filename = filename:join( [Dir, rebar3_erllambda:list(Command)] ),
BootFilename = filename:join( [Dir, "bootstrap"]),
case file:write_file( Filename, Script ) of
ok ->
ok = make_executable(Filename ),
% it can already exist from previous run. remove it
% technically we can just keep it
TODO rework for NO symlinks
file:delete(BootFilename),
%% create necessary symlink
ok = file:make_symlink( "/var/task/" ++ rebar3_erllambda:list(Command), BootFilename);
{error, Reason} ->
throw( {generate_start_script_failed, Reason} )
end.
make_executable(Filename ) ->
Mode = 8#00755,
case file:change_mode( Filename, Mode ) of
ok -> ok;
{error, Reason} -> throw( {generate_start_script_failed, Reason} )
end.
handler_info( State ) ->
DefaultName = rebar3_erllambda:release_name( State ),
Config = rebar_state:get(State, erllambda, []),
Module = proplists:get_value( module, Config, DefaultName ),
{["bin/", DefaultName], Module}.
start_script( ErllambdaDir ) ->
ScriptFile = filename:join( [ErllambdaDir, "priv", "erlang-start"] ),
case file:read_file( ScriptFile ) of
{ok, Script} -> Script;
{error, Reason} ->
throw( {erllambda_script_missing, Reason} )
end.
| null | https://raw.githubusercontent.com/alertlogic/rebar3_erllambda/62b40daacf57ca63995c8354eb29bdf58bcdf543/src/rebar3_erllambda_release.erl | erlang | ---------------------------------------------------------------------------
This module will build an erllambda release on top of a standard relx
release so that the result can just be started.
---------------------------------------------------------------------------
============================================================================
Constant Definitions
============================================================================
============================================================================
API Functions
============================================================================
---------------------------------------------------------------------------
---------------------------------------------------------------------------
@doc Initialize the release provider
---------------------------------------------------------------------------
---------------------------------------------------------------------------
@doc Initialize the release provider
---------------------------------------------------------------------------
---------------------------------------------------------------------------
@doc Format error for output
============================================================================
Internal Functions
============================================================================
it can already exist from previous run. remove it
technically we can just keep it
create necessary symlink | @doc rebar3_erllambda_release - Build a erlang lambda release
2017 Alert Logic , Inc
-module(rebar3_erllambda_release).
-author('Paul Fisher <>').
-behaviour(provider).
-export([init/1, do/1, format_error/1]).
-define(PROVIDER, release).
-define(NAMESPACE, erllambda).
-define(DEPS, [{default, compile}]).
-spec init( State :: rebar_state:t() ) -> {ok, rebar_state:t()}.
init( State ) ->
Options = [
{name, ?PROVIDER},
{module, ?MODULE},
{namespace, ?NAMESPACE},
{bare, true},
{deps, ?DEPS},
{example, "rebar3 erllambda release"},
{opts, rebar3_erllambda:opt_spec_list()},
{short_desc, "Rebar3 erllambda release provider"},
{desc,
"Performs erllamba specific release generation on top of the "
"standard rebar3 release generation."}
],
Provider = providers:create( Options ),
{ok, rebar_state:add_provider(State, Provider)}.
-spec do( State :: rebar_state:t() ) ->
{ok, rebar_state:t()} | {error, string()}.
do( State ) ->
try
rebar_api:info("running erllambda release generator", []),
ErllambdaDir = rebar3_erllambda:erllambda_dir( State ),
StartScript = start_script( ErllambdaDir ),
{Command, _} = handler_info( State ),
TargetDir = rebar3_erllambda:target_dir( State ),
generate_start_script( TargetDir, Command, StartScript ),
{ok, State}
catch
throw:Error ->
{error, format_error(Error)}
end.
-spec format_error( Error :: any() ) -> iolist().
format_error( Error ) ->
rebar3_erllambda:format_error( Error ).
generate_start_script( Dir, Command, Script ) ->
rebar_api:info( "generating start script bin/~s", [Command] ),
Filename = filename:join( [Dir, rebar3_erllambda:list(Command)] ),
BootFilename = filename:join( [Dir, "bootstrap"]),
case file:write_file( Filename, Script ) of
ok ->
ok = make_executable(Filename ),
TODO rework for NO symlinks
file:delete(BootFilename),
ok = file:make_symlink( "/var/task/" ++ rebar3_erllambda:list(Command), BootFilename);
{error, Reason} ->
throw( {generate_start_script_failed, Reason} )
end.
make_executable(Filename ) ->
Mode = 8#00755,
case file:change_mode( Filename, Mode ) of
ok -> ok;
{error, Reason} -> throw( {generate_start_script_failed, Reason} )
end.
handler_info( State ) ->
DefaultName = rebar3_erllambda:release_name( State ),
Config = rebar_state:get(State, erllambda, []),
Module = proplists:get_value( module, Config, DefaultName ),
{["bin/", DefaultName], Module}.
start_script( ErllambdaDir ) ->
ScriptFile = filename:join( [ErllambdaDir, "priv", "erlang-start"] ),
case file:read_file( ScriptFile ) of
{ok, Script} -> Script;
{error, Reason} ->
throw( {erllambda_script_missing, Reason} )
end.
|
7b52cd4122d074f48ccf1cd4a63631e85dd06e363397df182ec6cfa291b60dc1 | juspay/atlas | Issue.hs | |
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Module : Types . Issue
Copyright : ( C ) Juspay Technologies Pvt Ltd 2019 - 2022
License : Apache 2.0 ( see the file LICENSE )
Maintainer :
Stability : experimental
Portability : non - portable
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Module : Types.Issue
Copyright : (C) Juspay Technologies Pvt Ltd 2019-2022
License : Apache 2.0 (see the file LICENSE)
Maintainer :
Stability : experimental
Portability : non-portable
-}
module Types.Issue where
import Beckn.Types.Predicate
import Beckn.Utils.Predicates
import Beckn.Utils.Validation
import Data.OpenApi (ToSchema)
import Data.Text
import EulerHS.Prelude
data Issue = Issue
{ reason :: Text,
description :: Text
}
deriving (Generic, Show, ToJSON, FromJSON, ToSchema)
validateIssue :: Validate Issue
validateIssue Issue {..} =
sequenceA_
[ validateField "reason" reason $ LengthInRange 2 255 `And` text,
validateField "description" description $ LengthInRange 2 255 `And` text
]
where
text = star $ alphanum \/ " " \/ ","
| null | https://raw.githubusercontent.com/juspay/atlas/e64b227dc17887fb01c2554db21c08284d18a806/app/app-backend/src/Types/Issue.hs | haskell | |
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Module : Types . Issue
Copyright : ( C ) Juspay Technologies Pvt Ltd 2019 - 2022
License : Apache 2.0 ( see the file LICENSE )
Maintainer :
Stability : experimental
Portability : non - portable
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Module : Types.Issue
Copyright : (C) Juspay Technologies Pvt Ltd 2019-2022
License : Apache 2.0 (see the file LICENSE)
Maintainer :
Stability : experimental
Portability : non-portable
-}
module Types.Issue where
import Beckn.Types.Predicate
import Beckn.Utils.Predicates
import Beckn.Utils.Validation
import Data.OpenApi (ToSchema)
import Data.Text
import EulerHS.Prelude
data Issue = Issue
{ reason :: Text,
description :: Text
}
deriving (Generic, Show, ToJSON, FromJSON, ToSchema)
validateIssue :: Validate Issue
validateIssue Issue {..} =
sequenceA_
[ validateField "reason" reason $ LengthInRange 2 255 `And` text,
validateField "description" description $ LengthInRange 2 255 `And` text
]
where
text = star $ alphanum \/ " " \/ ","
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.