_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
b6a713e2799d344b2a3e86ac87c19cbda5fcf5c9e1cf647851bba46480b40eb4 | superhuman/rxxr2 | Run.ml | © Copyright University of Birmingham , UK
open ParsingData
open Nfa
open Common
let rec print_exp e = match e with
|Zero -> Printf.sprintf "\\0"
|One -> Printf.sprintf "\\1"
|Dot -> Printf.sprintf "."
|Pred(Bol) -> Printf.sprintf "\\p(^)"
|Pred(Eol) -> Printf.sprintf "\\p($)"
|Pred(Wordb) -> Printf.sprintf "\\p(b)"
|Pred(NWordb) -> Printf.sprintf "\\p(B)"
|Pred(Boi) -> Printf.sprintf "\\p(A)"
|Pred(Eom) -> Printf.sprintf "\\p(G)"
|Pred(Eoi1) -> Printf.sprintf "\\p(Z)"
|Pred(Eoi2) -> Printf.sprintf "\\p(z)"
|Atom(Char c) -> zprint c
|Atom(Cls cls) -> cls_print cls
|Group(CAP(_), _, _, r) -> Printf.sprintf "(%s)" (print_exp (fst r))
|Backref(i) -> Printf.sprintf "(?:\\%d)" i
|Group(MODS, m_on, m_off, _) -> Printf.sprintf "(?%d-%d)" m_on m_off
|Group(NOCAP, m_on, m_off, r) -> Printf.sprintf "(?%d-%d:%s)" m_on m_off (print_exp (fst r))
|Group(PLA, _, _, r) -> Printf.sprintf "(?=%s)" (print_exp (fst r))
|Group(NLA, _, _, r) -> Printf.sprintf "(?!%s)" (print_exp (fst r))
|Group(PLB, _, _, r) -> Printf.sprintf "(?<=%s)" (print_exp (fst r))
|Group(NLB, _, _, r) -> Printf.sprintf "(?<!%s)" (print_exp (fst r))
|Group(ATOMIC, _, _, r) -> Printf.sprintf "(?>%s)" (print_exp (fst r))
|Conc(r1, r2) -> Printf.sprintf "%s%s" (print_exp (fst r1)) (print_exp (fst r2))
|Alt(r1, r2) -> Printf.sprintf "(%s|%s)" (print_exp (fst r1)) (print_exp (fst r2))
|Kleene(Gq, r1) -> Printf.sprintf "%s*" (print_exp (fst r1))
|Kleene(Rq, r1) -> Printf.sprintf "%s*?" (print_exp (fst r1));;
let print_regex r = Printf.sprintf "%s" (print_exp (fst r));;
let print_pattern p = Printf.printf "/%s/%d\n" (print_regex (fst p)) (snd p);;
let print_nfa nfa =
let _ = Printf.printf "= (root: %d) =\n" (Nfa.root nfa) in
let print_state s = match s with
|End -> "END"
|Kill -> "KILL"
|Pass i -> Printf.sprintf "PASS, %d" i
|Match (cls, i) -> Printf.sprintf "MATCH %s, %d" (cls_print cls) i
|CheckPred (P_BOL ul, i) -> Printf.sprintf "P(BOL, %B), %d" ul i
|CheckPred (P_EOL ul, i) -> Printf.sprintf "P(EOL, %B), %d" ul i
|CheckPred (P_WB, i) -> Printf.sprintf "P(WB), %d" i
|CheckPred (P_NWB, i) -> Printf.sprintf "P(NWB), %d" i
|CheckPred (P_BOI, i) -> Printf.sprintf "P(BOI), %d" i
|CheckPred (P_EOM, i) -> Printf.sprintf "P(EOM), %d" i
|CheckPred (P_EOI, i) -> Printf.sprintf "P(EOI), %d" i
|CheckPred (P_EOIX ul, i) -> Printf.sprintf "P(EOIX, %B), %d" ul i
|CheckBackref (i, j) -> Printf.sprintf "\\%d, %d" i j
|BeginCap (i, j) -> Printf.sprintf "BEGIN_CAP %d, %d" i j
|EndCap (i, j) -> Printf.sprintf "END_CAP %d, %d" i j
|BranchAlt (i, j) -> Printf.sprintf "ALT (%d|%d)" i j
|MakeB (i) -> Printf.sprintf "MAKE_B, %d" i
|EvalB (i) -> Printf.sprintf "EVAL_B, %d" i
|BranchKln (true, i, j) -> Printf.sprintf "KLN_G (%d|%d)" i j
|BranchKln (false, i, j) -> Printf.sprintf "KLN_R (%d|%d)" i j in
let rec print_states i =
if i == Nfa.size nfa then () else
let _ = Printf.printf "%d: %s\n" i (print_state (Nfa.get_state nfa i)) in
print_states (i + 1) in
print_states 0;;
let print_flags f =
let s = if Flags.is_interrupted f then "INTERRUPTED, " else "" in
if Flags.is_pruned f then Printf.sprintf "%sPRUNED" s else s;;
let std_scan _ =
while true do
let lexbuf = Lexing.from_channel stdin in
let p = ParsingMain.parse_pattern lexbuf in
let nfa = Nfa.make p in
(*
let _ = print_nfa nfa in
print_newline ();
*)
Printf.printf "= Analysis =\n%!";
AnalyserMain.enumerate_verbose nfa;
begin
( * match AnalyserMain.search_exhaustive nfa with
begin
(*match AnalyserMain.search_exhaustive nfa with*)
let ts = Unix.gettimeofday () in
match AnalyserMain.search_optimized nfa slim with
|(f, _, None) -> Printf.printf "None.\nF : {%s}\nT : %f (s)\n" (print_flags f) (Unix.gettimeofday () -. ts)
|(f, _, Some (ik, x, y, z)) -> Printf.printf "KLEENE: %d\nPREFIX : %s\nPUMPABLE : %s\nSUFFIX : %s\nFLAGS : {%s}\nTIME : %f (s)\n"
ik (Word.print x) (Word.print y) (Word.print z) (print_flags f) (Unix.gettimeofday () -. ts)
end;
*)
print_newline ();
flush stdout
done;;
let batch_scan fname zlim =
let rs = RegexScanner.make fname in
let c_total = ref 0 in
let c_parsed = ref 0 in
let c_pumpable = ref 0 in
let c_vulnerable = ref 0 in
let c_interrupted = ref 0 in
let c_pruned = ref 0 in
let t_total = ref 0.0 in
let t_max = ref 0.0 in
let s_max = ref 0 in
let s_total = ref 0 in
let rec scan () = match RegexScanner.next rs with
|RegexScanner.Eof ->
Printf.printf ">> TOTAL: %d\n" !c_total;
Printf.printf ">> PARSED: %d\n" !c_parsed;
Printf.printf ">> MAX NFA SIZE: %d\n" !s_max;
Printf.printf ">> AVG NFA SIZE: %d\n" (!s_total / !c_parsed);
Printf.printf ">> PUMPABLE: %d\n" !c_pumpable;
Printf.printf ">> VULNERABLE: %d\n" !c_vulnerable;
Printf.printf ">> INTERRUPTED: %d\n" !c_interrupted;
Printf.printf ">> PRUNED: %d\n" !c_pruned;
Printf.printf ">> TIME TOTAL: %f (s)\n" !t_total;
Printf.printf ">> TIME MAX: %f (s)\n" !t_max;
|RegexScanner.Error (e, s) ->
c_total := !c_total + 1;
Printf.printf "= [%d] =\n" !c_total;
Printf.printf "INPUT: %s\n" s;
Printf.printf "PARSE: ERROR {%s}\n%!" (Printexc.to_string e);
scan ()
|RegexScanner.Regex (nfa, s) ->
c_total := !c_total + 1;
c_parsed := !c_parsed + 1;
s_total := !s_total + (Nfa.size nfa);
s_max := if (Nfa.size nfa > !s_max) then (Nfa.size nfa) else !s_max;
Printf.printf "= [%d] =\n" !c_total;
Printf.printf "INPUT: %s\n" s;
Printf.printf "PARSE: OK\n";
Printf.printf "SIZE: %d\n%!" (Nfa.size nfa);
begin
let ts = Unix.gettimeofday () in
match AnalyserMain.search_optimized nfa zlim with
|(f, kset, None) ->
let t_this = Unix.gettimeofday () -. ts in
let _ = if IntSet.is_empty kset then (
Printf.printf "PUMPABLE: NO\n"
) else (
c_pumpable := !c_pumpable + 1;
Printf.printf "PUMPABLE: YES\n";
Printf.printf "VULNERABLE: NO {%s}\n" (print_flags f)
) in
c_interrupted := if Flags.is_interrupted f then !c_interrupted + 1 else !c_interrupted;
c_pruned := if Flags.is_pruned f then !c_pruned + 1 else !c_pruned;
t_total := !t_total +. t_this;
t_max := if t_this > !t_max then t_this else !t_max;
Printf.printf "TIME: %f (s)\n" t_this
|(f, _, Some (ik, x, y, z)) ->
c_pumpable := !c_pumpable + 1;
c_vulnerable := !c_vulnerable + 1;
c_pruned := if Flags.is_pruned f then !c_pruned + 1 else !c_pruned;
Printf.printf "PUMPABLE: YES\n";
Printf.printf "VULNERABLE: YES {%s}\n" (print_flags f);
Printf.printf "KLEENE: %s\n" (let (i, j) = Nfa.get_subexp_location nfa ik in String.sub s i (j - i + 1));
Printf.printf "PREFIX: %s\n" (Word.print_select x [('\x21', '\x7e')]);
Printf.printf "PUMPABLE: %s\n" (Word.print_select y [('\x21', '\x7e')]);
Printf.printf "SUFFIX: %s\n" (Word.print_select z [('\x21', '\x7e')]);
Printf.printf "TIME: %f (s)\n" (Unix.gettimeofday () -. ts)
end;
scan () in
scan ();;
let snort_scan fname slim qmode =
let rs = RuleScanner.make fname in
let total_regexes = ref 0 in
let analysed_regexes = ref 0 in
let vulnerable_regexes = ref 0 in
let unknown_regexes = ref 0 in
let current_file = ref "" in
let rec scan () = match RuleScanner.next rs with
|None -> Printf.printf "TOTAL: %d\nANALYSED: %d\nVULNERABLE: %d\nUNKNOWN: %d\n" !total_regexes !analysed_regexes !vulnerable_regexes !unknown_regexes
|Some (file, line, regex_string) ->
total_regexes := !total_regexes + 1;
if (!current_file != file) then (
current_file := file;
Printf.printf "Processing file: %s\n" file
);
let lexbuf = Lexing.from_string (Printf.sprintf "%s\n" regex_string) in
try
let p = ParsingMain.parse_pattern lexbuf in
let nfa = Nfa.make p in
begin
match AnalyserMain.search_optimized nfa slim with
|(f, _, None) when (Flags.is_empty f) -> ()
|(f, _, None) ->
unknown_regexes := !unknown_regexes + 1;
Printf.printf "Line: %d, Flags : {%s}\n%!" line (print_flags f)
|(f, _, Some (ik, x, y, z)) ->
vulnerable_regexes := !vulnerable_regexes + 1;
let (ks, ke) = Nfa.get_subexp_location nfa ik in
Printf.printf "Line: %d\n KLEENE: %s\n PREFIX : %s\n PUMPABLE : %s\n SUFFIX : %s\n FLAGS : {%s}\n%!"
line (String.sub regex_string (ks + 1) (ke - ks + 1)) (Word.print x) (Word.print y) (Word.print z) (print_flags f)
end;
analysed_regexes := !analysed_regexes + 1;
scan ()
with e ->
unknown_regexes := !unknown_regexes + 1;
if not qmode then Printf.printf "Line: %d, Parsing error (%s)\n%!" line (Printexc.to_string e);
scan () in
scan ();;
let slim = ref 100 in (* default z search limit, rarely touched *)
let input_file = ref None in
let snort_mode = ref false in
let quiet_mode = ref false in
let spec = Arg.align [("-slim", Arg.Int (fun i -> if i > 1 then slim := i), "<n> Abandon current search path after this many unstable xy derivations");
("-i", Arg.String (fun s -> input_file := Some s), "<file> Analyse regular expressions from this input file");
("-q", Arg.Unit (fun () -> quiet_mode := true), " Quiet mode (hide parsing erros)");
("-snort", Arg.Unit (fun () -> snort_mode := true), " Snort rule processing mode (use -i to specify the rules file / directory)")] in
let message = "USAGE: run.bin [-slim n] [-i file] [-snort]" in
let _ = Arg.parse spec (fun _ -> ()) message in
match !input_file with
|Some f when !snort_mode -> snort_scan f !slim !quiet_mode
|Some f -> batch_scan f !slim
|None ->
if !snort_mode then
Arg.usage spec message
else
std_scan !slim;;
| null | https://raw.githubusercontent.com/superhuman/rxxr2/0eea5e9f0e0cde6c39e0fc12614f64edb6189cd5/code/Run.ml | ocaml |
let _ = print_nfa nfa in
print_newline ();
match AnalyserMain.search_exhaustive nfa with
default z search limit, rarely touched | © Copyright University of Birmingham , UK
open ParsingData
open Nfa
open Common
let rec print_exp e = match e with
|Zero -> Printf.sprintf "\\0"
|One -> Printf.sprintf "\\1"
|Dot -> Printf.sprintf "."
|Pred(Bol) -> Printf.sprintf "\\p(^)"
|Pred(Eol) -> Printf.sprintf "\\p($)"
|Pred(Wordb) -> Printf.sprintf "\\p(b)"
|Pred(NWordb) -> Printf.sprintf "\\p(B)"
|Pred(Boi) -> Printf.sprintf "\\p(A)"
|Pred(Eom) -> Printf.sprintf "\\p(G)"
|Pred(Eoi1) -> Printf.sprintf "\\p(Z)"
|Pred(Eoi2) -> Printf.sprintf "\\p(z)"
|Atom(Char c) -> zprint c
|Atom(Cls cls) -> cls_print cls
|Group(CAP(_), _, _, r) -> Printf.sprintf "(%s)" (print_exp (fst r))
|Backref(i) -> Printf.sprintf "(?:\\%d)" i
|Group(MODS, m_on, m_off, _) -> Printf.sprintf "(?%d-%d)" m_on m_off
|Group(NOCAP, m_on, m_off, r) -> Printf.sprintf "(?%d-%d:%s)" m_on m_off (print_exp (fst r))
|Group(PLA, _, _, r) -> Printf.sprintf "(?=%s)" (print_exp (fst r))
|Group(NLA, _, _, r) -> Printf.sprintf "(?!%s)" (print_exp (fst r))
|Group(PLB, _, _, r) -> Printf.sprintf "(?<=%s)" (print_exp (fst r))
|Group(NLB, _, _, r) -> Printf.sprintf "(?<!%s)" (print_exp (fst r))
|Group(ATOMIC, _, _, r) -> Printf.sprintf "(?>%s)" (print_exp (fst r))
|Conc(r1, r2) -> Printf.sprintf "%s%s" (print_exp (fst r1)) (print_exp (fst r2))
|Alt(r1, r2) -> Printf.sprintf "(%s|%s)" (print_exp (fst r1)) (print_exp (fst r2))
|Kleene(Gq, r1) -> Printf.sprintf "%s*" (print_exp (fst r1))
|Kleene(Rq, r1) -> Printf.sprintf "%s*?" (print_exp (fst r1));;
let print_regex r = Printf.sprintf "%s" (print_exp (fst r));;
let print_pattern p = Printf.printf "/%s/%d\n" (print_regex (fst p)) (snd p);;
let print_nfa nfa =
let _ = Printf.printf "= (root: %d) =\n" (Nfa.root nfa) in
let print_state s = match s with
|End -> "END"
|Kill -> "KILL"
|Pass i -> Printf.sprintf "PASS, %d" i
|Match (cls, i) -> Printf.sprintf "MATCH %s, %d" (cls_print cls) i
|CheckPred (P_BOL ul, i) -> Printf.sprintf "P(BOL, %B), %d" ul i
|CheckPred (P_EOL ul, i) -> Printf.sprintf "P(EOL, %B), %d" ul i
|CheckPred (P_WB, i) -> Printf.sprintf "P(WB), %d" i
|CheckPred (P_NWB, i) -> Printf.sprintf "P(NWB), %d" i
|CheckPred (P_BOI, i) -> Printf.sprintf "P(BOI), %d" i
|CheckPred (P_EOM, i) -> Printf.sprintf "P(EOM), %d" i
|CheckPred (P_EOI, i) -> Printf.sprintf "P(EOI), %d" i
|CheckPred (P_EOIX ul, i) -> Printf.sprintf "P(EOIX, %B), %d" ul i
|CheckBackref (i, j) -> Printf.sprintf "\\%d, %d" i j
|BeginCap (i, j) -> Printf.sprintf "BEGIN_CAP %d, %d" i j
|EndCap (i, j) -> Printf.sprintf "END_CAP %d, %d" i j
|BranchAlt (i, j) -> Printf.sprintf "ALT (%d|%d)" i j
|MakeB (i) -> Printf.sprintf "MAKE_B, %d" i
|EvalB (i) -> Printf.sprintf "EVAL_B, %d" i
|BranchKln (true, i, j) -> Printf.sprintf "KLN_G (%d|%d)" i j
|BranchKln (false, i, j) -> Printf.sprintf "KLN_R (%d|%d)" i j in
let rec print_states i =
if i == Nfa.size nfa then () else
let _ = Printf.printf "%d: %s\n" i (print_state (Nfa.get_state nfa i)) in
print_states (i + 1) in
print_states 0;;
let print_flags f =
let s = if Flags.is_interrupted f then "INTERRUPTED, " else "" in
if Flags.is_pruned f then Printf.sprintf "%sPRUNED" s else s;;
let std_scan _ =
while true do
let lexbuf = Lexing.from_channel stdin in
let p = ParsingMain.parse_pattern lexbuf in
let nfa = Nfa.make p in
Printf.printf "= Analysis =\n%!";
AnalyserMain.enumerate_verbose nfa;
begin
( * match AnalyserMain.search_exhaustive nfa with
begin
let ts = Unix.gettimeofday () in
match AnalyserMain.search_optimized nfa slim with
|(f, _, None) -> Printf.printf "None.\nF : {%s}\nT : %f (s)\n" (print_flags f) (Unix.gettimeofday () -. ts)
|(f, _, Some (ik, x, y, z)) -> Printf.printf "KLEENE: %d\nPREFIX : %s\nPUMPABLE : %s\nSUFFIX : %s\nFLAGS : {%s}\nTIME : %f (s)\n"
ik (Word.print x) (Word.print y) (Word.print z) (print_flags f) (Unix.gettimeofday () -. ts)
end;
*)
print_newline ();
flush stdout
done;;
let batch_scan fname zlim =
let rs = RegexScanner.make fname in
let c_total = ref 0 in
let c_parsed = ref 0 in
let c_pumpable = ref 0 in
let c_vulnerable = ref 0 in
let c_interrupted = ref 0 in
let c_pruned = ref 0 in
let t_total = ref 0.0 in
let t_max = ref 0.0 in
let s_max = ref 0 in
let s_total = ref 0 in
let rec scan () = match RegexScanner.next rs with
|RegexScanner.Eof ->
Printf.printf ">> TOTAL: %d\n" !c_total;
Printf.printf ">> PARSED: %d\n" !c_parsed;
Printf.printf ">> MAX NFA SIZE: %d\n" !s_max;
Printf.printf ">> AVG NFA SIZE: %d\n" (!s_total / !c_parsed);
Printf.printf ">> PUMPABLE: %d\n" !c_pumpable;
Printf.printf ">> VULNERABLE: %d\n" !c_vulnerable;
Printf.printf ">> INTERRUPTED: %d\n" !c_interrupted;
Printf.printf ">> PRUNED: %d\n" !c_pruned;
Printf.printf ">> TIME TOTAL: %f (s)\n" !t_total;
Printf.printf ">> TIME MAX: %f (s)\n" !t_max;
|RegexScanner.Error (e, s) ->
c_total := !c_total + 1;
Printf.printf "= [%d] =\n" !c_total;
Printf.printf "INPUT: %s\n" s;
Printf.printf "PARSE: ERROR {%s}\n%!" (Printexc.to_string e);
scan ()
|RegexScanner.Regex (nfa, s) ->
c_total := !c_total + 1;
c_parsed := !c_parsed + 1;
s_total := !s_total + (Nfa.size nfa);
s_max := if (Nfa.size nfa > !s_max) then (Nfa.size nfa) else !s_max;
Printf.printf "= [%d] =\n" !c_total;
Printf.printf "INPUT: %s\n" s;
Printf.printf "PARSE: OK\n";
Printf.printf "SIZE: %d\n%!" (Nfa.size nfa);
begin
let ts = Unix.gettimeofday () in
match AnalyserMain.search_optimized nfa zlim with
|(f, kset, None) ->
let t_this = Unix.gettimeofday () -. ts in
let _ = if IntSet.is_empty kset then (
Printf.printf "PUMPABLE: NO\n"
) else (
c_pumpable := !c_pumpable + 1;
Printf.printf "PUMPABLE: YES\n";
Printf.printf "VULNERABLE: NO {%s}\n" (print_flags f)
) in
c_interrupted := if Flags.is_interrupted f then !c_interrupted + 1 else !c_interrupted;
c_pruned := if Flags.is_pruned f then !c_pruned + 1 else !c_pruned;
t_total := !t_total +. t_this;
t_max := if t_this > !t_max then t_this else !t_max;
Printf.printf "TIME: %f (s)\n" t_this
|(f, _, Some (ik, x, y, z)) ->
c_pumpable := !c_pumpable + 1;
c_vulnerable := !c_vulnerable + 1;
c_pruned := if Flags.is_pruned f then !c_pruned + 1 else !c_pruned;
Printf.printf "PUMPABLE: YES\n";
Printf.printf "VULNERABLE: YES {%s}\n" (print_flags f);
Printf.printf "KLEENE: %s\n" (let (i, j) = Nfa.get_subexp_location nfa ik in String.sub s i (j - i + 1));
Printf.printf "PREFIX: %s\n" (Word.print_select x [('\x21', '\x7e')]);
Printf.printf "PUMPABLE: %s\n" (Word.print_select y [('\x21', '\x7e')]);
Printf.printf "SUFFIX: %s\n" (Word.print_select z [('\x21', '\x7e')]);
Printf.printf "TIME: %f (s)\n" (Unix.gettimeofday () -. ts)
end;
scan () in
scan ();;
let snort_scan fname slim qmode =
let rs = RuleScanner.make fname in
let total_regexes = ref 0 in
let analysed_regexes = ref 0 in
let vulnerable_regexes = ref 0 in
let unknown_regexes = ref 0 in
let current_file = ref "" in
let rec scan () = match RuleScanner.next rs with
|None -> Printf.printf "TOTAL: %d\nANALYSED: %d\nVULNERABLE: %d\nUNKNOWN: %d\n" !total_regexes !analysed_regexes !vulnerable_regexes !unknown_regexes
|Some (file, line, regex_string) ->
total_regexes := !total_regexes + 1;
if (!current_file != file) then (
current_file := file;
Printf.printf "Processing file: %s\n" file
);
let lexbuf = Lexing.from_string (Printf.sprintf "%s\n" regex_string) in
try
let p = ParsingMain.parse_pattern lexbuf in
let nfa = Nfa.make p in
begin
match AnalyserMain.search_optimized nfa slim with
|(f, _, None) when (Flags.is_empty f) -> ()
|(f, _, None) ->
unknown_regexes := !unknown_regexes + 1;
Printf.printf "Line: %d, Flags : {%s}\n%!" line (print_flags f)
|(f, _, Some (ik, x, y, z)) ->
vulnerable_regexes := !vulnerable_regexes + 1;
let (ks, ke) = Nfa.get_subexp_location nfa ik in
Printf.printf "Line: %d\n KLEENE: %s\n PREFIX : %s\n PUMPABLE : %s\n SUFFIX : %s\n FLAGS : {%s}\n%!"
line (String.sub regex_string (ks + 1) (ke - ks + 1)) (Word.print x) (Word.print y) (Word.print z) (print_flags f)
end;
analysed_regexes := !analysed_regexes + 1;
scan ()
with e ->
unknown_regexes := !unknown_regexes + 1;
if not qmode then Printf.printf "Line: %d, Parsing error (%s)\n%!" line (Printexc.to_string e);
scan () in
scan ();;
let input_file = ref None in
let snort_mode = ref false in
let quiet_mode = ref false in
let spec = Arg.align [("-slim", Arg.Int (fun i -> if i > 1 then slim := i), "<n> Abandon current search path after this many unstable xy derivations");
("-i", Arg.String (fun s -> input_file := Some s), "<file> Analyse regular expressions from this input file");
("-q", Arg.Unit (fun () -> quiet_mode := true), " Quiet mode (hide parsing erros)");
("-snort", Arg.Unit (fun () -> snort_mode := true), " Snort rule processing mode (use -i to specify the rules file / directory)")] in
let message = "USAGE: run.bin [-slim n] [-i file] [-snort]" in
let _ = Arg.parse spec (fun _ -> ()) message in
match !input_file with
|Some f when !snort_mode -> snort_scan f !slim !quiet_mode
|Some f -> batch_scan f !slim
|None ->
if !snort_mode then
Arg.usage spec message
else
std_scan !slim;;
|
376df0d707c47ad1e7d3b697aafbd4ad08ad3a70db7f751a14095c4e581bb3eb | windorg/app-old | Board.hs | module Web.Controller.Board where
import qualified Optics
import Web.Controller.Authorization
import Web.Controller.Prelude
import Web.Helper.Common
import Web.View.Board.Edit
import Web.View.Board.Index
import Web.View.Board.New
import Web.View.Board.Show
import Data.Text (strip)
import Named
instance Controller BoardController where
action BoardsAction = do
let augmentBoard board = do
user <- fetch (get #ownerId board)
pure (board, #handle (get #handle user), #displayName (get #displayName user))
case mbCurrentUserId of
Just currentUid -> do
ownBoards <-
query @Board
|> filterWhere (#ownerId, currentUid)
|> orderByDesc #createdAt
|> fetch
othersBoards <-
query @Board
|> filterWhereNot (#ownerId, currentUid)
|> orderByDesc #createdAt
|> fetch
>>= filterM (userCanView @Board . get #id)
>>= mapM augmentBoard
render IndexViewUser{..}
Nothing -> do
allBoards <-
query @Board
|> orderByDesc #createdAt
|> fetch
>>= filterM (userCanView @Board . get #id)
>>= mapM augmentBoard
render IndexViewGuest{..}
action NewBoardAction = do
ensureIsUser
let board = newRecord :: Board
render NewView{..}
action ShowBoardAction{boardId} = do
accessDeniedUnless =<< userCanView @Board boardId
board <- fetch boardId
owner <- fetch (get #ownerId board)
cards <-
get #cards board
|> orderByDesc #createdAt
|> fetch
>>= filterM (userCanView @Card . get #id)
counts <- forM cards $ \card ->
sqlQueryScalar "SELECT COUNT(*) FROM card_updates WHERE card_id = ?" [get #id card]
render ShowView{cards = zip cards counts, ..}
action EditBoardAction{boardId} = do
accessDeniedUnless =<< userCanEdit @Board boardId
board <- fetch boardId
owner <- fetch (get #ownerId board)
render EditView{..}
action UpdateBoardAction{boardId} = do
accessDeniedUnless =<< userCanEdit @Board boardId
board <- fetch boardId
owner <- fetch (get #ownerId board)
board
|> buildBoard
|> ifValid \case
Left board -> render EditView{..}
Right board -> do
board <- board |> updateRecord
redirectTo BoardsAction
action CreateBoardAction = do
ensureIsUser
let board = (newRecord :: Board) |> set #ownerId currentUserId
board
|> buildBoard
|> modify #title strip
|> ifValid \case
Left board -> render NewView{..}
Right board -> do
board <- board |> createRecord
redirectTo BoardsAction
action DeleteBoardAction{boardId} = do
accessDeniedUnless =<< userCanEdit @Board boardId
board <- fetch boardId
deleteRecord board
redirectTo BoardsAction
buildBoard board =
board
|> fill @'["title"]
|> Optics.over #settings_ \settings ->
(settings :: BoardSettings)
{ visibility = if paramOrDefault False "private" then VisibilityPrivate else VisibilityPublic
}
| null | https://raw.githubusercontent.com/windorg/app-old/ed9c5322c8ab8a0275bdcd479be12a3f230da8c9/Web/Controller/Board.hs | haskell | module Web.Controller.Board where
import qualified Optics
import Web.Controller.Authorization
import Web.Controller.Prelude
import Web.Helper.Common
import Web.View.Board.Edit
import Web.View.Board.Index
import Web.View.Board.New
import Web.View.Board.Show
import Data.Text (strip)
import Named
instance Controller BoardController where
action BoardsAction = do
let augmentBoard board = do
user <- fetch (get #ownerId board)
pure (board, #handle (get #handle user), #displayName (get #displayName user))
case mbCurrentUserId of
Just currentUid -> do
ownBoards <-
query @Board
|> filterWhere (#ownerId, currentUid)
|> orderByDesc #createdAt
|> fetch
othersBoards <-
query @Board
|> filterWhereNot (#ownerId, currentUid)
|> orderByDesc #createdAt
|> fetch
>>= filterM (userCanView @Board . get #id)
>>= mapM augmentBoard
render IndexViewUser{..}
Nothing -> do
allBoards <-
query @Board
|> orderByDesc #createdAt
|> fetch
>>= filterM (userCanView @Board . get #id)
>>= mapM augmentBoard
render IndexViewGuest{..}
action NewBoardAction = do
ensureIsUser
let board = newRecord :: Board
render NewView{..}
action ShowBoardAction{boardId} = do
accessDeniedUnless =<< userCanView @Board boardId
board <- fetch boardId
owner <- fetch (get #ownerId board)
cards <-
get #cards board
|> orderByDesc #createdAt
|> fetch
>>= filterM (userCanView @Card . get #id)
counts <- forM cards $ \card ->
sqlQueryScalar "SELECT COUNT(*) FROM card_updates WHERE card_id = ?" [get #id card]
render ShowView{cards = zip cards counts, ..}
action EditBoardAction{boardId} = do
accessDeniedUnless =<< userCanEdit @Board boardId
board <- fetch boardId
owner <- fetch (get #ownerId board)
render EditView{..}
action UpdateBoardAction{boardId} = do
accessDeniedUnless =<< userCanEdit @Board boardId
board <- fetch boardId
owner <- fetch (get #ownerId board)
board
|> buildBoard
|> ifValid \case
Left board -> render EditView{..}
Right board -> do
board <- board |> updateRecord
redirectTo BoardsAction
action CreateBoardAction = do
ensureIsUser
let board = (newRecord :: Board) |> set #ownerId currentUserId
board
|> buildBoard
|> modify #title strip
|> ifValid \case
Left board -> render NewView{..}
Right board -> do
board <- board |> createRecord
redirectTo BoardsAction
action DeleteBoardAction{boardId} = do
accessDeniedUnless =<< userCanEdit @Board boardId
board <- fetch boardId
deleteRecord board
redirectTo BoardsAction
buildBoard board =
board
|> fill @'["title"]
|> Optics.over #settings_ \settings ->
(settings :: BoardSettings)
{ visibility = if paramOrDefault False "private" then VisibilityPrivate else VisibilityPublic
}
|
|
a8452438993ef6536e270ed1cc9cd27b0cc520bd444bcb90e60591b8366b1bf1 | jwiegley/parsec-free | Perm.hs | -----------------------------------------------------------------------------
-- |
Module : Text . Parsec . Perm
Copyright : ( c ) 1999 - 2001 , ( c ) 2007
-- License : BSD-style (see the file libraries/parsec/LICENSE)
--
-- Maintainer :
-- Stability : provisional
-- Portability : non-portable (uses existentially quantified data constructors)
--
-- This module implements permutation parsers. The algorithm used
-- is fairly complex since we push the type system to its limits :-)
-- The algorithm is described in:
--
-- /Parsing Permutation Phrases,/
by , and .
Published as a functional pearl at the Haskell Workshop 2001 .
--
-----------------------------------------------------------------------------
# LANGUAGE ExistentialQuantification , StandaloneDeriving #
module Text.Parsec.Perm
( PermParser
, StreamPermParser -- abstract
, permute
, (<||>), (<$$>)
, (<|?>), (<$?>)
) where
import Text.Parsec
import Control.Monad.Identity
import Data.Typeable ( Typeable )
#if !(MIN_VERSION_base(4,7,0))
For GHC 7.6
import Data.Typeable ( Typeable3 )
#endif
infixl 1 <||>, <|?>
infixl 2 <$$>, <$?>
{---------------------------------------------------------------
test -- parse a permutation of
* an optional string of 'a's
* a required 'b'
* an optional 'c'
---------------------------------------------------------------}
test input
= parse ( do { x < - ptest ; eof ; return x } ) " " input
ptest : : ( String , , )
ptest
= permute $
( , , ) < $ ? > ( " " , many1 ( char ' a ' ) )
< || > char ' b '
< | ? > ( ' _ ' , char ' c ' )
test input
= parse (do{ x <- ptest; eof; return x }) "" input
ptest :: Parser (String,Char,Char)
ptest
= permute $
(,,) <$?> ("",many1 (char 'a'))
<||> char 'b'
<|?> ('_',char 'c')
-}
{---------------------------------------------------------------
Building a permutation parser
---------------------------------------------------------------}
-- | The expression @perm \<||> p@ adds parser @p@ to the permutation
-- parser @perm@. The parser @p@ is not allowed to accept empty input -
-- use the optional combinator ('<|?>') instead. Returns a
new permutation parser that includes @p@.
(<||>) :: (Stream s Identity tok) => StreamPermParser s st (a -> b) -> Parsec s st a -> StreamPermParser s st b
(<||>) perm p = add perm p
-- | The expression @f \<$$> p@ creates a fresh permutation parser
-- consisting of parser @p@. The the final result of the permutation
parser is the function @f@ applied to the return value of @p@. The
-- parser @p@ is not allowed to accept empty input - use the optional
-- combinator ('<$?>') instead.
--
If the function @f@ takes more than one parameter , the type variable
@b@ is instantiated to a functional type which combines nicely with
-- the adds parser @p@ to the ('<||>') combinator. This
-- results in stylized code where a permutation parser starts with a
combining function @f@ followed by the parsers . The function @f@
-- gets its parameters in the order in which the parsers are specified,
-- but actual input can be in any order.
(<$$>) :: (Stream s Identity tok) => (a -> b) -> Parsec s st a -> StreamPermParser s st b
(<$$>) f p = newperm f <||> p
-- | The expression @perm \<||> (x,p)@ adds parser @p@ to the
-- permutation parser @perm@. The parser @p@ is optional - if it can
-- not be applied, the default value @x@ will be used instead. Returns
-- a new permutation parser that includes the optional parser @p@.
(<|?>) :: (Stream s Identity tok) => StreamPermParser s st (a -> b) -> (a, Parsec s st a) -> StreamPermParser s st b
(<|?>) perm (x,p) = addopt perm x p
| The expression ? > ( x , p)@ creates a fresh permutation parser
-- consisting of parser @p@. The the final result of the permutation
parser is the function @f@ applied to the return value of @p@. The
-- parser @p@ is optional - if it can not be applied, the default value
-- @x@ will be used instead.
(<$?>) :: (Stream s Identity tok) => (a -> b) -> (a, Parsec s st a) -> StreamPermParser s st b
(<$?>) f (x,p) = newperm f <|?> (x,p)
{---------------------------------------------------------------
The permutation tree
---------------------------------------------------------------}
-- | Provided for backwards compatibility. The tok type is ignored.
type PermParser tok st a = StreamPermParser String st a
-- | The type @StreamPermParser s st a@ denotes a permutation parser that,
-- when converted by the 'permute' function, parses
-- @s@ streams with user state @st@ and returns a value of
-- type @a@ on success.
--
Normally , a permutation parser is first build with special operators
-- like ('<||>') and than transformed into a normal parser
-- using 'permute'.
data StreamPermParser s st a = Perm (Maybe a) [StreamBranch s st a]
#if MIN_VERSION_base(4,7,0)
deriving ( Typeable )
#else
deriving instance Typeable3 StreamPermParser
#endif
type Branch st a = StreamBranch String st a
data StreamBranch s st a = forall b. Branch (StreamPermParser s st (b -> a)) (Parsec s st b)
#if MIN_VERSION_base(4,7,0)
deriving ( Typeable )
#else
deriving instance Typeable3 StreamBranch
#endif
-- | The parser @permute perm@ parses a permutation of parser described
-- by @perm@. For example, suppose we want to parse a permutation of:
an optional string of @a@ 's , the character @b@ and an optional @c@.
-- This can be described by:
--
-- > test = permute (tuple <$?> ("",many1 (char 'a'))
-- > <||> char 'b'
-- > <|?> ('_',char 'c'))
-- > where
-- > tuple a b c = (a,b,c)
-- transform a permutation tree into a normal parser
permute :: (Stream s Identity tok) => StreamPermParser s st a -> Parsec s st a
permute (Perm def xs)
= choice (map branch xs ++ empty)
where
empty
= case def of
Nothing -> []
Just x -> [return x]
branch (Branch perm p)
= do{ x <- p
; f <- permute perm
; return (f x)
}
-- build permutation trees
newperm :: (Stream s Identity tok) => (a -> b) -> StreamPermParser s st (a -> b)
newperm f
= Perm (Just f) []
add :: (Stream s Identity tok) => StreamPermParser s st (a -> b) -> Parsec s st a -> StreamPermParser s st b
add perm@(Perm _mf fs) p
= Perm Nothing (first:map insert fs)
where
first = Branch perm p
insert (Branch perm' p')
= Branch (add (mapPerms flip perm') p) p'
addopt :: (Stream s Identity tok) => StreamPermParser s st (a -> b) -> a -> Parsec s st a -> StreamPermParser s st b
addopt perm@(Perm mf fs) x p
= Perm (fmap ($ x) mf) (first:map insert fs)
where
first = Branch perm p
insert (Branch perm' p')
= Branch (addopt (mapPerms flip perm') x p) p'
mapPerms :: (Stream s Identity tok) => (a -> b) -> StreamPermParser s st a -> StreamPermParser s st b
mapPerms f (Perm x xs)
= Perm (fmap f x) (map mapBranch xs)
where
mapBranch (Branch perm p)
= Branch (mapPerms (f.) perm) p
| null | https://raw.githubusercontent.com/jwiegley/parsec-free/b8bbaaa0457e1871747af889fea4be97d20b41b1/Text/Parsec/Perm.hs | haskell | ---------------------------------------------------------------------------
|
License : BSD-style (see the file libraries/parsec/LICENSE)
Maintainer :
Stability : provisional
Portability : non-portable (uses existentially quantified data constructors)
This module implements permutation parsers. The algorithm used
is fairly complex since we push the type system to its limits :-)
The algorithm is described in:
/Parsing Permutation Phrases,/
---------------------------------------------------------------------------
abstract
--------------------------------------------------------------
test -- parse a permutation of
* an optional string of 'a's
* a required 'b'
* an optional 'c'
--------------------------------------------------------------
--------------------------------------------------------------
Building a permutation parser
--------------------------------------------------------------
| The expression @perm \<||> p@ adds parser @p@ to the permutation
parser @perm@. The parser @p@ is not allowed to accept empty input -
use the optional combinator ('<|?>') instead. Returns a
| The expression @f \<$$> p@ creates a fresh permutation parser
consisting of parser @p@. The the final result of the permutation
parser @p@ is not allowed to accept empty input - use the optional
combinator ('<$?>') instead.
the adds parser @p@ to the ('<||>') combinator. This
results in stylized code where a permutation parser starts with a
gets its parameters in the order in which the parsers are specified,
but actual input can be in any order.
| The expression @perm \<||> (x,p)@ adds parser @p@ to the
permutation parser @perm@. The parser @p@ is optional - if it can
not be applied, the default value @x@ will be used instead. Returns
a new permutation parser that includes the optional parser @p@.
consisting of parser @p@. The the final result of the permutation
parser @p@ is optional - if it can not be applied, the default value
@x@ will be used instead.
--------------------------------------------------------------
The permutation tree
--------------------------------------------------------------
| Provided for backwards compatibility. The tok type is ignored.
| The type @StreamPermParser s st a@ denotes a permutation parser that,
when converted by the 'permute' function, parses
@s@ streams with user state @st@ and returns a value of
type @a@ on success.
like ('<||>') and than transformed into a normal parser
using 'permute'.
| The parser @permute perm@ parses a permutation of parser described
by @perm@. For example, suppose we want to parse a permutation of:
This can be described by:
> test = permute (tuple <$?> ("",many1 (char 'a'))
> <||> char 'b'
> <|?> ('_',char 'c'))
> where
> tuple a b c = (a,b,c)
transform a permutation tree into a normal parser
build permutation trees | Module : Text . Parsec . Perm
Copyright : ( c ) 1999 - 2001 , ( c ) 2007
by , and .
Published as a functional pearl at the Haskell Workshop 2001 .
# LANGUAGE ExistentialQuantification , StandaloneDeriving #
module Text.Parsec.Perm
( PermParser
, permute
, (<||>), (<$$>)
, (<|?>), (<$?>)
) where
import Text.Parsec
import Control.Monad.Identity
import Data.Typeable ( Typeable )
#if !(MIN_VERSION_base(4,7,0))
For GHC 7.6
import Data.Typeable ( Typeable3 )
#endif
infixl 1 <||>, <|?>
infixl 2 <$$>, <$?>
test input
= parse ( do { x < - ptest ; eof ; return x } ) " " input
ptest : : ( String , , )
ptest
= permute $
( , , ) < $ ? > ( " " , many1 ( char ' a ' ) )
< || > char ' b '
< | ? > ( ' _ ' , char ' c ' )
test input
= parse (do{ x <- ptest; eof; return x }) "" input
ptest :: Parser (String,Char,Char)
ptest
= permute $
(,,) <$?> ("",many1 (char 'a'))
<||> char 'b'
<|?> ('_',char 'c')
-}
new permutation parser that includes @p@.
(<||>) :: (Stream s Identity tok) => StreamPermParser s st (a -> b) -> Parsec s st a -> StreamPermParser s st b
(<||>) perm p = add perm p
parser is the function @f@ applied to the return value of @p@. The
If the function @f@ takes more than one parameter , the type variable
@b@ is instantiated to a functional type which combines nicely with
combining function @f@ followed by the parsers . The function @f@
(<$$>) :: (Stream s Identity tok) => (a -> b) -> Parsec s st a -> StreamPermParser s st b
(<$$>) f p = newperm f <||> p
(<|?>) :: (Stream s Identity tok) => StreamPermParser s st (a -> b) -> (a, Parsec s st a) -> StreamPermParser s st b
(<|?>) perm (x,p) = addopt perm x p
| The expression ? > ( x , p)@ creates a fresh permutation parser
parser is the function @f@ applied to the return value of @p@. The
(<$?>) :: (Stream s Identity tok) => (a -> b) -> (a, Parsec s st a) -> StreamPermParser s st b
(<$?>) f (x,p) = newperm f <|?> (x,p)
type PermParser tok st a = StreamPermParser String st a
Normally , a permutation parser is first build with special operators
data StreamPermParser s st a = Perm (Maybe a) [StreamBranch s st a]
#if MIN_VERSION_base(4,7,0)
deriving ( Typeable )
#else
deriving instance Typeable3 StreamPermParser
#endif
type Branch st a = StreamBranch String st a
data StreamBranch s st a = forall b. Branch (StreamPermParser s st (b -> a)) (Parsec s st b)
#if MIN_VERSION_base(4,7,0)
deriving ( Typeable )
#else
deriving instance Typeable3 StreamBranch
#endif
an optional string of @a@ 's , the character @b@ and an optional @c@.
permute :: (Stream s Identity tok) => StreamPermParser s st a -> Parsec s st a
permute (Perm def xs)
= choice (map branch xs ++ empty)
where
empty
= case def of
Nothing -> []
Just x -> [return x]
branch (Branch perm p)
= do{ x <- p
; f <- permute perm
; return (f x)
}
newperm :: (Stream s Identity tok) => (a -> b) -> StreamPermParser s st (a -> b)
newperm f
= Perm (Just f) []
add :: (Stream s Identity tok) => StreamPermParser s st (a -> b) -> Parsec s st a -> StreamPermParser s st b
add perm@(Perm _mf fs) p
= Perm Nothing (first:map insert fs)
where
first = Branch perm p
insert (Branch perm' p')
= Branch (add (mapPerms flip perm') p) p'
addopt :: (Stream s Identity tok) => StreamPermParser s st (a -> b) -> a -> Parsec s st a -> StreamPermParser s st b
addopt perm@(Perm mf fs) x p
= Perm (fmap ($ x) mf) (first:map insert fs)
where
first = Branch perm p
insert (Branch perm' p')
= Branch (addopt (mapPerms flip perm') x p) p'
mapPerms :: (Stream s Identity tok) => (a -> b) -> StreamPermParser s st a -> StreamPermParser s st b
mapPerms f (Perm x xs)
= Perm (fmap f x) (map mapBranch xs)
where
mapBranch (Branch perm p)
= Branch (mapPerms (f.) perm) p
|
35c52b7bd230d5b5a827c5eb49d3baf4e86ab41d6ff7235ba05ac2e642701987 | cojna/iota | SieveSpec.hs | module Math.Prime.SieveSpec (main, spec) where
import qualified Data.Vector.Unboxed as U
import Math.Prime
import Math.Prime.Sieve
import Test.Prelude
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "withPrimes 46337 generates smallPrimes" $
it "equal to smallPrimes" $
withPrimes 46337 U.toList `shouldBe` smallPrimes
| null | https://raw.githubusercontent.com/cojna/iota/6d2ad5b71b1b50bca9136d6ed84f80a0b7713d7c/test/Math/Prime/SieveSpec.hs | haskell | module Math.Prime.SieveSpec (main, spec) where
import qualified Data.Vector.Unboxed as U
import Math.Prime
import Math.Prime.Sieve
import Test.Prelude
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "withPrimes 46337 generates smallPrimes" $
it "equal to smallPrimes" $
withPrimes 46337 U.toList `shouldBe` smallPrimes
|
|
2528cb866a72dd0ab9705bfab9c3d6e2f1e707b2f88bfaf31fbeb411c868c514 | SonyCSLParis/BENG | config.lisp | Copyright ( C ) 2019 Sony Computer Science Laboratories Paris
( www.remivantrijp.eu )
;;;
;;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , version 3 of the License .
;;;
;;; This program is distributed in the hope that it will be useful,
;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
;;; along with this program. If not, see </>.
;;; ----------------------------------------------------------------------------
(in-package :beng)
Helper function for using the BENG pathname .
;;;;; --------------------------------------------------------------------------------
(defun beng-pathname (&key directory name type)
"Helper function for accessing files of the English grammar."
(merge-pathnames (make-pathname :directory (cons :relative directory)
:name name :type type)
*beng-pathname*))
( beng - pathname )
;;;;; Configuration utilities for manipulating the configuration of the grammar.
;;;;; --------------------------------------------------------------------------------
(defun set-parse-order (order &optional (cxn-inventory *fcg-constructions*))
(set-configuration cxn-inventory :parse-order order)
(set-configuration (processing-cxn-inventory cxn-inventory)
:parse-order order))
;; (set-parse-order '(hashed-string hashed-lex-id arg-cxn marked-phrasal unmarked-phrasal))
(defun set-production-order (order &optional (cxn-inventory *fcg-constructions*))
(set-configuration cxn-inventory :production-order order)
(set-configuration (processing-cxn-inventory cxn-inventory)
:production-order order))
;; (set-production-order '(hashed-meaning arg-cxn phrasal hashed-lex-id))
;;;;; Header used for writing files.
;;;;; --------------------------------------------------------------------------------
(defun add-license-and-copyright-header (out)
(format out ";;; Copyright (C) 2019 Sony Computer Science Laboratories Paris")
(format out "~%;;; Remi van Trijp (www.remivantrijp.eu)")
(format out "~%;;; ")
(format out "~%;;; This program is free software: you can redistribute it and/or modify")
(format out "~%;;; it under the terms of the GNU General Public License as published by")
(format out "~%;;; the Free Software Foundation, version 3 of the License.")
(format out "~%;;; ")
(format out "~%;;; This program is distributed in the hope that it will be useful,")
(format out "~%;;; but WITHOUT ANY WARRANTY; without even the implied warranty of")
(format out "~%;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the")
(format out "~%;;; GNU General Public License for more details.")
(format out "~%;;; ")
(format out "~%;;; You should have received a copy of the GNU General Public License")
(format out "~%;;; along with this program. If not, see </>.")
(format out "~%;;; ----------------------------------------------------------------------------")) | null | https://raw.githubusercontent.com/SonyCSLParis/BENG/1eb525e132d2b1f74ae18532657561725f6fbb78/signature/config.lisp | lisp |
This program is free software: you can redistribute it and/or modify
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program. If not, see </>.
----------------------------------------------------------------------------
--------------------------------------------------------------------------------
Configuration utilities for manipulating the configuration of the grammar.
--------------------------------------------------------------------------------
(set-parse-order '(hashed-string hashed-lex-id arg-cxn marked-phrasal unmarked-phrasal))
(set-production-order '(hashed-meaning arg-cxn phrasal hashed-lex-id))
Header used for writing files.
-------------------------------------------------------------------------------- | Copyright ( C ) 2019 Sony Computer Science Laboratories Paris
( www.remivantrijp.eu )
it under the terms of the GNU General Public License as published by
the Free Software Foundation , version 3 of the License .
You should have received a copy of the GNU General Public License
(in-package :beng)
Helper function for using the BENG pathname .
(defun beng-pathname (&key directory name type)
"Helper function for accessing files of the English grammar."
(merge-pathnames (make-pathname :directory (cons :relative directory)
:name name :type type)
*beng-pathname*))
( beng - pathname )
(defun set-parse-order (order &optional (cxn-inventory *fcg-constructions*))
(set-configuration cxn-inventory :parse-order order)
(set-configuration (processing-cxn-inventory cxn-inventory)
:parse-order order))
(defun set-production-order (order &optional (cxn-inventory *fcg-constructions*))
(set-configuration cxn-inventory :production-order order)
(set-configuration (processing-cxn-inventory cxn-inventory)
:production-order order))
(defun add-license-and-copyright-header (out)
(format out ";;; Copyright (C) 2019 Sony Computer Science Laboratories Paris")
(format out "~%;;; Remi van Trijp (www.remivantrijp.eu)")
(format out "~%;;; ")
(format out "~%;;; This program is free software: you can redistribute it and/or modify")
(format out "~%;;; it under the terms of the GNU General Public License as published by")
(format out "~%;;; the Free Software Foundation, version 3 of the License.")
(format out "~%;;; ")
(format out "~%;;; This program is distributed in the hope that it will be useful,")
(format out "~%;;; but WITHOUT ANY WARRANTY; without even the implied warranty of")
(format out "~%;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the")
(format out "~%;;; GNU General Public License for more details.")
(format out "~%;;; ")
(format out "~%;;; You should have received a copy of the GNU General Public License")
(format out "~%;;; along with this program. If not, see </>.")
(format out "~%;;; ----------------------------------------------------------------------------")) |
814b64d0a9dec64888afd8a9f56615889c15eee9060328bb73d475a53ce6ef06 | mdsebald/link_blox_app | lblx_select_pri.erl | %%% @doc
BLOCKTYPE
Select Higest Priority Active Input Value
%%% DESCRIPTION
%%% Set the block output value to the highest priority actvive input value
%%% LINKS
%%% @end
-module(lblx_select_pri).
-author("Mark Sebald").
-include("../block_state.hrl").
%% ====================================================================
%% API functions
%% ====================================================================
-export([groups/0, version/0]).
-export([create/2, create/4, create/5, upgrade/1, initialize/1, execute/2, delete/1]).
groups() -> [select].
version() -> "0.1.0".
%% Merge the block type specific, Config, Input, and Output attributes
%% with the common Config, Input, and Output attributes, that all block types have
-spec default_configs(BlockName :: block_name(),
Description :: string()) -> config_attribs().
default_configs(BlockName, Description) ->
attrib_utils:merge_attribute_lists(
block_common:configs(BlockName, ?MODULE, version(), Description),
[
{num_of_inputs, {3}} %| int | 3 | 1..99 |
]).
-spec default_inputs() -> input_attribs().
default_inputs() ->
attrib_utils:merge_attribute_lists(
block_common:inputs(),
[
{inputs, [{empty, {empty}}]} %| any array | empty | N/A |
]).
-spec default_outputs() -> output_attribs().
default_outputs() ->
attrib_utils:merge_attribute_lists(
block_common:outputs(),
[
]).
%%
%% Create a set of block attributes for this block type.
Init attributes are used to override the default attribute values
%% and to add attributes to the lists of default attributes
%%
-spec create(BlockName :: block_name(),
Description :: string()) -> block_defn().
create(BlockName, Description) ->
create(BlockName, Description, [], [], []).
-spec create(BlockName :: block_name(),
Description :: string(),
InitConfig :: config_attribs(),
InitInputs :: input_attribs()) -> block_defn().
create(BlockName, Description, InitConfig, InitInputs) ->
create(BlockName, Description, InitConfig, InitInputs, []).
-spec create(BlockName :: block_name(),
Description :: string(),
InitConfig :: config_attribs(),
InitInputs :: input_attribs(),
InitOutputs :: output_attribs()) -> block_defn().
create(BlockName, Description, InitConfig, InitInputs, InitOutputs) ->
% Update Default Config, Input, Output, and Private attribute values
% with the initial values passed into this function.
%
% If any of the intial attributes do not already exist in the
% default attribute lists, merge_attribute_lists() will create them.
Config = attrib_utils:merge_attribute_lists(default_configs(BlockName, Description), InitConfig),
Inputs = attrib_utils:merge_attribute_lists(default_inputs(), InitInputs),
Outputs = attrib_utils:merge_attribute_lists(default_outputs(), InitOutputs),
% This is the block definition,
{Config, Inputs, Outputs}.
%%
%% Upgrade block attribute values, when block code and block data versions are different
%%
-spec upgrade(BlockDefn :: block_defn()) -> {ok, block_defn()} | {error, atom()}.
upgrade({Config, Inputs, Outputs}) ->
ModuleVer = version(),
{BlockName, BlockModule, ConfigVer} = config_utils:name_module_version(Config),
BlockType = type_utils:type_name(BlockModule),
case attrib_utils:set_value(Config, version, version()) of
{ok, UpdConfig} ->
m_logger:info(block_type_upgraded_from_ver_to,
[BlockName, BlockType, ConfigVer, ModuleVer]),
{ok, {UpdConfig, Inputs, Outputs}};
{error, Reason} ->
m_logger:error(err_upgrading_block_type_from_ver_to,
[Reason, BlockName, BlockType, ConfigVer, ModuleVer]),
{error, Reason}
end.
%%
Initialize block values
%% Perform any setup here as needed before starting execution
%%
-spec initialize(BlockState :: block_state()) -> block_state().
initialize({Config, Inputs, Outputs, Private}) ->
% Check the config values
case config_utils:get_integer_range(Config, num_of_inputs, 1, 99) of
{ok, NumOfInputs} ->
% All config values are OK
Create N inputs
BlockName = config_utils:name(Config),
Inputs1 = input_utils:resize_attribute_array_value(BlockName, Inputs,
inputs, NumOfInputs, {empty, {empty}}),
Initialize output values
Value = null,
Status = initialed;
{error, Reason} ->
Inputs1 = Inputs,
{Value, Status} = config_utils:log_error(Config, num_of_inputs, Reason)
end,
Outputs1 = output_utils:set_value_status(Outputs, Value, Status),
% This is the block state
{Config, Inputs1, Outputs1, Private}.
%%
%% Execute the block specific functionality
%%
-spec execute(BlockState :: block_state(),
ExecMethod :: exec_method()) -> block_state().
execute({Config, Inputs, Outputs, Private}, disable) ->
Outputs1 = output_utils:update_all_outputs(Outputs, null, disabled),
{Config, Inputs, Outputs1, Private};
execute({Config, Inputs, Outputs, Private}, _ExecMethod) ->
{ok, NumOfInputs} = attrib_utils:get_value(Config, num_of_inputs),
case highest_priority_input(Inputs, 1, NumOfInputs) of
{ok, null} ->
Value = null,
Status = no_input;
{ok, Value} ->
Status = normal;
{error, Reason} ->
{Value, Status} = input_utils:log_error(Config, inputs, Reason)
end,
Outputs1 = output_utils:set_value_status(Outputs, Value, Status),
% Return updated block state
{Config, Inputs, Outputs1, Private}.
%%
%% Delete the block
%%
-spec delete(BlockState :: block_state()) -> block_defn().
delete({Config, Inputs, Outputs, _Private}) ->
{Config, Inputs, Outputs}.
%% ====================================================================
Internal functions
%% ====================================================================
% Find the value of the highest priority (lowest index) input value
highest_priority_input(Inputs, Index, NumOfInputs) when Index =< NumOfInputs ->
case input_utils:get_any_type(Inputs, {inputs, Index}) of
% Input value is null, get next input value
{ok, null} -> highest_priority_input(Inputs, Index+1, NumOfInputs);
% Got an active input value, return it
{ok, Value} -> {ok, Value};
% Error input value, stop looking, put block in error state
{error, Reason} -> {error, Reason}
end;
% None of the inputs have active value, return null
highest_priority_input(_Inputs, _Index, _NumOfInputs) -> {ok, null}.
%% ====================================================================
%% Tests
%% ====================================================================
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-include("block_io_test_gen.hrl").
test_sets() ->
[
{[{num_of_inputs, 10}], [], [{status, no_input}, {value, null}]},
{[{{inputs, 1}, null}, {{inputs, 2}, 2}, {{inputs, 10}, 10}], [{status, normal}, {value, 2}]},
{[{{inputs, 1}, 1}, {{inputs, 2}, 2}, {{inputs, 10}, 10}], [{status, normal}, {value, 1}]},
{[{{inputs, 1}, null}, {{inputs, 2}, null}], [{status, normal}, {value, 10}]}
].
-endif. | null | https://raw.githubusercontent.com/mdsebald/link_blox_app/64034fa5854759ad16625b93e3dde65a9c65f615/src/block_types/lblx_select_pri.erl | erlang | @doc
DESCRIPTION
Set the block output value to the highest priority actvive input value
LINKS
@end
====================================================================
API functions
====================================================================
Merge the block type specific, Config, Input, and Output attributes
with the common Config, Input, and Output attributes, that all block types have
| int | 3 | 1..99 |
| any array | empty | N/A |
Create a set of block attributes for this block type.
and to add attributes to the lists of default attributes
Update Default Config, Input, Output, and Private attribute values
with the initial values passed into this function.
If any of the intial attributes do not already exist in the
default attribute lists, merge_attribute_lists() will create them.
This is the block definition,
Upgrade block attribute values, when block code and block data versions are different
Perform any setup here as needed before starting execution
Check the config values
All config values are OK
This is the block state
Execute the block specific functionality
Return updated block state
Delete the block
====================================================================
====================================================================
Find the value of the highest priority (lowest index) input value
Input value is null, get next input value
Got an active input value, return it
Error input value, stop looking, put block in error state
None of the inputs have active value, return null
====================================================================
Tests
==================================================================== | BLOCKTYPE
Select Higest Priority Active Input Value
-module(lblx_select_pri).
-author("Mark Sebald").
-include("../block_state.hrl").
-export([groups/0, version/0]).
-export([create/2, create/4, create/5, upgrade/1, initialize/1, execute/2, delete/1]).
groups() -> [select].
version() -> "0.1.0".
-spec default_configs(BlockName :: block_name(),
Description :: string()) -> config_attribs().
default_configs(BlockName, Description) ->
attrib_utils:merge_attribute_lists(
block_common:configs(BlockName, ?MODULE, version(), Description),
[
]).
-spec default_inputs() -> input_attribs().
default_inputs() ->
attrib_utils:merge_attribute_lists(
block_common:inputs(),
[
]).
-spec default_outputs() -> output_attribs().
default_outputs() ->
attrib_utils:merge_attribute_lists(
block_common:outputs(),
[
]).
Init attributes are used to override the default attribute values
-spec create(BlockName :: block_name(),
Description :: string()) -> block_defn().
create(BlockName, Description) ->
create(BlockName, Description, [], [], []).
-spec create(BlockName :: block_name(),
Description :: string(),
InitConfig :: config_attribs(),
InitInputs :: input_attribs()) -> block_defn().
create(BlockName, Description, InitConfig, InitInputs) ->
create(BlockName, Description, InitConfig, InitInputs, []).
-spec create(BlockName :: block_name(),
Description :: string(),
InitConfig :: config_attribs(),
InitInputs :: input_attribs(),
InitOutputs :: output_attribs()) -> block_defn().
create(BlockName, Description, InitConfig, InitInputs, InitOutputs) ->
Config = attrib_utils:merge_attribute_lists(default_configs(BlockName, Description), InitConfig),
Inputs = attrib_utils:merge_attribute_lists(default_inputs(), InitInputs),
Outputs = attrib_utils:merge_attribute_lists(default_outputs(), InitOutputs),
{Config, Inputs, Outputs}.
-spec upgrade(BlockDefn :: block_defn()) -> {ok, block_defn()} | {error, atom()}.
upgrade({Config, Inputs, Outputs}) ->
ModuleVer = version(),
{BlockName, BlockModule, ConfigVer} = config_utils:name_module_version(Config),
BlockType = type_utils:type_name(BlockModule),
case attrib_utils:set_value(Config, version, version()) of
{ok, UpdConfig} ->
m_logger:info(block_type_upgraded_from_ver_to,
[BlockName, BlockType, ConfigVer, ModuleVer]),
{ok, {UpdConfig, Inputs, Outputs}};
{error, Reason} ->
m_logger:error(err_upgrading_block_type_from_ver_to,
[Reason, BlockName, BlockType, ConfigVer, ModuleVer]),
{error, Reason}
end.
Initialize block values
-spec initialize(BlockState :: block_state()) -> block_state().
initialize({Config, Inputs, Outputs, Private}) ->
case config_utils:get_integer_range(Config, num_of_inputs, 1, 99) of
{ok, NumOfInputs} ->
Create N inputs
BlockName = config_utils:name(Config),
Inputs1 = input_utils:resize_attribute_array_value(BlockName, Inputs,
inputs, NumOfInputs, {empty, {empty}}),
Initialize output values
Value = null,
Status = initialed;
{error, Reason} ->
Inputs1 = Inputs,
{Value, Status} = config_utils:log_error(Config, num_of_inputs, Reason)
end,
Outputs1 = output_utils:set_value_status(Outputs, Value, Status),
{Config, Inputs1, Outputs1, Private}.
-spec execute(BlockState :: block_state(),
ExecMethod :: exec_method()) -> block_state().
execute({Config, Inputs, Outputs, Private}, disable) ->
Outputs1 = output_utils:update_all_outputs(Outputs, null, disabled),
{Config, Inputs, Outputs1, Private};
execute({Config, Inputs, Outputs, Private}, _ExecMethod) ->
{ok, NumOfInputs} = attrib_utils:get_value(Config, num_of_inputs),
case highest_priority_input(Inputs, 1, NumOfInputs) of
{ok, null} ->
Value = null,
Status = no_input;
{ok, Value} ->
Status = normal;
{error, Reason} ->
{Value, Status} = input_utils:log_error(Config, inputs, Reason)
end,
Outputs1 = output_utils:set_value_status(Outputs, Value, Status),
{Config, Inputs, Outputs1, Private}.
-spec delete(BlockState :: block_state()) -> block_defn().
delete({Config, Inputs, Outputs, _Private}) ->
{Config, Inputs, Outputs}.
Internal functions
highest_priority_input(Inputs, Index, NumOfInputs) when Index =< NumOfInputs ->
case input_utils:get_any_type(Inputs, {inputs, Index}) of
{ok, null} -> highest_priority_input(Inputs, Index+1, NumOfInputs);
{ok, Value} -> {ok, Value};
{error, Reason} -> {error, Reason}
end;
highest_priority_input(_Inputs, _Index, _NumOfInputs) -> {ok, null}.
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-include("block_io_test_gen.hrl").
test_sets() ->
[
{[{num_of_inputs, 10}], [], [{status, no_input}, {value, null}]},
{[{{inputs, 1}, null}, {{inputs, 2}, 2}, {{inputs, 10}, 10}], [{status, normal}, {value, 2}]},
{[{{inputs, 1}, 1}, {{inputs, 2}, 2}, {{inputs, 10}, 10}], [{status, normal}, {value, 1}]},
{[{{inputs, 1}, null}, {{inputs, 2}, null}], [{status, normal}, {value, 10}]}
].
-endif. |
8940185e3f690583849e6b129bea00ac242cac4e116e1d9fb4ba59632d4413fe | witan-org/witan | stdlib.ml | (********************************************************************)
(* *)
The Why3 Verification Platform / The Why3 Development Team
Copyright 2010 - 2017 -- INRIA - CNRS - Paris - Sud University
(* *)
(* This software is distributed under the terms of the GNU Lesser *)
General Public License version 2.1 , with the special exception
(* on linking described in file LICENSE. *)
(* *)
(********************************************************************)
module Map = Extmap
module XHashtbl = Exthtbl.Hashtbl
Set , Map , on structures with a unique tag
module type TaggedType =
sig
type t
val tag : t -> int
val pp: t Pp.pp
end
module type OrderedHashedType =
sig
type t
val hash : t -> int
val equal : t -> t -> bool
val compare : t -> t -> int
val pp: t Pp.pp
end
module OrderedHashed (X : TaggedType) =
struct
type t = X.t
let hash = X.tag
let equal ts1 ts2 = X.tag ts1 == X.tag ts2 (** Todo ts1 == ts2? *)
let compare ts1 ts2 = Pervasives.compare (X.tag ts1) (X.tag ts2)
let pp = X.pp
end
module OrderedHashedList (X : TaggedType) =
struct
type t = X.t list
let hash = Lists.hash X.tag 3
let equ_ts ts1 ts2 = X.tag ts1 == X.tag ts2
let equal = Lists.equal equ_ts
let cmp_ts ts1 ts2 = Pervasives.compare (X.tag ts1) (X.tag ts2)
let compare = Lists.compare cmp_ts
let pp = Pp.list Pp.comma X.pp
end
module MakeMSH (X : TaggedType) =
struct
module T = OrderedHashed(X)
include T
module MGen = Intmap.Make(struct
include X
let equal ts1 ts2 = X.tag ts1 == X.tag ts2
end)
module M = MGen.NT
module S = Extset.MakeOfMap(M)
module H = XHashtbl.Make(T)
end
module MakeTagged (X : Weakhtbl.Weakey) =
struct
type t = X.t
let tag t = Weakhtbl.tag_hash (X.tag t)
let pp = X.pp
end
module MakeMSHW (X : Weakhtbl.Weakey) =
struct
module T = OrderedHashed(MakeTagged(X))
module M = Map.Make(T)
module S = Extset.MakeOfMap(M)
module H = XHashtbl.Make(T)
module W = Weakhtbl.Make(X)
end
module type Datatype = sig
include OrderedHashedType
module M : Map_intf.PMap with type key = t
module S : Map_intf.Set with type 'a M.t = 'a M.t
and type M.key = M.key
module H : Exthtbl.Hashtbl.S with type key = t
end
module type Printable = sig
include OrderedHashedType
val pp: t Pp.pp
end
module MkDatatype(T : OrderedHashedType) = struct
module M = Map.Make(T)
module S = Extset.MakeOfMap(M)
module H = XHashtbl.Make(T)
end
Set , Map , on ints and strings
module Int = struct
type t = int
let compare (x : int) (y : int) = Pervasives.compare x y
let equal (x : int) y = x = y
let hash (x : int) = x
let tag x = x
let pp = Pp.int
end
module DInt = struct
include Int
let pp fmt x = Format.pp_print_int fmt x
module GM = Intmap.Make(Int)
module M = GM.NT
module S = Extset.MakeOfMap(M)
module H = XHashtbl.Make(Int)
end
module DIntOrd = DInt
module DUnit = Unit
module Bool = struct
type t = bool
let compare (x : bool) (y : bool) = Pervasives.compare x y
let equal (x : bool) y = x = y
let hash (x : bool) = (Obj.magic x : int)
let pp = Format.pp_print_bool
end
module DBool = struct
include Bool
module M = Map.Make(Bool)
module S = Extset.MakeOfMap(M)
module H = XHashtbl.Make(Bool)
end
module DStr = struct
module Str = struct
type t = String.t
let compare = String.compare
let hash = (Hashtbl.hash : string -> int)
let equal = ((=) : string -> string -> bool)
let pp = Format.pp_print_string
end
include Str
module M = Map.Make(Str)
module S = Extset.MakeOfMap(M)
module H = XHashtbl.Make(Str)
end
module DFloat = struct
module Float = struct
type t = float
let compare (x : float) y = Pervasives.compare x y
let equal (x : float) y = x = y
let hash (x : float) = XHashtbl.hash x
let pp = Format.pp_print_float
end
include Float
module M = Map.Make(Float)
module S = Extset.MakeOfMap(M)
module H = XHashtbl.Make(Float)
end
| null | https://raw.githubusercontent.com/witan-org/witan/d26f9f810fc34bf44daccb91f71ad3258eb62037/src/popop_lib/stdlib.ml | ocaml | ******************************************************************
This software is distributed under the terms of the GNU Lesser
on linking described in file LICENSE.
******************************************************************
* Todo ts1 == ts2? | The Why3 Verification Platform / The Why3 Development Team
Copyright 2010 - 2017 -- INRIA - CNRS - Paris - Sud University
General Public License version 2.1 , with the special exception
module Map = Extmap
module XHashtbl = Exthtbl.Hashtbl
Set , Map , on structures with a unique tag
module type TaggedType =
sig
type t
val tag : t -> int
val pp: t Pp.pp
end
module type OrderedHashedType =
sig
type t
val hash : t -> int
val equal : t -> t -> bool
val compare : t -> t -> int
val pp: t Pp.pp
end
module OrderedHashed (X : TaggedType) =
struct
type t = X.t
let hash = X.tag
let compare ts1 ts2 = Pervasives.compare (X.tag ts1) (X.tag ts2)
let pp = X.pp
end
module OrderedHashedList (X : TaggedType) =
struct
type t = X.t list
let hash = Lists.hash X.tag 3
let equ_ts ts1 ts2 = X.tag ts1 == X.tag ts2
let equal = Lists.equal equ_ts
let cmp_ts ts1 ts2 = Pervasives.compare (X.tag ts1) (X.tag ts2)
let compare = Lists.compare cmp_ts
let pp = Pp.list Pp.comma X.pp
end
module MakeMSH (X : TaggedType) =
struct
module T = OrderedHashed(X)
include T
module MGen = Intmap.Make(struct
include X
let equal ts1 ts2 = X.tag ts1 == X.tag ts2
end)
module M = MGen.NT
module S = Extset.MakeOfMap(M)
module H = XHashtbl.Make(T)
end
module MakeTagged (X : Weakhtbl.Weakey) =
struct
type t = X.t
let tag t = Weakhtbl.tag_hash (X.tag t)
let pp = X.pp
end
module MakeMSHW (X : Weakhtbl.Weakey) =
struct
module T = OrderedHashed(MakeTagged(X))
module M = Map.Make(T)
module S = Extset.MakeOfMap(M)
module H = XHashtbl.Make(T)
module W = Weakhtbl.Make(X)
end
module type Datatype = sig
include OrderedHashedType
module M : Map_intf.PMap with type key = t
module S : Map_intf.Set with type 'a M.t = 'a M.t
and type M.key = M.key
module H : Exthtbl.Hashtbl.S with type key = t
end
module type Printable = sig
include OrderedHashedType
val pp: t Pp.pp
end
module MkDatatype(T : OrderedHashedType) = struct
module M = Map.Make(T)
module S = Extset.MakeOfMap(M)
module H = XHashtbl.Make(T)
end
Set , Map , on ints and strings
module Int = struct
type t = int
let compare (x : int) (y : int) = Pervasives.compare x y
let equal (x : int) y = x = y
let hash (x : int) = x
let tag x = x
let pp = Pp.int
end
module DInt = struct
include Int
let pp fmt x = Format.pp_print_int fmt x
module GM = Intmap.Make(Int)
module M = GM.NT
module S = Extset.MakeOfMap(M)
module H = XHashtbl.Make(Int)
end
module DIntOrd = DInt
module DUnit = Unit
module Bool = struct
type t = bool
let compare (x : bool) (y : bool) = Pervasives.compare x y
let equal (x : bool) y = x = y
let hash (x : bool) = (Obj.magic x : int)
let pp = Format.pp_print_bool
end
module DBool = struct
include Bool
module M = Map.Make(Bool)
module S = Extset.MakeOfMap(M)
module H = XHashtbl.Make(Bool)
end
module DStr = struct
module Str = struct
type t = String.t
let compare = String.compare
let hash = (Hashtbl.hash : string -> int)
let equal = ((=) : string -> string -> bool)
let pp = Format.pp_print_string
end
include Str
module M = Map.Make(Str)
module S = Extset.MakeOfMap(M)
module H = XHashtbl.Make(Str)
end
module DFloat = struct
module Float = struct
type t = float
let compare (x : float) y = Pervasives.compare x y
let equal (x : float) y = x = y
let hash (x : float) = XHashtbl.hash x
let pp = Format.pp_print_float
end
include Float
module M = Map.Make(Float)
module S = Extset.MakeOfMap(M)
module H = XHashtbl.Make(Float)
end
|
8b90897afaa59e0b68d583bebb88f3b1b7dab72aadc079739eecb5c8ced5a14a | lindenbaum/lbm_kv | lbm_kv_vclock.erl | %% -------------------------------------------------------------------
%%
%% riak_core: Core Riak Application
%%
Copyright ( c ) 2007 - 2010 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
@doc A simple Erlang implementation of vector clocks as inspired by Lamport logical clocks .
%%
( 1978 ) . " Time , clocks , and the ordering of events
in a distributed system " . Communications of the ACM 21 ( 7 ): 558 - 565 .
%%
( 1988 ) . " Virtual Time and Global States of
%% Distributed Systems". Workshop on Parallel and Distributed Algorithms:
pp . 215 - 226
%%
%% This module is a renamed version of the `vclock.erl' module shipped with
%% `riak_core'.
%% @see
-module(lbm_kv_vclock).
-export([fresh/0,
fresh/2,
descends/2,
dominates/2,
descends_dot/2,
pure_dot/1,
merge/1,
get_counter/2,
get_timestamp/2,
get_dot/2,
valid_dot/1,
increment/2,
increment/3,
all_nodes/1,
equal/2,
prune/3,
timestamp/0]).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export_type([vclock/0, timestamp/0, vclock_node/0, dot/0, pure_dot/0]).
-type vclock() :: [dot()].
-type dot() :: {vclock_node(), {counter(), timestamp()}}.
-type pure_dot() :: {vclock_node(), counter()}.
% Nodes can have any term() as a name, but they must differ from each other.
-type vclock_node() :: term().
-type counter() :: integer().
-type timestamp() :: integer().
% @doc Create a brand new vclock.
-spec fresh() -> vclock().
fresh() ->
[].
-spec fresh(vclock_node(), counter()) -> vclock().
fresh(Node, Count) ->
[{Node, {Count, timestamp()}}].
@doc Return true if Va is a direct descendant of Vb , else false -- remember , a vclock is its own descendant !
-spec descends(Va :: vclock(), Vb :: vclock()) -> boolean().
descends(_, []) ->
% all vclocks descend from the empty vclock
true;
descends(Va, Vb) ->
[{NodeB, {CtrB, _T}}|RestB] = Vb,
case lists:keyfind(NodeB, 1, Va) of
false ->
false;
{_, {CtrA, _TSA}} ->
(CtrA >= CtrB) andalso descends(Va,RestB)
end.
%% @doc does the given `vclock()' descend from the given `dot()'. The
%% `dot()' can be any vclock entry returned from
%% `get_entry/2'. returns `true' if the `vclock()' has an entry for
%% the `actor' in the `dot()', and that the counter for that entry is
%% at least that of the given `dot()'. False otherwise. Call with a
%% valid entry or you'll get an error.
%%
@see descends/2
%% @see get_entry/3
%% @see dominates/2
-spec descends_dot(vclock(), dot()) -> boolean().
descends_dot(Vclock, Dot) ->
descends(Vclock, [Dot]).
%% @doc in some cases the dot without timestamp data is needed.
-spec pure_dot(dot()) -> pure_dot().
pure_dot({N, {C, _TS}}) ->
{N, C}.
%% @doc true if `A' strictly dominates `B'. Note: ignores
timestamps . In it is possible to have vclocks that are
identical except for timestamps . When two vclocks descend each
%% other, but are not equal, they are concurrent. See source comment
%% for more details. (Actually you can have indentical clocks
%% including timestamps, that represent different events, but let's
%% not go there.)
%%
-spec dominates(vclock(), vclock()) -> boolean().
dominates(A, B) ->
In a sane world if two vclocks descend each other they MUST be
%% equal. In riak they can descend each other and have different
%% timestamps(!) How? Deleted keys, re-written, then restored is
one example . See riak_kv#679 for others . This is why we must
%% check descends both ways rather than checking descends(A, B)
and not equal(A , B ) . Do not " optimise " this to dodge the second
%% descends call! I know that the laws of causality say that each
actor must act serially , but breaks that .
descends(A, B) andalso not descends(B, A).
% @doc Combine all VClocks in the input list into their least possible
% common descendant.
-spec merge(VClocks :: [vclock()]) -> vclock().
merge([]) -> [];
merge([SingleVclock]) -> SingleVclock;
merge([First|Rest]) -> merge(Rest, lists:keysort(1, First)).
merge([], NClock) -> NClock;
merge([AClock|VClocks],NClock) ->
merge(VClocks, merge(lists:keysort(1, AClock), NClock, [])).
merge([], [], AccClock) -> lists:reverse(AccClock);
merge([], Left, AccClock) -> lists:reverse(AccClock, Left);
merge(Left, [], AccClock) -> lists:reverse(AccClock, Left);
merge(V=[{Node1,{Ctr1,TS1}=CT1}=NCT1|VClock],
N=[{Node2,{Ctr2,TS2}=CT2}=NCT2|NClock], AccClock) ->
if Node1 < Node2 ->
merge(VClock, N, [NCT1|AccClock]);
Node1 > Node2 ->
merge(V, NClock, [NCT2|AccClock]);
true ->
({_Ctr,_TS} = CT) = if Ctr1 > Ctr2 -> CT1;
Ctr1 < Ctr2 -> CT2;
true -> {Ctr1, erlang:max(TS1,TS2)}
end,
merge(VClock, NClock, [{Node1,CT}|AccClock])
end.
@doc Get the counter value in VClock set from Node .
-spec get_counter(Node :: vclock_node(), VClock :: vclock()) -> counter().
get_counter(Node, VClock) ->
case lists:keyfind(Node, 1, VClock) of
{_, {Ctr, _TS}} -> Ctr;
false -> 0
end.
@doc Get the timestamp value in a VClock set from Node .
-spec get_timestamp(Node :: vclock_node(), VClock :: vclock()) -> timestamp() | undefined.
get_timestamp(Node, VClock) ->
case lists:keyfind(Node, 1, VClock) of
{_, {_Ctr, TS}} -> TS;
false -> undefined
end.
% @doc Get the entry `dot()' for `vclock_node()' from `vclock()'.
-spec get_dot(Node :: vclock_node(), VClock :: vclock()) -> {ok, dot()} | undefined.
get_dot(Node, VClock) ->
case lists:keyfind(Node, 1, VClock) of
false -> undefined;
Entry -> {ok, Entry}
end.
%% @doc is the given argument a valid dot, or entry?
-spec valid_dot(dot()) -> boolean().
valid_dot({_, {Cnt, TS}}) when is_integer(Cnt), is_integer(TS) ->
true;
valid_dot(_) ->
false.
@doc Increment VClock at Node .
-spec increment(Node :: vclock_node(), VClock :: vclock()) -> vclock().
increment(Node, VClock) ->
increment(Node, timestamp(), VClock).
@doc Increment VClock at Node .
-spec increment(Node :: vclock_node(), IncTs :: timestamp(),
VClock :: vclock()) -> vclock().
increment(Node, IncTs, VClock) ->
{{_Ctr, _TS}=C1,NewV} = case lists:keytake(Node, 1, VClock) of
false ->
{{1, IncTs}, VClock};
{value, {_N, {C, _T}}, ModV} ->
{{C + 1, IncTs}, ModV}
end,
[{Node,C1}|NewV].
@doc Return the list of all nodes that have ever incremented VClock .
-spec all_nodes(VClock :: vclock()) -> [vclock_node()].
all_nodes(VClock) ->
[X || {X,{_,_}} <- VClock].
-define(DAYS_FROM_GREGORIAN_BASE_TO_EPOCH, (1970*365+478)).
-define(SECONDS_FROM_GREGORIAN_BASE_TO_EPOCH,
(?DAYS_FROM_GREGORIAN_BASE_TO_EPOCH * 24*60*60)
%% == calendar:datetime_to_gregorian_seconds({{1970,1,1},{0,0,0}})
).
% @doc Return a timestamp for a vector clock
-spec timestamp() -> timestamp().
timestamp() ->
%% Same as calendar:datetime_to_gregorian_seconds(erlang:universaltime()),
%% but significantly faster.
{MegaSeconds, Seconds, _} = os:timestamp(),
?SECONDS_FROM_GREGORIAN_BASE_TO_EPOCH + MegaSeconds*1000000 + Seconds.
@doc Compares two VClocks for equality .
-spec equal(VClockA :: vclock(), VClockB :: vclock()) -> boolean().
equal(VA,VB) ->
lists:sort(VA) =:= lists:sort(VB).
% @doc Possibly shrink the size of a vclock, depending on current age and size.
-spec prune(V::vclock(), Now::integer(), BucketProps::term()) -> vclock().
prune(V,Now,BucketProps) ->
%% This sort need to be deterministic, to avoid spurious merge conflicts later.
%% We achieve this by using the node ID as secondary key.
SortV = lists:sort(fun({N1,{_,T1}},{N2,{_,T2}}) -> {T1,N1} < {T2,N2} end, V),
prune_vclock1(SortV,Now,BucketProps).
@private
prune_vclock1(V,Now,BProps) ->
case length(V) =< get_property(small_vclock, BProps) of
true -> V;
false ->
{_,{_,HeadTime}} = hd(V),
case (Now - HeadTime) < get_property(young_vclock,BProps) of
true -> V;
false -> prune_vclock1(V,Now,BProps,HeadTime)
end
end.
@private
prune_vclock1(V,Now,BProps,HeadTime) ->
% has a precondition that V is longer than small and older than young
case (length(V) > get_property(big_vclock,BProps)) orelse
((Now - HeadTime) > get_property(old_vclock,BProps)) of
true -> prune_vclock1(tl(V),Now,BProps);
false -> V
end.
get_property(Key, PairList) ->
case lists:keyfind(Key, 1, PairList) of
{_Key, Value} ->
Value;
false ->
undefined
end.
%% ===================================================================
EUnit tests
%% ===================================================================
-ifdef(TEST).
% doc Serves as both a trivial test and some example code.
example_test() ->
A = lbm_kv_vclock:fresh(),
B = lbm_kv_vclock:fresh(),
A1 = lbm_kv_vclock:increment(a, A),
B1 = lbm_kv_vclock:increment(b, B),
true = lbm_kv_vclock:descends(A1,A),
true = lbm_kv_vclock:descends(B1,B),
false = lbm_kv_vclock:descends(A1,B1),
A2 = lbm_kv_vclock:increment(a, A1),
C = lbm_kv_vclock:merge([A2, B1]),
C1 = lbm_kv_vclock:increment(c, C),
true = lbm_kv_vclock:descends(C1, A2),
true = lbm_kv_vclock:descends(C1, B1),
false = lbm_kv_vclock:descends(B1, C1),
false = lbm_kv_vclock:descends(B1, A1),
ok.
prune_small_test() ->
% vclock with less entries than small_vclock will be untouched
Now = timestamp(),
OldTime = Now - 32000000,
SmallVC = [{<<"1">>, {1, OldTime}},
{<<"2">>, {2, OldTime}},
{<<"3">>, {3, OldTime}}],
Props = [{small_vclock,4}],
?assertEqual(lists:sort(SmallVC), lists:sort(prune(SmallVC, Now, Props))).
prune_young_test() ->
% vclock with all entries younger than young_vclock will be untouched
Now = timestamp(),
NewTime = Now - 1,
VC = [{<<"1">>, {1, NewTime}},
{<<"2">>, {2, NewTime}},
{<<"3">>, {3, NewTime}}],
Props = [{small_vclock,1},{young_vclock,1000}],
?assertEqual(lists:sort(VC), lists:sort(prune(VC, Now, Props))).
prune_big_test() ->
% vclock not preserved by small or young will be pruned down to
% no larger than big_vclock entries
Now = timestamp(),
NewTime = Now - 1000,
VC = [{<<"1">>, {1, NewTime}},
{<<"2">>, {2, NewTime}},
{<<"3">>, {3, NewTime}}],
Props = [{small_vclock,1},{young_vclock,1},
{big_vclock,2},{old_vclock,100000}],
?assert(length(prune(VC, Now, Props)) =:= 2).
prune_old_test() ->
% vclock not preserved by small or young will be pruned down to
% no larger than big_vclock and no entries more than old_vclock ago
Now = timestamp(),
NewTime = Now - 1000,
OldTime = Now - 100000,
VC = [{<<"1">>, {1, NewTime}},
{<<"2">>, {2, OldTime}},
{<<"3">>, {3, OldTime}}],
Props = [{small_vclock,1},{young_vclock,1},
{big_vclock,2},{old_vclock,10000}],
?assert(length(prune(VC, Now, Props)) =:= 1).
prune_order_test() ->
vclock with two nodes of the same timestamp will be pruned down
% to the same node
Now = timestamp(),
OldTime = Now - 100000,
VC1 = [{<<"1">>, {1, OldTime}},
{<<"2">>, {2, OldTime}}],
VC2 = lists:reverse(VC1),
Props = [{small_vclock,1},{young_vclock,1},
{big_vclock,2},{old_vclock,10000}],
?assertEqual(prune(VC1, Now, Props), prune(VC2, Now, Props)).
accessor_test() ->
VC = [{<<"1">>, {1, 1}},
{<<"2">>, {2, 2}}],
?assertEqual(1, get_counter(<<"1">>, VC)),
?assertEqual(1, get_timestamp(<<"1">>, VC)),
?assertEqual(2, get_counter(<<"2">>, VC)),
?assertEqual(2, get_timestamp(<<"2">>, VC)),
?assertEqual(0, get_counter(<<"3">>, VC)),
?assertEqual(undefined, get_timestamp(<<"3">>, VC)),
?assertEqual([<<"1">>, <<"2">>], all_nodes(VC)).
merge_test() ->
VC1 = [{<<"1">>, {1, 1}},
{<<"2">>, {2, 2}},
{<<"4">>, {4, 4}}],
VC2 = [{<<"3">>, {3, 3}},
{<<"4">>, {3, 3}}],
?assertEqual([], merge(lbm_kv_vclock:fresh())),
?assertEqual([{<<"1">>,{1,1}},{<<"2">>,{2,2}},{<<"3">>,{3,3}},{<<"4">>,{4,4}}],
merge([VC1, VC2])).
merge_less_left_test() ->
VC1 = [{<<"5">>, {5, 5}}],
VC2 = [{<<"6">>, {6, 6}}, {<<"7">>, {7, 7}}],
?assertEqual([{<<"5">>, {5, 5}},{<<"6">>, {6, 6}}, {<<"7">>, {7, 7}}],
lbm_kv_vclock:merge([VC1, VC2])).
merge_less_right_test() ->
VC1 = [{<<"6">>, {6, 6}}, {<<"7">>, {7, 7}}],
VC2 = [{<<"5">>, {5, 5}}],
?assertEqual([{<<"5">>, {5, 5}},{<<"6">>, {6, 6}}, {<<"7">>, {7, 7}}],
lbm_kv_vclock:merge([VC1, VC2])).
merge_same_id_test() ->
VC1 = [{<<"1">>, {1, 2}},{<<"2">>,{1,4}}],
VC2 = [{<<"1">>, {1, 3}},{<<"3">>,{1,5}}],
?assertEqual([{<<"1">>, {1, 3}},{<<"2">>,{1,4}},{<<"3">>,{1,5}}],
lbm_kv_vclock:merge([VC1, VC2])).
get_entry_test() ->
VC = lbm_kv_vclock:fresh(),
VC1 = increment(a, increment(c, increment(b, increment(a, VC)))),
?assertMatch({ok, {a, {2, _}}}, get_dot(a, VC1)),
?assertMatch({ok, {b, {1, _}}}, get_dot(b, VC1)),
?assertMatch({ok, {c, {1, _}}}, get_dot(c, VC1)),
?assertEqual(undefined, get_dot(d, VC1)).
valid_entry_test() ->
VC = lbm_kv_vclock:fresh(),
VC1 = increment(c, increment(b, increment(a, VC))),
[begin
{ok, E} = get_dot(Actor, VC1),
?assert(valid_dot(E))
end || Actor <- [a, b, c]],
?assertNot(valid_dot(undefined)),
?assertNot(valid_dot("huffle-puff")),
?assertNot(valid_dot([])).
-endif.
| null | https://raw.githubusercontent.com/lindenbaum/lbm_kv/121467ae4766753b052b923d202e8b17dc14e054/src/lbm_kv_vclock.erl | erlang | -------------------------------------------------------------------
riak_core: Core Riak Application
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
Distributed Systems". Workshop on Parallel and Distributed Algorithms:
This module is a renamed version of the `vclock.erl' module shipped with
`riak_core'.
@see
Nodes can have any term() as a name, but they must differ from each other.
@doc Create a brand new vclock.
all vclocks descend from the empty vclock
@doc does the given `vclock()' descend from the given `dot()'. The
`dot()' can be any vclock entry returned from
`get_entry/2'. returns `true' if the `vclock()' has an entry for
the `actor' in the `dot()', and that the counter for that entry is
at least that of the given `dot()'. False otherwise. Call with a
valid entry or you'll get an error.
@see get_entry/3
@see dominates/2
@doc in some cases the dot without timestamp data is needed.
@doc true if `A' strictly dominates `B'. Note: ignores
other, but are not equal, they are concurrent. See source comment
for more details. (Actually you can have indentical clocks
including timestamps, that represent different events, but let's
not go there.)
equal. In riak they can descend each other and have different
timestamps(!) How? Deleted keys, re-written, then restored is
check descends both ways rather than checking descends(A, B)
descends call! I know that the laws of causality say that each
@doc Combine all VClocks in the input list into their least possible
common descendant.
@doc Get the entry `dot()' for `vclock_node()' from `vclock()'.
@doc is the given argument a valid dot, or entry?
== calendar:datetime_to_gregorian_seconds({{1970,1,1},{0,0,0}})
@doc Return a timestamp for a vector clock
Same as calendar:datetime_to_gregorian_seconds(erlang:universaltime()),
but significantly faster.
@doc Possibly shrink the size of a vclock, depending on current age and size.
This sort need to be deterministic, to avoid spurious merge conflicts later.
We achieve this by using the node ID as secondary key.
has a precondition that V is longer than small and older than young
===================================================================
===================================================================
doc Serves as both a trivial test and some example code.
vclock with less entries than small_vclock will be untouched
vclock with all entries younger than young_vclock will be untouched
vclock not preserved by small or young will be pruned down to
no larger than big_vclock entries
vclock not preserved by small or young will be pruned down to
no larger than big_vclock and no entries more than old_vclock ago
to the same node | Copyright ( c ) 2007 - 2010 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
@doc A simple Erlang implementation of vector clocks as inspired by Lamport logical clocks .
( 1978 ) . " Time , clocks , and the ordering of events
in a distributed system " . Communications of the ACM 21 ( 7 ): 558 - 565 .
( 1988 ) . " Virtual Time and Global States of
pp . 215 - 226
-module(lbm_kv_vclock).
-export([fresh/0,
fresh/2,
descends/2,
dominates/2,
descends_dot/2,
pure_dot/1,
merge/1,
get_counter/2,
get_timestamp/2,
get_dot/2,
valid_dot/1,
increment/2,
increment/3,
all_nodes/1,
equal/2,
prune/3,
timestamp/0]).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export_type([vclock/0, timestamp/0, vclock_node/0, dot/0, pure_dot/0]).
-type vclock() :: [dot()].
-type dot() :: {vclock_node(), {counter(), timestamp()}}.
-type pure_dot() :: {vclock_node(), counter()}.
-type vclock_node() :: term().
-type counter() :: integer().
-type timestamp() :: integer().
-spec fresh() -> vclock().
fresh() ->
[].
-spec fresh(vclock_node(), counter()) -> vclock().
fresh(Node, Count) ->
[{Node, {Count, timestamp()}}].
@doc Return true if Va is a direct descendant of Vb , else false -- remember , a vclock is its own descendant !
-spec descends(Va :: vclock(), Vb :: vclock()) -> boolean().
descends(_, []) ->
true;
descends(Va, Vb) ->
[{NodeB, {CtrB, _T}}|RestB] = Vb,
case lists:keyfind(NodeB, 1, Va) of
false ->
false;
{_, {CtrA, _TSA}} ->
(CtrA >= CtrB) andalso descends(Va,RestB)
end.
@see descends/2
-spec descends_dot(vclock(), dot()) -> boolean().
descends_dot(Vclock, Dot) ->
descends(Vclock, [Dot]).
-spec pure_dot(dot()) -> pure_dot().
pure_dot({N, {C, _TS}}) ->
{N, C}.
timestamps . In it is possible to have vclocks that are
identical except for timestamps . When two vclocks descend each
-spec dominates(vclock(), vclock()) -> boolean().
dominates(A, B) ->
In a sane world if two vclocks descend each other they MUST be
one example . See riak_kv#679 for others . This is why we must
and not equal(A , B ) . Do not " optimise " this to dodge the second
actor must act serially , but breaks that .
descends(A, B) andalso not descends(B, A).
-spec merge(VClocks :: [vclock()]) -> vclock().
merge([]) -> [];
merge([SingleVclock]) -> SingleVclock;
merge([First|Rest]) -> merge(Rest, lists:keysort(1, First)).
merge([], NClock) -> NClock;
merge([AClock|VClocks],NClock) ->
merge(VClocks, merge(lists:keysort(1, AClock), NClock, [])).
merge([], [], AccClock) -> lists:reverse(AccClock);
merge([], Left, AccClock) -> lists:reverse(AccClock, Left);
merge(Left, [], AccClock) -> lists:reverse(AccClock, Left);
merge(V=[{Node1,{Ctr1,TS1}=CT1}=NCT1|VClock],
N=[{Node2,{Ctr2,TS2}=CT2}=NCT2|NClock], AccClock) ->
if Node1 < Node2 ->
merge(VClock, N, [NCT1|AccClock]);
Node1 > Node2 ->
merge(V, NClock, [NCT2|AccClock]);
true ->
({_Ctr,_TS} = CT) = if Ctr1 > Ctr2 -> CT1;
Ctr1 < Ctr2 -> CT2;
true -> {Ctr1, erlang:max(TS1,TS2)}
end,
merge(VClock, NClock, [{Node1,CT}|AccClock])
end.
@doc Get the counter value in VClock set from Node .
-spec get_counter(Node :: vclock_node(), VClock :: vclock()) -> counter().
get_counter(Node, VClock) ->
case lists:keyfind(Node, 1, VClock) of
{_, {Ctr, _TS}} -> Ctr;
false -> 0
end.
@doc Get the timestamp value in a VClock set from Node .
-spec get_timestamp(Node :: vclock_node(), VClock :: vclock()) -> timestamp() | undefined.
get_timestamp(Node, VClock) ->
case lists:keyfind(Node, 1, VClock) of
{_, {_Ctr, TS}} -> TS;
false -> undefined
end.
-spec get_dot(Node :: vclock_node(), VClock :: vclock()) -> {ok, dot()} | undefined.
get_dot(Node, VClock) ->
case lists:keyfind(Node, 1, VClock) of
false -> undefined;
Entry -> {ok, Entry}
end.
-spec valid_dot(dot()) -> boolean().
valid_dot({_, {Cnt, TS}}) when is_integer(Cnt), is_integer(TS) ->
true;
valid_dot(_) ->
false.
@doc Increment VClock at Node .
-spec increment(Node :: vclock_node(), VClock :: vclock()) -> vclock().
increment(Node, VClock) ->
increment(Node, timestamp(), VClock).
@doc Increment VClock at Node .
-spec increment(Node :: vclock_node(), IncTs :: timestamp(),
VClock :: vclock()) -> vclock().
increment(Node, IncTs, VClock) ->
{{_Ctr, _TS}=C1,NewV} = case lists:keytake(Node, 1, VClock) of
false ->
{{1, IncTs}, VClock};
{value, {_N, {C, _T}}, ModV} ->
{{C + 1, IncTs}, ModV}
end,
[{Node,C1}|NewV].
@doc Return the list of all nodes that have ever incremented VClock .
-spec all_nodes(VClock :: vclock()) -> [vclock_node()].
all_nodes(VClock) ->
[X || {X,{_,_}} <- VClock].
-define(DAYS_FROM_GREGORIAN_BASE_TO_EPOCH, (1970*365+478)).
-define(SECONDS_FROM_GREGORIAN_BASE_TO_EPOCH,
(?DAYS_FROM_GREGORIAN_BASE_TO_EPOCH * 24*60*60)
).
-spec timestamp() -> timestamp().
timestamp() ->
{MegaSeconds, Seconds, _} = os:timestamp(),
?SECONDS_FROM_GREGORIAN_BASE_TO_EPOCH + MegaSeconds*1000000 + Seconds.
@doc Compares two VClocks for equality .
-spec equal(VClockA :: vclock(), VClockB :: vclock()) -> boolean().
equal(VA,VB) ->
lists:sort(VA) =:= lists:sort(VB).
-spec prune(V::vclock(), Now::integer(), BucketProps::term()) -> vclock().
prune(V,Now,BucketProps) ->
SortV = lists:sort(fun({N1,{_,T1}},{N2,{_,T2}}) -> {T1,N1} < {T2,N2} end, V),
prune_vclock1(SortV,Now,BucketProps).
@private
prune_vclock1(V,Now,BProps) ->
case length(V) =< get_property(small_vclock, BProps) of
true -> V;
false ->
{_,{_,HeadTime}} = hd(V),
case (Now - HeadTime) < get_property(young_vclock,BProps) of
true -> V;
false -> prune_vclock1(V,Now,BProps,HeadTime)
end
end.
@private
prune_vclock1(V,Now,BProps,HeadTime) ->
case (length(V) > get_property(big_vclock,BProps)) orelse
((Now - HeadTime) > get_property(old_vclock,BProps)) of
true -> prune_vclock1(tl(V),Now,BProps);
false -> V
end.
get_property(Key, PairList) ->
case lists:keyfind(Key, 1, PairList) of
{_Key, Value} ->
Value;
false ->
undefined
end.
EUnit tests
-ifdef(TEST).
example_test() ->
A = lbm_kv_vclock:fresh(),
B = lbm_kv_vclock:fresh(),
A1 = lbm_kv_vclock:increment(a, A),
B1 = lbm_kv_vclock:increment(b, B),
true = lbm_kv_vclock:descends(A1,A),
true = lbm_kv_vclock:descends(B1,B),
false = lbm_kv_vclock:descends(A1,B1),
A2 = lbm_kv_vclock:increment(a, A1),
C = lbm_kv_vclock:merge([A2, B1]),
C1 = lbm_kv_vclock:increment(c, C),
true = lbm_kv_vclock:descends(C1, A2),
true = lbm_kv_vclock:descends(C1, B1),
false = lbm_kv_vclock:descends(B1, C1),
false = lbm_kv_vclock:descends(B1, A1),
ok.
prune_small_test() ->
Now = timestamp(),
OldTime = Now - 32000000,
SmallVC = [{<<"1">>, {1, OldTime}},
{<<"2">>, {2, OldTime}},
{<<"3">>, {3, OldTime}}],
Props = [{small_vclock,4}],
?assertEqual(lists:sort(SmallVC), lists:sort(prune(SmallVC, Now, Props))).
prune_young_test() ->
Now = timestamp(),
NewTime = Now - 1,
VC = [{<<"1">>, {1, NewTime}},
{<<"2">>, {2, NewTime}},
{<<"3">>, {3, NewTime}}],
Props = [{small_vclock,1},{young_vclock,1000}],
?assertEqual(lists:sort(VC), lists:sort(prune(VC, Now, Props))).
prune_big_test() ->
Now = timestamp(),
NewTime = Now - 1000,
VC = [{<<"1">>, {1, NewTime}},
{<<"2">>, {2, NewTime}},
{<<"3">>, {3, NewTime}}],
Props = [{small_vclock,1},{young_vclock,1},
{big_vclock,2},{old_vclock,100000}],
?assert(length(prune(VC, Now, Props)) =:= 2).
prune_old_test() ->
Now = timestamp(),
NewTime = Now - 1000,
OldTime = Now - 100000,
VC = [{<<"1">>, {1, NewTime}},
{<<"2">>, {2, OldTime}},
{<<"3">>, {3, OldTime}}],
Props = [{small_vclock,1},{young_vclock,1},
{big_vclock,2},{old_vclock,10000}],
?assert(length(prune(VC, Now, Props)) =:= 1).
prune_order_test() ->
vclock with two nodes of the same timestamp will be pruned down
Now = timestamp(),
OldTime = Now - 100000,
VC1 = [{<<"1">>, {1, OldTime}},
{<<"2">>, {2, OldTime}}],
VC2 = lists:reverse(VC1),
Props = [{small_vclock,1},{young_vclock,1},
{big_vclock,2},{old_vclock,10000}],
?assertEqual(prune(VC1, Now, Props), prune(VC2, Now, Props)).
accessor_test() ->
VC = [{<<"1">>, {1, 1}},
{<<"2">>, {2, 2}}],
?assertEqual(1, get_counter(<<"1">>, VC)),
?assertEqual(1, get_timestamp(<<"1">>, VC)),
?assertEqual(2, get_counter(<<"2">>, VC)),
?assertEqual(2, get_timestamp(<<"2">>, VC)),
?assertEqual(0, get_counter(<<"3">>, VC)),
?assertEqual(undefined, get_timestamp(<<"3">>, VC)),
?assertEqual([<<"1">>, <<"2">>], all_nodes(VC)).
merge_test() ->
VC1 = [{<<"1">>, {1, 1}},
{<<"2">>, {2, 2}},
{<<"4">>, {4, 4}}],
VC2 = [{<<"3">>, {3, 3}},
{<<"4">>, {3, 3}}],
?assertEqual([], merge(lbm_kv_vclock:fresh())),
?assertEqual([{<<"1">>,{1,1}},{<<"2">>,{2,2}},{<<"3">>,{3,3}},{<<"4">>,{4,4}}],
merge([VC1, VC2])).
merge_less_left_test() ->
VC1 = [{<<"5">>, {5, 5}}],
VC2 = [{<<"6">>, {6, 6}}, {<<"7">>, {7, 7}}],
?assertEqual([{<<"5">>, {5, 5}},{<<"6">>, {6, 6}}, {<<"7">>, {7, 7}}],
lbm_kv_vclock:merge([VC1, VC2])).
merge_less_right_test() ->
VC1 = [{<<"6">>, {6, 6}}, {<<"7">>, {7, 7}}],
VC2 = [{<<"5">>, {5, 5}}],
?assertEqual([{<<"5">>, {5, 5}},{<<"6">>, {6, 6}}, {<<"7">>, {7, 7}}],
lbm_kv_vclock:merge([VC1, VC2])).
merge_same_id_test() ->
VC1 = [{<<"1">>, {1, 2}},{<<"2">>,{1,4}}],
VC2 = [{<<"1">>, {1, 3}},{<<"3">>,{1,5}}],
?assertEqual([{<<"1">>, {1, 3}},{<<"2">>,{1,4}},{<<"3">>,{1,5}}],
lbm_kv_vclock:merge([VC1, VC2])).
get_entry_test() ->
VC = lbm_kv_vclock:fresh(),
VC1 = increment(a, increment(c, increment(b, increment(a, VC)))),
?assertMatch({ok, {a, {2, _}}}, get_dot(a, VC1)),
?assertMatch({ok, {b, {1, _}}}, get_dot(b, VC1)),
?assertMatch({ok, {c, {1, _}}}, get_dot(c, VC1)),
?assertEqual(undefined, get_dot(d, VC1)).
valid_entry_test() ->
VC = lbm_kv_vclock:fresh(),
VC1 = increment(c, increment(b, increment(a, VC))),
[begin
{ok, E} = get_dot(Actor, VC1),
?assert(valid_dot(E))
end || Actor <- [a, b, c]],
?assertNot(valid_dot(undefined)),
?assertNot(valid_dot("huffle-puff")),
?assertNot(valid_dot([])).
-endif.
|
522068b730c21f06da6567f420ef2976e96e25c8e64a3c2d57a0653dc285603a | purebred-mua/purebred | Main.hs | -- This file is part of purebred
Copyright ( C ) 2017 - 2019 and
--
-- purebred is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU Affero General Public License for more details.
--
You should have received a copy of the GNU Affero General Public License
-- along with this program. If not, see </>.
import Test.Tasty (defaultMain, testGroup)
import qualified LazyVector
import qualified TestMail
import qualified TestActions
import qualified TestTagParser
import qualified TestAddressBook
main :: IO ()
main = defaultMain $ testGroup "unit tests"
[ TestMail.tests
, TestTagParser.tests
, TestActions.tests
, TestAddressBook.addressbookTests
, LazyVector.tests
]
| null | https://raw.githubusercontent.com/purebred-mua/purebred/14f50e8406dd81f6f506f1352f623948a1ea63ed/test/Main.hs | haskell | This file is part of purebred
purebred is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
along with this program. If not, see </>. | Copyright ( C ) 2017 - 2019 and
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU Affero General Public License
import Test.Tasty (defaultMain, testGroup)
import qualified LazyVector
import qualified TestMail
import qualified TestActions
import qualified TestTagParser
import qualified TestAddressBook
main :: IO ()
main = defaultMain $ testGroup "unit tests"
[ TestMail.tests
, TestTagParser.tests
, TestActions.tests
, TestAddressBook.addressbookTests
, LazyVector.tests
]
|
6294ba8da81739875b0bec8e12ddb40eff29236c507e9eeb41ba0c60acf5e930 | awolven/cl-vulkan | render-pass.lisp | Copyright 2019 , 2020
;;
;; Permission is hereby granted, free of charge, to any person obtaining
;; a copy of this software and associated documentation files (the
" Software " ) , to deal in the Software without restriction , including
;; without limitation the rights to use, copy, modify, merge, publish,
distribute , sublicense , and/or sell copies of the Software , and to
permit persons to whom the Software is furnished to do so , subject to
;; the following conditions:
;;
;; The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software .
;;
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
;; EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
;; MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
;; NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION
;; OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
;; WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
(in-package :vk)
(defun create-render-pass (device surface-format &key (allocator +null-allocator+)
(color-attachments (list (make-instance 'color-attachment
:name :default-color-attachment
:format (surface-format-format surface-format))))
(depth-attachments (list (make-instance 'depth-attachment
:name :default-depth-stencil-attachment
:format (find-supported-depth-format (physical-device device)))))
(subpasses (list (make-instance 'subpass
:name :default-subpass
:color-attachments (list :default-color-attachment)
:depth-stencil-attachment :default-depth-stencil-attachment))))
(let ((attachment-count (+ (length color-attachments) (length depth-attachments)))
(pointers ()))
(with-foreign-object (p-attachments '(:struct VkAttachmentDescription) attachment-count) ;; todo: make with-vk-struct take a count. Long overdue.
(loop for i from 0 for attachment in (append color-attachments depth-attachments)
do (zero-struct (mem-aptr p-attachments '(:struct VkAttachmentDescription) i) '(:struct VkAttachmentDescription))
(with-foreign-slots ((%vk::format
%vk::samples
%vk::loadOp
%vk::storeOp
%vk::stencilLoadOp
%vk::stencilStoreOp
%vk::initialLayout
%vk::finalLayout)
(mem-aptr p-attachments '(:struct VkAttachmentDescription) i) (:struct VkAttachmentDescription))
(setf %vk::format (attachment-format attachment)
%vk::samples (samples attachment)
%vk::loadOp (load-op attachment)
%vk::storeOp (store-op attachment)
%vk::stencilLoadOp (stencil-load-op attachment)
%vk::stencilStoreOp (stencil-store-op attachment)
%vk::initialLayout (initial-layout attachment)
%vk::finalLayout (final-layout attachment))))
(let ((subpass-count (length subpasses)))
(unwind-protect
(with-foreign-object (p-subpasses '(:struct VkSubpassDescription) subpass-count)
(loop for i from 0 for subpass in subpasses
do (zero-struct (mem-aptr p-subpasses '(:struct VkSubpassDescription) i) '(:struct VkSubpassDescription))
(with-foreign-slots ((%vk::pipelineBindPoint
%vk::colorAttachmentCount
%vk::pColorAttachments
%vk::pDepthStencilAttachment)
(mem-aptr p-subpasses '(:struct VkSubpassDescription) i)
(:struct VkSubpassDescription))
(let* ((color-attachment-references (color-attachments subpass))
(reference-count (length color-attachment-references)))
(let ((p-attachment-refs (foreign-alloc '(:struct VkAttachmentReference) :count reference-count)))
(push p-attachment-refs pointers)
(loop for reference in color-attachment-references for i from 0
do (%vk::zero-struct (mem-aptr p-attachment-refs '(:struct VkAttachmentReference) i) '(:struct VkAttachmentReference))
(with-foreign-slots ((%vk::attachment %vk::layout)
(mem-aptr p-attachment-refs '(:struct VkAttachmentReference) i)
(:struct VkAttachmentReference))
(setf %vk::attachment (position reference color-attachments :key #'attachment-name)
%vk::layout (reference-layout (find reference color-attachments :key #'attachment-name)))))
(let ((p-depth-attachment-ref (foreign-alloc '(:struct VkAttachmentReference))))
(zero-struct p-depth-attachment-ref '(:struct VkAttachmentReference))
(push p-depth-attachment-ref pointers)
(with-foreign-slots ((%vk::attachment %vk::layout)
p-depth-attachment-ref
(:struct VkAttachmentReference))
(setf %vk::attachment (+ (length color-attachments) (position :default-depth-stencil-attachment depth-attachments :key #'attachment-name))
%vk::layout (reference-layout (find :default-depth-stencil-attachment depth-attachments :key #'attachment-name))))
(setf %vk::pipelineBindPoint (pipeline-bind-point subpass)
%vk::colorAttachmentCount reference-count
%vk::pColorAttachments p-attachment-refs
%vk::pDepthStencilAttachment p-depth-attachment-ref))))))
(with-vk-struct (p-info VkRenderPassCreateInfo)
(with-foreign-slots ((%vk::attachmentCount
%vk::pAttachments
%vk::subpassCount
%vk::pSubpasses
%vk::dependencyCount
%vk::pDependencies)
p-info (:struct VkRenderPassCreateInfo))
(setf %vk::attachmentCount attachment-count
%vk::pAttachments p-attachments
%vk::subpassCount subpass-count
%vk::pSubpasses p-subpasses
%vk::dependencyCount 0
%vk::pDependencies +nullptr+))
(with-foreign-object (p-render-pass 'VkRenderPass)
(check-vk-result (vkCreateRenderPass (h device) p-info (h allocator) p-render-pass))
(make-instance 'render-pass :handle (mem-aref p-render-pass 'VkRenderPass)
:device device :allocator allocator))))
(mapcar #'foreign-free pointers))))))
(defun destroy-render-pass (render-pass)
(with-slots (device) render-pass
(with-slots (allocator) device
(vkDestroyRenderPass (h device) (h render-pass) (h allocator))))
(values))
| null | https://raw.githubusercontent.com/awolven/cl-vulkan/dd64af0ceb57cfc4aedbd3ba8d3340e2e922e16f/src/render-pass.lisp | lisp |
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
without limitation the rights to use, copy, modify, merge, publish,
the following conditions:
The above copyright notice and this permission notice shall be
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
todo: make with-vk-struct take a count. Long overdue. | Copyright 2019 , 2020
" Software " ) , to deal in the Software without restriction , including
distribute , sublicense , and/or sell copies of the Software , and to
permit persons to whom the Software is furnished to do so , subject to
included in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION
(in-package :vk)
(defun create-render-pass (device surface-format &key (allocator +null-allocator+)
(color-attachments (list (make-instance 'color-attachment
:name :default-color-attachment
:format (surface-format-format surface-format))))
(depth-attachments (list (make-instance 'depth-attachment
:name :default-depth-stencil-attachment
:format (find-supported-depth-format (physical-device device)))))
(subpasses (list (make-instance 'subpass
:name :default-subpass
:color-attachments (list :default-color-attachment)
:depth-stencil-attachment :default-depth-stencil-attachment))))
(let ((attachment-count (+ (length color-attachments) (length depth-attachments)))
(pointers ()))
(loop for i from 0 for attachment in (append color-attachments depth-attachments)
do (zero-struct (mem-aptr p-attachments '(:struct VkAttachmentDescription) i) '(:struct VkAttachmentDescription))
(with-foreign-slots ((%vk::format
%vk::samples
%vk::loadOp
%vk::storeOp
%vk::stencilLoadOp
%vk::stencilStoreOp
%vk::initialLayout
%vk::finalLayout)
(mem-aptr p-attachments '(:struct VkAttachmentDescription) i) (:struct VkAttachmentDescription))
(setf %vk::format (attachment-format attachment)
%vk::samples (samples attachment)
%vk::loadOp (load-op attachment)
%vk::storeOp (store-op attachment)
%vk::stencilLoadOp (stencil-load-op attachment)
%vk::stencilStoreOp (stencil-store-op attachment)
%vk::initialLayout (initial-layout attachment)
%vk::finalLayout (final-layout attachment))))
(let ((subpass-count (length subpasses)))
(unwind-protect
(with-foreign-object (p-subpasses '(:struct VkSubpassDescription) subpass-count)
(loop for i from 0 for subpass in subpasses
do (zero-struct (mem-aptr p-subpasses '(:struct VkSubpassDescription) i) '(:struct VkSubpassDescription))
(with-foreign-slots ((%vk::pipelineBindPoint
%vk::colorAttachmentCount
%vk::pColorAttachments
%vk::pDepthStencilAttachment)
(mem-aptr p-subpasses '(:struct VkSubpassDescription) i)
(:struct VkSubpassDescription))
(let* ((color-attachment-references (color-attachments subpass))
(reference-count (length color-attachment-references)))
(let ((p-attachment-refs (foreign-alloc '(:struct VkAttachmentReference) :count reference-count)))
(push p-attachment-refs pointers)
(loop for reference in color-attachment-references for i from 0
do (%vk::zero-struct (mem-aptr p-attachment-refs '(:struct VkAttachmentReference) i) '(:struct VkAttachmentReference))
(with-foreign-slots ((%vk::attachment %vk::layout)
(mem-aptr p-attachment-refs '(:struct VkAttachmentReference) i)
(:struct VkAttachmentReference))
(setf %vk::attachment (position reference color-attachments :key #'attachment-name)
%vk::layout (reference-layout (find reference color-attachments :key #'attachment-name)))))
(let ((p-depth-attachment-ref (foreign-alloc '(:struct VkAttachmentReference))))
(zero-struct p-depth-attachment-ref '(:struct VkAttachmentReference))
(push p-depth-attachment-ref pointers)
(with-foreign-slots ((%vk::attachment %vk::layout)
p-depth-attachment-ref
(:struct VkAttachmentReference))
(setf %vk::attachment (+ (length color-attachments) (position :default-depth-stencil-attachment depth-attachments :key #'attachment-name))
%vk::layout (reference-layout (find :default-depth-stencil-attachment depth-attachments :key #'attachment-name))))
(setf %vk::pipelineBindPoint (pipeline-bind-point subpass)
%vk::colorAttachmentCount reference-count
%vk::pColorAttachments p-attachment-refs
%vk::pDepthStencilAttachment p-depth-attachment-ref))))))
(with-vk-struct (p-info VkRenderPassCreateInfo)
(with-foreign-slots ((%vk::attachmentCount
%vk::pAttachments
%vk::subpassCount
%vk::pSubpasses
%vk::dependencyCount
%vk::pDependencies)
p-info (:struct VkRenderPassCreateInfo))
(setf %vk::attachmentCount attachment-count
%vk::pAttachments p-attachments
%vk::subpassCount subpass-count
%vk::pSubpasses p-subpasses
%vk::dependencyCount 0
%vk::pDependencies +nullptr+))
(with-foreign-object (p-render-pass 'VkRenderPass)
(check-vk-result (vkCreateRenderPass (h device) p-info (h allocator) p-render-pass))
(make-instance 'render-pass :handle (mem-aref p-render-pass 'VkRenderPass)
:device device :allocator allocator))))
(mapcar #'foreign-free pointers))))))
(defun destroy-render-pass (render-pass)
(with-slots (device) render-pass
(with-slots (allocator) device
(vkDestroyRenderPass (h device) (h render-pass) (h allocator))))
(values))
|
0d73dd4963bd8f73aa333b2b0d80760fb34f7e491ab56f6f3db802d2770a2762 | threatgrid/ctia | keyword.clj | (ns ctia.lib.keyword
(:require [clojure.string :as str]))
(defn singular
"remove the last s of a keyword see test for an example."
[k]
(-> k
name
(str/replace #"s$" "")
keyword))
| null | https://raw.githubusercontent.com/threatgrid/ctia/32857663cdd7ac385161103dbafa8dc4f98febf0/src/ctia/lib/keyword.clj | clojure | (ns ctia.lib.keyword
(:require [clojure.string :as str]))
(defn singular
"remove the last s of a keyword see test for an example."
[k]
(-> k
name
(str/replace #"s$" "")
keyword))
|
|
a48c759695bac0daaa49a0a4387cbcdd44a9d984199a175b80f2700dcb297e5c | haskell-numerics/hmatrix | LinearAlgebra.hs | # LANGUAGE CPP #
# LANGUAGE FlexibleContexts #
# OPTIONS_GHC -fno - warn - missing - signatures #
-----------------------------------------------------------------------------
|
Module : Numeric . LinearAlgebra
Copyright : ( c ) 2006 - 15
License : :
Stability : provisional
Module : Numeric.LinearAlgebra
Copyright : (c) Alberto Ruiz 2006-15
License : BSD3
Maintainer : Alberto Ruiz
Stability : provisional
-}
-----------------------------------------------------------------------------
module Numeric.LinearAlgebra (
-- * Basic types and data manipulation
-- | This package works with 2D ('Matrix') and 1D ('Vector')
-- arrays of real ('R') or complex ('C') double precision numbers.
-- Single precision and machine integers are also supported for
-- basic arithmetic and data manipulation.
module Numeric.LinearAlgebra.Data,
-- * Numeric classes
-- |
-- The standard numeric classes are defined elementwise (commonly referred to
as the Hadamard product or the product ):
--
> > > vector [ 1,2,3 ] * vector [ 3,0,-2 ]
[ 3.0,0.0,-6.0 ]
-- it :: Vector R
--
> > > matrix 3 [ 1 .. 9 ] * ident 3
( 3><3 )
[ 1.0 , 0.0 , 0.0
, 0.0 , 5.0 , 0.0
, 0.0 , 0.0 , 9.0 ]
-- * Autoconformable dimensions
-- |
-- In most operations, single-element vectors and matrices
-- (created from numeric literals or using 'scalar'), and matrices
-- with just one row or column, automatically
-- expand to match the dimensions of the other operand:
--
-- >>> 5 + 2*ident 3 :: Matrix Double
( 3><3 )
[ 7.0 , 5.0 , 5.0
, 5.0 , 7.0 , 5.0
, 5.0 , 5.0 , 7.0 ]
--
> > > ( 4><3 ) [ 1 .. ] + row [ 10,20,30 ]
( 4><3 )
[ 11.0 , 22.0 , 33.0
, 14.0 , 25.0 , 36.0
, 17.0 , 28.0 , 39.0
, 20.0 , 31.0 , 42.0 ]
--
-- * Products
-- ** Dot
dot, (<.>),
-- ** Matrix-vector
(#>), (<#), (!#>),
-- ** Matrix-matrix
(<>),
| The matrix product is also implemented in the " Data . Monoid " instance , where
-- single-element matrices (created from numeric literals or using 'scalar')
-- are used for scaling.
--
> > > import Data . Monoid as M
> > > let m = matrix 3 [ 1 .. 6 ]
> > > m M. < > 2 M. < > diagl[0.5,1,0 ]
( 2><3 )
[ 1.0 , 4.0 , 0.0
, 4.0 , 10.0 , 0.0 ]
--
-- 'mconcat' uses 'optimiseMult' to get the optimal association order.
-- ** Other
outer, kronecker, cross,
scale, add,
sumElements, prodElements,
-- * Linear systems
-- ** General
(<\>),
linearSolveLS,
linearSolveSVD,
-- ** Determined
linearSolve,
luSolve, luPacked,
luSolve', luPacked',
-- ** Symmetric indefinite
ldlSolve, ldlPacked,
-- ** Positive definite
cholSolve,
-- ** Triangular
UpLo(..),
triSolve,
-- ** Tridiagonal
triDiagSolve,
* *
cgSolve,
cgSolve',
-- * Inverse and pseudoinverse
inv, pinv, pinvTol,
-- * Determinant and rank
rcond, rank,
det, invlndet,
-- * Norms
Normed(..),
norm_Frob, norm_nuclear,
* and range
orth,
nullspace, null1, null1sym,
-- * Singular value decomposition
svd,
thinSVD,
compactSVD,
compactSVDTol,
singularValues,
leftSV, rightSV,
*
eig, geig, eigSH,
eigenvalues, geigenvalues, eigenvaluesSH,
geigSH,
-- * QR
qr, thinQR, rq, thinRQ, qrRaw, qrgr,
-- * Cholesky
chol, mbChol,
-- * LU
lu, luFact,
-- * Hessenberg
hess,
*
schur,
-- * Matrix functions
expm,
sqrtm,
matFunc,
-- * Correlation and convolution
corr, conv, corrMin, corr2, conv2,
-- * Random arrays
Seed, RandDist(..), randomVector, rand, randn, gaussianSample, uniformSample,
-- * Misc
meanCov, rowOuters, pairwiseD2, normalize, peps, relativeError, magnit,
haussholder, optimiseMult, udot, nullspaceSVD, orthSVD, ranksv,
iC, sym, mTm, trustSym, unSym,
-- * Auxiliary classes
Element, Container, Product, Numeric, LSDiv, Herm,
Complexable, RealElement,
RealOf, ComplexOf, SingleOf, DoubleOf,
IndexOf,
Field, Linear(), Additive(),
Transposable,
LU(..),
LDL(..),
QR(..),
CGState(..),
Testable(..)
) where
import Numeric.LinearAlgebra.Data
import Numeric.Matrix()
import Numeric.Vector()
import Internal.Matrix
import Internal.Container hiding ((<>))
import Internal.Numeric hiding (mul)
import Internal.Algorithms hiding (linearSolve,Normed,orth,luPacked',linearSolve',luSolve',ldlPacked')
import qualified Internal.Algorithms as A
import Internal.Util
import Internal.Random
import Internal.Sparse((!#>))
import Internal.CG
import Internal.Conversion
#if MIN_VERSION_base(4,11,0)
import Prelude hiding ((<>))
#endif
| dense matrix product
> > > let a = ( 3><5 ) [ 1 .. ]
> > > a
( 3><5 )
[ 1.0 , 2.0 , 3.0 , 4.0 , 5.0
, 6.0 , 7.0 , 8.0 , 9.0 , 10.0
, 11.0 , 12.0 , 13.0 , 14.0 , 15.0 ]
> > > let b = ( 5><2 ) [ 1,3 , 0,2 , -1,5 , 7,7 , 6,0 ]
> > > b
( 5><2 )
[ 1.0 , 3.0
, 0.0 , 2.0
, -1.0 , 5.0
, 7.0 , 7.0
, 6.0 , 0.0 ]
> > > a < > b
( 3><2 )
[ 56.0 , 50.0
, 121.0 , 135.0
, 186.0 , 220.0 ]
>>> let a = (3><5) [1..]
>>> a
(3><5)
[ 1.0, 2.0, 3.0, 4.0, 5.0
, 6.0, 7.0, 8.0, 9.0, 10.0
, 11.0, 12.0, 13.0, 14.0, 15.0 ]
>>> let b = (5><2) [1,3, 0,2, -1,5, 7,7, 6,0]
>>> b
(5><2)
[ 1.0, 3.0
, 0.0, 2.0
, -1.0, 5.0
, 7.0, 7.0
, 6.0, 0.0 ]
>>> a <> b
(3><2)
[ 56.0, 50.0
, 121.0, 135.0
, 186.0, 220.0 ]
-}
(<>) :: Numeric t => Matrix t -> Matrix t -> Matrix t
(<>) = mXm
infixr 8 <>
| Solve a linear system ( for square coefficient matrix and several right - hand sides ) using the LU decomposition , returning Nothing for a singular system . For underconstrained or overconstrained systems use ' linearSolveLS ' or ' linearSolveSVD ' .
@
a = ( 2><2 )
[ 1.0 , 2.0
, 3.0 , 5.0 ]
@
@
b = ( 2><3 )
[ 6.0 , 1.0 , 10.0
, 15.0 , 3.0 , 26.0 ]
@
> > > a b
Just ( 2><3 )
[ -1.4802973661668753e-15 , 0.9999999999999997 , 1.999999999999997
, 3.000000000000001 , 1.6653345369377348e-16 , 4.000000000000002 ]
> > > let Just x = it
> > > disp 5 x
2x3
-0.00000 1.00000 2.00000
3.00000 0.00000 4.00000
> > > a < > x
( 2><3 )
[ 6.0 , 1.0 , 10.0
, 15.0 , 3.0 , 26.0 ]
@
a = (2><2)
[ 1.0, 2.0
, 3.0, 5.0 ]
@
@
b = (2><3)
[ 6.0, 1.0, 10.0
, 15.0, 3.0, 26.0 ]
@
>>> linearSolve a b
Just (2><3)
[ -1.4802973661668753e-15, 0.9999999999999997, 1.999999999999997
, 3.000000000000001, 1.6653345369377348e-16, 4.000000000000002 ]
>>> let Just x = it
>>> disp 5 x
2x3
-0.00000 1.00000 2.00000
3.00000 0.00000 4.00000
>>> a <> x
(2><3)
[ 6.0, 1.0, 10.0
, 15.0, 3.0, 26.0 ]
-}
linearSolve m b = A.mbLinearSolve m b
-- | return an orthonormal basis of the null space of a matrix. See also 'nullspaceSVD'.
nullspace m = nullspaceSVD (Left (1*eps)) m (rightSV m)
-- | return an orthonormal basis of the range space of a matrix. See also 'orthSVD'.
orth m = orthSVD (Left (1*eps)) m (leftSV m)
| null | https://raw.githubusercontent.com/haskell-numerics/hmatrix/2694f776c7b5034d239acb5d984c489417739225/packages/base/src/Numeric/LinearAlgebra.hs | haskell | ---------------------------------------------------------------------------
---------------------------------------------------------------------------
* Basic types and data manipulation
| This package works with 2D ('Matrix') and 1D ('Vector')
arrays of real ('R') or complex ('C') double precision numbers.
Single precision and machine integers are also supported for
basic arithmetic and data manipulation.
* Numeric classes
|
The standard numeric classes are defined elementwise (commonly referred to
it :: Vector R
* Autoconformable dimensions
|
In most operations, single-element vectors and matrices
(created from numeric literals or using 'scalar'), and matrices
with just one row or column, automatically
expand to match the dimensions of the other operand:
>>> 5 + 2*ident 3 :: Matrix Double
* Products
** Dot
** Matrix-vector
** Matrix-matrix
single-element matrices (created from numeric literals or using 'scalar')
are used for scaling.
'mconcat' uses 'optimiseMult' to get the optimal association order.
** Other
* Linear systems
** General
** Determined
** Symmetric indefinite
** Positive definite
** Triangular
** Tridiagonal
* Inverse and pseudoinverse
* Determinant and rank
* Norms
* Singular value decomposition
* QR
* Cholesky
* LU
* Hessenberg
* Matrix functions
* Correlation and convolution
* Random arrays
* Misc
* Auxiliary classes
| return an orthonormal basis of the null space of a matrix. See also 'nullspaceSVD'.
| return an orthonormal basis of the range space of a matrix. See also 'orthSVD'. | # LANGUAGE CPP #
# LANGUAGE FlexibleContexts #
# OPTIONS_GHC -fno - warn - missing - signatures #
|
Module : Numeric . LinearAlgebra
Copyright : ( c ) 2006 - 15
License : :
Stability : provisional
Module : Numeric.LinearAlgebra
Copyright : (c) Alberto Ruiz 2006-15
License : BSD3
Maintainer : Alberto Ruiz
Stability : provisional
-}
module Numeric.LinearAlgebra (
module Numeric.LinearAlgebra.Data,
as the Hadamard product or the product ):
> > > vector [ 1,2,3 ] * vector [ 3,0,-2 ]
[ 3.0,0.0,-6.0 ]
> > > matrix 3 [ 1 .. 9 ] * ident 3
( 3><3 )
[ 1.0 , 0.0 , 0.0
, 0.0 , 5.0 , 0.0
, 0.0 , 0.0 , 9.0 ]
( 3><3 )
[ 7.0 , 5.0 , 5.0
, 5.0 , 7.0 , 5.0
, 5.0 , 5.0 , 7.0 ]
> > > ( 4><3 ) [ 1 .. ] + row [ 10,20,30 ]
( 4><3 )
[ 11.0 , 22.0 , 33.0
, 14.0 , 25.0 , 36.0
, 17.0 , 28.0 , 39.0
, 20.0 , 31.0 , 42.0 ]
dot, (<.>),
(#>), (<#), (!#>),
(<>),
| The matrix product is also implemented in the " Data . Monoid " instance , where
> > > import Data . Monoid as M
> > > let m = matrix 3 [ 1 .. 6 ]
> > > m M. < > 2 M. < > diagl[0.5,1,0 ]
( 2><3 )
[ 1.0 , 4.0 , 0.0
, 4.0 , 10.0 , 0.0 ]
outer, kronecker, cross,
scale, add,
sumElements, prodElements,
(<\>),
linearSolveLS,
linearSolveSVD,
linearSolve,
luSolve, luPacked,
luSolve', luPacked',
ldlSolve, ldlPacked,
cholSolve,
UpLo(..),
triSolve,
triDiagSolve,
* *
cgSolve,
cgSolve',
inv, pinv, pinvTol,
rcond, rank,
det, invlndet,
Normed(..),
norm_Frob, norm_nuclear,
* and range
orth,
nullspace, null1, null1sym,
svd,
thinSVD,
compactSVD,
compactSVDTol,
singularValues,
leftSV, rightSV,
*
eig, geig, eigSH,
eigenvalues, geigenvalues, eigenvaluesSH,
geigSH,
qr, thinQR, rq, thinRQ, qrRaw, qrgr,
chol, mbChol,
lu, luFact,
hess,
*
schur,
expm,
sqrtm,
matFunc,
corr, conv, corrMin, corr2, conv2,
Seed, RandDist(..), randomVector, rand, randn, gaussianSample, uniformSample,
meanCov, rowOuters, pairwiseD2, normalize, peps, relativeError, magnit,
haussholder, optimiseMult, udot, nullspaceSVD, orthSVD, ranksv,
iC, sym, mTm, trustSym, unSym,
Element, Container, Product, Numeric, LSDiv, Herm,
Complexable, RealElement,
RealOf, ComplexOf, SingleOf, DoubleOf,
IndexOf,
Field, Linear(), Additive(),
Transposable,
LU(..),
LDL(..),
QR(..),
CGState(..),
Testable(..)
) where
import Numeric.LinearAlgebra.Data
import Numeric.Matrix()
import Numeric.Vector()
import Internal.Matrix
import Internal.Container hiding ((<>))
import Internal.Numeric hiding (mul)
import Internal.Algorithms hiding (linearSolve,Normed,orth,luPacked',linearSolve',luSolve',ldlPacked')
import qualified Internal.Algorithms as A
import Internal.Util
import Internal.Random
import Internal.Sparse((!#>))
import Internal.CG
import Internal.Conversion
#if MIN_VERSION_base(4,11,0)
import Prelude hiding ((<>))
#endif
| dense matrix product
> > > let a = ( 3><5 ) [ 1 .. ]
> > > a
( 3><5 )
[ 1.0 , 2.0 , 3.0 , 4.0 , 5.0
, 6.0 , 7.0 , 8.0 , 9.0 , 10.0
, 11.0 , 12.0 , 13.0 , 14.0 , 15.0 ]
> > > let b = ( 5><2 ) [ 1,3 , 0,2 , -1,5 , 7,7 , 6,0 ]
> > > b
( 5><2 )
[ 1.0 , 3.0
, 0.0 , 2.0
, -1.0 , 5.0
, 7.0 , 7.0
, 6.0 , 0.0 ]
> > > a < > b
( 3><2 )
[ 56.0 , 50.0
, 121.0 , 135.0
, 186.0 , 220.0 ]
>>> let a = (3><5) [1..]
>>> a
(3><5)
[ 1.0, 2.0, 3.0, 4.0, 5.0
, 6.0, 7.0, 8.0, 9.0, 10.0
, 11.0, 12.0, 13.0, 14.0, 15.0 ]
>>> let b = (5><2) [1,3, 0,2, -1,5, 7,7, 6,0]
>>> b
(5><2)
[ 1.0, 3.0
, 0.0, 2.0
, -1.0, 5.0
, 7.0, 7.0
, 6.0, 0.0 ]
>>> a <> b
(3><2)
[ 56.0, 50.0
, 121.0, 135.0
, 186.0, 220.0 ]
-}
(<>) :: Numeric t => Matrix t -> Matrix t -> Matrix t
(<>) = mXm
infixr 8 <>
| Solve a linear system ( for square coefficient matrix and several right - hand sides ) using the LU decomposition , returning Nothing for a singular system . For underconstrained or overconstrained systems use ' linearSolveLS ' or ' linearSolveSVD ' .
@
a = ( 2><2 )
[ 1.0 , 2.0
, 3.0 , 5.0 ]
@
@
b = ( 2><3 )
[ 6.0 , 1.0 , 10.0
, 15.0 , 3.0 , 26.0 ]
@
> > > a b
Just ( 2><3 )
[ -1.4802973661668753e-15 , 0.9999999999999997 , 1.999999999999997
, 3.000000000000001 , 1.6653345369377348e-16 , 4.000000000000002 ]
> > > let Just x = it
> > > disp 5 x
2x3
-0.00000 1.00000 2.00000
3.00000 0.00000 4.00000
> > > a < > x
( 2><3 )
[ 6.0 , 1.0 , 10.0
, 15.0 , 3.0 , 26.0 ]
@
a = (2><2)
[ 1.0, 2.0
, 3.0, 5.0 ]
@
@
b = (2><3)
[ 6.0, 1.0, 10.0
, 15.0, 3.0, 26.0 ]
@
>>> linearSolve a b
Just (2><3)
[ -1.4802973661668753e-15, 0.9999999999999997, 1.999999999999997
, 3.000000000000001, 1.6653345369377348e-16, 4.000000000000002 ]
>>> let Just x = it
>>> disp 5 x
2x3
-0.00000 1.00000 2.00000
3.00000 0.00000 4.00000
>>> a <> x
(2><3)
[ 6.0, 1.0, 10.0
, 15.0, 3.0, 26.0 ]
-}
linearSolve m b = A.mbLinearSolve m b
nullspace m = nullspaceSVD (Left (1*eps)) m (rightSV m)
orth m = orthSVD (Left (1*eps)) m (leftSV m)
|
ecf714fe02996f01bf2d870c11aba857b49e1df90d0f73486d12085f59a5a788 | poscat0x04/telegram-types | File.hs | module Web.Telegram.Types.Internal.File where
import Common
-- | a file ready to be downloaded. The file can be downloaded via the link
@ / file / bot < token>/<file_path>@. It is guaranteed
that the link will be valid for at least 1 hour . When the link expires ,
-- a new one can be requested by calling [getFile](#getfile).
--
_ _ Note : _ _ Maximum file size to download is 20 MB
data File = File
{ -- | Identifier for this file, which can be used to download or reuse the file
fileId :: Text,
-- | Unique identifier for this file, which is supposed to be the same over time
-- and for different bots. Can't be used to download or reuse the file.
fileUniqueId :: Text,
-- | File size, if known
fileSize :: Maybe Int,
| File path . Use @ / file / bot < token>/<file_path>@ to get the file .
filePath :: Maybe Text
}
deriving stock (Show, Eq)
mkLabel ''File
deriveJSON snake ''File
| null | https://raw.githubusercontent.com/poscat0x04/telegram-types/c09ccc81cff10399538894cf2d1273022c797e18/src/Web/Telegram/Types/Internal/File.hs | haskell | | a file ready to be downloaded. The file can be downloaded via the link
a new one can be requested by calling [getFile](#getfile).
| Identifier for this file, which can be used to download or reuse the file
| Unique identifier for this file, which is supposed to be the same over time
and for different bots. Can't be used to download or reuse the file.
| File size, if known | module Web.Telegram.Types.Internal.File where
import Common
@ / file / bot < token>/<file_path>@. It is guaranteed
that the link will be valid for at least 1 hour . When the link expires ,
_ _ Note : _ _ Maximum file size to download is 20 MB
data File = File
fileId :: Text,
fileUniqueId :: Text,
fileSize :: Maybe Int,
| File path . Use @ / file / bot < token>/<file_path>@ to get the file .
filePath :: Maybe Text
}
deriving stock (Show, Eq)
mkLabel ''File
deriveJSON snake ''File
|
4bd11ad8718507d14e549ea1f85e7c54e99adda066cbaa80e0a47cef940e8d7c | sonowz/advent-of-code-haskell | Day15.hs | module Y2021.Day15 where
import Algebra.Graph.Label (Distance, distance, finite, getDistance, getFinite)
import qualified Algebra.Graph.Labelled.AdjacencyMap as A
import Data.Vector (Vector)
import qualified Data.Vector as V
import Lib.Graph (dijkstra)
import Lib.IO
import Lib.Types
import Lib.Vector2D
import Relude
import Relude.Extra.Bifunctor
import Relude.Extra.Foldable1
import Relude.Extra.Map
import Relude.Extra.Newtype
import Relude.Extra.Tuple
-----------------------
-- Type declarations --
-----------------------
newtype RiskLevel = RiskLevel Int deriving (Show, Eq, Ord, Enum, Num, Real, Integral) via Int
type CaveMap = Vector (Vector RiskLevel)
newtype Pos = Pos (Int, Int) deriving (Pos2D, Ord, Eq, Show) via (Int, Int)
instance Num Pos where
Pos (x1, y1) + Pos (x2, y2) = Pos (x1 + x2, y1 + y2)
Pos (x1, y1) - Pos (x2, y2) = Pos (x1 - x2, y1 - y2)
Pos (x1, y1) * Pos (x2, y2) = Pos (x1 * x2, y1 * y2)
abs (Pos (x, y)) = Pos (abs x, abs y)
signum (Pos (x, y)) = Pos (signum x, signum y)
fromInteger x = Pos (fromInteger x, fromInteger x)
------------
-- Part 1 --
------------
solve1 :: CaveMap -> RiskLevel
solve1 caveMap = maybe 0 fromDistance answer where
!graph = caveMapToGraph caveMap
!distanceMap = dijkstra graph (Pos (0, 0)) :: Map Pos (Distance RiskLevel)
destination = Pos (size2D caveMap) - Pos (1, 1)
answer = lookup destination distanceMap :: Maybe (Distance RiskLevel)
caveMapToGraph :: CaveMap -> A.AdjacencyMap (Distance RiskLevel) Pos
caveMapToGraph caveMap = A.edges (join edgeList) where
edgeList = imapMaybeL2D (\pos _ -> Just $ makeEdges pos) caveMap
makeEdges :: Pos -> [(Distance RiskLevel, Pos, Pos)]
makeEdges pos = catMaybes $ zipWith zipFn riskLevels (adjacent pos) where
riskLevels = (toDistance <=< at caveMap) <$> adjacent pos :: [Maybe (Distance RiskLevel)]
zipFn (Just riskLevel) adjPos = Just (riskLevel, pos, adjPos)
zipFn Nothing _ = Nothing
adjacent :: Pos -> [Pos]
adjacent (Pos (x, y)) = [Pos (x - 1, y), Pos (x + 1, y), Pos (x, y - 1), Pos (x, y + 1)]
toDistance :: RiskLevel -> Maybe (Distance RiskLevel)
toDistance = fmap distance . finite
fromDistance :: Distance RiskLevel -> RiskLevel
fromDistance = fromMaybe (error "Negative number!") . getFinite . getDistance
------------
Part 2 --
------------
This solution takes about 5 seconds
-- where 'dijkstra' function takes majority of time
solve2 :: CaveMap -> RiskLevel
solve2 = solve1 . enlargeCaveMap
enlargeCaveMap :: CaveMap -> CaveMap
enlargeCaveMap caveMap = foldl1' concatVertical $ foldl1' concatHorizontal <$> enlargedRaw' where
-- [[0,1,2,3,4],[1,2,3,4,5],[2,3,4,5,6],[3,4,5,6,7],[4,5,6,7,8]]
offsetMatrix = take 5 $ iterate (fmap (+ 1)) [0 .. 4] :: [[RiskLevel]]
enlargedRaw = (\ofs -> imap2D (plusFn ofs) caveMap) <<$>> offsetMatrix :: [[CaveMap]]
plusFn offset = \(_ :: Pos) x -> let x' = x + offset in if x' > 9 then x' - 9 else x'
enlargedRaw' =
fromJust $ traverse nonEmpty enlargedRaw >>= nonEmpty :: NonEmpty (NonEmpty CaveMap)
fromJust = fromMaybe (error "Impossible!")
concatHorizontal :: CaveMap -> CaveMap -> CaveMap
concatHorizontal = V.zipWith (<>)
concatVertical :: CaveMap -> CaveMap -> CaveMap
concatVertical = (<>)
--------------------
Main & Parsing --
--------------------
main' :: IO ()
main' = do
caveMap <- parseCaveMap <$> readFileLines "inputs/Y2021/Day15.txt" :: IO CaveMap
print $ solve1 caveMap
print $ solve2 caveMap
parseCaveMap :: [Text] -> CaveMap
parseCaveMap lines = fromList $ parseLine <$> lines where
parseLine :: Text -> Vector RiskLevel
parseLine line = fromList $ fmap (RiskLevel . readInt . toText . toList) (toString line)
toList = one :: a -> [a]
| null | https://raw.githubusercontent.com/sonowz/advent-of-code-haskell/d725764162d2d3886dfbecf9e8a10305e8868329/src/Y2021/Day15.hs | haskell | ---------------------
Type declarations --
---------------------
----------
Part 1 --
----------
----------
----------
where 'dijkstra' function takes majority of time
[[0,1,2,3,4],[1,2,3,4,5],[2,3,4,5,6],[3,4,5,6,7],[4,5,6,7,8]]
------------------
------------------ | module Y2021.Day15 where
import Algebra.Graph.Label (Distance, distance, finite, getDistance, getFinite)
import qualified Algebra.Graph.Labelled.AdjacencyMap as A
import Data.Vector (Vector)
import qualified Data.Vector as V
import Lib.Graph (dijkstra)
import Lib.IO
import Lib.Types
import Lib.Vector2D
import Relude
import Relude.Extra.Bifunctor
import Relude.Extra.Foldable1
import Relude.Extra.Map
import Relude.Extra.Newtype
import Relude.Extra.Tuple
newtype RiskLevel = RiskLevel Int deriving (Show, Eq, Ord, Enum, Num, Real, Integral) via Int
type CaveMap = Vector (Vector RiskLevel)
newtype Pos = Pos (Int, Int) deriving (Pos2D, Ord, Eq, Show) via (Int, Int)
instance Num Pos where
Pos (x1, y1) + Pos (x2, y2) = Pos (x1 + x2, y1 + y2)
Pos (x1, y1) - Pos (x2, y2) = Pos (x1 - x2, y1 - y2)
Pos (x1, y1) * Pos (x2, y2) = Pos (x1 * x2, y1 * y2)
abs (Pos (x, y)) = Pos (abs x, abs y)
signum (Pos (x, y)) = Pos (signum x, signum y)
fromInteger x = Pos (fromInteger x, fromInteger x)
solve1 :: CaveMap -> RiskLevel
solve1 caveMap = maybe 0 fromDistance answer where
!graph = caveMapToGraph caveMap
!distanceMap = dijkstra graph (Pos (0, 0)) :: Map Pos (Distance RiskLevel)
destination = Pos (size2D caveMap) - Pos (1, 1)
answer = lookup destination distanceMap :: Maybe (Distance RiskLevel)
caveMapToGraph :: CaveMap -> A.AdjacencyMap (Distance RiskLevel) Pos
caveMapToGraph caveMap = A.edges (join edgeList) where
edgeList = imapMaybeL2D (\pos _ -> Just $ makeEdges pos) caveMap
makeEdges :: Pos -> [(Distance RiskLevel, Pos, Pos)]
makeEdges pos = catMaybes $ zipWith zipFn riskLevels (adjacent pos) where
riskLevels = (toDistance <=< at caveMap) <$> adjacent pos :: [Maybe (Distance RiskLevel)]
zipFn (Just riskLevel) adjPos = Just (riskLevel, pos, adjPos)
zipFn Nothing _ = Nothing
adjacent :: Pos -> [Pos]
adjacent (Pos (x, y)) = [Pos (x - 1, y), Pos (x + 1, y), Pos (x, y - 1), Pos (x, y + 1)]
toDistance :: RiskLevel -> Maybe (Distance RiskLevel)
toDistance = fmap distance . finite
fromDistance :: Distance RiskLevel -> RiskLevel
fromDistance = fromMaybe (error "Negative number!") . getFinite . getDistance
This solution takes about 5 seconds
solve2 :: CaveMap -> RiskLevel
solve2 = solve1 . enlargeCaveMap
enlargeCaveMap :: CaveMap -> CaveMap
enlargeCaveMap caveMap = foldl1' concatVertical $ foldl1' concatHorizontal <$> enlargedRaw' where
offsetMatrix = take 5 $ iterate (fmap (+ 1)) [0 .. 4] :: [[RiskLevel]]
enlargedRaw = (\ofs -> imap2D (plusFn ofs) caveMap) <<$>> offsetMatrix :: [[CaveMap]]
plusFn offset = \(_ :: Pos) x -> let x' = x + offset in if x' > 9 then x' - 9 else x'
enlargedRaw' =
fromJust $ traverse nonEmpty enlargedRaw >>= nonEmpty :: NonEmpty (NonEmpty CaveMap)
fromJust = fromMaybe (error "Impossible!")
concatHorizontal :: CaveMap -> CaveMap -> CaveMap
concatHorizontal = V.zipWith (<>)
concatVertical :: CaveMap -> CaveMap -> CaveMap
concatVertical = (<>)
main' :: IO ()
main' = do
caveMap <- parseCaveMap <$> readFileLines "inputs/Y2021/Day15.txt" :: IO CaveMap
print $ solve1 caveMap
print $ solve2 caveMap
parseCaveMap :: [Text] -> CaveMap
parseCaveMap lines = fromList $ parseLine <$> lines where
parseLine :: Text -> Vector RiskLevel
parseLine line = fromList $ fmap (RiskLevel . readInt . toText . toList) (toString line)
toList = one :: a -> [a]
|
843309a502bb935cb838159ed0d7178485c40264ad04130a83cadf5779cd00de | senapk/fn_2020_1 | Main.hs | soma x y = x + y
1 1
2,3,4,4,4
key last qtd = if qtd /= 1 then [[qtd, last]] else [[last]]
compac' [] last qtd = key last qtd
compac' (x:xs) last qtd = if x == last
then compac' xs last (qtd + 1)
else key last qtd ++ compac' xs x 1
compac (x:xs) = compac' xs x 1
-
soImparesTeste = do
print $ soImpares [ 1 .. 10 ] = = [ 1,3,5,7,9 ]
print $ soImpares [ 1 .. 20 ] = = [ 1,3 .. 20 ]
print $ soImpares [ -10 .. 20 ] = = [ -9,-7 .. 20 ]
main = do
line < - getLine
let vet = [ read x : : Int | x < - words line ]
print $ soImpares vet
-
soImparesTeste = do
print $ soImpares [1..10] == [1,3,5,7,9]
print $ soImpares [1..20] == [1,3..20]
print $ soImpares [-10..20] == [-9,-7..20]
main = do
line <- getLine
let vet = [read x :: Int | x <- words line]
print $ soImpares vet
--} | null | https://raw.githubusercontent.com/senapk/fn_2020_1/dd30b5d49300459a62b00299d65309f09638d8fc/s04e02_colaboracao/Main.hs | haskell | } | soma x y = x + y
1 1
2,3,4,4,4
key last qtd = if qtd /= 1 then [[qtd, last]] else [[last]]
compac' [] last qtd = key last qtd
compac' (x:xs) last qtd = if x == last
then compac' xs last (qtd + 1)
else key last qtd ++ compac' xs x 1
compac (x:xs) = compac' xs x 1
-
soImparesTeste = do
print $ soImpares [ 1 .. 10 ] = = [ 1,3,5,7,9 ]
print $ soImpares [ 1 .. 20 ] = = [ 1,3 .. 20 ]
print $ soImpares [ -10 .. 20 ] = = [ -9,-7 .. 20 ]
main = do
line < - getLine
let vet = [ read x : : Int | x < - words line ]
print $ soImpares vet
-
soImparesTeste = do
print $ soImpares [1..10] == [1,3,5,7,9]
print $ soImpares [1..20] == [1,3..20]
print $ soImpares [-10..20] == [-9,-7..20]
main = do
line <- getLine
let vet = [read x :: Int | x <- words line]
print $ soImpares vet |
042ff52e42d098f1bfc175eefc2684e814973dda5220dd5c0b66c135721615c5 | mbutterick/brag | colorer.rkt | #lang racket/base
(require brag/support (submod brag/rules/lexer lex-abbrevs) racket/match)
(provide color-brag)
(define brag-syntax-lexer
(lexer-srcloc
[(eof) (return-without-srcloc eof)]
need to lex whitespace to keep accurate
( for DrRacket selections etc )
[whitespace (token 'WHITE lexeme)]
[(:or (:: "\"" "\\" "\"" "\"") ; string containg double-quote = "\""
(from/to "'" "'")
(from/to "\"" "\"")) (token 'LIT lexeme)]
[(:or "()" "Ø" "∅") (token 'NO-COLOR lexeme)] ; empty set symbols
[(:or (char-set "()[]{}|+*:?") hide-char splice-char "::=") (token 'MISC lexeme)]
[(from/to "(*" "*)") (token 'COMMENT lexeme)]
[(:seq (:or "#" ";") (complement (:seq (:* any-char) NL (:* any-char))) (:or NL "")) (token 'COMMENT lexeme)]
[id (token 'ID lexeme)]
[any-char (token 'OTHER lexeme)]))
(define (color-brag port)
(define srcloc-tok (brag-syntax-lexer port))
(cond
[(eof-object? srcloc-tok) (values srcloc-tok 'eof #f #f #f)]
[else
(match-define (srcloc-token (token-struct type val _ _ _ _ _) (srcloc _ _ _ posn span)) srcloc-tok)
(match-define (list start end) (list posn (+ posn span)))
(values val (case type
[(ID) 'symbol]
[(LIT) 'string]
[(MISC) 'parenthesis]
[(WHITE) 'whitespace]
[(COMMENT) 'comment]
[else 'no-color]) #f start end)]))
(module+ test
(require rackunit)
(define-syntax-rule (values->list EXPR) (call-with-values (λ () EXPR) list))
(define (apply-colorer str)
(for/list ([annotation (in-port (λ (p)
(let ([xs (values->list (color-brag p))])
(if (eof-object? (car xs)) eof xs)))
(open-input-string str))])
annotation))
(check-equal? (apply-colorer "foo") `(("foo" symbol #f 1 4)))
(check-equal? (apply-colorer "'str'") `(("'str'" string #f 1 6)))
(check-equal? (apply-colorer "(foo)+") `(("(" parenthesis #f 1 2)
("foo" symbol #f 2 5)
(")" parenthesis #f 5 6)
("+" parenthesis #f 6 7)))
(check-equal? (apply-colorer "; rem") `(("; rem" comment #f 1 6)))
(check-equal? (apply-colorer "◊") `(("◊" no-color #f 1 4))))
| null | https://raw.githubusercontent.com/mbutterick/brag/6c161ae31df9b4ae7f55a14f754c0b216b60c9a6/brag-lib/brag/private/colorer.rkt | racket | empty set symbols | #lang racket/base
(require brag/support (submod brag/rules/lexer lex-abbrevs) racket/match)
(provide color-brag)
(define brag-syntax-lexer
(lexer-srcloc
[(eof) (return-without-srcloc eof)]
need to lex whitespace to keep accurate
( for DrRacket selections etc )
[whitespace (token 'WHITE lexeme)]
[(:or (:: "\"" "\\" "\"" "\"") ; string containg double-quote = "\""
(from/to "'" "'")
(from/to "\"" "\"")) (token 'LIT lexeme)]
[(:or (char-set "()[]{}|+*:?") hide-char splice-char "::=") (token 'MISC lexeme)]
[(from/to "(*" "*)") (token 'COMMENT lexeme)]
[(:seq (:or "#" ";") (complement (:seq (:* any-char) NL (:* any-char))) (:or NL "")) (token 'COMMENT lexeme)]
[id (token 'ID lexeme)]
[any-char (token 'OTHER lexeme)]))
(define (color-brag port)
(define srcloc-tok (brag-syntax-lexer port))
(cond
[(eof-object? srcloc-tok) (values srcloc-tok 'eof #f #f #f)]
[else
(match-define (srcloc-token (token-struct type val _ _ _ _ _) (srcloc _ _ _ posn span)) srcloc-tok)
(match-define (list start end) (list posn (+ posn span)))
(values val (case type
[(ID) 'symbol]
[(LIT) 'string]
[(MISC) 'parenthesis]
[(WHITE) 'whitespace]
[(COMMENT) 'comment]
[else 'no-color]) #f start end)]))
(module+ test
(require rackunit)
(define-syntax-rule (values->list EXPR) (call-with-values (λ () EXPR) list))
(define (apply-colorer str)
(for/list ([annotation (in-port (λ (p)
(let ([xs (values->list (color-brag p))])
(if (eof-object? (car xs)) eof xs)))
(open-input-string str))])
annotation))
(check-equal? (apply-colorer "foo") `(("foo" symbol #f 1 4)))
(check-equal? (apply-colorer "'str'") `(("'str'" string #f 1 6)))
(check-equal? (apply-colorer "(foo)+") `(("(" parenthesis #f 1 2)
("foo" symbol #f 2 5)
(")" parenthesis #f 5 6)
("+" parenthesis #f 6 7)))
(check-equal? (apply-colorer "; rem") `(("; rem" comment #f 1 6)))
(check-equal? (apply-colorer "◊") `(("◊" no-color #f 1 4))))
|
7a68c203ce7543f02f101f1cd66606ad1607c23d29de4294ea031f72ad0c5b97 | cac-t-u-s/om-sharp | graphics.lisp | ;=========================================================================
OM API
Multiplatform API for OpenMusic
LispWorks Implementation
;=========================================================================
;
; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
; (at your option) any later version.
;
; This program is distributed; in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
;
You should have received a copy of the GNU General Public License
; along with this program. If not, see </>.
;
;=========================================================================
Authors : ,
;=========================================================================
;;===========================================================================
; GRAPHIC STRUCTURES (points, colors, fonts)
;;===========================================================================
(in-package :om-api)
;;;=========================
;;; export :
;;;=========================
(export '(
ompoint
om-make-point omp
om-point-p
om-points-equal-p
om-add-points
om-subtract-points
om-point-x
om-point-y
om-point-*
om-point-mv
om-point-set
om-point-set-values-from-point
om-max-point
om-min-point
om-borne-point
om-round-point
om-def-point
om-make-color
om-gray-color
om-make-color-alpha
om-color-p
om-color-r
om-color-g
om-color-b
om-color-a
om-color-null-p
om-def-color
om-color-equal
om-make-font
om-font-p
om-font-face
om-font-size
om-font-style
om-def-font
om-lambda
om-font-equal
om-list-all-fonts
om-string-size
om-string-wrap
om-correct-point
om-correct-font
om-correct-color
) :om-api)
;;;=========================
;;; DUMMY VIEW
;;; THIS VIEW IS USED BY SEVERAL CAPI FUNCTIONS TO INITIALIZE GRAPHICS COMPONENTS
;;;=========================
(defvar *dummy-view* nil)
(defun init-dummy-view ()
(let* ((pl (make-instance 'capi:pinboard-layout)))
(capi:display (make-instance 'capi:interface
:display-state :hidden
:layout pl))
(setf *dummy-view* pl)))
(om-api-add-init-fun 'init-dummy-view)
;;;=========================
;;; COMPATIBILITY
;;;=========================
(defun om-correct-color (color)
(if (om-color-p color) color (om-def-color :gray)))
(defun om-correct-point (point)
(cond ((om-point-p point) point)
((null point) point)
((numberp point) (om-make-point (- point (ash (ash point -16) 16)) (ash point -16)))
((consp point) (om-make-point (car point) (cadr point)))
(t nil)))
(defun om-correct-font (font)
(if (om-font-p font) font (om-def-font :normal)))
;;;=========================
;;;POINTS
;;;=========================
(defstruct ompoint (x 0) (y 0))
LW facilities are allowed ( e.g. NIL , (: character 4 ) , etc . )
(defun om-make-point (x y)
(make-ompoint :x x :y y))
(defmacro omp (x y) `(om-make-point ,x ,y))
(defmethod make-load-form ((self ompoint) &optional env)
(declare (ignore env))
`(make-ompoint :x ,(ompoint-x self) :y ,(ompoint-y self)))
(defmethod om-point-p ((self t)) (ompoint-p self))
;;; COMPAT
(defmethod om-point-h ((point ompoint)) (ompoint-x point))
(defmethod om-point-v ((point ompoint)) (ompoint-y point))
(defmethod om-point-x ((point ompoint))
(ompoint-x point))
(defmethod om-point-y ((point ompoint))
(ompoint-y point))
(defmethod om-add-points (point1 point2)
(make-ompoint :x (+ (ompoint-x point1) (ompoint-x point2))
:y (+ (ompoint-y point1) (ompoint-y point2))))
(defmethod om-subtract-points (point1 point2)
(make-ompoint :x (- (ompoint-x point1) (ompoint-x point2))
:y (- (ompoint-y point1) (ompoint-y point2))))
(defmethod om-points-equal-p (point1 point2) nil)
(defmethod om-points-equal-p ((point1 ompoint) (point2 ompoint))
(and (= (ompoint-x point1) (ompoint-x point2))
(= (ompoint-y point1) (ompoint-y point2))))
(defmethod om-point-* ((point ompoint) fact)
(make-ompoint :x (* (ompoint-x point) fact)
:y (* (om-point-y point) fact)))
(defmethod om-point-mv ((point ompoint) &key x y)
(if x (setf (ompoint-x point) (+ (ompoint-x point) x)))
(if y (setf (ompoint-y point) (+ (ompoint-y point) y)))
point)
(defmethod om-point-set ((point ompoint) &key x y)
(if x (setf (ompoint-x point) x))
(if y (setf (ompoint-y point) y))
point)
(defmethod om-point-set-values-from-point ((point ompoint) (from ompoint))
(setf (ompoint-x point) (ompoint-x from))
(setf (ompoint-y point) (ompoint-y from))
point)
(defun max-null (a b)
(cond ((and a b) (max a b))
((null a) b)
((null b) a)))
(defun min-null (a b)
(cond ((and a b) (min a b))
((null a) b)
((null b) a)))
(defmethod om-max-point ((p1 ompoint) (p2 ompoint))
(make-ompoint :x (max-null (ompoint-x p1) (ompoint-x p2))
:y (max-null (ompoint-y p1) (ompoint-y p2))))
(defmethod om-max-point ((p1 ompoint) (p2 null)) p1)
(defmethod om-max-point ((p1 null) (p2 ompoint)) p2)
(defmethod om-min-point ((p1 ompoint) (p2 ompoint))
(make-ompoint :x (min-null (ompoint-x p1) (ompoint-x p2))
:y (min-null (ompoint-y p1) (ompoint-y p2))))
(defmethod om-min-point ((p1 ompoint) (p2 null)) p1)
(defmethod om-min-point ((p1 null) (p2 ompoint)) p2)
(defun om-borne-point (p pmin pmax)
(om-min-point (om-max-point p pmin) pmax))
(defun om-round-point (p)
(make-ompoint :x (round (ompoint-x p)) :y (round (ompoint-y p))))
(defun om-def-point (p defp)
(make-ompoint :x (or (ompoint-x p) (ompoint-x defp)) :y (or (ompoint-y p) (ompoint-y defp))))
;;;=========================
;;;COLORS
;;;=========================
(defstruct omcolor
(c (color:make-rgb 0 0 0)))
(defun om-make-color (r g b &optional a)
(make-omcolor :c (color:make-rgb r g b a)))
(defun om-gray-color (val &optional a)
(make-omcolor :c (color:make-rgb val val val a)))
(defmethod om-make-color-alpha ((color omcolor) alpha)
(make-omcolor :c (color::color-with-alpha (omcolor-c color) alpha)))
(defmethod make-load-form ((self omcolor) &optional env)
(declare (ignore env))
`(make-omcolor :c ,(omcolor-c self)))
(defmethod om-color-p ((self t)) nil)
(defmethod om-color-p ((self omcolor)) t)
(defun om-color-r (color)
(color::color-red (omcolor-c color)))
(defun om-color-g (color)
(color::color-green (omcolor-c color)))
(defun om-color-b (color)
(color::color-blue (omcolor-c color)))
(defun om-color-a (color)
(color::color-alpha (omcolor-c color)))
(defun om-color-null-p (color)
(or (null color)
(= (color::color-alpha (omcolor-c color)) 0)))
(defun om-color-equal (c1 c2)
(and (= (om-color-r c1) (om-color-r c2))
(= (om-color-g c1) (om-color-g c2))
(= (om-color-b c1) (om-color-b c2))
(= (om-color-a c1) (om-color-a c2))))
(defun om-def-color (c)
(case c
(:light-gray (make-omcolor :c (color:make-rgb 0.9 0.9 0.9)))
(:gray (make-omcolor :c (color:make-rgb 0.6 0.6 0.6)))
(:dark-gray (make-omcolor :c (color:make-rgb 0.3 0.3 0.3)))
(:dark-red (make-omcolor :c (color:make-rgb 0.9 0.3 0.3)))
(:dark-blue (make-omcolor :c (color:make-rgb 0.2 0.4 0.5)))
(: window ( make - omcolor : c ( color::get - color - spec # + cocoa : transparent # -cocoa : gray90 ) ) )
(:window (make-omcolor :c (color::get-color-spec #+cocoa :transparent #-cocoa :background)))
(:selection (make-omcolor :c #+win32 (color::make-rgb 0.87058825 0.87058825 0.87058825 1)
#-win32 (color::make-rgb 0.5 0.5 0.5 1)))
(:selection-inv (make-omcolor :c (color::make-rgb 0.9 0.9 0.9)))
(:selection-a (make-omcolor :c (color::make-rgb 0.7 0.7 0.7 0.2)))
(:toolbar-color (make-omcolor :c (color:make-rgb 0.85 0.85 0.85)))
(:text-selection (let ((selectcolor (om-def-color :selection)))
(make-omcolor :c (color:make-rgb (/ (om-color-r selectcolor) 2)
(/ (om-color-g selectcolor) 2)
(/ (om-color-b selectcolor) 2)
0.7))))
supported symbols = : black : : red ... : transparent
(otherwise (make-omcolor :c (color::get-color-spec c)))
))
;;;=========================
;;;=========================
(defmethod om-font-p ((self t)) (gp::font-description-p self))
;; &allow-other-keys is for compatibility with OM6 patches
(defun om-make-font (face size &key (style nil) &allow-other-keys)
(gp::make-font-description
;:name face ; --> name is not portable for find-best-font process
:family face
:size (round size)
:slant (if (member :italic style) :italic :roman)
:weight (if (member :bold style) :bold :normal)
:charset :ansi
))
(defun om-font-face (font)
(gp::font-description-attribute-value font :family))
(defun om-font-size (font)
(gp::font-description-attribute-value font :size))
(defun om-font-style (font)
(cond ((and (equal (gp::font-description-attribute-value font :weight) :bold)
(equal (gp::font-description-attribute-value font :slant) :italic))
'(:bold :italic))
((equal (gp::font-description-attribute-value font :slant) :italic)
'(:italic))
((equal (gp::font-description-attribute-value font :weight) :bold)
'(:bold))
(t '(:plain))))
(defun om-font-equal (f1 f2)
(and (string-equal (om-font-face f1) (om-font-face f2))
(= (om-font-size f2) (om-font-size f2))
(equal (om-font-style f1) (om-font-style f2))))
(defun om-string-size (str font)
(if str
(multiple-value-bind (left top right bottom)
(gp::get-string-extent
*dummy-view* str
(gp::find-best-font *dummy-view* font))
(values (round (- right left)) (- bottom top)))
(values 0 0)))
; (om-string-size "--" (om-def-font :large))
( om - string - wrap " azertyuiop qsdfghjklm wxcvbn " 10 ( om - def - font : large ) )
(defun om-string-wrap (str width font)
(declare (special *curstream* *dummy-view*))
(let* ((view (or *curstream* *dummy-view*))
(w (max width (om-string-size "--" font))))
(capi::wrap-text-for-pane
view str
:visible-width w
:font (gp::find-best-font view font)
)))
(defparameter *def-font*
72 ppi
96 ppi
96 ppi
)
(defparameter *mono-font*
#+macosx '("Courier New" 12)
#+mswindows '("Courier New" 8)
#+linux '("Courier" 10)
)
(defparameter *gui-font*
#+macosx '("Lucida Grande" (11 12))
#+mswindows '("Segoe UI" (8 9))
#+linux '("Bistream Vera Sans" (9 10))
)
(defparameter *score-font*
'("Times New Roman" 10))
(defparameter *fonts-table*
(let ((def-face (car *def-font*))
(sizes (cadr *def-font*)))
`((:tiny ,(om-make-font def-face (nth 0 sizes)))
(:small ,(om-make-font def-face (nth 1 sizes)))
(:small-b ,(om-make-font def-face (nth 1 sizes) :style '(:bold)))
(:normal ,(om-make-font def-face (nth 2 sizes)))
(:normal-b ,(om-make-font def-face (nth 2 sizes) :style '(:bold)))
(:large ,(om-make-font def-face (nth 3 sizes)))
(:large-b ,(om-make-font def-face (nth 3 sizes) :style '(:bold)))
(:gui ,(om-make-font (car *gui-font*) (nth 0 (cadr *gui-font*))))
(:gui-b ,(om-make-font (car *gui-font*) (nth 0 (cadr *gui-font*)) :style '(:bold)))
(:gui-title ,(om-make-font (car *gui-font*) (nth 1 (cadr *gui-font*)) :style '(:bold)))
(:score ,(apply #'om-make-font *score-font*))
(:mono ,(apply #'om-make-font *mono-font*)))))
(defun om-def-font (font-id &key face size style)
(let ((font (cadr (find font-id *fonts-table* :key 'car))))
(when font
(when face (setf font (gp::augment-font-description font :family face)))
(when size (setf font (gp::augment-font-description font :size size)))
(when style (setf font (gp::augment-font-description
font
:slant (if (member :italic style) :italic :roman)
:weight (if (member :bold style) :bold :normal)))))
font))
;;; a special font / char code to write a lambda
(defparameter *lambda-font*
(om-make-font "Times" 10))
(defun om-lambda (&optional size)
(let ((font *lambda-font*))
(when size (setf font (gp::augment-font-description font :size size)))
(values (code-char 955) font)))
;;; #+win32 (gp::font-description capi-win32-lib::*win32-default-gui-font*))
(defun om-list-all-fonts ()
(mapcar #'(lambda (font)
(gp::font-description-attribute-value font :family))
(gp:list-all-font-names *dummy-view*)))
| null | https://raw.githubusercontent.com/cac-t-u-s/om-sharp/80f9537368471d0e6e4accdc9fff01ed277b879e/src/api/om-api-LW/graphics.lisp | lisp | =========================================================================
=========================================================================
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed; in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program. If not, see </>.
=========================================================================
=========================================================================
===========================================================================
GRAPHIC STRUCTURES (points, colors, fonts)
===========================================================================
=========================
export :
=========================
=========================
DUMMY VIEW
THIS VIEW IS USED BY SEVERAL CAPI FUNCTIONS TO INITIALIZE GRAPHICS COMPONENTS
=========================
=========================
COMPATIBILITY
=========================
=========================
POINTS
=========================
COMPAT
=========================
COLORS
=========================
=========================
=========================
&allow-other-keys is for compatibility with OM6 patches
:name face ; --> name is not portable for find-best-font process
(om-string-size "--" (om-def-font :large))
a special font / char code to write a lambda
#+win32 (gp::font-description capi-win32-lib::*win32-default-gui-font*))
| OM API
Multiplatform API for OpenMusic
LispWorks Implementation
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
Authors : ,
(in-package :om-api)
(export '(
ompoint
om-make-point omp
om-point-p
om-points-equal-p
om-add-points
om-subtract-points
om-point-x
om-point-y
om-point-*
om-point-mv
om-point-set
om-point-set-values-from-point
om-max-point
om-min-point
om-borne-point
om-round-point
om-def-point
om-make-color
om-gray-color
om-make-color-alpha
om-color-p
om-color-r
om-color-g
om-color-b
om-color-a
om-color-null-p
om-def-color
om-color-equal
om-make-font
om-font-p
om-font-face
om-font-size
om-font-style
om-def-font
om-lambda
om-font-equal
om-list-all-fonts
om-string-size
om-string-wrap
om-correct-point
om-correct-font
om-correct-color
) :om-api)
(defvar *dummy-view* nil)
(defun init-dummy-view ()
(let* ((pl (make-instance 'capi:pinboard-layout)))
(capi:display (make-instance 'capi:interface
:display-state :hidden
:layout pl))
(setf *dummy-view* pl)))
(om-api-add-init-fun 'init-dummy-view)
(defun om-correct-color (color)
(if (om-color-p color) color (om-def-color :gray)))
(defun om-correct-point (point)
(cond ((om-point-p point) point)
((null point) point)
((numberp point) (om-make-point (- point (ash (ash point -16) 16)) (ash point -16)))
((consp point) (om-make-point (car point) (cadr point)))
(t nil)))
(defun om-correct-font (font)
(if (om-font-p font) font (om-def-font :normal)))
(defstruct ompoint (x 0) (y 0))
LW facilities are allowed ( e.g. NIL , (: character 4 ) , etc . )
(defun om-make-point (x y)
(make-ompoint :x x :y y))
(defmacro omp (x y) `(om-make-point ,x ,y))
(defmethod make-load-form ((self ompoint) &optional env)
(declare (ignore env))
`(make-ompoint :x ,(ompoint-x self) :y ,(ompoint-y self)))
(defmethod om-point-p ((self t)) (ompoint-p self))
(defmethod om-point-h ((point ompoint)) (ompoint-x point))
(defmethod om-point-v ((point ompoint)) (ompoint-y point))
(defmethod om-point-x ((point ompoint))
(ompoint-x point))
(defmethod om-point-y ((point ompoint))
(ompoint-y point))
(defmethod om-add-points (point1 point2)
(make-ompoint :x (+ (ompoint-x point1) (ompoint-x point2))
:y (+ (ompoint-y point1) (ompoint-y point2))))
(defmethod om-subtract-points (point1 point2)
(make-ompoint :x (- (ompoint-x point1) (ompoint-x point2))
:y (- (ompoint-y point1) (ompoint-y point2))))
(defmethod om-points-equal-p (point1 point2) nil)
(defmethod om-points-equal-p ((point1 ompoint) (point2 ompoint))
(and (= (ompoint-x point1) (ompoint-x point2))
(= (ompoint-y point1) (ompoint-y point2))))
(defmethod om-point-* ((point ompoint) fact)
(make-ompoint :x (* (ompoint-x point) fact)
:y (* (om-point-y point) fact)))
(defmethod om-point-mv ((point ompoint) &key x y)
(if x (setf (ompoint-x point) (+ (ompoint-x point) x)))
(if y (setf (ompoint-y point) (+ (ompoint-y point) y)))
point)
(defmethod om-point-set ((point ompoint) &key x y)
(if x (setf (ompoint-x point) x))
(if y (setf (ompoint-y point) y))
point)
(defmethod om-point-set-values-from-point ((point ompoint) (from ompoint))
(setf (ompoint-x point) (ompoint-x from))
(setf (ompoint-y point) (ompoint-y from))
point)
(defun max-null (a b)
(cond ((and a b) (max a b))
((null a) b)
((null b) a)))
(defun min-null (a b)
(cond ((and a b) (min a b))
((null a) b)
((null b) a)))
(defmethod om-max-point ((p1 ompoint) (p2 ompoint))
(make-ompoint :x (max-null (ompoint-x p1) (ompoint-x p2))
:y (max-null (ompoint-y p1) (ompoint-y p2))))
(defmethod om-max-point ((p1 ompoint) (p2 null)) p1)
(defmethod om-max-point ((p1 null) (p2 ompoint)) p2)
(defmethod om-min-point ((p1 ompoint) (p2 ompoint))
(make-ompoint :x (min-null (ompoint-x p1) (ompoint-x p2))
:y (min-null (ompoint-y p1) (ompoint-y p2))))
(defmethod om-min-point ((p1 ompoint) (p2 null)) p1)
(defmethod om-min-point ((p1 null) (p2 ompoint)) p2)
(defun om-borne-point (p pmin pmax)
(om-min-point (om-max-point p pmin) pmax))
(defun om-round-point (p)
(make-ompoint :x (round (ompoint-x p)) :y (round (ompoint-y p))))
(defun om-def-point (p defp)
(make-ompoint :x (or (ompoint-x p) (ompoint-x defp)) :y (or (ompoint-y p) (ompoint-y defp))))
(defstruct omcolor
(c (color:make-rgb 0 0 0)))
(defun om-make-color (r g b &optional a)
(make-omcolor :c (color:make-rgb r g b a)))
(defun om-gray-color (val &optional a)
(make-omcolor :c (color:make-rgb val val val a)))
(defmethod om-make-color-alpha ((color omcolor) alpha)
(make-omcolor :c (color::color-with-alpha (omcolor-c color) alpha)))
(defmethod make-load-form ((self omcolor) &optional env)
(declare (ignore env))
`(make-omcolor :c ,(omcolor-c self)))
(defmethod om-color-p ((self t)) nil)
(defmethod om-color-p ((self omcolor)) t)
(defun om-color-r (color)
(color::color-red (omcolor-c color)))
(defun om-color-g (color)
(color::color-green (omcolor-c color)))
(defun om-color-b (color)
(color::color-blue (omcolor-c color)))
(defun om-color-a (color)
(color::color-alpha (omcolor-c color)))
(defun om-color-null-p (color)
(or (null color)
(= (color::color-alpha (omcolor-c color)) 0)))
(defun om-color-equal (c1 c2)
(and (= (om-color-r c1) (om-color-r c2))
(= (om-color-g c1) (om-color-g c2))
(= (om-color-b c1) (om-color-b c2))
(= (om-color-a c1) (om-color-a c2))))
(defun om-def-color (c)
(case c
(:light-gray (make-omcolor :c (color:make-rgb 0.9 0.9 0.9)))
(:gray (make-omcolor :c (color:make-rgb 0.6 0.6 0.6)))
(:dark-gray (make-omcolor :c (color:make-rgb 0.3 0.3 0.3)))
(:dark-red (make-omcolor :c (color:make-rgb 0.9 0.3 0.3)))
(:dark-blue (make-omcolor :c (color:make-rgb 0.2 0.4 0.5)))
(: window ( make - omcolor : c ( color::get - color - spec # + cocoa : transparent # -cocoa : gray90 ) ) )
(:window (make-omcolor :c (color::get-color-spec #+cocoa :transparent #-cocoa :background)))
(:selection (make-omcolor :c #+win32 (color::make-rgb 0.87058825 0.87058825 0.87058825 1)
#-win32 (color::make-rgb 0.5 0.5 0.5 1)))
(:selection-inv (make-omcolor :c (color::make-rgb 0.9 0.9 0.9)))
(:selection-a (make-omcolor :c (color::make-rgb 0.7 0.7 0.7 0.2)))
(:toolbar-color (make-omcolor :c (color:make-rgb 0.85 0.85 0.85)))
(:text-selection (let ((selectcolor (om-def-color :selection)))
(make-omcolor :c (color:make-rgb (/ (om-color-r selectcolor) 2)
(/ (om-color-g selectcolor) 2)
(/ (om-color-b selectcolor) 2)
0.7))))
supported symbols = : black : : red ... : transparent
(otherwise (make-omcolor :c (color::get-color-spec c)))
))
(defmethod om-font-p ((self t)) (gp::font-description-p self))
(defun om-make-font (face size &key (style nil) &allow-other-keys)
(gp::make-font-description
:family face
:size (round size)
:slant (if (member :italic style) :italic :roman)
:weight (if (member :bold style) :bold :normal)
:charset :ansi
))
(defun om-font-face (font)
(gp::font-description-attribute-value font :family))
(defun om-font-size (font)
(gp::font-description-attribute-value font :size))
(defun om-font-style (font)
(cond ((and (equal (gp::font-description-attribute-value font :weight) :bold)
(equal (gp::font-description-attribute-value font :slant) :italic))
'(:bold :italic))
((equal (gp::font-description-attribute-value font :slant) :italic)
'(:italic))
((equal (gp::font-description-attribute-value font :weight) :bold)
'(:bold))
(t '(:plain))))
(defun om-font-equal (f1 f2)
(and (string-equal (om-font-face f1) (om-font-face f2))
(= (om-font-size f2) (om-font-size f2))
(equal (om-font-style f1) (om-font-style f2))))
(defun om-string-size (str font)
(if str
(multiple-value-bind (left top right bottom)
(gp::get-string-extent
*dummy-view* str
(gp::find-best-font *dummy-view* font))
(values (round (- right left)) (- bottom top)))
(values 0 0)))
( om - string - wrap " azertyuiop qsdfghjklm wxcvbn " 10 ( om - def - font : large ) )
(defun om-string-wrap (str width font)
(declare (special *curstream* *dummy-view*))
(let* ((view (or *curstream* *dummy-view*))
(w (max width (om-string-size "--" font))))
(capi::wrap-text-for-pane
view str
:visible-width w
:font (gp::find-best-font view font)
)))
(defparameter *def-font*
72 ppi
96 ppi
96 ppi
)
(defparameter *mono-font*
#+macosx '("Courier New" 12)
#+mswindows '("Courier New" 8)
#+linux '("Courier" 10)
)
(defparameter *gui-font*
#+macosx '("Lucida Grande" (11 12))
#+mswindows '("Segoe UI" (8 9))
#+linux '("Bistream Vera Sans" (9 10))
)
(defparameter *score-font*
'("Times New Roman" 10))
(defparameter *fonts-table*
(let ((def-face (car *def-font*))
(sizes (cadr *def-font*)))
`((:tiny ,(om-make-font def-face (nth 0 sizes)))
(:small ,(om-make-font def-face (nth 1 sizes)))
(:small-b ,(om-make-font def-face (nth 1 sizes) :style '(:bold)))
(:normal ,(om-make-font def-face (nth 2 sizes)))
(:normal-b ,(om-make-font def-face (nth 2 sizes) :style '(:bold)))
(:large ,(om-make-font def-face (nth 3 sizes)))
(:large-b ,(om-make-font def-face (nth 3 sizes) :style '(:bold)))
(:gui ,(om-make-font (car *gui-font*) (nth 0 (cadr *gui-font*))))
(:gui-b ,(om-make-font (car *gui-font*) (nth 0 (cadr *gui-font*)) :style '(:bold)))
(:gui-title ,(om-make-font (car *gui-font*) (nth 1 (cadr *gui-font*)) :style '(:bold)))
(:score ,(apply #'om-make-font *score-font*))
(:mono ,(apply #'om-make-font *mono-font*)))))
(defun om-def-font (font-id &key face size style)
(let ((font (cadr (find font-id *fonts-table* :key 'car))))
(when font
(when face (setf font (gp::augment-font-description font :family face)))
(when size (setf font (gp::augment-font-description font :size size)))
(when style (setf font (gp::augment-font-description
font
:slant (if (member :italic style) :italic :roman)
:weight (if (member :bold style) :bold :normal)))))
font))
(defparameter *lambda-font*
(om-make-font "Times" 10))
(defun om-lambda (&optional size)
(let ((font *lambda-font*))
(when size (setf font (gp::augment-font-description font :size size)))
(values (code-char 955) font)))
(defun om-list-all-fonts ()
(mapcar #'(lambda (font)
(gp::font-description-attribute-value font :family))
(gp:list-all-font-names *dummy-view*)))
|
697c15cb0319520699da45799bd1ca2d835287b7ef78058cb4daf75185fbea59 | rnons/lord | User.hs | # LANGUAGE DeriveGeneric #
module Web.Radio.EightTracks.User where
import Data.Aeson (FromJSON)
import GHC.Generics (Generic)
data User = User
{ user_token:: String
, id :: Int
, login :: String
, web_path :: String
} deriving (Show, Generic)
instance FromJSON User
data Session = Session
{ user :: User
, status :: String
, errors :: Maybe String
, notices :: Maybe String
, logged_in :: Bool
, api_version :: Int
} deriving (Show, Generic)
instance FromJSON Session
| null | https://raw.githubusercontent.com/rnons/lord/31d632306c3a972da2c3fd1f5359277543bfa669/Web/Radio/EightTracks/User.hs | haskell | # LANGUAGE DeriveGeneric #
module Web.Radio.EightTracks.User where
import Data.Aeson (FromJSON)
import GHC.Generics (Generic)
data User = User
{ user_token:: String
, id :: Int
, login :: String
, web_path :: String
} deriving (Show, Generic)
instance FromJSON User
data Session = Session
{ user :: User
, status :: String
, errors :: Maybe String
, notices :: Maybe String
, logged_in :: Bool
, api_version :: Int
} deriving (Show, Generic)
instance FromJSON Session
|
|
8e550b8e444643a3e04406fb6345b1ab8ce2ecede6b47190b32fc552483f7afe | project-oak/hafnium-verification | frontend.ml |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
* Translate to LLAIR
let pp_lltype fs t = Format.pp_print_string fs (Llvm.string_of_lltype t)
(* WARNING: SLOW on instructions and functions *)
let pp_llvalue fs t = Format.pp_print_string fs (Llvm.string_of_llvalue t)
let pp_llblock fs t =
Format.pp_print_string fs (Llvm.string_of_llvalue (Llvm.value_of_block t))
exception Invalid_llvm of string
let invalid_llvm : string -> 'a =
fun msg ->
let first_line =
Option.value_map ~default:msg ~f:(String.prefix msg)
(String.index msg '\n')
in
Format.printf "@\n%s@\n" msg ;
raise (Invalid_llvm first_line)
(* gather names and debug locations *)
let sym_tbl : (Llvm.llvalue, string * Loc.t) Hashtbl.t =
Hashtbl.Poly.create ~size:4_194_304 ()
let scope_tbl :
( [`Fun of Llvm.llvalue | `Mod of Llvm.llmodule]
, int ref * (string, int) Hashtbl.t )
Hashtbl.t =
Hashtbl.Poly.create ~size:32_768 ()
open struct
open struct
let loc_of_global g =
Loc.mk
?dir:(Llvm.get_debug_loc_directory g)
?file:(Llvm.get_debug_loc_filename g)
~line:(Llvm.get_debug_loc_line g)
?col:None
let loc_of_function f =
Loc.mk
?dir:(Llvm.get_debug_loc_directory f)
?file:(Llvm.get_debug_loc_filename f)
~line:(Llvm.get_debug_loc_line f)
?col:None
let loc_of_instr i =
Loc.mk
?dir:(Llvm.get_debug_loc_directory i)
?file:(Llvm.get_debug_loc_filename i)
~line:(Llvm.get_debug_loc_line i)
~col:(Llvm.get_debug_loc_column i)
let add_sym llv loc =
let maybe_scope =
match Llvm.classify_value llv with
| Argument -> Some (`Fun (Llvm.param_parent llv))
| BasicBlock ->
Some (`Fun (Llvm.block_parent (Llvm.block_of_value llv)))
| Instruction _ ->
Some (`Fun (Llvm.block_parent (Llvm.instr_parent llv)))
| GlobalVariable | Function -> Some (`Mod (Llvm.global_parent llv))
| UndefValue -> None
| ConstantExpr -> None
| ConstantPointerNull -> None
| _ ->
warn "Unexpected type of llv, might crash: %a" pp_llvalue llv () ;
Some (`Mod (Llvm.global_parent llv))
in
match maybe_scope with
| None -> ()
| Some scope ->
let next, void_tbl =
Hashtbl.find_or_add scope_tbl scope ~default:(fun () ->
(ref 0, Hashtbl.Poly.create ()) )
in
let name =
match Llvm.classify_type (Llvm.type_of llv) with
| Void -> (
let fname =
match Llvm.classify_value llv with
| Instruction (Call | Invoke) -> (
match
Llvm.value_name
(Llvm.operand llv (Llvm.num_operands llv - 1))
with
| "" -> Int.to_string (!next - 1)
| s -> s )
| _ -> "void"
in
match Hashtbl.find void_tbl fname with
| None ->
Hashtbl.set void_tbl ~key:fname ~data:1 ;
fname ^ ".void"
| Some count ->
Hashtbl.set void_tbl ~key:fname ~data:(count + 1) ;
String.concat_array
[|fname; ".void."; Int.to_string count|] )
| _ -> (
match Llvm.value_name llv with
| "" ->
anonymous values take the next SSA name
let name = !next in
next := name + 1 ;
Int.to_string name
| name -> (
match Int.of_string name with
| _ ->
(* escape to avoid clash with names of anonymous values *)
String.concat_array [|"\""; name; "\""|]
| exception _ -> name ) )
in
Hashtbl.set sym_tbl ~key:llv ~data:(name, loc)
end
let scan_names_and_locs : Llvm.llmodule -> unit =
fun m ->
let scan_global g = add_sym g (loc_of_global g) in
let scan_instr i =
let loc = loc_of_instr i in
add_sym i loc ;
match Llvm.instr_opcode i with
| Call -> (
match Llvm.(value_name (operand i (num_arg_operands i))) with
| "llvm.dbg.declare" ->
let md = Llvm.(get_mdnode_operands (operand i 0)) in
if not (Array.is_empty md) then add_sym md.(0) loc
else
warn
"could not find variable for debug info at %a with \
metadata %a"
Loc.pp loc (List.pp ", " pp_llvalue) (Array.to_list md) ()
| _ -> () )
| _ -> ()
in
let scan_block b =
add_sym (Llvm.value_of_block b) Loc.none ;
Llvm.iter_instrs scan_instr b
in
let scan_function f =
Llvm.iter_params (fun prm -> add_sym prm Loc.none) f ;
add_sym f (loc_of_function f) ;
Llvm.iter_blocks scan_block f
in
Llvm.iter_globals scan_global m ;
Llvm.iter_functions scan_function m
let find_name : Llvm.llvalue -> string =
fun v -> fst (Hashtbl.find_exn sym_tbl v)
let find_loc : Llvm.llvalue -> Loc.t =
fun v -> snd (Hashtbl.find_exn sym_tbl v)
end
let label_of_block : Llvm.llbasicblock -> string =
fun blk -> find_name (Llvm.value_of_block blk)
let anon_struct_name : (Llvm.lltype, string) Hashtbl.t =
Hashtbl.Poly.create ()
let struct_name : Llvm.lltype -> string =
fun llt ->
match Llvm.struct_name llt with
| Some name -> name
| None ->
Hashtbl.find_or_add anon_struct_name llt ~default:(fun () ->
Int.to_string (Hashtbl.length anon_struct_name) )
type x =
{ llcontext: Llvm.llcontext
; llmodule: Llvm.llmodule
; lldatalayout: Llvm_target.DataLayout.t }
let ptr_siz : x -> int =
fun x -> Llvm_target.DataLayout.pointer_size x.lldatalayout
let size_of, bit_size_of =
let size_to_int size_of x llt =
if Llvm.type_is_sized llt then
match Int64.to_int (size_of llt x.lldatalayout) with
| Some n -> n
| None -> fail "type size too large: %a" pp_lltype llt ()
else fail "types with undetermined size: %a" pp_lltype llt ()
in
( size_to_int Llvm_target.DataLayout.abi_size
, size_to_int Llvm_target.DataLayout.size_in_bits )
let memo_type : (Llvm.lltype, Typ.t) Hashtbl.t = Hashtbl.Poly.create ()
let rec xlate_type : x -> Llvm.lltype -> Typ.t =
fun x llt ->
let xlate_type_ llt =
if Llvm.type_is_sized llt then
let byts = size_of x llt in
let bits = bit_size_of x llt in
match Llvm.classify_type llt with
| Half | Float | Double | Fp128 -> Typ.float ~bits ~byts ~enc:`IEEE
| X86fp80 -> Typ.float ~bits ~byts ~enc:`Extended
| Ppc_fp128 -> Typ.float ~bits ~byts ~enc:`Pair
| Integer -> Typ.integer ~bits ~byts
| X86_mmx -> Typ.integer ~bits ~byts
| Pointer ->
if byts <> ptr_siz x then
todo "non-integral pointer types: %a" pp_lltype llt () ;
let elt = xlate_type x (Llvm.element_type llt) in
Typ.pointer ~elt
| Vector ->
let elt = xlate_type x (Llvm.element_type llt) in
let len = Llvm.vector_size llt in
Typ.array ~elt ~len ~bits ~byts
| Array ->
let elt = xlate_type x (Llvm.element_type llt) in
let len = Llvm.array_length llt in
Typ.array ~elt ~len ~bits ~byts
| Struct ->
let llelts = Llvm.struct_element_types llt in
let len = Array.length llelts in
let packed = Llvm.is_packed llt in
if Llvm.is_literal llt then
let elts =
Vector.map ~f:(xlate_type x) (Vector.of_array llelts)
in
Typ.tuple elts ~bits ~byts ~packed
else
let name = struct_name llt in
let elts =
Vector.init len ~f:(fun i -> lazy (xlate_type x llelts.(i)))
in
Typ.struct_ ~name elts ~bits ~byts ~packed
| Function -> fail "expected to be unsized: %a" pp_lltype llt ()
| Void | Label | Metadata | Token -> assert false
else
match Llvm.classify_type llt with
| Function ->
let return = xlate_type_opt x (Llvm.return_type llt) in
let llargs = Llvm.param_types llt in
let len = Array.length llargs in
let args =
Vector.init len ~f:(fun i -> xlate_type x llargs.(i))
in
Typ.function_ ~return ~args
| Struct when Llvm.is_opaque llt -> Typ.opaque ~name:(struct_name llt)
| Token -> Typ.opaque ~name:"token"
| Vector | Array | Struct ->
todo "unsized non-opaque aggregate types: %a" pp_lltype llt ()
| Half | Float | Double | X86fp80 | Fp128 | Ppc_fp128 | Integer
|X86_mmx | Pointer ->
fail "expected to be sized: %a" pp_lltype llt ()
| Void | Label | Metadata -> assert false
in
Hashtbl.find_or_add memo_type llt ~default:(fun () ->
[%Trace.call fun {pf} -> pf "%a" pp_lltype llt]
;
xlate_type_ llt
|>
[%Trace.retn fun {pf} -> pf "%a" Typ.pp_defn] )
and xlate_type_opt : x -> Llvm.lltype -> Typ.t option =
fun x llt ->
match Llvm.classify_type llt with
| Void -> None
| _ -> Some (xlate_type x llt)
let i32 x = xlate_type x (Llvm.i32_type x.llcontext)
let suffix_after_last_space : string -> string =
fun str -> String.drop_prefix str (String.rindex_exn str ' ' + 1)
let xlate_int : x -> Llvm.llvalue -> Exp.t =
fun x llv ->
let llt = Llvm.type_of llv in
let typ = xlate_type x llt in
let data =
match Llvm.int64_of_const llv with
| Some n -> Z.of_int64 n
| None ->
Z.of_string (suffix_after_last_space (Llvm.string_of_llvalue llv))
in
Exp.integer typ data
let xlate_float : x -> Llvm.llvalue -> Exp.t =
fun x llv ->
let llt = Llvm.type_of llv in
let typ = xlate_type x llt in
let data = suffix_after_last_space (Llvm.string_of_llvalue llv) in
Exp.float typ data
let xlate_name x ?global : Llvm.llvalue -> Reg.t =
fun llv ->
let typ = xlate_type x (Llvm.type_of llv) in
Reg.program ?global typ (find_name llv)
let xlate_name_opt : x -> Llvm.llvalue -> Reg.t option =
fun x instr ->
let llt = Llvm.type_of instr in
match Llvm.classify_type llt with
| Void -> None
| _ -> Some (xlate_name x instr)
let memo_value : (bool * Llvm.llvalue, Exp.t) Hashtbl.t =
Hashtbl.Poly.create ()
let memo_global : (Llvm.llvalue, Global.t) Hashtbl.t =
Hashtbl.Poly.create ()
let should_inline : Llvm.llvalue -> bool =
fun llv ->
match Llvm.use_begin llv with
| Some use -> (
match Llvm.use_succ use with
| Some _ -> (
match Llvm.classify_value llv with
| Instruction
( Trunc | ZExt | SExt | FPToUI | FPToSI | UIToFP | SIToFP
| FPTrunc | FPExt | PtrToInt | IntToPtr | BitCast | AddrSpaceCast
) ->
true (* inline casts *)
do not inline if > = 2 uses
| None -> true )
| None -> true
module Llvalue = struct
type t = Llvm.llvalue
let hash = Hashtbl.hash
let compare = Poly.compare
let sexp_of_t llv = Sexp.Atom (Llvm.string_of_llvalue llv)
end
let struct_rec = Staged.unstage (Exp.struct_rec (module Llvalue))
let ptr_fld x ~ptr ~fld ~lltyp =
let offset =
Llvm_target.DataLayout.offset_of_element lltyp fld x.lldatalayout
in
Exp.add ~typ:Typ.ptr ptr (Exp.integer Typ.siz (Z.of_int64 offset))
let ptr_idx x ~ptr ~idx ~llelt =
let stride = Llvm_target.DataLayout.abi_size llelt x.lldatalayout in
Exp.add ~typ:Typ.ptr ptr
(Exp.mul ~typ:Typ.siz (Exp.integer Typ.siz (Z.of_int64 stride)) idx)
let convert_to_siz =
let siz_bits = Typ.bit_size_of Typ.siz in
fun typ arg ->
match (typ : Typ.t) with
| Integer {bits} ->
if siz_bits < bits then Exp.signed siz_bits arg ~to_:Typ.siz
else if siz_bits > bits then Exp.signed bits arg ~to_:Typ.siz
else arg
| _ -> fail "convert_to_siz: %a" Typ.pp typ ()
let xlate_llvm_eh_typeid_for : x -> Typ.t -> Exp.t -> Exp.t =
fun x typ arg -> Exp.convert typ ~to_:(i32 x) arg
let rec xlate_intrinsic_exp : string -> (x -> Llvm.llvalue -> Exp.t) option
=
fun name ->
match name with
| "llvm.eh.typeid.for" ->
Some
(fun x llv ->
let rand = Llvm.operand llv 0 in
let arg = xlate_value x rand in
let src = xlate_type x (Llvm.type_of rand) in
xlate_llvm_eh_typeid_for x src arg )
| _ -> None
and xlate_value ?(inline = false) : x -> Llvm.llvalue -> Exp.t =
fun x llv ->
let xlate_value_ llv =
match Llvm.classify_value llv with
| Instruction Call -> (
let func = Llvm.operand llv (Llvm.num_arg_operands llv) in
let fname = Llvm.value_name func in
match xlate_intrinsic_exp fname with
| Some intrinsic when inline || should_inline llv -> intrinsic x llv
| _ -> Exp.reg (xlate_name x llv) )
| Instruction (Invoke | Alloca | Load | PHI | LandingPad | VAArg)
|Argument ->
Exp.reg (xlate_name x llv)
| Function | GlobalVariable -> Exp.reg (xlate_global x llv).reg
| GlobalAlias -> xlate_value x (Llvm.operand llv 0)
| ConstantInt -> xlate_int x llv
| ConstantFP -> xlate_float x llv
| ConstantPointerNull -> Exp.null
| ConstantAggregateZero -> (
let typ = xlate_type x (Llvm.type_of llv) in
match typ with
| Integer _ -> Exp.integer typ Z.zero
| Pointer _ -> Exp.null
| Array _ | Tuple _ | Struct _ ->
Exp.splat typ (Exp.integer Typ.byt Z.zero)
| _ -> fail "ConstantAggregateZero of type %a" Typ.pp typ () )
| ConstantVector | ConstantArray ->
let typ = xlate_type x (Llvm.type_of llv) in
let len = Llvm.num_operands llv in
let f i = xlate_value x (Llvm.operand llv i) in
Exp.record typ (Vector.init len ~f)
| ConstantDataVector ->
let typ = xlate_type x (Llvm.type_of llv) in
let len = Llvm.vector_size (Llvm.type_of llv) in
let f i = xlate_value x (Llvm.const_element llv i) in
Exp.record typ (Vector.init len ~f)
| ConstantDataArray ->
let typ = xlate_type x (Llvm.type_of llv) in
let len = Llvm.array_length (Llvm.type_of llv) in
let f i = xlate_value x (Llvm.const_element llv i) in
Exp.record typ (Vector.init len ~f)
| ConstantStruct ->
let typ = xlate_type x (Llvm.type_of llv) in
let is_recursive =
Llvm.fold_left_uses
(fun b use -> b || llv == Llvm.used_value use)
false llv
in
if is_recursive then
let elt_thks =
Vector.init (Llvm.num_operands llv) ~f:(fun i ->
lazy (xlate_value x (Llvm.operand llv i)) )
in
struct_rec ~id:llv typ elt_thks
else
Exp.record typ
(Vector.init (Llvm.num_operands llv) ~f:(fun i ->
xlate_value x (Llvm.operand llv i) ))
| BlockAddress ->
let parent = find_name (Llvm.operand llv 0) in
let name = find_name (Llvm.operand llv 1) in
Exp.label ~parent ~name
| UndefValue ->
let typ = xlate_type x (Llvm.type_of llv) in
Exp.nondet typ (Llvm.string_of_llvalue llv)
| Instruction
( ( Trunc | ZExt | SExt | FPToUI | FPToSI | UIToFP | SIToFP
| FPTrunc | FPExt | PtrToInt | IntToPtr | BitCast | AddrSpaceCast
| Add | FAdd | Sub | FSub | Mul | FMul | UDiv | SDiv | FDiv | URem
| SRem | FRem | Shl | LShr | AShr | And | Or | Xor | ICmp | FCmp
| Select | GetElementPtr | ExtractElement | InsertElement
| ShuffleVector | ExtractValue | InsertValue ) as opcode ) ->
if inline || should_inline llv then xlate_opcode x llv opcode
else Exp.reg (xlate_name x llv)
| ConstantExpr -> xlate_opcode x llv (Llvm.constexpr_opcode llv)
| GlobalIFunc -> todo "ifuncs: %a" pp_llvalue llv ()
| Instruction (CatchPad | CleanupPad | CatchSwitch) ->
todo "windows exception handling: %a" pp_llvalue llv ()
| Instruction
( Invalid | Ret | Br | Switch | IndirectBr | Invalid2 | Unreachable
| Store | UserOp1 | UserOp2 | Fence | AtomicCmpXchg | AtomicRMW
| Resume | CleanupRet | CatchRet )
|NullValue | BasicBlock | InlineAsm | MDNode | MDString ->
fail "xlate_value: %a" pp_llvalue llv ()
in
Hashtbl.find_or_add memo_value (inline, llv) ~default:(fun () ->
[%Trace.call fun {pf} -> pf "%a" pp_llvalue llv]
;
xlate_value_ llv
|>
[%Trace.retn fun {pf} exp -> pf "%a" Exp.pp exp] )
and xlate_opcode : x -> Llvm.llvalue -> Llvm.Opcode.t -> Exp.t =
fun x llv opcode ->
[%Trace.call fun {pf} -> pf "%a" pp_llvalue llv]
;
let xlate_rand i = xlate_value x (Llvm.operand llv i) in
let typ = lazy (xlate_type x (Llvm.type_of llv)) in
let check_vector =
lazy
( if Poly.equal (Llvm.classify_type (Llvm.type_of llv)) Vector then
todo "vector operations: %a" pp_llvalue llv () )
in
let convert opcode =
let dst = Lazy.force typ in
let rand = Llvm.operand llv 0 in
let src = xlate_type x (Llvm.type_of rand) in
let arg = xlate_value x rand in
match (opcode : Llvm.Opcode.t) with
| Trunc -> Exp.signed (Typ.bit_size_of dst) arg ~to_:dst
| SExt -> Exp.signed (Typ.bit_size_of src) arg ~to_:dst
| ZExt -> Exp.unsigned (Typ.bit_size_of src) arg ~to_:dst
| (BitCast | AddrSpaceCast) when Typ.equal dst src -> arg
| FPToUI | FPToSI | UIToFP | SIToFP | FPTrunc | FPExt | PtrToInt
|IntToPtr | BitCast | AddrSpaceCast ->
Exp.convert src ~to_:dst arg
| _ -> fail "convert: %a" pp_llvalue llv ()
in
let binary (mk : ?typ:_ -> _) =
Lazy.force check_vector ;
let typ = xlate_type x (Llvm.type_of (Llvm.operand llv 0)) in
mk ~typ (xlate_rand 0) (xlate_rand 1)
in
let unordered_or mk =
binary (fun ?typ e f ->
Exp.or_ ~typ:Typ.bool (Exp.uno ?typ e f) (mk ?typ e f) )
in
( match opcode with
| Trunc | ZExt | SExt | FPToUI | FPToSI | UIToFP | SIToFP | FPTrunc
|FPExt | PtrToInt | IntToPtr | BitCast | AddrSpaceCast ->
convert opcode
| ICmp -> (
match Option.value_exn (Llvm.icmp_predicate llv) with
| Eq -> binary Exp.eq
| Ne -> binary Exp.dq
| Sgt -> binary Exp.gt
| Sge -> binary Exp.ge
| Slt -> binary Exp.lt
| Sle -> binary Exp.le
| Ugt -> binary Exp.ugt
| Uge -> binary Exp.uge
| Ult -> binary Exp.ult
| Ule -> binary Exp.ule )
| FCmp -> (
match Llvm.fcmp_predicate llv with
| None | Some False -> binary (fun ?typ:_ _ _ -> Exp.false_)
| Some Oeq -> binary Exp.eq
| Some Ogt -> binary Exp.gt
| Some Oge -> binary Exp.ge
| Some Olt -> binary Exp.lt
| Some Ole -> binary Exp.le
| Some One -> binary Exp.dq
| Some Ord -> binary Exp.ord
| Some Uno -> binary Exp.uno
| Some Ueq -> unordered_or Exp.eq
| Some Ugt -> unordered_or Exp.gt
| Some Uge -> unordered_or Exp.ge
| Some Ult -> unordered_or Exp.lt
| Some Ule -> unordered_or Exp.le
| Some Une -> unordered_or Exp.dq
| Some True -> binary (fun ?typ:_ _ _ -> Exp.true_) )
| Add | FAdd -> binary Exp.add
| Sub | FSub -> binary Exp.sub
| Mul | FMul -> binary Exp.mul
| SDiv | FDiv -> binary Exp.div
| UDiv -> binary Exp.udiv
| SRem | FRem -> binary Exp.rem
| URem -> binary Exp.urem
| Shl -> binary Exp.shl
| LShr -> binary Exp.lshr
| AShr -> binary Exp.ashr
| And -> binary Exp.and_
| Or -> binary Exp.or_
| Xor -> binary Exp.xor
| Select ->
let typ = xlate_type x (Llvm.type_of (Llvm.operand llv 1)) in
Exp.conditional ~typ ~cnd:(xlate_rand 0) ~thn:(xlate_rand 1)
~els:(xlate_rand 2)
| ExtractElement | InsertElement -> (
let typ =
let lltyp = Llvm.type_of (Llvm.operand llv 0) in
let llelt = Llvm.element_type lltyp in
let elt = xlate_type x llelt in
let len = Llvm.vector_size llelt in
let byts = size_of x lltyp in
let bits = bit_size_of x lltyp in
Typ.array ~elt ~len ~bits ~byts
in
let idx i =
match (xlate_rand i).desc with
| Integer {data} -> Z.to_int data
| _ -> todo "vector operations: %a" pp_llvalue llv ()
in
let rcd = xlate_rand 0 in
match opcode with
| ExtractElement -> Exp.select typ rcd (idx 1)
| InsertElement -> Exp.update typ ~rcd (idx 2) ~elt:(xlate_rand 1)
| _ -> assert false )
| ExtractValue | InsertValue ->
let agg = xlate_rand 0 in
let typ = xlate_type x (Llvm.type_of (Llvm.operand llv 0)) in
let indices = Llvm.indices llv in
let num = Array.length indices in
let rec xlate_indices i rcd typ =
let rcd_i, typ_i, upd =
match (typ : Typ.t) with
| Tuple {elts} | Struct {elts} ->
( Exp.select typ rcd indices.(i)
, Vector.get elts indices.(i)
, Exp.update typ ~rcd indices.(i) )
| Array {elt} ->
( Exp.select typ rcd indices.(i)
, elt
, Exp.update typ ~rcd indices.(i) )
| _ -> fail "xlate_value: %a" pp_llvalue llv ()
in
let update_or_return elt ret =
match[@warning "p"] opcode with
| InsertValue -> upd ~elt:(Lazy.force elt)
| ExtractValue -> ret
in
if i < num - 1 then
let elt = xlate_indices (i + 1) rcd_i typ_i in
update_or_return (lazy elt) elt
else
let elt = lazy (xlate_rand 1) in
update_or_return elt rcd_i
in
xlate_indices 0 agg typ
| GetElementPtr ->
if Poly.equal (Llvm.classify_type (Llvm.type_of llv)) Vector then
todo "vector operations: %a" pp_llvalue llv () ;
let len = Llvm.num_operands llv in
assert (len > 0 || invalid_llvm (Llvm.string_of_llvalue llv)) ;
if len = 1 then convert BitCast
else
let rec xlate_indices i =
[%Trace.call fun {pf} ->
pf "%i %a" i pp_llvalue (Llvm.operand llv i)]
;
let idx =
convert_to_siz
(xlate_type x (Llvm.type_of (Llvm.operand llv i)))
(xlate_rand i)
in
( if i = 1 then
let base = xlate_rand 0 in
let lltyp = Llvm.type_of (Llvm.operand llv 0) in
let llelt =
match Llvm.classify_type lltyp with
| Pointer -> Llvm.element_type lltyp
| _ -> fail "xlate_opcode: %i %a" i pp_llvalue llv ()
in
translate [ gep t * , iN M ] as [ gep [ 1 x t ] * , iN M ]
(ptr_idx x ~ptr:base ~idx ~llelt, llelt)
else
let ptr, lltyp = xlate_indices (i - 1) in
match Llvm.classify_type lltyp with
| Array | Vector ->
let llelt = Llvm.element_type lltyp in
(ptr_idx x ~ptr ~idx ~llelt, llelt)
| Struct ->
let fld =
match
Option.bind ~f:Int64.to_int
(Llvm.int64_of_const (Llvm.operand llv i))
with
| Some n -> n
| None -> fail "xlate_opcode: %i %a" i pp_llvalue llv ()
in
let llelt = (Llvm.struct_element_types lltyp).(fld) in
(ptr_fld x ~ptr ~fld ~lltyp, llelt)
| _ -> fail "xlate_opcode: %i %a" i pp_llvalue llv () )
|>
[%Trace.retn fun {pf} (exp, llt) ->
pf "%a %a" Exp.pp exp pp_lltype llt]
in
fst (xlate_indices (len - 1))
| ShuffleVector -> (
(* translate shufflevector <N x t> %x, _, <N x i32> zeroinitializer to
%x *)
let exp = xlate_value x (Llvm.operand llv 0) in
let exp_typ = xlate_type x (Llvm.type_of (Llvm.operand llv 0)) in
let llmask = Llvm.operand llv 2 in
let mask_typ = xlate_type x (Llvm.type_of llmask) in
match (exp_typ, mask_typ) with
| Array {len= m}, Array {len= n} when m = n && Llvm.is_null llmask ->
exp
| _ -> todo "vector operations: %a" pp_llvalue llv () )
| Invalid | Ret | Br | Switch | IndirectBr | Invoke | Invalid2
|Unreachable | Alloca | Load | Store | PHI | Call | UserOp1 | UserOp2
|Fence | AtomicCmpXchg | AtomicRMW | Resume | LandingPad | CleanupRet
|CatchRet | CatchPad | CleanupPad | CatchSwitch | VAArg ->
fail "xlate_opcode: %a" pp_llvalue llv () )
|>
[%Trace.retn fun {pf} exp -> pf "%a" Exp.pp exp]
and xlate_global : x -> Llvm.llvalue -> Global.t =
fun x llg ->
Hashtbl.find_or_add memo_global llg ~default:(fun () ->
[%Trace.call fun {pf} -> pf "%a" pp_llvalue llg]
;
let g = xlate_name x ~global:() llg in
let llt = Llvm.type_of llg in
let typ = xlate_type x llt in
let loc = find_loc llg in
(* add to tbl without initializer in case of recursive occurrences in
its own initializer *)
Hashtbl.set memo_global ~key:llg ~data:(Global.mk g typ loc) ;
let init =
match Llvm.classify_value llg with
| GlobalVariable ->
Option.map ~f:(xlate_value x) (Llvm.global_initializer llg)
| _ -> None
in
Global.mk ?init g typ loc
|>
[%Trace.retn fun {pf} -> pf "%a" Global.pp_defn] )
type pop_thunk = Loc.t -> Llair.inst list
let pop_stack_frame_of_function :
x -> Llvm.llvalue -> Llvm.llbasicblock -> pop_thunk =
fun x func entry_blk ->
let append_stack_regs blk regs =
Llvm.fold_right_instrs
(fun instr regs ->
match Llvm.instr_opcode instr with
| Alloca -> xlate_name x instr :: regs
| _ -> regs )
blk regs
in
let entry_regs = append_stack_regs entry_blk [] in
Llvm.iter_blocks
(fun blk ->
if not (Poly.equal entry_blk blk) then
Llvm.iter_instrs
(fun instr ->
match Llvm.instr_opcode instr with
| Alloca ->
warn "stack allocation after function entry:@ %a" Loc.pp
(find_loc instr) ()
| _ -> () )
blk )
func ;
let pop retn_loc =
List.map entry_regs ~f:(fun reg ->
Llair.Inst.free ~ptr:(Exp.reg reg) ~loc:retn_loc )
in
pop
* construct the types involved in landingpads : i32 , std::type_info * , and
_ _ cxa_exception
__cxa_exception *)
let landingpad_typs : x -> Llvm.llvalue -> Typ.t * Typ.t * Llvm.lltype =
fun x instr ->
let llt = Llvm.type_of instr in
let i32 = i32 x in
if
not
( Poly.(Llvm.classify_type llt = Struct)
&&
let llelts = Llvm.struct_element_types llt in
Array.length llelts = 2
&& Poly.(llelts.(0) = Llvm.pointer_type (Llvm.i8_type x.llcontext))
&& Poly.(llelts.(1) = Llvm.i32_type x.llcontext) )
then
todo "landingpad of type other than {i8*, i32}: %a" pp_llvalue instr () ;
let llcontext =
Llvm.(
module_context (global_parent (block_parent (instr_parent instr))))
in
let llpi8 = Llvm.(pointer_type (integer_type llcontext 8)) in
let ti = Llvm.(named_struct_type llcontext "class.std::type_info") in
let tip = Llvm.pointer_type ti in
let void = Llvm.void_type llcontext in
let dtor = Llvm.(pointer_type (function_type void [|llpi8|])) in
let cxa_exception = Llvm.struct_type llcontext [|tip; dtor|] in
(i32, xlate_type x tip, cxa_exception)
let exception_typs =
let pi8 = Typ.pointer ~elt:Typ.byt in
let i32 = Typ.integer ~bits:32 ~byts:4 in
let exc =
Typ.tuple ~packed:false (Vector.of_array [|pi8; i32|]) ~bits:96 ~byts:12
in
(pi8, i32, exc)
(** Translate a control transfer from instruction [instr] to block [dst] to
a jump, if necessary by extending [blocks] with a trampoline containing
the PHIs of [dst] translated to a move. *)
let xlate_jump :
x
-> ?reg_exps:(Reg.t * Exp.t) list
-> Llvm.llvalue
-> Llvm.llbasicblock
-> Loc.t
-> Llair.block list
-> Llair.jump * Llair.block list =
fun x ?(reg_exps = []) instr dst loc blocks ->
let src = Llvm.instr_parent instr in
let rec xlate_jump_ reg_exps (pos : _ Llvm.llpos) =
match pos with
| Before dst_instr -> (
match Llvm.instr_opcode dst_instr with
| PHI ->
let reg_exp =
List.find_map_exn (Llvm.incoming dst_instr)
~f:(fun (arg, pred) ->
if Poly.equal pred src then
Some (xlate_name x dst_instr, xlate_value x arg)
else None )
in
xlate_jump_ (reg_exp :: reg_exps) (Llvm.instr_succ dst_instr)
| _ -> reg_exps )
| At_end blk -> fail "xlate_jump: %a" pp_llblock blk ()
in
let dst_lbl = label_of_block dst in
let jmp = Llair.Jump.mk dst_lbl in
match xlate_jump_ reg_exps (Llvm.instr_begin dst) with
| [] -> (jmp, blocks)
| reg_exps ->
let mov =
Llair.Inst.move ~reg_exps:(Vector.of_list_rev reg_exps) ~loc
in
let lbl = find_name instr ^ ".jmp." ^ dst_lbl in
let blk =
Llair.Block.mk ~lbl
~cmnd:(Vector.of_array [|mov|])
~term:(Llair.Term.goto ~dst:jmp ~loc)
in
let blocks =
match List.find blocks ~f:(fun b -> String.equal lbl b.lbl) with
| None -> blk :: blocks
| Some blk0 ->
assert (Llair.Block.equal blk0 blk) ;
blocks
in
(Llair.Jump.mk lbl, blocks)
* An LLVM instruction is translated to a sequence of LLAIR instructions
and a terminator , plus some additional blocks to which it may refer
( that is , essentially a function body ) . These are needed since LLVM and
LLAIR blocks are not in 1:1 correspondence .
and a terminator, plus some additional blocks to which it may refer
(that is, essentially a function body). These are needed since LLVM and
LLAIR blocks are not in 1:1 correspondence. *)
type code = Llair.inst list * Llair.term * Llair.block list
let pp_code fs (insts, term, blocks) =
Format.fprintf fs "@[<hv>@,@[%a%t@]%t@[<hv>%a@]@]"
(List.pp "@ " Llair.Inst.pp)
insts
(fun fs ->
match term with
| Llair.Unreachable -> ()
| _ ->
Format.fprintf fs "%t%a"
(fun fs ->
if List.is_empty insts then () else Format.fprintf fs "@ " )
Llair.Term.pp term )
(fun fs -> if List.is_empty blocks then () else Format.fprintf fs "@\n")
(List.pp "@ " Llair.Block.pp)
blocks
let rec xlate_func_name x llv =
match Llvm.classify_value llv with
| Function | GlobalVariable -> Exp.reg (xlate_name x ~global:() llv)
| ConstantExpr -> xlate_opcode x llv (Llvm.constexpr_opcode llv)
| Argument | Instruction _ -> xlate_value x llv
| GlobalAlias -> xlate_func_name x (Llvm.operand llv 0)
| GlobalIFunc -> todo "ifunc: %a" pp_llvalue llv ()
| InlineAsm -> todo "inline asm: %a" pp_llvalue llv ()
| ConstantPointerNull -> todo "call null: %a" pp_llvalue llv ()
| _ -> todo "function kind in %a" pp_llvalue llv ()
let ignored_callees = Hash_set.create (module String)
let xlate_instr :
pop_thunk
-> x
-> Llvm.llvalue
-> ((Llair.inst list * Llair.term -> code) -> code)
-> code =
fun pop x instr continue ->
[%Trace.call fun {pf} -> pf "%a" pp_llvalue instr]
;
let continue insts_term_to_code =
[%Trace.retn
fun {pf} () ->
pf "%a" pp_code (insts_term_to_code ([], Llair.Term.unreachable))]
() ;
continue insts_term_to_code
in
let nop () = continue (fun (insts, term) -> (insts, term, [])) in
let emit_inst inst =
continue (fun (insts, term) -> (inst :: insts, term, []))
in
let emit_term ?(prefix = []) ?(blocks = []) term =
[%Trace.retn fun {pf} () -> pf "%a" pp_code (prefix, term, blocks)] () ;
(prefix, term, blocks)
in
let name = find_name instr in
let loc = find_loc instr in
let inline_or_move xlate =
if should_inline instr then nop ()
else
let reg = xlate_name x instr in
let exp = xlate instr in
let reg_exps = Vector.of_array [|(reg, exp)|] in
emit_inst (Llair.Inst.move ~reg_exps ~loc)
in
let opcode = Llvm.instr_opcode instr in
match opcode with
| Load ->
let reg = xlate_name x instr in
let len = Exp.size_of (Exp.reg reg) in
let ptr = xlate_value x (Llvm.operand instr 0) in
emit_inst (Llair.Inst.load ~reg ~ptr ~len ~loc)
| Store ->
let exp = xlate_value x (Llvm.operand instr 0) in
let len = Exp.size_of exp in
let ptr = xlate_value x (Llvm.operand instr 1) in
emit_inst (Llair.Inst.store ~ptr ~exp ~len ~loc)
| Alloca ->
let reg = xlate_name x instr in
let rand = Llvm.operand instr 0 in
let num =
convert_to_siz
(xlate_type x (Llvm.type_of rand))
(xlate_value x rand)
in
assert (Poly.(Llvm.classify_type (Llvm.type_of instr) = Pointer)) ;
let len = Exp.size_of (Exp.reg reg) in
emit_inst (Llair.Inst.alloc ~reg ~num ~len ~loc)
| Call -> (
let maybe_llfunc = Llvm.operand instr (Llvm.num_operands instr - 1) in
let lltyp = Llvm.type_of maybe_llfunc in
assert (Poly.(Llvm.classify_type lltyp = Pointer)) ;
let llfunc =
let llfunc_valuekind = Llvm.classify_value maybe_llfunc in
match llfunc_valuekind with
| Function | Instruction _ | InlineAsm | Argument -> maybe_llfunc
| ConstantExpr -> (
match Llvm.constexpr_opcode maybe_llfunc with
| BitCast -> Llvm.operand maybe_llfunc 0
| _ ->
todo "opcode kind in call instruction %a" pp_llvalue
maybe_llfunc () )
| _ ->
todo "operand kind in call instruction %a" pp_llvalue
maybe_llfunc ()
in
let fname = Llvm.value_name llfunc in
let skip msg =
( match Hash_set.strict_add ignored_callees fname with
| Ok () -> warn "ignoring uninterpreted %s %s" msg fname ()
| Error _ -> () ) ;
let reg = xlate_name_opt x instr in
emit_inst (Llair.Inst.nondet ~reg ~msg:fname ~loc)
in
(* intrinsics *)
match xlate_intrinsic_exp fname with
| Some intrinsic -> inline_or_move (intrinsic x)
| None -> (
match String.split fname ~on:'.' with
| ["__llair_throw"] ->
let exc = xlate_value x (Llvm.operand instr 0) in
emit_term ~prefix:(pop loc) (Llair.Term.throw ~exc ~loc)
| ["__llair_alloc" (* void* __llair_alloc(unsigned size) *)] ->
let reg = xlate_name x instr in
let num_operand = Llvm.operand instr 0 in
let num =
convert_to_siz
(xlate_type x (Llvm.type_of num_operand))
(xlate_value x num_operand)
in
let len = Exp.integer Typ.siz (Z.of_int 1) in
emit_inst (Llair.Inst.alloc ~reg ~num ~len ~loc)
operator )
|[ "_ZnwmSt11align_val_t"
(* operator new(unsigned long, std::align_val_t) *) ] ->
let reg = xlate_name x instr in
let num = xlate_value x (Llvm.operand instr 0) in
let len = Exp.size_of (Exp.reg reg) in
emit_inst (Llair.Inst.alloc ~reg ~num ~len ~loc)
| ["_ZdlPv" (* operator delete(void* ptr) *)]
|[ "_ZdlPvSt11align_val_t"
(* operator delete(void* ptr, std::align_val_t) *) ]
|[ "_ZdlPvmSt11align_val_t"
(* operator delete(void* ptr, unsigned long, std::align_val_t) *)
]
|["free" (* void free(void* ptr) *)] ->
let ptr = xlate_value x (Llvm.operand instr 0) in
emit_inst (Llair.Inst.free ~ptr ~loc)
| "llvm" :: "memset" :: _ ->
let dst = xlate_value x (Llvm.operand instr 0) in
let byt = xlate_value x (Llvm.operand instr 1) in
let len = xlate_value x (Llvm.operand instr 2) in
emit_inst (Llair.Inst.memset ~dst ~byt ~len ~loc)
| "llvm" :: "memcpy" :: _ ->
let dst = xlate_value x (Llvm.operand instr 0) in
let src = xlate_value x (Llvm.operand instr 1) in
let len = xlate_value x (Llvm.operand instr 2) in
emit_inst (Llair.Inst.memcpy ~dst ~src ~len ~loc)
| "llvm" :: "memmove" :: _ ->
let dst = xlate_value x (Llvm.operand instr 0) in
let src = xlate_value x (Llvm.operand instr 1) in
let len = xlate_value x (Llvm.operand instr 2) in
emit_inst (Llair.Inst.memmov ~dst ~src ~len ~loc)
| ["abort"] | ["llvm"; "trap"] -> emit_inst (Llair.Inst.abort ~loc)
(* dropped / handled elsewhere *)
| ["llvm"; "dbg"; ("declare" | "value")]
|"llvm" :: ("lifetime" | "invariant") :: ("start" | "end") :: _ ->
nop ()
(* unimplemented *)
| ["llvm"; ("stacksave" | "stackrestore")] ->
skip "dynamic stack deallocation"
| "llvm" :: "coro" :: _ ->
todo "coroutines:@ %a" pp_llvalue instr ()
| "llvm" :: "experimental" :: "gc" :: "statepoint" :: _ ->
todo "statepoints:@ %a" pp_llvalue instr ()
| ["llvm"; ("va_start" | "va_copy" | "va_end")] ->
skip "variadic function intrinsic"
| "llvm" :: _ -> skip "intrinsic"
| _ when Poly.equal (Llvm.classify_value llfunc) InlineAsm ->
skip "inline asm"
(* general function call that may not throw *)
| _ ->
let callee = xlate_func_name x llfunc in
let typ = xlate_type x lltyp in
let lbl = name ^ ".ret" in
let call =
let actuals =
let num_actuals =
if not (Llvm.is_var_arg (Llvm.element_type lltyp)) then
Llvm.num_arg_operands instr
else
let fname = Llvm.value_name llfunc in
( match Hash_set.strict_add ignored_callees fname with
| Ok () when not (Llvm.is_declaration llfunc) ->
warn
"ignoring variable arguments to variadic \
function: %a"
Exp.pp callee ()
| _ -> () ) ;
let llfty = Llvm.element_type lltyp in
( match Llvm.classify_type llfty with
| Function -> ()
| _ ->
fail "called function not of function type: %a"
pp_llvalue instr () ) ;
Array.length (Llvm.param_types llfty)
in
List.rev_init num_actuals ~f:(fun i ->
xlate_value x (Llvm.operand instr i) )
in
let areturn = xlate_name_opt x instr in
let return = Llair.Jump.mk lbl in
Llair.Term.call ~callee ~typ ~actuals ~areturn ~return
~throw:None ~loc
in
continue (fun (insts, term) ->
let cmnd = Vector.of_list insts in
([], call, [Llair.Block.mk ~lbl ~cmnd ~term]) ) ) )
| Invoke -> (
let llfunc = Llvm.operand instr (Llvm.num_operands instr - 3) in
let lltyp = Llvm.type_of llfunc in
assert (Poly.(Llvm.classify_type lltyp = Pointer)) ;
let fname = Llvm.value_name llfunc in
let return_blk = Llvm.get_normal_dest instr in
let unwind_blk = Llvm.get_unwind_dest instr in
let num_actuals =
if not (Llvm.is_var_arg (Llvm.element_type lltyp)) then
Llvm.num_arg_operands instr
else (
( match Hash_set.strict_add ignored_callees fname with
| Ok () when not (Llvm.is_declaration llfunc) ->
warn "ignoring variable arguments to variadic function: %a"
Global.pp (xlate_global x llfunc) ()
| _ -> () ) ;
assert (Poly.(Llvm.classify_type lltyp = Pointer)) ;
Array.length (Llvm.param_types (Llvm.element_type lltyp)) )
in
(* intrinsics *)
match String.split fname ~on:'.' with
| _ when Option.is_some (xlate_intrinsic_exp fname) ->
let dst, blocks = xlate_jump x instr return_blk loc [] in
emit_term (Llair.Term.goto ~dst ~loc) ~blocks
| ["__llair_throw"] ->
let dst, blocks = xlate_jump x instr unwind_blk loc [] in
emit_term (Llair.Term.goto ~dst ~loc) ~blocks
| ["abort"] ->
emit_term ~prefix:[Llair.Inst.abort ~loc] Llair.Term.unreachable
operator )
|[ "_ZnwmSt11align_val_t"
(* operator new(unsigned long num, std::align_val_t) *) ]
when num_actuals > 0 ->
let reg = xlate_name x instr in
let num = xlate_value x (Llvm.operand instr 0) in
let len = Exp.size_of (Exp.reg reg) in
let dst, blocks = xlate_jump x instr return_blk loc [] in
emit_term
~prefix:[Llair.Inst.alloc ~reg ~num ~len ~loc]
(Llair.Term.goto ~dst ~loc)
~blocks
(* unimplemented *)
| "llvm" :: "experimental" :: "gc" :: "statepoint" :: _ ->
todo "statepoints:@ %a" pp_llvalue instr ()
(* general function call that may throw *)
| _ ->
let callee = xlate_func_name x llfunc in
let typ = xlate_type x (Llvm.type_of llfunc) in
let actuals =
List.rev_init num_actuals ~f:(fun i ->
xlate_value x (Llvm.operand instr i) )
in
let areturn = xlate_name_opt x instr in
let return, blocks = xlate_jump x instr return_blk loc [] in
let throw, blocks = xlate_jump x instr unwind_blk loc blocks in
let throw = Some throw in
emit_term
(Llair.Term.call ~callee ~typ ~actuals ~areturn ~return ~throw
~loc)
~blocks )
| Ret ->
let exp =
if Llvm.num_operands instr = 0 then None
else Some (xlate_value x (Llvm.operand instr 0))
in
emit_term ~prefix:(pop loc) (Llair.Term.return ~exp ~loc)
| Br -> (
match Option.value_exn (Llvm.get_branch instr) with
| `Unconditional blk ->
let dst, blocks = xlate_jump x instr blk loc [] in
emit_term (Llair.Term.goto ~dst ~loc) ~blocks
| `Conditional (cnd, thn, els) ->
let key = xlate_value x cnd in
let thn, blocks = xlate_jump x instr thn loc [] in
let els, blocks = xlate_jump x instr els loc blocks in
emit_term (Llair.Term.branch ~key ~nzero:thn ~zero:els ~loc) ~blocks
)
| Switch ->
let key = xlate_value x (Llvm.operand instr 0) in
let cases, blocks =
let num_cases = (Llvm.num_operands instr / 2) - 1 in
let rec xlate_cases i blocks =
if i <= num_cases then
let idx = Llvm.operand instr (2 * i) in
let blk =
Llvm.block_of_value (Llvm.operand instr ((2 * i) + 1))
in
let num = xlate_value x idx in
let jmp, blocks = xlate_jump x instr blk loc blocks in
let rest, blocks = xlate_cases (i + 1) blocks in
((num, jmp) :: rest, blocks)
else ([], blocks)
in
xlate_cases 1 []
in
let tbl = Vector.of_list cases in
let blk = Llvm.block_of_value (Llvm.operand instr 1) in
let els, blocks = xlate_jump x instr blk loc blocks in
emit_term (Llair.Term.switch ~key ~tbl ~els ~loc) ~blocks
| IndirectBr ->
let ptr = xlate_value x (Llvm.operand instr 0) in
let num_dests = Llvm.num_operands instr - 1 in
let lldests, blocks =
let rec dests i blocks =
if i <= num_dests then
let v = Llvm.operand instr i in
let blk = Llvm.block_of_value v in
let jmp, blocks = xlate_jump x instr blk loc blocks in
let rest, blocks = dests (i + 1) blocks in
(jmp :: rest, blocks)
else ([], blocks)
in
dests 1 []
in
let tbl = Vector.of_list lldests in
emit_term (Llair.Term.iswitch ~ptr ~tbl ~loc) ~blocks
| LandingPad ->
Translate the clauses to code to load the type_info from
the thrown exception , and test the type_info against the clauses ,
eventually jumping to the handler code following the landingpad ,
passing a value for the selector which the handler code tests to
e.g. either cleanup or rethrow .
the thrown exception, and test the type_info against the clauses,
eventually jumping to the handler code following the landingpad,
passing a value for the selector which the handler code tests to
e.g. either cleanup or rethrow. *)
let i32, tip, cxa_exception = landingpad_typs x instr in
let pi8, _, exc_typ = exception_typs in
let exc = Exp.reg (Reg.program pi8 (find_name instr ^ ".exc")) in
let ti = Reg.program tip (name ^ ".ti") in
(* std::type_info* ti = ((__cxa_exception* )exc - 1)->exceptionType *)
let load_ti =
let typ = cxa_exception in
(* field number of the exceptionType member of __cxa_exception *)
let fld = 0 in
index from exc that points to header
let idx = Exp.integer Typ.siz Z.minus_one in
let ptr =
ptr_fld x
~ptr:(ptr_idx x ~ptr:exc ~idx ~llelt:typ)
~fld ~lltyp:typ
in
let len = Exp.integer Typ.siz (Z.of_int (size_of x typ)) in
Llair.Inst.load ~reg:ti ~ptr ~len ~loc
in
let ti = Exp.reg ti in
let typeid = xlate_llvm_eh_typeid_for x tip ti in
let lbl = name ^ ".unwind" in
let reg = xlate_name x instr in
let jump_unwind i sel rev_blocks =
let exp = Exp.record exc_typ (Vector.of_array [|exc; sel|]) in
let mov =
Llair.Inst.move ~reg_exps:(Vector.of_array [|(reg, exp)|]) ~loc
in
let lbl_i = lbl ^ "." ^ Int.to_string i in
let blk =
Llair.Block.mk ~lbl:lbl_i
~cmnd:(Vector.of_array [|mov|])
~term:(Llair.Term.goto ~dst:(Llair.Jump.mk lbl) ~loc)
in
(Llair.Jump.mk lbl_i, blk :: rev_blocks)
in
let goto_unwind i sel blocks =
let dst, blocks = jump_unwind i sel blocks in
(Llair.Term.goto ~dst ~loc, blocks)
in
let term_unwind, rev_blocks =
if Llvm.is_cleanup instr then
goto_unwind 0 (Exp.integer i32 Z.zero) []
else
let num_clauses = Llvm.num_operands instr in
let lbl i = name ^ "." ^ Int.to_string i in
let jump i = Llair.Jump.mk (lbl i) in
let block i term =
Llair.Block.mk ~lbl:(lbl i) ~cmnd:Vector.empty ~term
in
let match_filter i rev_blocks =
jump_unwind i
(Exp.sub ~typ:i32 (Exp.integer i32 Z.zero) typeid)
rev_blocks
in
let xlate_clause i rev_blocks =
let clause = Llvm.operand instr i in
let num_tis = Llvm.num_operands clause in
if num_tis = 0 then
let dst, rev_blocks = match_filter i rev_blocks in
(Llair.Term.goto ~dst ~loc, rev_blocks)
else
match Llvm.classify_type (Llvm.type_of clause) with
| Array (* filter *) -> (
match Llvm.classify_value clause with
| ConstantArray ->
let rec xlate_filter i =
let tiI = xlate_value x (Llvm.operand clause i) in
if i < num_tis - 1 then
Exp.and_ ~typ:Typ.bool (Exp.dq ~typ:tip tiI ti)
(xlate_filter (i + 1))
else Exp.dq ~typ:tip tiI ti
in
let key = xlate_filter 0 in
let nzero, rev_blocks = match_filter i rev_blocks in
( Llair.Term.branch ~loc ~key ~nzero ~zero:(jump (i + 1))
, rev_blocks )
| _ -> fail "xlate_instr: %a" pp_llvalue instr () )
| _ (* catch *) ->
let typ = xlate_type x (Llvm.type_of clause) in
let clause = xlate_value x clause in
let key =
Exp.or_ ~typ:Typ.bool
(Exp.eq ~typ clause Exp.null)
(Exp.eq ~typ clause ti)
in
let nzero, rev_blocks = jump_unwind i typeid rev_blocks in
( Llair.Term.branch ~loc ~key ~nzero ~zero:(jump (i + 1))
, rev_blocks )
in
let rec rev_blocks i z =
if i < num_clauses then
let term, z = xlate_clause i z in
rev_blocks (i + 1) (block i term :: z)
else block i Llair.Term.unreachable :: z
in
xlate_clause 0 (rev_blocks 1 [])
in
continue (fun (insts, term) ->
( [load_ti]
, term_unwind
, List.rev_append rev_blocks
[Llair.Block.mk ~lbl ~cmnd:(Vector.of_list insts) ~term] ) )
| Resume ->
let llrcd = Llvm.operand instr 0 in
let typ = xlate_type x (Llvm.type_of llrcd) in
let rcd = xlate_value x llrcd in
let exc = Exp.select typ rcd 0 in
emit_term ~prefix:(pop loc) (Llair.Term.throw ~exc ~loc)
| Unreachable -> emit_term Llair.Term.unreachable
| Trunc | ZExt | SExt | FPToUI | FPToSI | UIToFP | SIToFP | FPTrunc
|FPExt | PtrToInt | IntToPtr | BitCast | AddrSpaceCast | Add | FAdd
|Sub | FSub | Mul | FMul | UDiv | SDiv | FDiv | URem | SRem | FRem
|Shl | LShr | AShr | And | Or | Xor | ICmp | FCmp | Select
|GetElementPtr | ExtractElement | InsertElement | ShuffleVector
|ExtractValue | InsertValue ->
inline_or_move (xlate_value ~inline:true x)
| VAArg ->
let reg = xlate_name_opt x instr in
warn "variadic function argument: %a" Loc.pp loc () ;
emit_inst (Llair.Inst.nondet ~reg ~msg:"vaarg" ~loc)
| CleanupRet | CatchRet | CatchPad | CleanupPad | CatchSwitch ->
todo "windows exception handling: %a" pp_llvalue instr ()
| Fence | AtomicCmpXchg | AtomicRMW ->
fail "xlate_instr: %a" pp_llvalue instr ()
| PHI | Invalid | Invalid2 | UserOp1 | UserOp2 -> assert false
let skip_phis : Llvm.llbasicblock -> _ Llvm.llpos =
fun blk ->
let rec skip_phis_ (pos : _ Llvm.llpos) =
match pos with
| Before instr -> (
match Llvm.instr_opcode instr with
| PHI -> skip_phis_ (Llvm.instr_succ instr)
| _ -> pos )
| _ -> pos
in
skip_phis_ (Llvm.instr_begin blk)
let rec xlate_instrs : pop_thunk -> x -> _ Llvm.llpos -> code =
fun pop x -> function
| Before instrI ->
xlate_instr pop x instrI (fun xlate_instrI ->
let instrJ = Llvm.instr_succ instrI in
let instsJ, termJ, blocksJN = xlate_instrs pop x instrJ in
let instsI, termI, blocksI = xlate_instrI (instsJ, termJ) in
(instsI, termI, blocksI @ blocksJN) )
| At_end blk -> fail "xlate_instrs: %a" pp_llblock blk ()
let xlate_block : pop_thunk -> x -> Llvm.llbasicblock -> Llair.block list =
fun pop x blk ->
[%Trace.call fun {pf} -> pf "%a" pp_llblock blk]
;
let lbl = label_of_block blk in
let pos = skip_phis blk in
let insts, term, blocks = xlate_instrs pop x pos in
Llair.Block.mk ~lbl ~cmnd:(Vector.of_list insts) ~term :: blocks
|>
[%Trace.retn fun {pf} blocks -> pf "%s" (List.hd_exn blocks).Llair.lbl]
let report_undefined func name =
if Option.is_some (Llvm.use_begin func) then
[%Trace.info "undefined function: %a" Global.pp name]
let xlate_function : x -> Llvm.llvalue -> Llair.func =
fun x llf ->
[%Trace.call fun {pf} -> pf "%a" pp_llvalue llf]
;
let name = xlate_global x llf in
let formals =
Llvm.fold_left_params
(fun rev_args param -> xlate_name x param :: rev_args)
[] llf
in
let freturn =
match name.typ with
| Pointer {elt= Function {return= Some typ; _}} ->
Some (Reg.program typ "freturn")
| _ -> None
in
let _, _, exc_typ = exception_typs in
let fthrow = Reg.program exc_typ "fthrow" in
( match Llvm.block_begin llf with
| Before entry_blk ->
let pop = pop_stack_frame_of_function x llf entry_blk in
let[@warning "p"] (entry_block :: entry_blocks) =
xlate_block pop x entry_blk
in
let entry =
let {Llair.lbl; cmnd; term} = entry_block in
Llair.Block.mk ~lbl ~cmnd ~term
in
let cfg =
let rec trav_blocks rev_cfg prev =
match Llvm.block_succ prev with
| Before blk ->
trav_blocks
(List.rev_append (xlate_block pop x blk) rev_cfg)
blk
| At_end _ -> Vector.of_list_rev rev_cfg
in
trav_blocks (List.rev entry_blocks) entry_blk
in
Llair.Func.mk ~name ~formals ~freturn ~fthrow ~entry ~cfg
| At_end _ ->
report_undefined llf name ;
Llair.Func.mk_undefined ~name ~formals ~freturn ~fthrow )
|>
[%Trace.retn fun {pf} -> pf "@\n%a" Llair.Func.pp]
let transform ~internalize : Llvm.llmodule -> unit =
fun llmodule ->
let pm = Llvm.PassManager.create () in
let entry_points = Config.find_list "entry-points" in
if internalize then
Llvm_ipo.add_internalize_predicate pm (fun fn ->
List.exists entry_points ~f:(String.equal fn) ) ;
Llvm_ipo.add_global_dce pm ;
Llvm_ipo.add_global_optimizer pm ;
Llvm_ipo.add_merge_functions pm ;
Llvm_ipo.add_constant_merge pm ;
Llvm_ipo.add_argument_promotion pm ;
Llvm_ipo.add_ipsccp pm ;
Llvm_scalar_opts.add_memory_to_register_promotion pm ;
Llvm_scalar_opts.add_dce pm ;
Llvm_ipo.add_global_dce pm ;
Llvm_ipo.add_dead_arg_elimination pm ;
Llvm_scalar_opts.add_lower_atomic pm ;
Llvm_scalar_opts.add_scalar_repl_aggregation pm ;
Llvm_scalar_opts.add_scalarizer pm ;
Llvm_scalar_opts.add_unify_function_exit_nodes pm ;
Llvm_scalar_opts.add_cfg_simplification pm ;
Llvm.PassManager.run_module llmodule pm |> (ignore : bool -> _) ;
Llvm.PassManager.dispose pm
let read_and_parse llcontext bc_file =
[%Trace.call fun {pf} -> pf "%s" bc_file]
;
let llmemorybuffer =
try Llvm.MemoryBuffer.of_file bc_file
with Llvm.IoError msg -> fail "%s: %s" bc_file msg ()
in
( try Llvm_irreader.parse_ir llcontext llmemorybuffer
with Llvm_irreader.Error msg -> invalid_llvm msg )
|>
[%Trace.retn fun {pf} _ -> pf ""]
let link_in : Llvm.llcontext -> Llvm.lllinker -> string -> unit =
fun llcontext link_ctx bc_file ->
Llvm_linker.link_in link_ctx (read_and_parse llcontext bc_file)
let check_datalayout llcontext lldatalayout =
let check_size llt typ =
let llsiz =
Int64.to_int_exn (Llvm_target.DataLayout.abi_size llt lldatalayout)
in
let siz = Typ.size_of typ in
if llsiz != siz then
todo "size_of %a = %i != %i" Typ.pp typ llsiz siz ()
in
check_size (Llvm.i1_type llcontext) Typ.bool ;
check_size (Llvm.i8_type llcontext) Typ.byt ;
check_size (Llvm.i32_type llcontext) Typ.int ;
check_size (Llvm.i64_type llcontext) Typ.siz ;
check_size
(Llvm_target.DataLayout.intptr_type llcontext lldatalayout)
Typ.ptr
let translate ~models ~fuzzer ~internalize : string list -> Llair.t =
fun inputs ->
[%Trace.call fun {pf} ->
pf "%a" (List.pp "@ " Format.pp_print_string) inputs]
;
Llvm.install_fatal_error_handler invalid_llvm ;
let llcontext = Llvm.global_context () in
let input, inputs = List.pop_exn inputs in
let llmodule = read_and_parse llcontext input in
let link_ctx = Llvm_linker.get_linker llmodule in
List.iter ~f:(link_in llcontext link_ctx) inputs ;
let link_model_file name =
Llvm_linker.link_in link_ctx
(Llvm_irreader.parse_ir llcontext
(Llvm.MemoryBuffer.of_string (Option.value_exn (Model.read name))))
in
if models then link_model_file "/cxxabi.bc" ;
if fuzzer then link_model_file "/lib_fuzzer_main.bc" ;
Llvm_linker.linker_dispose link_ctx ;
assert (
Llvm_analysis.verify_module llmodule |> Option.for_all ~f:invalid_llvm
) ;
transform ~internalize llmodule ;
scan_names_and_locs llmodule ;
let lldatalayout =
Llvm_target.DataLayout.of_string (Llvm.data_layout llmodule)
in
check_datalayout llcontext lldatalayout ;
let x = {llcontext; llmodule; lldatalayout} in
let globals =
Llvm.fold_left_globals
(fun globals llg ->
if
Poly.equal (Llvm.linkage llg) Appending
&& Llvm.(array_length (element_type (type_of llg))) = 0
then globals
else xlate_global x llg :: globals )
[] llmodule
in
let functions =
Llvm.fold_left_functions
(fun functions llf ->
let name = Llvm.value_name llf in
if
String.is_prefix name ~prefix:"__llair_"
|| String.is_prefix name ~prefix:"llvm."
then functions
else xlate_function x llf :: functions )
[] llmodule
in
Hashtbl.clear sym_tbl ;
Hashtbl.clear scope_tbl ;
Hashtbl.clear anon_struct_name ;
Hashtbl.clear memo_type ;
Hashtbl.clear memo_global ;
Hashtbl.clear memo_value ;
Hash_set.clear ignored_callees ;
Llvm.dispose_module llmodule ;
Llair.mk ~globals ~functions
|>
[%Trace.retn fun {pf} _ ->
pf "number of globals %d, number of functions %d" (List.length globals)
(List.length functions)]
| null | https://raw.githubusercontent.com/project-oak/hafnium-verification/6071eff162148e4d25a0fedaea003addac242ace/experiments/ownership-inference/infer/sledge/src/llair/frontend.ml | ocaml | WARNING: SLOW on instructions and functions
gather names and debug locations
escape to avoid clash with names of anonymous values
inline casts
translate shufflevector <N x t> %x, _, <N x i32> zeroinitializer to
%x
add to tbl without initializer in case of recursive occurrences in
its own initializer
* Translate a control transfer from instruction [instr] to block [dst] to
a jump, if necessary by extending [blocks] with a trampoline containing
the PHIs of [dst] translated to a move.
intrinsics
void* __llair_alloc(unsigned size)
operator new(unsigned long, std::align_val_t)
operator delete(void* ptr)
operator delete(void* ptr, std::align_val_t)
operator delete(void* ptr, unsigned long, std::align_val_t)
void free(void* ptr)
dropped / handled elsewhere
unimplemented
general function call that may not throw
intrinsics
operator new(unsigned long num, std::align_val_t)
unimplemented
general function call that may throw
std::type_info* ti = ((__cxa_exception* )exc - 1)->exceptionType
field number of the exceptionType member of __cxa_exception
filter
catch |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
* Translate to LLAIR
let pp_lltype fs t = Format.pp_print_string fs (Llvm.string_of_lltype t)
let pp_llvalue fs t = Format.pp_print_string fs (Llvm.string_of_llvalue t)
let pp_llblock fs t =
Format.pp_print_string fs (Llvm.string_of_llvalue (Llvm.value_of_block t))
exception Invalid_llvm of string
let invalid_llvm : string -> 'a =
fun msg ->
let first_line =
Option.value_map ~default:msg ~f:(String.prefix msg)
(String.index msg '\n')
in
Format.printf "@\n%s@\n" msg ;
raise (Invalid_llvm first_line)
let sym_tbl : (Llvm.llvalue, string * Loc.t) Hashtbl.t =
Hashtbl.Poly.create ~size:4_194_304 ()
let scope_tbl :
( [`Fun of Llvm.llvalue | `Mod of Llvm.llmodule]
, int ref * (string, int) Hashtbl.t )
Hashtbl.t =
Hashtbl.Poly.create ~size:32_768 ()
open struct
open struct
let loc_of_global g =
Loc.mk
?dir:(Llvm.get_debug_loc_directory g)
?file:(Llvm.get_debug_loc_filename g)
~line:(Llvm.get_debug_loc_line g)
?col:None
let loc_of_function f =
Loc.mk
?dir:(Llvm.get_debug_loc_directory f)
?file:(Llvm.get_debug_loc_filename f)
~line:(Llvm.get_debug_loc_line f)
?col:None
let loc_of_instr i =
Loc.mk
?dir:(Llvm.get_debug_loc_directory i)
?file:(Llvm.get_debug_loc_filename i)
~line:(Llvm.get_debug_loc_line i)
~col:(Llvm.get_debug_loc_column i)
let add_sym llv loc =
let maybe_scope =
match Llvm.classify_value llv with
| Argument -> Some (`Fun (Llvm.param_parent llv))
| BasicBlock ->
Some (`Fun (Llvm.block_parent (Llvm.block_of_value llv)))
| Instruction _ ->
Some (`Fun (Llvm.block_parent (Llvm.instr_parent llv)))
| GlobalVariable | Function -> Some (`Mod (Llvm.global_parent llv))
| UndefValue -> None
| ConstantExpr -> None
| ConstantPointerNull -> None
| _ ->
warn "Unexpected type of llv, might crash: %a" pp_llvalue llv () ;
Some (`Mod (Llvm.global_parent llv))
in
match maybe_scope with
| None -> ()
| Some scope ->
let next, void_tbl =
Hashtbl.find_or_add scope_tbl scope ~default:(fun () ->
(ref 0, Hashtbl.Poly.create ()) )
in
let name =
match Llvm.classify_type (Llvm.type_of llv) with
| Void -> (
let fname =
match Llvm.classify_value llv with
| Instruction (Call | Invoke) -> (
match
Llvm.value_name
(Llvm.operand llv (Llvm.num_operands llv - 1))
with
| "" -> Int.to_string (!next - 1)
| s -> s )
| _ -> "void"
in
match Hashtbl.find void_tbl fname with
| None ->
Hashtbl.set void_tbl ~key:fname ~data:1 ;
fname ^ ".void"
| Some count ->
Hashtbl.set void_tbl ~key:fname ~data:(count + 1) ;
String.concat_array
[|fname; ".void."; Int.to_string count|] )
| _ -> (
match Llvm.value_name llv with
| "" ->
anonymous values take the next SSA name
let name = !next in
next := name + 1 ;
Int.to_string name
| name -> (
match Int.of_string name with
| _ ->
String.concat_array [|"\""; name; "\""|]
| exception _ -> name ) )
in
Hashtbl.set sym_tbl ~key:llv ~data:(name, loc)
end
let scan_names_and_locs : Llvm.llmodule -> unit =
fun m ->
let scan_global g = add_sym g (loc_of_global g) in
let scan_instr i =
let loc = loc_of_instr i in
add_sym i loc ;
match Llvm.instr_opcode i with
| Call -> (
match Llvm.(value_name (operand i (num_arg_operands i))) with
| "llvm.dbg.declare" ->
let md = Llvm.(get_mdnode_operands (operand i 0)) in
if not (Array.is_empty md) then add_sym md.(0) loc
else
warn
"could not find variable for debug info at %a with \
metadata %a"
Loc.pp loc (List.pp ", " pp_llvalue) (Array.to_list md) ()
| _ -> () )
| _ -> ()
in
let scan_block b =
add_sym (Llvm.value_of_block b) Loc.none ;
Llvm.iter_instrs scan_instr b
in
let scan_function f =
Llvm.iter_params (fun prm -> add_sym prm Loc.none) f ;
add_sym f (loc_of_function f) ;
Llvm.iter_blocks scan_block f
in
Llvm.iter_globals scan_global m ;
Llvm.iter_functions scan_function m
let find_name : Llvm.llvalue -> string =
fun v -> fst (Hashtbl.find_exn sym_tbl v)
let find_loc : Llvm.llvalue -> Loc.t =
fun v -> snd (Hashtbl.find_exn sym_tbl v)
end
let label_of_block : Llvm.llbasicblock -> string =
fun blk -> find_name (Llvm.value_of_block blk)
let anon_struct_name : (Llvm.lltype, string) Hashtbl.t =
Hashtbl.Poly.create ()
let struct_name : Llvm.lltype -> string =
fun llt ->
match Llvm.struct_name llt with
| Some name -> name
| None ->
Hashtbl.find_or_add anon_struct_name llt ~default:(fun () ->
Int.to_string (Hashtbl.length anon_struct_name) )
type x =
{ llcontext: Llvm.llcontext
; llmodule: Llvm.llmodule
; lldatalayout: Llvm_target.DataLayout.t }
let ptr_siz : x -> int =
fun x -> Llvm_target.DataLayout.pointer_size x.lldatalayout
let size_of, bit_size_of =
let size_to_int size_of x llt =
if Llvm.type_is_sized llt then
match Int64.to_int (size_of llt x.lldatalayout) with
| Some n -> n
| None -> fail "type size too large: %a" pp_lltype llt ()
else fail "types with undetermined size: %a" pp_lltype llt ()
in
( size_to_int Llvm_target.DataLayout.abi_size
, size_to_int Llvm_target.DataLayout.size_in_bits )
let memo_type : (Llvm.lltype, Typ.t) Hashtbl.t = Hashtbl.Poly.create ()
let rec xlate_type : x -> Llvm.lltype -> Typ.t =
fun x llt ->
let xlate_type_ llt =
if Llvm.type_is_sized llt then
let byts = size_of x llt in
let bits = bit_size_of x llt in
match Llvm.classify_type llt with
| Half | Float | Double | Fp128 -> Typ.float ~bits ~byts ~enc:`IEEE
| X86fp80 -> Typ.float ~bits ~byts ~enc:`Extended
| Ppc_fp128 -> Typ.float ~bits ~byts ~enc:`Pair
| Integer -> Typ.integer ~bits ~byts
| X86_mmx -> Typ.integer ~bits ~byts
| Pointer ->
if byts <> ptr_siz x then
todo "non-integral pointer types: %a" pp_lltype llt () ;
let elt = xlate_type x (Llvm.element_type llt) in
Typ.pointer ~elt
| Vector ->
let elt = xlate_type x (Llvm.element_type llt) in
let len = Llvm.vector_size llt in
Typ.array ~elt ~len ~bits ~byts
| Array ->
let elt = xlate_type x (Llvm.element_type llt) in
let len = Llvm.array_length llt in
Typ.array ~elt ~len ~bits ~byts
| Struct ->
let llelts = Llvm.struct_element_types llt in
let len = Array.length llelts in
let packed = Llvm.is_packed llt in
if Llvm.is_literal llt then
let elts =
Vector.map ~f:(xlate_type x) (Vector.of_array llelts)
in
Typ.tuple elts ~bits ~byts ~packed
else
let name = struct_name llt in
let elts =
Vector.init len ~f:(fun i -> lazy (xlate_type x llelts.(i)))
in
Typ.struct_ ~name elts ~bits ~byts ~packed
| Function -> fail "expected to be unsized: %a" pp_lltype llt ()
| Void | Label | Metadata | Token -> assert false
else
match Llvm.classify_type llt with
| Function ->
let return = xlate_type_opt x (Llvm.return_type llt) in
let llargs = Llvm.param_types llt in
let len = Array.length llargs in
let args =
Vector.init len ~f:(fun i -> xlate_type x llargs.(i))
in
Typ.function_ ~return ~args
| Struct when Llvm.is_opaque llt -> Typ.opaque ~name:(struct_name llt)
| Token -> Typ.opaque ~name:"token"
| Vector | Array | Struct ->
todo "unsized non-opaque aggregate types: %a" pp_lltype llt ()
| Half | Float | Double | X86fp80 | Fp128 | Ppc_fp128 | Integer
|X86_mmx | Pointer ->
fail "expected to be sized: %a" pp_lltype llt ()
| Void | Label | Metadata -> assert false
in
Hashtbl.find_or_add memo_type llt ~default:(fun () ->
[%Trace.call fun {pf} -> pf "%a" pp_lltype llt]
;
xlate_type_ llt
|>
[%Trace.retn fun {pf} -> pf "%a" Typ.pp_defn] )
and xlate_type_opt : x -> Llvm.lltype -> Typ.t option =
fun x llt ->
match Llvm.classify_type llt with
| Void -> None
| _ -> Some (xlate_type x llt)
let i32 x = xlate_type x (Llvm.i32_type x.llcontext)
let suffix_after_last_space : string -> string =
fun str -> String.drop_prefix str (String.rindex_exn str ' ' + 1)
let xlate_int : x -> Llvm.llvalue -> Exp.t =
fun x llv ->
let llt = Llvm.type_of llv in
let typ = xlate_type x llt in
let data =
match Llvm.int64_of_const llv with
| Some n -> Z.of_int64 n
| None ->
Z.of_string (suffix_after_last_space (Llvm.string_of_llvalue llv))
in
Exp.integer typ data
let xlate_float : x -> Llvm.llvalue -> Exp.t =
fun x llv ->
let llt = Llvm.type_of llv in
let typ = xlate_type x llt in
let data = suffix_after_last_space (Llvm.string_of_llvalue llv) in
Exp.float typ data
let xlate_name x ?global : Llvm.llvalue -> Reg.t =
fun llv ->
let typ = xlate_type x (Llvm.type_of llv) in
Reg.program ?global typ (find_name llv)
let xlate_name_opt : x -> Llvm.llvalue -> Reg.t option =
fun x instr ->
let llt = Llvm.type_of instr in
match Llvm.classify_type llt with
| Void -> None
| _ -> Some (xlate_name x instr)
let memo_value : (bool * Llvm.llvalue, Exp.t) Hashtbl.t =
Hashtbl.Poly.create ()
let memo_global : (Llvm.llvalue, Global.t) Hashtbl.t =
Hashtbl.Poly.create ()
let should_inline : Llvm.llvalue -> bool =
fun llv ->
match Llvm.use_begin llv with
| Some use -> (
match Llvm.use_succ use with
| Some _ -> (
match Llvm.classify_value llv with
| Instruction
( Trunc | ZExt | SExt | FPToUI | FPToSI | UIToFP | SIToFP
| FPTrunc | FPExt | PtrToInt | IntToPtr | BitCast | AddrSpaceCast
) ->
do not inline if > = 2 uses
| None -> true )
| None -> true
module Llvalue = struct
type t = Llvm.llvalue
let hash = Hashtbl.hash
let compare = Poly.compare
let sexp_of_t llv = Sexp.Atom (Llvm.string_of_llvalue llv)
end
let struct_rec = Staged.unstage (Exp.struct_rec (module Llvalue))
let ptr_fld x ~ptr ~fld ~lltyp =
let offset =
Llvm_target.DataLayout.offset_of_element lltyp fld x.lldatalayout
in
Exp.add ~typ:Typ.ptr ptr (Exp.integer Typ.siz (Z.of_int64 offset))
let ptr_idx x ~ptr ~idx ~llelt =
let stride = Llvm_target.DataLayout.abi_size llelt x.lldatalayout in
Exp.add ~typ:Typ.ptr ptr
(Exp.mul ~typ:Typ.siz (Exp.integer Typ.siz (Z.of_int64 stride)) idx)
let convert_to_siz =
let siz_bits = Typ.bit_size_of Typ.siz in
fun typ arg ->
match (typ : Typ.t) with
| Integer {bits} ->
if siz_bits < bits then Exp.signed siz_bits arg ~to_:Typ.siz
else if siz_bits > bits then Exp.signed bits arg ~to_:Typ.siz
else arg
| _ -> fail "convert_to_siz: %a" Typ.pp typ ()
let xlate_llvm_eh_typeid_for : x -> Typ.t -> Exp.t -> Exp.t =
fun x typ arg -> Exp.convert typ ~to_:(i32 x) arg
let rec xlate_intrinsic_exp : string -> (x -> Llvm.llvalue -> Exp.t) option
=
fun name ->
match name with
| "llvm.eh.typeid.for" ->
Some
(fun x llv ->
let rand = Llvm.operand llv 0 in
let arg = xlate_value x rand in
let src = xlate_type x (Llvm.type_of rand) in
xlate_llvm_eh_typeid_for x src arg )
| _ -> None
and xlate_value ?(inline = false) : x -> Llvm.llvalue -> Exp.t =
fun x llv ->
let xlate_value_ llv =
match Llvm.classify_value llv with
| Instruction Call -> (
let func = Llvm.operand llv (Llvm.num_arg_operands llv) in
let fname = Llvm.value_name func in
match xlate_intrinsic_exp fname with
| Some intrinsic when inline || should_inline llv -> intrinsic x llv
| _ -> Exp.reg (xlate_name x llv) )
| Instruction (Invoke | Alloca | Load | PHI | LandingPad | VAArg)
|Argument ->
Exp.reg (xlate_name x llv)
| Function | GlobalVariable -> Exp.reg (xlate_global x llv).reg
| GlobalAlias -> xlate_value x (Llvm.operand llv 0)
| ConstantInt -> xlate_int x llv
| ConstantFP -> xlate_float x llv
| ConstantPointerNull -> Exp.null
| ConstantAggregateZero -> (
let typ = xlate_type x (Llvm.type_of llv) in
match typ with
| Integer _ -> Exp.integer typ Z.zero
| Pointer _ -> Exp.null
| Array _ | Tuple _ | Struct _ ->
Exp.splat typ (Exp.integer Typ.byt Z.zero)
| _ -> fail "ConstantAggregateZero of type %a" Typ.pp typ () )
| ConstantVector | ConstantArray ->
let typ = xlate_type x (Llvm.type_of llv) in
let len = Llvm.num_operands llv in
let f i = xlate_value x (Llvm.operand llv i) in
Exp.record typ (Vector.init len ~f)
| ConstantDataVector ->
let typ = xlate_type x (Llvm.type_of llv) in
let len = Llvm.vector_size (Llvm.type_of llv) in
let f i = xlate_value x (Llvm.const_element llv i) in
Exp.record typ (Vector.init len ~f)
| ConstantDataArray ->
let typ = xlate_type x (Llvm.type_of llv) in
let len = Llvm.array_length (Llvm.type_of llv) in
let f i = xlate_value x (Llvm.const_element llv i) in
Exp.record typ (Vector.init len ~f)
| ConstantStruct ->
let typ = xlate_type x (Llvm.type_of llv) in
let is_recursive =
Llvm.fold_left_uses
(fun b use -> b || llv == Llvm.used_value use)
false llv
in
if is_recursive then
let elt_thks =
Vector.init (Llvm.num_operands llv) ~f:(fun i ->
lazy (xlate_value x (Llvm.operand llv i)) )
in
struct_rec ~id:llv typ elt_thks
else
Exp.record typ
(Vector.init (Llvm.num_operands llv) ~f:(fun i ->
xlate_value x (Llvm.operand llv i) ))
| BlockAddress ->
let parent = find_name (Llvm.operand llv 0) in
let name = find_name (Llvm.operand llv 1) in
Exp.label ~parent ~name
| UndefValue ->
let typ = xlate_type x (Llvm.type_of llv) in
Exp.nondet typ (Llvm.string_of_llvalue llv)
| Instruction
( ( Trunc | ZExt | SExt | FPToUI | FPToSI | UIToFP | SIToFP
| FPTrunc | FPExt | PtrToInt | IntToPtr | BitCast | AddrSpaceCast
| Add | FAdd | Sub | FSub | Mul | FMul | UDiv | SDiv | FDiv | URem
| SRem | FRem | Shl | LShr | AShr | And | Or | Xor | ICmp | FCmp
| Select | GetElementPtr | ExtractElement | InsertElement
| ShuffleVector | ExtractValue | InsertValue ) as opcode ) ->
if inline || should_inline llv then xlate_opcode x llv opcode
else Exp.reg (xlate_name x llv)
| ConstantExpr -> xlate_opcode x llv (Llvm.constexpr_opcode llv)
| GlobalIFunc -> todo "ifuncs: %a" pp_llvalue llv ()
| Instruction (CatchPad | CleanupPad | CatchSwitch) ->
todo "windows exception handling: %a" pp_llvalue llv ()
| Instruction
( Invalid | Ret | Br | Switch | IndirectBr | Invalid2 | Unreachable
| Store | UserOp1 | UserOp2 | Fence | AtomicCmpXchg | AtomicRMW
| Resume | CleanupRet | CatchRet )
|NullValue | BasicBlock | InlineAsm | MDNode | MDString ->
fail "xlate_value: %a" pp_llvalue llv ()
in
Hashtbl.find_or_add memo_value (inline, llv) ~default:(fun () ->
[%Trace.call fun {pf} -> pf "%a" pp_llvalue llv]
;
xlate_value_ llv
|>
[%Trace.retn fun {pf} exp -> pf "%a" Exp.pp exp] )
and xlate_opcode : x -> Llvm.llvalue -> Llvm.Opcode.t -> Exp.t =
fun x llv opcode ->
[%Trace.call fun {pf} -> pf "%a" pp_llvalue llv]
;
let xlate_rand i = xlate_value x (Llvm.operand llv i) in
let typ = lazy (xlate_type x (Llvm.type_of llv)) in
let check_vector =
lazy
( if Poly.equal (Llvm.classify_type (Llvm.type_of llv)) Vector then
todo "vector operations: %a" pp_llvalue llv () )
in
let convert opcode =
let dst = Lazy.force typ in
let rand = Llvm.operand llv 0 in
let src = xlate_type x (Llvm.type_of rand) in
let arg = xlate_value x rand in
match (opcode : Llvm.Opcode.t) with
| Trunc -> Exp.signed (Typ.bit_size_of dst) arg ~to_:dst
| SExt -> Exp.signed (Typ.bit_size_of src) arg ~to_:dst
| ZExt -> Exp.unsigned (Typ.bit_size_of src) arg ~to_:dst
| (BitCast | AddrSpaceCast) when Typ.equal dst src -> arg
| FPToUI | FPToSI | UIToFP | SIToFP | FPTrunc | FPExt | PtrToInt
|IntToPtr | BitCast | AddrSpaceCast ->
Exp.convert src ~to_:dst arg
| _ -> fail "convert: %a" pp_llvalue llv ()
in
let binary (mk : ?typ:_ -> _) =
Lazy.force check_vector ;
let typ = xlate_type x (Llvm.type_of (Llvm.operand llv 0)) in
mk ~typ (xlate_rand 0) (xlate_rand 1)
in
let unordered_or mk =
binary (fun ?typ e f ->
Exp.or_ ~typ:Typ.bool (Exp.uno ?typ e f) (mk ?typ e f) )
in
( match opcode with
| Trunc | ZExt | SExt | FPToUI | FPToSI | UIToFP | SIToFP | FPTrunc
|FPExt | PtrToInt | IntToPtr | BitCast | AddrSpaceCast ->
convert opcode
| ICmp -> (
match Option.value_exn (Llvm.icmp_predicate llv) with
| Eq -> binary Exp.eq
| Ne -> binary Exp.dq
| Sgt -> binary Exp.gt
| Sge -> binary Exp.ge
| Slt -> binary Exp.lt
| Sle -> binary Exp.le
| Ugt -> binary Exp.ugt
| Uge -> binary Exp.uge
| Ult -> binary Exp.ult
| Ule -> binary Exp.ule )
| FCmp -> (
match Llvm.fcmp_predicate llv with
| None | Some False -> binary (fun ?typ:_ _ _ -> Exp.false_)
| Some Oeq -> binary Exp.eq
| Some Ogt -> binary Exp.gt
| Some Oge -> binary Exp.ge
| Some Olt -> binary Exp.lt
| Some Ole -> binary Exp.le
| Some One -> binary Exp.dq
| Some Ord -> binary Exp.ord
| Some Uno -> binary Exp.uno
| Some Ueq -> unordered_or Exp.eq
| Some Ugt -> unordered_or Exp.gt
| Some Uge -> unordered_or Exp.ge
| Some Ult -> unordered_or Exp.lt
| Some Ule -> unordered_or Exp.le
| Some Une -> unordered_or Exp.dq
| Some True -> binary (fun ?typ:_ _ _ -> Exp.true_) )
| Add | FAdd -> binary Exp.add
| Sub | FSub -> binary Exp.sub
| Mul | FMul -> binary Exp.mul
| SDiv | FDiv -> binary Exp.div
| UDiv -> binary Exp.udiv
| SRem | FRem -> binary Exp.rem
| URem -> binary Exp.urem
| Shl -> binary Exp.shl
| LShr -> binary Exp.lshr
| AShr -> binary Exp.ashr
| And -> binary Exp.and_
| Or -> binary Exp.or_
| Xor -> binary Exp.xor
| Select ->
let typ = xlate_type x (Llvm.type_of (Llvm.operand llv 1)) in
Exp.conditional ~typ ~cnd:(xlate_rand 0) ~thn:(xlate_rand 1)
~els:(xlate_rand 2)
| ExtractElement | InsertElement -> (
let typ =
let lltyp = Llvm.type_of (Llvm.operand llv 0) in
let llelt = Llvm.element_type lltyp in
let elt = xlate_type x llelt in
let len = Llvm.vector_size llelt in
let byts = size_of x lltyp in
let bits = bit_size_of x lltyp in
Typ.array ~elt ~len ~bits ~byts
in
let idx i =
match (xlate_rand i).desc with
| Integer {data} -> Z.to_int data
| _ -> todo "vector operations: %a" pp_llvalue llv ()
in
let rcd = xlate_rand 0 in
match opcode with
| ExtractElement -> Exp.select typ rcd (idx 1)
| InsertElement -> Exp.update typ ~rcd (idx 2) ~elt:(xlate_rand 1)
| _ -> assert false )
| ExtractValue | InsertValue ->
let agg = xlate_rand 0 in
let typ = xlate_type x (Llvm.type_of (Llvm.operand llv 0)) in
let indices = Llvm.indices llv in
let num = Array.length indices in
let rec xlate_indices i rcd typ =
let rcd_i, typ_i, upd =
match (typ : Typ.t) with
| Tuple {elts} | Struct {elts} ->
( Exp.select typ rcd indices.(i)
, Vector.get elts indices.(i)
, Exp.update typ ~rcd indices.(i) )
| Array {elt} ->
( Exp.select typ rcd indices.(i)
, elt
, Exp.update typ ~rcd indices.(i) )
| _ -> fail "xlate_value: %a" pp_llvalue llv ()
in
let update_or_return elt ret =
match[@warning "p"] opcode with
| InsertValue -> upd ~elt:(Lazy.force elt)
| ExtractValue -> ret
in
if i < num - 1 then
let elt = xlate_indices (i + 1) rcd_i typ_i in
update_or_return (lazy elt) elt
else
let elt = lazy (xlate_rand 1) in
update_or_return elt rcd_i
in
xlate_indices 0 agg typ
| GetElementPtr ->
if Poly.equal (Llvm.classify_type (Llvm.type_of llv)) Vector then
todo "vector operations: %a" pp_llvalue llv () ;
let len = Llvm.num_operands llv in
assert (len > 0 || invalid_llvm (Llvm.string_of_llvalue llv)) ;
if len = 1 then convert BitCast
else
let rec xlate_indices i =
[%Trace.call fun {pf} ->
pf "%i %a" i pp_llvalue (Llvm.operand llv i)]
;
let idx =
convert_to_siz
(xlate_type x (Llvm.type_of (Llvm.operand llv i)))
(xlate_rand i)
in
( if i = 1 then
let base = xlate_rand 0 in
let lltyp = Llvm.type_of (Llvm.operand llv 0) in
let llelt =
match Llvm.classify_type lltyp with
| Pointer -> Llvm.element_type lltyp
| _ -> fail "xlate_opcode: %i %a" i pp_llvalue llv ()
in
translate [ gep t * , iN M ] as [ gep [ 1 x t ] * , iN M ]
(ptr_idx x ~ptr:base ~idx ~llelt, llelt)
else
let ptr, lltyp = xlate_indices (i - 1) in
match Llvm.classify_type lltyp with
| Array | Vector ->
let llelt = Llvm.element_type lltyp in
(ptr_idx x ~ptr ~idx ~llelt, llelt)
| Struct ->
let fld =
match
Option.bind ~f:Int64.to_int
(Llvm.int64_of_const (Llvm.operand llv i))
with
| Some n -> n
| None -> fail "xlate_opcode: %i %a" i pp_llvalue llv ()
in
let llelt = (Llvm.struct_element_types lltyp).(fld) in
(ptr_fld x ~ptr ~fld ~lltyp, llelt)
| _ -> fail "xlate_opcode: %i %a" i pp_llvalue llv () )
|>
[%Trace.retn fun {pf} (exp, llt) ->
pf "%a %a" Exp.pp exp pp_lltype llt]
in
fst (xlate_indices (len - 1))
| ShuffleVector -> (
let exp = xlate_value x (Llvm.operand llv 0) in
let exp_typ = xlate_type x (Llvm.type_of (Llvm.operand llv 0)) in
let llmask = Llvm.operand llv 2 in
let mask_typ = xlate_type x (Llvm.type_of llmask) in
match (exp_typ, mask_typ) with
| Array {len= m}, Array {len= n} when m = n && Llvm.is_null llmask ->
exp
| _ -> todo "vector operations: %a" pp_llvalue llv () )
| Invalid | Ret | Br | Switch | IndirectBr | Invoke | Invalid2
|Unreachable | Alloca | Load | Store | PHI | Call | UserOp1 | UserOp2
|Fence | AtomicCmpXchg | AtomicRMW | Resume | LandingPad | CleanupRet
|CatchRet | CatchPad | CleanupPad | CatchSwitch | VAArg ->
fail "xlate_opcode: %a" pp_llvalue llv () )
|>
[%Trace.retn fun {pf} exp -> pf "%a" Exp.pp exp]
and xlate_global : x -> Llvm.llvalue -> Global.t =
fun x llg ->
Hashtbl.find_or_add memo_global llg ~default:(fun () ->
[%Trace.call fun {pf} -> pf "%a" pp_llvalue llg]
;
let g = xlate_name x ~global:() llg in
let llt = Llvm.type_of llg in
let typ = xlate_type x llt in
let loc = find_loc llg in
Hashtbl.set memo_global ~key:llg ~data:(Global.mk g typ loc) ;
let init =
match Llvm.classify_value llg with
| GlobalVariable ->
Option.map ~f:(xlate_value x) (Llvm.global_initializer llg)
| _ -> None
in
Global.mk ?init g typ loc
|>
[%Trace.retn fun {pf} -> pf "%a" Global.pp_defn] )
type pop_thunk = Loc.t -> Llair.inst list
let pop_stack_frame_of_function :
x -> Llvm.llvalue -> Llvm.llbasicblock -> pop_thunk =
fun x func entry_blk ->
let append_stack_regs blk regs =
Llvm.fold_right_instrs
(fun instr regs ->
match Llvm.instr_opcode instr with
| Alloca -> xlate_name x instr :: regs
| _ -> regs )
blk regs
in
let entry_regs = append_stack_regs entry_blk [] in
Llvm.iter_blocks
(fun blk ->
if not (Poly.equal entry_blk blk) then
Llvm.iter_instrs
(fun instr ->
match Llvm.instr_opcode instr with
| Alloca ->
warn "stack allocation after function entry:@ %a" Loc.pp
(find_loc instr) ()
| _ -> () )
blk )
func ;
let pop retn_loc =
List.map entry_regs ~f:(fun reg ->
Llair.Inst.free ~ptr:(Exp.reg reg) ~loc:retn_loc )
in
pop
* construct the types involved in landingpads : i32 , std::type_info * , and
_ _ cxa_exception
__cxa_exception *)
let landingpad_typs : x -> Llvm.llvalue -> Typ.t * Typ.t * Llvm.lltype =
fun x instr ->
let llt = Llvm.type_of instr in
let i32 = i32 x in
if
not
( Poly.(Llvm.classify_type llt = Struct)
&&
let llelts = Llvm.struct_element_types llt in
Array.length llelts = 2
&& Poly.(llelts.(0) = Llvm.pointer_type (Llvm.i8_type x.llcontext))
&& Poly.(llelts.(1) = Llvm.i32_type x.llcontext) )
then
todo "landingpad of type other than {i8*, i32}: %a" pp_llvalue instr () ;
let llcontext =
Llvm.(
module_context (global_parent (block_parent (instr_parent instr))))
in
let llpi8 = Llvm.(pointer_type (integer_type llcontext 8)) in
let ti = Llvm.(named_struct_type llcontext "class.std::type_info") in
let tip = Llvm.pointer_type ti in
let void = Llvm.void_type llcontext in
let dtor = Llvm.(pointer_type (function_type void [|llpi8|])) in
let cxa_exception = Llvm.struct_type llcontext [|tip; dtor|] in
(i32, xlate_type x tip, cxa_exception)
let exception_typs =
let pi8 = Typ.pointer ~elt:Typ.byt in
let i32 = Typ.integer ~bits:32 ~byts:4 in
let exc =
Typ.tuple ~packed:false (Vector.of_array [|pi8; i32|]) ~bits:96 ~byts:12
in
(pi8, i32, exc)
let xlate_jump :
x
-> ?reg_exps:(Reg.t * Exp.t) list
-> Llvm.llvalue
-> Llvm.llbasicblock
-> Loc.t
-> Llair.block list
-> Llair.jump * Llair.block list =
fun x ?(reg_exps = []) instr dst loc blocks ->
let src = Llvm.instr_parent instr in
let rec xlate_jump_ reg_exps (pos : _ Llvm.llpos) =
match pos with
| Before dst_instr -> (
match Llvm.instr_opcode dst_instr with
| PHI ->
let reg_exp =
List.find_map_exn (Llvm.incoming dst_instr)
~f:(fun (arg, pred) ->
if Poly.equal pred src then
Some (xlate_name x dst_instr, xlate_value x arg)
else None )
in
xlate_jump_ (reg_exp :: reg_exps) (Llvm.instr_succ dst_instr)
| _ -> reg_exps )
| At_end blk -> fail "xlate_jump: %a" pp_llblock blk ()
in
let dst_lbl = label_of_block dst in
let jmp = Llair.Jump.mk dst_lbl in
match xlate_jump_ reg_exps (Llvm.instr_begin dst) with
| [] -> (jmp, blocks)
| reg_exps ->
let mov =
Llair.Inst.move ~reg_exps:(Vector.of_list_rev reg_exps) ~loc
in
let lbl = find_name instr ^ ".jmp." ^ dst_lbl in
let blk =
Llair.Block.mk ~lbl
~cmnd:(Vector.of_array [|mov|])
~term:(Llair.Term.goto ~dst:jmp ~loc)
in
let blocks =
match List.find blocks ~f:(fun b -> String.equal lbl b.lbl) with
| None -> blk :: blocks
| Some blk0 ->
assert (Llair.Block.equal blk0 blk) ;
blocks
in
(Llair.Jump.mk lbl, blocks)
* An LLVM instruction is translated to a sequence of LLAIR instructions
and a terminator , plus some additional blocks to which it may refer
( that is , essentially a function body ) . These are needed since LLVM and
LLAIR blocks are not in 1:1 correspondence .
and a terminator, plus some additional blocks to which it may refer
(that is, essentially a function body). These are needed since LLVM and
LLAIR blocks are not in 1:1 correspondence. *)
type code = Llair.inst list * Llair.term * Llair.block list
let pp_code fs (insts, term, blocks) =
Format.fprintf fs "@[<hv>@,@[%a%t@]%t@[<hv>%a@]@]"
(List.pp "@ " Llair.Inst.pp)
insts
(fun fs ->
match term with
| Llair.Unreachable -> ()
| _ ->
Format.fprintf fs "%t%a"
(fun fs ->
if List.is_empty insts then () else Format.fprintf fs "@ " )
Llair.Term.pp term )
(fun fs -> if List.is_empty blocks then () else Format.fprintf fs "@\n")
(List.pp "@ " Llair.Block.pp)
blocks
let rec xlate_func_name x llv =
match Llvm.classify_value llv with
| Function | GlobalVariable -> Exp.reg (xlate_name x ~global:() llv)
| ConstantExpr -> xlate_opcode x llv (Llvm.constexpr_opcode llv)
| Argument | Instruction _ -> xlate_value x llv
| GlobalAlias -> xlate_func_name x (Llvm.operand llv 0)
| GlobalIFunc -> todo "ifunc: %a" pp_llvalue llv ()
| InlineAsm -> todo "inline asm: %a" pp_llvalue llv ()
| ConstantPointerNull -> todo "call null: %a" pp_llvalue llv ()
| _ -> todo "function kind in %a" pp_llvalue llv ()
let ignored_callees = Hash_set.create (module String)
let xlate_instr :
pop_thunk
-> x
-> Llvm.llvalue
-> ((Llair.inst list * Llair.term -> code) -> code)
-> code =
fun pop x instr continue ->
[%Trace.call fun {pf} -> pf "%a" pp_llvalue instr]
;
let continue insts_term_to_code =
[%Trace.retn
fun {pf} () ->
pf "%a" pp_code (insts_term_to_code ([], Llair.Term.unreachable))]
() ;
continue insts_term_to_code
in
let nop () = continue (fun (insts, term) -> (insts, term, [])) in
let emit_inst inst =
continue (fun (insts, term) -> (inst :: insts, term, []))
in
let emit_term ?(prefix = []) ?(blocks = []) term =
[%Trace.retn fun {pf} () -> pf "%a" pp_code (prefix, term, blocks)] () ;
(prefix, term, blocks)
in
let name = find_name instr in
let loc = find_loc instr in
let inline_or_move xlate =
if should_inline instr then nop ()
else
let reg = xlate_name x instr in
let exp = xlate instr in
let reg_exps = Vector.of_array [|(reg, exp)|] in
emit_inst (Llair.Inst.move ~reg_exps ~loc)
in
let opcode = Llvm.instr_opcode instr in
match opcode with
| Load ->
let reg = xlate_name x instr in
let len = Exp.size_of (Exp.reg reg) in
let ptr = xlate_value x (Llvm.operand instr 0) in
emit_inst (Llair.Inst.load ~reg ~ptr ~len ~loc)
| Store ->
let exp = xlate_value x (Llvm.operand instr 0) in
let len = Exp.size_of exp in
let ptr = xlate_value x (Llvm.operand instr 1) in
emit_inst (Llair.Inst.store ~ptr ~exp ~len ~loc)
| Alloca ->
let reg = xlate_name x instr in
let rand = Llvm.operand instr 0 in
let num =
convert_to_siz
(xlate_type x (Llvm.type_of rand))
(xlate_value x rand)
in
assert (Poly.(Llvm.classify_type (Llvm.type_of instr) = Pointer)) ;
let len = Exp.size_of (Exp.reg reg) in
emit_inst (Llair.Inst.alloc ~reg ~num ~len ~loc)
| Call -> (
let maybe_llfunc = Llvm.operand instr (Llvm.num_operands instr - 1) in
let lltyp = Llvm.type_of maybe_llfunc in
assert (Poly.(Llvm.classify_type lltyp = Pointer)) ;
let llfunc =
let llfunc_valuekind = Llvm.classify_value maybe_llfunc in
match llfunc_valuekind with
| Function | Instruction _ | InlineAsm | Argument -> maybe_llfunc
| ConstantExpr -> (
match Llvm.constexpr_opcode maybe_llfunc with
| BitCast -> Llvm.operand maybe_llfunc 0
| _ ->
todo "opcode kind in call instruction %a" pp_llvalue
maybe_llfunc () )
| _ ->
todo "operand kind in call instruction %a" pp_llvalue
maybe_llfunc ()
in
let fname = Llvm.value_name llfunc in
let skip msg =
( match Hash_set.strict_add ignored_callees fname with
| Ok () -> warn "ignoring uninterpreted %s %s" msg fname ()
| Error _ -> () ) ;
let reg = xlate_name_opt x instr in
emit_inst (Llair.Inst.nondet ~reg ~msg:fname ~loc)
in
match xlate_intrinsic_exp fname with
| Some intrinsic -> inline_or_move (intrinsic x)
| None -> (
match String.split fname ~on:'.' with
| ["__llair_throw"] ->
let exc = xlate_value x (Llvm.operand instr 0) in
emit_term ~prefix:(pop loc) (Llair.Term.throw ~exc ~loc)
let reg = xlate_name x instr in
let num_operand = Llvm.operand instr 0 in
let num =
convert_to_siz
(xlate_type x (Llvm.type_of num_operand))
(xlate_value x num_operand)
in
let len = Exp.integer Typ.siz (Z.of_int 1) in
emit_inst (Llair.Inst.alloc ~reg ~num ~len ~loc)
operator )
|[ "_ZnwmSt11align_val_t"
let reg = xlate_name x instr in
let num = xlate_value x (Llvm.operand instr 0) in
let len = Exp.size_of (Exp.reg reg) in
emit_inst (Llair.Inst.alloc ~reg ~num ~len ~loc)
|[ "_ZdlPvSt11align_val_t"
|[ "_ZdlPvmSt11align_val_t"
]
let ptr = xlate_value x (Llvm.operand instr 0) in
emit_inst (Llair.Inst.free ~ptr ~loc)
| "llvm" :: "memset" :: _ ->
let dst = xlate_value x (Llvm.operand instr 0) in
let byt = xlate_value x (Llvm.operand instr 1) in
let len = xlate_value x (Llvm.operand instr 2) in
emit_inst (Llair.Inst.memset ~dst ~byt ~len ~loc)
| "llvm" :: "memcpy" :: _ ->
let dst = xlate_value x (Llvm.operand instr 0) in
let src = xlate_value x (Llvm.operand instr 1) in
let len = xlate_value x (Llvm.operand instr 2) in
emit_inst (Llair.Inst.memcpy ~dst ~src ~len ~loc)
| "llvm" :: "memmove" :: _ ->
let dst = xlate_value x (Llvm.operand instr 0) in
let src = xlate_value x (Llvm.operand instr 1) in
let len = xlate_value x (Llvm.operand instr 2) in
emit_inst (Llair.Inst.memmov ~dst ~src ~len ~loc)
| ["abort"] | ["llvm"; "trap"] -> emit_inst (Llair.Inst.abort ~loc)
| ["llvm"; "dbg"; ("declare" | "value")]
|"llvm" :: ("lifetime" | "invariant") :: ("start" | "end") :: _ ->
nop ()
| ["llvm"; ("stacksave" | "stackrestore")] ->
skip "dynamic stack deallocation"
| "llvm" :: "coro" :: _ ->
todo "coroutines:@ %a" pp_llvalue instr ()
| "llvm" :: "experimental" :: "gc" :: "statepoint" :: _ ->
todo "statepoints:@ %a" pp_llvalue instr ()
| ["llvm"; ("va_start" | "va_copy" | "va_end")] ->
skip "variadic function intrinsic"
| "llvm" :: _ -> skip "intrinsic"
| _ when Poly.equal (Llvm.classify_value llfunc) InlineAsm ->
skip "inline asm"
| _ ->
let callee = xlate_func_name x llfunc in
let typ = xlate_type x lltyp in
let lbl = name ^ ".ret" in
let call =
let actuals =
let num_actuals =
if not (Llvm.is_var_arg (Llvm.element_type lltyp)) then
Llvm.num_arg_operands instr
else
let fname = Llvm.value_name llfunc in
( match Hash_set.strict_add ignored_callees fname with
| Ok () when not (Llvm.is_declaration llfunc) ->
warn
"ignoring variable arguments to variadic \
function: %a"
Exp.pp callee ()
| _ -> () ) ;
let llfty = Llvm.element_type lltyp in
( match Llvm.classify_type llfty with
| Function -> ()
| _ ->
fail "called function not of function type: %a"
pp_llvalue instr () ) ;
Array.length (Llvm.param_types llfty)
in
List.rev_init num_actuals ~f:(fun i ->
xlate_value x (Llvm.operand instr i) )
in
let areturn = xlate_name_opt x instr in
let return = Llair.Jump.mk lbl in
Llair.Term.call ~callee ~typ ~actuals ~areturn ~return
~throw:None ~loc
in
continue (fun (insts, term) ->
let cmnd = Vector.of_list insts in
([], call, [Llair.Block.mk ~lbl ~cmnd ~term]) ) ) )
| Invoke -> (
let llfunc = Llvm.operand instr (Llvm.num_operands instr - 3) in
let lltyp = Llvm.type_of llfunc in
assert (Poly.(Llvm.classify_type lltyp = Pointer)) ;
let fname = Llvm.value_name llfunc in
let return_blk = Llvm.get_normal_dest instr in
let unwind_blk = Llvm.get_unwind_dest instr in
let num_actuals =
if not (Llvm.is_var_arg (Llvm.element_type lltyp)) then
Llvm.num_arg_operands instr
else (
( match Hash_set.strict_add ignored_callees fname with
| Ok () when not (Llvm.is_declaration llfunc) ->
warn "ignoring variable arguments to variadic function: %a"
Global.pp (xlate_global x llfunc) ()
| _ -> () ) ;
assert (Poly.(Llvm.classify_type lltyp = Pointer)) ;
Array.length (Llvm.param_types (Llvm.element_type lltyp)) )
in
match String.split fname ~on:'.' with
| _ when Option.is_some (xlate_intrinsic_exp fname) ->
let dst, blocks = xlate_jump x instr return_blk loc [] in
emit_term (Llair.Term.goto ~dst ~loc) ~blocks
| ["__llair_throw"] ->
let dst, blocks = xlate_jump x instr unwind_blk loc [] in
emit_term (Llair.Term.goto ~dst ~loc) ~blocks
| ["abort"] ->
emit_term ~prefix:[Llair.Inst.abort ~loc] Llair.Term.unreachable
operator )
|[ "_ZnwmSt11align_val_t"
when num_actuals > 0 ->
let reg = xlate_name x instr in
let num = xlate_value x (Llvm.operand instr 0) in
let len = Exp.size_of (Exp.reg reg) in
let dst, blocks = xlate_jump x instr return_blk loc [] in
emit_term
~prefix:[Llair.Inst.alloc ~reg ~num ~len ~loc]
(Llair.Term.goto ~dst ~loc)
~blocks
| "llvm" :: "experimental" :: "gc" :: "statepoint" :: _ ->
todo "statepoints:@ %a" pp_llvalue instr ()
| _ ->
let callee = xlate_func_name x llfunc in
let typ = xlate_type x (Llvm.type_of llfunc) in
let actuals =
List.rev_init num_actuals ~f:(fun i ->
xlate_value x (Llvm.operand instr i) )
in
let areturn = xlate_name_opt x instr in
let return, blocks = xlate_jump x instr return_blk loc [] in
let throw, blocks = xlate_jump x instr unwind_blk loc blocks in
let throw = Some throw in
emit_term
(Llair.Term.call ~callee ~typ ~actuals ~areturn ~return ~throw
~loc)
~blocks )
| Ret ->
let exp =
if Llvm.num_operands instr = 0 then None
else Some (xlate_value x (Llvm.operand instr 0))
in
emit_term ~prefix:(pop loc) (Llair.Term.return ~exp ~loc)
| Br -> (
match Option.value_exn (Llvm.get_branch instr) with
| `Unconditional blk ->
let dst, blocks = xlate_jump x instr blk loc [] in
emit_term (Llair.Term.goto ~dst ~loc) ~blocks
| `Conditional (cnd, thn, els) ->
let key = xlate_value x cnd in
let thn, blocks = xlate_jump x instr thn loc [] in
let els, blocks = xlate_jump x instr els loc blocks in
emit_term (Llair.Term.branch ~key ~nzero:thn ~zero:els ~loc) ~blocks
)
| Switch ->
let key = xlate_value x (Llvm.operand instr 0) in
let cases, blocks =
let num_cases = (Llvm.num_operands instr / 2) - 1 in
let rec xlate_cases i blocks =
if i <= num_cases then
let idx = Llvm.operand instr (2 * i) in
let blk =
Llvm.block_of_value (Llvm.operand instr ((2 * i) + 1))
in
let num = xlate_value x idx in
let jmp, blocks = xlate_jump x instr blk loc blocks in
let rest, blocks = xlate_cases (i + 1) blocks in
((num, jmp) :: rest, blocks)
else ([], blocks)
in
xlate_cases 1 []
in
let tbl = Vector.of_list cases in
let blk = Llvm.block_of_value (Llvm.operand instr 1) in
let els, blocks = xlate_jump x instr blk loc blocks in
emit_term (Llair.Term.switch ~key ~tbl ~els ~loc) ~blocks
| IndirectBr ->
let ptr = xlate_value x (Llvm.operand instr 0) in
let num_dests = Llvm.num_operands instr - 1 in
let lldests, blocks =
let rec dests i blocks =
if i <= num_dests then
let v = Llvm.operand instr i in
let blk = Llvm.block_of_value v in
let jmp, blocks = xlate_jump x instr blk loc blocks in
let rest, blocks = dests (i + 1) blocks in
(jmp :: rest, blocks)
else ([], blocks)
in
dests 1 []
in
let tbl = Vector.of_list lldests in
emit_term (Llair.Term.iswitch ~ptr ~tbl ~loc) ~blocks
| LandingPad ->
Translate the clauses to code to load the type_info from
the thrown exception , and test the type_info against the clauses ,
eventually jumping to the handler code following the landingpad ,
passing a value for the selector which the handler code tests to
e.g. either cleanup or rethrow .
the thrown exception, and test the type_info against the clauses,
eventually jumping to the handler code following the landingpad,
passing a value for the selector which the handler code tests to
e.g. either cleanup or rethrow. *)
let i32, tip, cxa_exception = landingpad_typs x instr in
let pi8, _, exc_typ = exception_typs in
let exc = Exp.reg (Reg.program pi8 (find_name instr ^ ".exc")) in
let ti = Reg.program tip (name ^ ".ti") in
let load_ti =
let typ = cxa_exception in
let fld = 0 in
index from exc that points to header
let idx = Exp.integer Typ.siz Z.minus_one in
let ptr =
ptr_fld x
~ptr:(ptr_idx x ~ptr:exc ~idx ~llelt:typ)
~fld ~lltyp:typ
in
let len = Exp.integer Typ.siz (Z.of_int (size_of x typ)) in
Llair.Inst.load ~reg:ti ~ptr ~len ~loc
in
let ti = Exp.reg ti in
let typeid = xlate_llvm_eh_typeid_for x tip ti in
let lbl = name ^ ".unwind" in
let reg = xlate_name x instr in
let jump_unwind i sel rev_blocks =
let exp = Exp.record exc_typ (Vector.of_array [|exc; sel|]) in
let mov =
Llair.Inst.move ~reg_exps:(Vector.of_array [|(reg, exp)|]) ~loc
in
let lbl_i = lbl ^ "." ^ Int.to_string i in
let blk =
Llair.Block.mk ~lbl:lbl_i
~cmnd:(Vector.of_array [|mov|])
~term:(Llair.Term.goto ~dst:(Llair.Jump.mk lbl) ~loc)
in
(Llair.Jump.mk lbl_i, blk :: rev_blocks)
in
let goto_unwind i sel blocks =
let dst, blocks = jump_unwind i sel blocks in
(Llair.Term.goto ~dst ~loc, blocks)
in
let term_unwind, rev_blocks =
if Llvm.is_cleanup instr then
goto_unwind 0 (Exp.integer i32 Z.zero) []
else
let num_clauses = Llvm.num_operands instr in
let lbl i = name ^ "." ^ Int.to_string i in
let jump i = Llair.Jump.mk (lbl i) in
let block i term =
Llair.Block.mk ~lbl:(lbl i) ~cmnd:Vector.empty ~term
in
let match_filter i rev_blocks =
jump_unwind i
(Exp.sub ~typ:i32 (Exp.integer i32 Z.zero) typeid)
rev_blocks
in
let xlate_clause i rev_blocks =
let clause = Llvm.operand instr i in
let num_tis = Llvm.num_operands clause in
if num_tis = 0 then
let dst, rev_blocks = match_filter i rev_blocks in
(Llair.Term.goto ~dst ~loc, rev_blocks)
else
match Llvm.classify_type (Llvm.type_of clause) with
match Llvm.classify_value clause with
| ConstantArray ->
let rec xlate_filter i =
let tiI = xlate_value x (Llvm.operand clause i) in
if i < num_tis - 1 then
Exp.and_ ~typ:Typ.bool (Exp.dq ~typ:tip tiI ti)
(xlate_filter (i + 1))
else Exp.dq ~typ:tip tiI ti
in
let key = xlate_filter 0 in
let nzero, rev_blocks = match_filter i rev_blocks in
( Llair.Term.branch ~loc ~key ~nzero ~zero:(jump (i + 1))
, rev_blocks )
| _ -> fail "xlate_instr: %a" pp_llvalue instr () )
let typ = xlate_type x (Llvm.type_of clause) in
let clause = xlate_value x clause in
let key =
Exp.or_ ~typ:Typ.bool
(Exp.eq ~typ clause Exp.null)
(Exp.eq ~typ clause ti)
in
let nzero, rev_blocks = jump_unwind i typeid rev_blocks in
( Llair.Term.branch ~loc ~key ~nzero ~zero:(jump (i + 1))
, rev_blocks )
in
let rec rev_blocks i z =
if i < num_clauses then
let term, z = xlate_clause i z in
rev_blocks (i + 1) (block i term :: z)
else block i Llair.Term.unreachable :: z
in
xlate_clause 0 (rev_blocks 1 [])
in
continue (fun (insts, term) ->
( [load_ti]
, term_unwind
, List.rev_append rev_blocks
[Llair.Block.mk ~lbl ~cmnd:(Vector.of_list insts) ~term] ) )
| Resume ->
let llrcd = Llvm.operand instr 0 in
let typ = xlate_type x (Llvm.type_of llrcd) in
let rcd = xlate_value x llrcd in
let exc = Exp.select typ rcd 0 in
emit_term ~prefix:(pop loc) (Llair.Term.throw ~exc ~loc)
| Unreachable -> emit_term Llair.Term.unreachable
| Trunc | ZExt | SExt | FPToUI | FPToSI | UIToFP | SIToFP | FPTrunc
|FPExt | PtrToInt | IntToPtr | BitCast | AddrSpaceCast | Add | FAdd
|Sub | FSub | Mul | FMul | UDiv | SDiv | FDiv | URem | SRem | FRem
|Shl | LShr | AShr | And | Or | Xor | ICmp | FCmp | Select
|GetElementPtr | ExtractElement | InsertElement | ShuffleVector
|ExtractValue | InsertValue ->
inline_or_move (xlate_value ~inline:true x)
| VAArg ->
let reg = xlate_name_opt x instr in
warn "variadic function argument: %a" Loc.pp loc () ;
emit_inst (Llair.Inst.nondet ~reg ~msg:"vaarg" ~loc)
| CleanupRet | CatchRet | CatchPad | CleanupPad | CatchSwitch ->
todo "windows exception handling: %a" pp_llvalue instr ()
| Fence | AtomicCmpXchg | AtomicRMW ->
fail "xlate_instr: %a" pp_llvalue instr ()
| PHI | Invalid | Invalid2 | UserOp1 | UserOp2 -> assert false
let skip_phis : Llvm.llbasicblock -> _ Llvm.llpos =
fun blk ->
let rec skip_phis_ (pos : _ Llvm.llpos) =
match pos with
| Before instr -> (
match Llvm.instr_opcode instr with
| PHI -> skip_phis_ (Llvm.instr_succ instr)
| _ -> pos )
| _ -> pos
in
skip_phis_ (Llvm.instr_begin blk)
let rec xlate_instrs : pop_thunk -> x -> _ Llvm.llpos -> code =
fun pop x -> function
| Before instrI ->
xlate_instr pop x instrI (fun xlate_instrI ->
let instrJ = Llvm.instr_succ instrI in
let instsJ, termJ, blocksJN = xlate_instrs pop x instrJ in
let instsI, termI, blocksI = xlate_instrI (instsJ, termJ) in
(instsI, termI, blocksI @ blocksJN) )
| At_end blk -> fail "xlate_instrs: %a" pp_llblock blk ()
let xlate_block : pop_thunk -> x -> Llvm.llbasicblock -> Llair.block list =
fun pop x blk ->
[%Trace.call fun {pf} -> pf "%a" pp_llblock blk]
;
let lbl = label_of_block blk in
let pos = skip_phis blk in
let insts, term, blocks = xlate_instrs pop x pos in
Llair.Block.mk ~lbl ~cmnd:(Vector.of_list insts) ~term :: blocks
|>
[%Trace.retn fun {pf} blocks -> pf "%s" (List.hd_exn blocks).Llair.lbl]
let report_undefined func name =
if Option.is_some (Llvm.use_begin func) then
[%Trace.info "undefined function: %a" Global.pp name]
let xlate_function : x -> Llvm.llvalue -> Llair.func =
fun x llf ->
[%Trace.call fun {pf} -> pf "%a" pp_llvalue llf]
;
let name = xlate_global x llf in
let formals =
Llvm.fold_left_params
(fun rev_args param -> xlate_name x param :: rev_args)
[] llf
in
let freturn =
match name.typ with
| Pointer {elt= Function {return= Some typ; _}} ->
Some (Reg.program typ "freturn")
| _ -> None
in
let _, _, exc_typ = exception_typs in
let fthrow = Reg.program exc_typ "fthrow" in
( match Llvm.block_begin llf with
| Before entry_blk ->
let pop = pop_stack_frame_of_function x llf entry_blk in
let[@warning "p"] (entry_block :: entry_blocks) =
xlate_block pop x entry_blk
in
let entry =
let {Llair.lbl; cmnd; term} = entry_block in
Llair.Block.mk ~lbl ~cmnd ~term
in
let cfg =
let rec trav_blocks rev_cfg prev =
match Llvm.block_succ prev with
| Before blk ->
trav_blocks
(List.rev_append (xlate_block pop x blk) rev_cfg)
blk
| At_end _ -> Vector.of_list_rev rev_cfg
in
trav_blocks (List.rev entry_blocks) entry_blk
in
Llair.Func.mk ~name ~formals ~freturn ~fthrow ~entry ~cfg
| At_end _ ->
report_undefined llf name ;
Llair.Func.mk_undefined ~name ~formals ~freturn ~fthrow )
|>
[%Trace.retn fun {pf} -> pf "@\n%a" Llair.Func.pp]
let transform ~internalize : Llvm.llmodule -> unit =
fun llmodule ->
let pm = Llvm.PassManager.create () in
let entry_points = Config.find_list "entry-points" in
if internalize then
Llvm_ipo.add_internalize_predicate pm (fun fn ->
List.exists entry_points ~f:(String.equal fn) ) ;
Llvm_ipo.add_global_dce pm ;
Llvm_ipo.add_global_optimizer pm ;
Llvm_ipo.add_merge_functions pm ;
Llvm_ipo.add_constant_merge pm ;
Llvm_ipo.add_argument_promotion pm ;
Llvm_ipo.add_ipsccp pm ;
Llvm_scalar_opts.add_memory_to_register_promotion pm ;
Llvm_scalar_opts.add_dce pm ;
Llvm_ipo.add_global_dce pm ;
Llvm_ipo.add_dead_arg_elimination pm ;
Llvm_scalar_opts.add_lower_atomic pm ;
Llvm_scalar_opts.add_scalar_repl_aggregation pm ;
Llvm_scalar_opts.add_scalarizer pm ;
Llvm_scalar_opts.add_unify_function_exit_nodes pm ;
Llvm_scalar_opts.add_cfg_simplification pm ;
Llvm.PassManager.run_module llmodule pm |> (ignore : bool -> _) ;
Llvm.PassManager.dispose pm
let read_and_parse llcontext bc_file =
[%Trace.call fun {pf} -> pf "%s" bc_file]
;
let llmemorybuffer =
try Llvm.MemoryBuffer.of_file bc_file
with Llvm.IoError msg -> fail "%s: %s" bc_file msg ()
in
( try Llvm_irreader.parse_ir llcontext llmemorybuffer
with Llvm_irreader.Error msg -> invalid_llvm msg )
|>
[%Trace.retn fun {pf} _ -> pf ""]
let link_in : Llvm.llcontext -> Llvm.lllinker -> string -> unit =
fun llcontext link_ctx bc_file ->
Llvm_linker.link_in link_ctx (read_and_parse llcontext bc_file)
let check_datalayout llcontext lldatalayout =
let check_size llt typ =
let llsiz =
Int64.to_int_exn (Llvm_target.DataLayout.abi_size llt lldatalayout)
in
let siz = Typ.size_of typ in
if llsiz != siz then
todo "size_of %a = %i != %i" Typ.pp typ llsiz siz ()
in
check_size (Llvm.i1_type llcontext) Typ.bool ;
check_size (Llvm.i8_type llcontext) Typ.byt ;
check_size (Llvm.i32_type llcontext) Typ.int ;
check_size (Llvm.i64_type llcontext) Typ.siz ;
check_size
(Llvm_target.DataLayout.intptr_type llcontext lldatalayout)
Typ.ptr
let translate ~models ~fuzzer ~internalize : string list -> Llair.t =
fun inputs ->
[%Trace.call fun {pf} ->
pf "%a" (List.pp "@ " Format.pp_print_string) inputs]
;
Llvm.install_fatal_error_handler invalid_llvm ;
let llcontext = Llvm.global_context () in
let input, inputs = List.pop_exn inputs in
let llmodule = read_and_parse llcontext input in
let link_ctx = Llvm_linker.get_linker llmodule in
List.iter ~f:(link_in llcontext link_ctx) inputs ;
let link_model_file name =
Llvm_linker.link_in link_ctx
(Llvm_irreader.parse_ir llcontext
(Llvm.MemoryBuffer.of_string (Option.value_exn (Model.read name))))
in
if models then link_model_file "/cxxabi.bc" ;
if fuzzer then link_model_file "/lib_fuzzer_main.bc" ;
Llvm_linker.linker_dispose link_ctx ;
assert (
Llvm_analysis.verify_module llmodule |> Option.for_all ~f:invalid_llvm
) ;
transform ~internalize llmodule ;
scan_names_and_locs llmodule ;
let lldatalayout =
Llvm_target.DataLayout.of_string (Llvm.data_layout llmodule)
in
check_datalayout llcontext lldatalayout ;
let x = {llcontext; llmodule; lldatalayout} in
let globals =
Llvm.fold_left_globals
(fun globals llg ->
if
Poly.equal (Llvm.linkage llg) Appending
&& Llvm.(array_length (element_type (type_of llg))) = 0
then globals
else xlate_global x llg :: globals )
[] llmodule
in
let functions =
Llvm.fold_left_functions
(fun functions llf ->
let name = Llvm.value_name llf in
if
String.is_prefix name ~prefix:"__llair_"
|| String.is_prefix name ~prefix:"llvm."
then functions
else xlate_function x llf :: functions )
[] llmodule
in
Hashtbl.clear sym_tbl ;
Hashtbl.clear scope_tbl ;
Hashtbl.clear anon_struct_name ;
Hashtbl.clear memo_type ;
Hashtbl.clear memo_global ;
Hashtbl.clear memo_value ;
Hash_set.clear ignored_callees ;
Llvm.dispose_module llmodule ;
Llair.mk ~globals ~functions
|>
[%Trace.retn fun {pf} _ ->
pf "number of globals %d, number of functions %d" (List.length globals)
(List.length functions)]
|
be540e88e4b6ae7c72b6d4ab0d6c8e4108a90c83ad45fd5f4c53075ea335c113 | gowthamk/ocaml-irmin | icounter.ml | open Lwt.Infix
open Irmin_unix
Config module has three functions root , shared and init .
module type Config = sig
val root: string
val shared: string
val init: unit -> unit
end
MakeVersioned is a functor which takes Config and Atom as arguments
module MakeVersioned (Config: Config) = struct
module OM = Counter.Make
module K = Irmin.Hash.SHA1
let from_just = function (Some x) -> x
| None -> failwith "Expected Some. Got None."
type vt = int64
module M = struct
module AO_value = struct
type t = int64
let t = Irmin.Type.int64
let pp = Irmin.Type.dump t
let of_string s =
match Int64.of_string_opt s with
| Some i -> Ok i
| None -> Error (`Msg "invalid counter value")
end
(* storage backhend: Append-only store *)
module AO_store = struct
(* Immutable collection of all versionedt *)
module S = Irmin_git.AO(Git_unix.FS)(AO_value)
include S
let create config =
let level = Irmin.Private.Conf.key ~doc:"The Zlib compression level."
"level" Irmin.Private.Conf.(some int) None
in
let root = Irmin.Private.Conf.get config Irmin.Private.Conf.root in
let level = Irmin.Private.Conf.get config level in
Git_unix.FS.create ?root ?level ()
Somehow pulls the config set by Store.init
(* And creates a Git backend *)
let create () = create @@ Irmin_git.config Config.shared
end
type t = K.t
let of_string s =
match Int64.of_string_opt s with
| Some i -> Ok i
| None -> Error (`Msg "invalid counter value")
let of_adt (a:Counter.Make.t) : t Lwt.t =
let aostore = AO_store.create () in
let aostore_add value =
aostore >>= (fun ao_store -> AO_store.add ao_store value) in
aostore_add =<< Lwt.return (Int64.of_int a)
let to_adt (k:t) : Counter.Make.t Lwt.t =
AO_store.create () >>= fun ao_store ->
AO_store.find ao_store k >>= fun t ->
let t = from_just t in
Lwt.return (Int64.to_int t)
let t = K.t
let pp = K.pp
let of_string = K.of_string
let rec merge ~(old:t Irmin.Merge.promise) v1_k v2_k =
let open Irmin.Merge.Infix in
old () >>=* fun old_k ->
let old_k = from_just old_k in
to_adt old_k >>= fun oldv ->
to_adt v1_k >>= fun v1 ->
to_adt v2_k >>= fun v2 ->
let v = OM.merge oldv v1 v2 in
of_adt v >>= fun merged_k ->
Irmin.Merge.ok merged_k
let merge = Irmin.Merge.(option (v t merge))
end
(* Store is defined as follows which is a module *)
module BC_store = struct
module Store = Irmin_unix.Git.FS.KV(M)
module Sync = Irmin.Sync(Store)
type t = Store.t
let init ?root:_ ?bare:_ () =
let config = Irmin_git.config Config.root in
Store.Repo.v config
let master (repo:Store.repo) = Store.master repo
let clone t name = Store.clone t name
let get_branch r ~branch_name = Store.of_branch r branch_name
let merge s ~into = Store.merge s ~into
let update t k v = Store.set t k v
let read t k = Store.find t k
end
is a module which consist of type store , st and ' a t
module Vpst : sig
type 'a t
val return : 'a -> 'a t
val bind : 'a t -> ('a -> 'b t) -> 'b t
val with_init_version_do: OM.t -> 'a t -> 'a
val fork_version : 'a t -> unit t
val get_latest_version: unit -> OM.t t
val sync_next_version: ?v:OM.t -> OM.t t
val liftLwt : 'a Lwt.t -> 'a t
end = struct
(* store is a type which is basically of type BC_store.t *)
type store = BC_store.t
(* st is a record type with fields as master, local, name and next_id *)
type st = {master : store;
local : store;
name : string;
next_id : int}
type 'a t = st -> ('a * st) Lwt.t
let info s = Irmin_unix.info "[repo %s] %s" Config.root s
let path = ["state"]
let return (x : 'a) : 'a t = fun st -> Lwt.return (x,st)
let bind (m1: 'a t) (f: 'a -> 'b t) : 'b t =
fun st -> (m1 st >>= fun (a,st') -> f a st')
let with_init_version_do (v: OM.t) (m: 'a t) =
Lwt_main.run
begin
BC_store.init () >>= fun repo ->
BC_store.master repo >>= fun m_br ->
M.of_adt v >>= fun k ->
let cinfo = info "creating state of master" in
BC_store.update m_br path k ~info:cinfo >>= fun () ->
BC_store.clone m_br "1_local" >>= fun t_br ->
let st = {master=m_br; local=t_br; name="1"; next_id=1} in
m st >>= fun (a,_) -> Lwt.return a
end
let with_init_forked_do (m: 'a t) =
BC_store.init () >>= fun repo ->
BC_store.master repo >>= fun m_br ->
BC_store.clone m_br "1_local" >>= fun t_br ->
let st = {master=m_br; local=t_br; name="1"; next_id=1} in
m st >>= fun (a, _) -> Lwt.return a
let fork_version (m: 'a t) : unit t = fun (st: st) ->
let thread_f () =
let child_name = st.name^"_"^(string_of_int st.next_id) in
let parent_m_br = st.master in
(* Ideally, the following has to happen: *)
(* BC_store.clone_force parent_m_br m_name >>= fun m_br -> *)
But , we currently default to an SC mode . Master is global .
let m_br = parent_m_br in
BC_store.clone m_br (child_name^"_local") >>= fun t_br ->
let new_st = {master = m_br; local = t_br; name = child_name; next_id = 1} in
m new_st in
begin
Lwt.async thread_f;
Lwt.return ((), {st with next_id=st.next_id+1})
end
let get_latest_version () : OM.t t = fun (st: st) ->
BC_store.read st.local path >>= fun k ->
M.to_adt @@ from_just k >>= fun td ->
Lwt.return (td,st)
let sync_remote_version remote_uri ?v : OM.t t = fun (st: st) ->
(* How do you commit the next version? Simply update path? *)
1 . Commit to the local branch
let cinfo = info "committing local state" in
(match v with
| None -> Lwt.return ()
| Some v ->
M.of_adt v >>= fun k ->
BC_store.update st.local path k cinfo) >>= fun () ->
2 .. Pull from remote to master
let cinfo = info (Printf.sprintf "Merging remote: %s" remote_uri) in
BC_store.Sync.pull st.master (Irmin.remote_uri remote_uri) (`Merge cinfo) >>= fun _ ->
2 . Merge local master to the local branch
let cinfo = info "Merging master into local" in
BC_store.merge st.master ~into:st.local ~info:cinfo >>= fun _ ->
3 . Merge local branch to the local master
let cinfo = info "Merging local into master" in
BC_store.merge st.local ~into:st.master ~info:cinfo >>= fun _ ->
get_latest_version () st
let sync_next_version ?v : OM.t t = fun (st: st) ->
(* How do you commit the next version? Simply update path? *)
1 . Commit to the local branch
let cinfo = info "committing local state" in
(match v with
| None -> Lwt.return ()
| Some v ->
M.of_adt v >>= fun k ->
BC_store.update st.local path k cinfo) >>= fun () ->
2 . Merge local master to the local branch
let cinfo = info "Merging master into local" in
BC_store.merge st.master ~into:st.local ~info:cinfo >>= fun _ ->
3 . Merge local branch to the local master
let cinfo = info "Merging local into master" in
BC_store.merge st.local ~into:st.master ~info:cinfo >>= fun _ ->
get_latest_version () st
let liftLwt (m: 'a Lwt.t) : 'a t = fun st ->
m >>= fun a -> Lwt.return (a,st)
end
end
| null | https://raw.githubusercontent.com/gowthamk/ocaml-irmin/54775f6c3012e87d2d0308f37a2ec7b27477e887/counter/icounter.ml | ocaml | storage backhend: Append-only store
Immutable collection of all versionedt
And creates a Git backend
Store is defined as follows which is a module
store is a type which is basically of type BC_store.t
st is a record type with fields as master, local, name and next_id
Ideally, the following has to happen:
BC_store.clone_force parent_m_br m_name >>= fun m_br ->
How do you commit the next version? Simply update path?
How do you commit the next version? Simply update path? | open Lwt.Infix
open Irmin_unix
Config module has three functions root , shared and init .
module type Config = sig
val root: string
val shared: string
val init: unit -> unit
end
MakeVersioned is a functor which takes Config and Atom as arguments
module MakeVersioned (Config: Config) = struct
module OM = Counter.Make
module K = Irmin.Hash.SHA1
let from_just = function (Some x) -> x
| None -> failwith "Expected Some. Got None."
type vt = int64
module M = struct
module AO_value = struct
type t = int64
let t = Irmin.Type.int64
let pp = Irmin.Type.dump t
let of_string s =
match Int64.of_string_opt s with
| Some i -> Ok i
| None -> Error (`Msg "invalid counter value")
end
module AO_store = struct
module S = Irmin_git.AO(Git_unix.FS)(AO_value)
include S
let create config =
let level = Irmin.Private.Conf.key ~doc:"The Zlib compression level."
"level" Irmin.Private.Conf.(some int) None
in
let root = Irmin.Private.Conf.get config Irmin.Private.Conf.root in
let level = Irmin.Private.Conf.get config level in
Git_unix.FS.create ?root ?level ()
Somehow pulls the config set by Store.init
let create () = create @@ Irmin_git.config Config.shared
end
type t = K.t
let of_string s =
match Int64.of_string_opt s with
| Some i -> Ok i
| None -> Error (`Msg "invalid counter value")
let of_adt (a:Counter.Make.t) : t Lwt.t =
let aostore = AO_store.create () in
let aostore_add value =
aostore >>= (fun ao_store -> AO_store.add ao_store value) in
aostore_add =<< Lwt.return (Int64.of_int a)
let to_adt (k:t) : Counter.Make.t Lwt.t =
AO_store.create () >>= fun ao_store ->
AO_store.find ao_store k >>= fun t ->
let t = from_just t in
Lwt.return (Int64.to_int t)
let t = K.t
let pp = K.pp
let of_string = K.of_string
let rec merge ~(old:t Irmin.Merge.promise) v1_k v2_k =
let open Irmin.Merge.Infix in
old () >>=* fun old_k ->
let old_k = from_just old_k in
to_adt old_k >>= fun oldv ->
to_adt v1_k >>= fun v1 ->
to_adt v2_k >>= fun v2 ->
let v = OM.merge oldv v1 v2 in
of_adt v >>= fun merged_k ->
Irmin.Merge.ok merged_k
let merge = Irmin.Merge.(option (v t merge))
end
module BC_store = struct
module Store = Irmin_unix.Git.FS.KV(M)
module Sync = Irmin.Sync(Store)
type t = Store.t
let init ?root:_ ?bare:_ () =
let config = Irmin_git.config Config.root in
Store.Repo.v config
let master (repo:Store.repo) = Store.master repo
let clone t name = Store.clone t name
let get_branch r ~branch_name = Store.of_branch r branch_name
let merge s ~into = Store.merge s ~into
let update t k v = Store.set t k v
let read t k = Store.find t k
end
is a module which consist of type store , st and ' a t
module Vpst : sig
type 'a t
val return : 'a -> 'a t
val bind : 'a t -> ('a -> 'b t) -> 'b t
val with_init_version_do: OM.t -> 'a t -> 'a
val fork_version : 'a t -> unit t
val get_latest_version: unit -> OM.t t
val sync_next_version: ?v:OM.t -> OM.t t
val liftLwt : 'a Lwt.t -> 'a t
end = struct
type store = BC_store.t
type st = {master : store;
local : store;
name : string;
next_id : int}
type 'a t = st -> ('a * st) Lwt.t
let info s = Irmin_unix.info "[repo %s] %s" Config.root s
let path = ["state"]
let return (x : 'a) : 'a t = fun st -> Lwt.return (x,st)
let bind (m1: 'a t) (f: 'a -> 'b t) : 'b t =
fun st -> (m1 st >>= fun (a,st') -> f a st')
let with_init_version_do (v: OM.t) (m: 'a t) =
Lwt_main.run
begin
BC_store.init () >>= fun repo ->
BC_store.master repo >>= fun m_br ->
M.of_adt v >>= fun k ->
let cinfo = info "creating state of master" in
BC_store.update m_br path k ~info:cinfo >>= fun () ->
BC_store.clone m_br "1_local" >>= fun t_br ->
let st = {master=m_br; local=t_br; name="1"; next_id=1} in
m st >>= fun (a,_) -> Lwt.return a
end
let with_init_forked_do (m: 'a t) =
BC_store.init () >>= fun repo ->
BC_store.master repo >>= fun m_br ->
BC_store.clone m_br "1_local" >>= fun t_br ->
let st = {master=m_br; local=t_br; name="1"; next_id=1} in
m st >>= fun (a, _) -> Lwt.return a
let fork_version (m: 'a t) : unit t = fun (st: st) ->
let thread_f () =
let child_name = st.name^"_"^(string_of_int st.next_id) in
let parent_m_br = st.master in
But , we currently default to an SC mode . Master is global .
let m_br = parent_m_br in
BC_store.clone m_br (child_name^"_local") >>= fun t_br ->
let new_st = {master = m_br; local = t_br; name = child_name; next_id = 1} in
m new_st in
begin
Lwt.async thread_f;
Lwt.return ((), {st with next_id=st.next_id+1})
end
let get_latest_version () : OM.t t = fun (st: st) ->
BC_store.read st.local path >>= fun k ->
M.to_adt @@ from_just k >>= fun td ->
Lwt.return (td,st)
let sync_remote_version remote_uri ?v : OM.t t = fun (st: st) ->
1 . Commit to the local branch
let cinfo = info "committing local state" in
(match v with
| None -> Lwt.return ()
| Some v ->
M.of_adt v >>= fun k ->
BC_store.update st.local path k cinfo) >>= fun () ->
2 .. Pull from remote to master
let cinfo = info (Printf.sprintf "Merging remote: %s" remote_uri) in
BC_store.Sync.pull st.master (Irmin.remote_uri remote_uri) (`Merge cinfo) >>= fun _ ->
2 . Merge local master to the local branch
let cinfo = info "Merging master into local" in
BC_store.merge st.master ~into:st.local ~info:cinfo >>= fun _ ->
3 . Merge local branch to the local master
let cinfo = info "Merging local into master" in
BC_store.merge st.local ~into:st.master ~info:cinfo >>= fun _ ->
get_latest_version () st
let sync_next_version ?v : OM.t t = fun (st: st) ->
1 . Commit to the local branch
let cinfo = info "committing local state" in
(match v with
| None -> Lwt.return ()
| Some v ->
M.of_adt v >>= fun k ->
BC_store.update st.local path k cinfo) >>= fun () ->
2 . Merge local master to the local branch
let cinfo = info "Merging master into local" in
BC_store.merge st.master ~into:st.local ~info:cinfo >>= fun _ ->
3 . Merge local branch to the local master
let cinfo = info "Merging local into master" in
BC_store.merge st.local ~into:st.master ~info:cinfo >>= fun _ ->
get_latest_version () st
let liftLwt (m: 'a Lwt.t) : 'a t = fun st ->
m >>= fun a -> Lwt.return (a,st)
end
end
|
2996ff4673a0034f7fd6d1162ce83e271ccc501d5f9a124d4d2b0c87d2de2b90 | drewr/copycat | Opts.hs | module Copycat.Opts ( CommandLine(..)
, Opts(..)
, Verbosity(..)
, Columns
, parseArgs
) where
import Options.Applicative
type Url = String
type Command = String
type Columns = String
data Verbosity = Normal
| Verbose
deriving (Show, Read)
data Opts = Opts
{ url :: Url
, columns :: Columns
, verbose :: Verbosity
}
data CommandLine = CommandLine Command Opts
opts :: Parser Opts
opts = Opts
<$> strOption ( long "url"
<> short 'u'
<> value ":9200"
<> metavar "URL"
<> help "Instance URL" )
<*> strOption ( long "columns"
<> short 'c'
<> value "default"
<> metavar "COLUMNS"
<> help "What columns to return" )
<*> flag Normal Verbose ( long "verbose"
<> short 'v'
<> help "Show column headers?" )
args :: Parser Command
args = argument str ( metavar "API" <> help "cat API to call" )
parseCommandLine :: Parser CommandLine
parseCommandLine = CommandLine <$> args <*> opts
parseArgs :: IO (CommandLine)
parseArgs = execParser p
where
p = info (helper <*> parseCommandLine)
( fullDesc <> progDesc "copycat!" <> header "the _cat companion" )
| null | https://raw.githubusercontent.com/drewr/copycat/18193200e9ff7b10b24dfb23c9da6a443da8f19d/src/Copycat/Opts.hs | haskell | module Copycat.Opts ( CommandLine(..)
, Opts(..)
, Verbosity(..)
, Columns
, parseArgs
) where
import Options.Applicative
type Url = String
type Command = String
type Columns = String
data Verbosity = Normal
| Verbose
deriving (Show, Read)
data Opts = Opts
{ url :: Url
, columns :: Columns
, verbose :: Verbosity
}
data CommandLine = CommandLine Command Opts
opts :: Parser Opts
opts = Opts
<$> strOption ( long "url"
<> short 'u'
<> value ":9200"
<> metavar "URL"
<> help "Instance URL" )
<*> strOption ( long "columns"
<> short 'c'
<> value "default"
<> metavar "COLUMNS"
<> help "What columns to return" )
<*> flag Normal Verbose ( long "verbose"
<> short 'v'
<> help "Show column headers?" )
args :: Parser Command
args = argument str ( metavar "API" <> help "cat API to call" )
parseCommandLine :: Parser CommandLine
parseCommandLine = CommandLine <$> args <*> opts
parseArgs :: IO (CommandLine)
parseArgs = execParser p
where
p = info (helper <*> parseCommandLine)
( fullDesc <> progDesc "copycat!" <> header "the _cat companion" )
|
|
bcb4d395a170b1dd0cb361b87a1de7dcd166528474575826681e54ebe3e0a351 | argp/bap | batInt64.ml |
* BatInt64 - Extended 64 - bit integers
* Copyright ( C ) 2007 Bluestorm < bluestorm dot dylc on - the - server gmail dot com >
* 2008
*
* This library is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation ; either
* version 2.1 of the License , or ( at your option ) any later version ,
* with the special exception on linking described in file LICENSE .
*
* This library is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
* BatInt64 - Extended 64-bit integers
* Copyright (C) 2007 Bluestorm <bluestorm dot dylc on-the-server gmail dot com>
* 2008 David Teller
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version,
* with the special exception on linking described in file LICENSE.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
module BaseInt64 = struct
include Int64
let modulo = rem
let pow = BatNumber.generic_pow ~zero ~one ~div_two:(fun n -> shift_right n 1) ~mod_two:(logand one) ~mul
end
include BatNumber.MakeNumeric(BaseInt64)
let min_int = Int64.min_int
let max_int = Int64.max_int
let minus_one = Int64.minus_one
let lognot = Int64.lognot
external neg : int64 -> int64 = "%int64_neg"
external add : int64 -> int64 -> int64 = "%int64_add"
external sub : int64 -> int64 -> int64 = "%int64_sub"
external mul : int64 -> int64 -> int64 = "%int64_mul"
external div : int64 -> int64 -> int64 = "%int64_div"
external rem : int64 -> int64 -> int64 = "%int64_mod"
external logand : int64 -> int64 -> int64 = "%int64_and"
external logor : int64 -> int64 -> int64 = "%int64_or"
external logxor : int64 -> int64 -> int64 = "%int64_xor"
external shift_left : int64 -> int -> int64 = "%int64_lsl"
external shift_right : int64 -> int -> int64 = "%int64_asr"
external shift_right_logical : int64 -> int -> int64 = "%int64_lsr"
external of_int : int -> int64 = "%int64_of_int"
external to_int : int64 -> int = "%int64_to_int"
external of_float : float -> int64 = "caml_int64_of_float"
external to_float : int64 -> float = "caml_int64_to_float"
external of_int32 : int32 -> int64 = "%int64_of_int32"
external to_int32 : int64 -> int32 = "%int64_to_int32"
external of_nativeint : nativeint -> int64 = "%int64_of_nativeint"
external to_nativeint : int64 -> nativeint = "%int64_to_nativeint"
external of_string : string -> int64 = "caml_int64_of_string"
external bits_of_float : float -> int64 = "caml_int64_bits_of_float"
external float_of_bits : int64 -> float = "caml_int64_float_of_bits"
external format : string -> int64 -> string = "caml_int64_format"
let print out t = BatInnerIO.nwrite out (to_string t)
let print_hex out t = BatPrintf.fprintf out "%Lx" t
| null | https://raw.githubusercontent.com/argp/bap/2f60a35e822200a1ec50eea3a947a322b45da363/batteries/src/batInt64.ml | ocaml |
* BatInt64 - Extended 64 - bit integers
* Copyright ( C ) 2007 Bluestorm < bluestorm dot dylc on - the - server gmail dot com >
* 2008
*
* This library is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation ; either
* version 2.1 of the License , or ( at your option ) any later version ,
* with the special exception on linking described in file LICENSE .
*
* This library is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
* BatInt64 - Extended 64-bit integers
* Copyright (C) 2007 Bluestorm <bluestorm dot dylc on-the-server gmail dot com>
* 2008 David Teller
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version,
* with the special exception on linking described in file LICENSE.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
module BaseInt64 = struct
include Int64
let modulo = rem
let pow = BatNumber.generic_pow ~zero ~one ~div_two:(fun n -> shift_right n 1) ~mod_two:(logand one) ~mul
end
include BatNumber.MakeNumeric(BaseInt64)
let min_int = Int64.min_int
let max_int = Int64.max_int
let minus_one = Int64.minus_one
let lognot = Int64.lognot
external neg : int64 -> int64 = "%int64_neg"
external add : int64 -> int64 -> int64 = "%int64_add"
external sub : int64 -> int64 -> int64 = "%int64_sub"
external mul : int64 -> int64 -> int64 = "%int64_mul"
external div : int64 -> int64 -> int64 = "%int64_div"
external rem : int64 -> int64 -> int64 = "%int64_mod"
external logand : int64 -> int64 -> int64 = "%int64_and"
external logor : int64 -> int64 -> int64 = "%int64_or"
external logxor : int64 -> int64 -> int64 = "%int64_xor"
external shift_left : int64 -> int -> int64 = "%int64_lsl"
external shift_right : int64 -> int -> int64 = "%int64_asr"
external shift_right_logical : int64 -> int -> int64 = "%int64_lsr"
external of_int : int -> int64 = "%int64_of_int"
external to_int : int64 -> int = "%int64_to_int"
external of_float : float -> int64 = "caml_int64_of_float"
external to_float : int64 -> float = "caml_int64_to_float"
external of_int32 : int32 -> int64 = "%int64_of_int32"
external to_int32 : int64 -> int32 = "%int64_to_int32"
external of_nativeint : nativeint -> int64 = "%int64_of_nativeint"
external to_nativeint : int64 -> nativeint = "%int64_to_nativeint"
external of_string : string -> int64 = "caml_int64_of_string"
external bits_of_float : float -> int64 = "caml_int64_bits_of_float"
external float_of_bits : int64 -> float = "caml_int64_float_of_bits"
external format : string -> int64 -> string = "caml_int64_format"
let print out t = BatInnerIO.nwrite out (to_string t)
let print_hex out t = BatPrintf.fprintf out "%Lx" t
|
|
c10fab19c53088def394b4cd204140ff575a4c620ad6b82d9cfe0b7571c7fb1b | grin-compiler/ghc-whole-program-compiler-project | IO.hs | module Stg.IO
( -- * Convenient Modpak IO
readModpakS
, readModpakL
, doesModpakEntryExist
-- * Convenient Decoding
, decodeStgbin
, decodeStgbin'
, decodeStgbinInfo
, decodeStgbinStubs
, decodeStgbinModuleName
.fullpak and .modpak content structure
, fullpakAppInfoPath
, modpakHaskellSourcePath
, modpakStgbinPath
) where
import Prelude hiding (readFile)
import Control.Monad.IO.Class
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BS8
import qualified Data.ByteString.Lazy as BSL
import Data.Binary
import Data.Binary.Get
import Codec.Archive.Zip
import System.FilePath
import Stg.Syntax
import Stg.Reconstruct
from .modpak file
readModpakS :: FilePath -> String -> (BS.ByteString -> a) -> IO a
readModpakS modpakPath fname f = do
s <- mkEntrySelector fname
f <$> withArchive modpakPath (getEntry s)
readModpakL :: FilePath -> String -> (BSL.ByteString -> a) -> IO a
readModpakL modpakPath fname f = do
s <- mkEntrySelector fname
f . BSL.fromStrict <$> withArchive modpakPath (getEntry s)
doesModpakEntryExist :: FilePath -> String -> IO Bool
doesModpakEntryExist modpakPath fname = do
s <- mkEntrySelector fname
withArchive modpakPath $ doesEntryExist s
from bytestring
decodeStgbin' :: BSL.ByteString -> SModule
decodeStgbin' = decode
decodeStgbin :: BSL.ByteString -> Module
decodeStgbin = reconModule . decodeStgbin'
decodeStgbinInfo :: BSL.ByteString -> (Name, UnitId, ModuleName, Maybe Name, SForeignStubs, Bool, [(UnitId, [ModuleName])])
decodeStgbinInfo = decode
decodeStgbinStubs :: BSL.ByteString -> (Name, UnitId, ModuleName, Maybe Name, SForeignStubs)
decodeStgbinStubs = decode
decodeStgbinModuleName :: BSL.ByteString -> (Name, UnitId, ModuleName, Maybe Name)
decodeStgbinModuleName = decode
.modpak and .fullpak structure
modpakStgbinPath :: FilePath
modpakStgbinPath = "module.stgbin"
modpakHaskellSourcePath :: FilePath
modpakHaskellSourcePath = "module.hs"
fullpakAppInfoPath :: FilePath
fullpakAppInfoPath = "app.info"
| null | https://raw.githubusercontent.com/grin-compiler/ghc-whole-program-compiler-project/fee2d7b513f1201c17309f9a928c9da17dfe8ff7/external-stg/lib/Stg/IO.hs | haskell | * Convenient Modpak IO
* Convenient Decoding | module Stg.IO
readModpakS
, readModpakL
, doesModpakEntryExist
, decodeStgbin
, decodeStgbin'
, decodeStgbinInfo
, decodeStgbinStubs
, decodeStgbinModuleName
.fullpak and .modpak content structure
, fullpakAppInfoPath
, modpakHaskellSourcePath
, modpakStgbinPath
) where
import Prelude hiding (readFile)
import Control.Monad.IO.Class
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BS8
import qualified Data.ByteString.Lazy as BSL
import Data.Binary
import Data.Binary.Get
import Codec.Archive.Zip
import System.FilePath
import Stg.Syntax
import Stg.Reconstruct
from .modpak file
readModpakS :: FilePath -> String -> (BS.ByteString -> a) -> IO a
readModpakS modpakPath fname f = do
s <- mkEntrySelector fname
f <$> withArchive modpakPath (getEntry s)
readModpakL :: FilePath -> String -> (BSL.ByteString -> a) -> IO a
readModpakL modpakPath fname f = do
s <- mkEntrySelector fname
f . BSL.fromStrict <$> withArchive modpakPath (getEntry s)
doesModpakEntryExist :: FilePath -> String -> IO Bool
doesModpakEntryExist modpakPath fname = do
s <- mkEntrySelector fname
withArchive modpakPath $ doesEntryExist s
from bytestring
decodeStgbin' :: BSL.ByteString -> SModule
decodeStgbin' = decode
decodeStgbin :: BSL.ByteString -> Module
decodeStgbin = reconModule . decodeStgbin'
decodeStgbinInfo :: BSL.ByteString -> (Name, UnitId, ModuleName, Maybe Name, SForeignStubs, Bool, [(UnitId, [ModuleName])])
decodeStgbinInfo = decode
decodeStgbinStubs :: BSL.ByteString -> (Name, UnitId, ModuleName, Maybe Name, SForeignStubs)
decodeStgbinStubs = decode
decodeStgbinModuleName :: BSL.ByteString -> (Name, UnitId, ModuleName, Maybe Name)
decodeStgbinModuleName = decode
.modpak and .fullpak structure
modpakStgbinPath :: FilePath
modpakStgbinPath = "module.stgbin"
modpakHaskellSourcePath :: FilePath
modpakHaskellSourcePath = "module.hs"
fullpakAppInfoPath :: FilePath
fullpakAppInfoPath = "app.info"
|
dc211fbb30fe313c3fd21810b419ceefe6ed4ec85202d8f3ec1c0f384531773d | weavejester/integrant | core_test.cljc | (ns integrant.core-test
(:require [clojure.spec.alpha :as s]
#?(:clj [clojure.test :refer :all]
:cljs [cljs.test :refer-macros [are deftest is testing]])
[integrant.core :as ig]
[weavejester.dependency :as dep]))
(def log (atom []))
(defmethod ig/prep-key ::p [_ v]
(merge {:a (ig/ref ::a)} v))
(defmethod ig/init-key :default [k v]
(swap! log conj [:init k v])
[v])
(defmethod ig/init-key ::x [k v]
(swap! log conj [:init k v])
:x)
(defmethod ig/init-key ::error-init [_ _]
(throw (ex-info "Testing" {:reason ::test})))
(defmethod ig/init-key ::k [_ v] v)
(defmethod ig/init-key ::n [_ v] (inc v))
(defmethod ig/pre-init-spec ::n [_] nat-int?)
(defmethod ig/init-key ::r [_ v] {:v v})
(defmethod ig/resolve-key ::r [_ {:keys [v]}] v)
(defmethod ig/resume-key ::r [k v _ _] (ig/init-key k v))
(defmethod ig/halt-key! :default [k v]
(swap! log conj [:halt k v]))
(defmethod ig/halt-key! ::error-halt [_ _]
(throw (ex-info "Testing" {:reason ::test})))
(defmethod ig/resume-key :default [k cfg cfg' sys]
(swap! log conj [:resume k cfg cfg' sys])
[cfg])
(defmethod ig/resume-key ::x [k cfg cfg' sys]
(swap! log conj [:resume k cfg cfg' sys])
:rx)
(defmethod ig/suspend-key! :default [k v]
(swap! log conj [:suspend k v]))
(derive ::p ::pp)
(derive ::pp ::ppp)
(derive ::ap ::a)
(derive ::ap ::p)
(deftest ref-test
(is (ig/ref? (ig/ref ::foo)))
(is (ig/ref? (ig/ref [::foo ::bar])))
(is (ig/reflike? (ig/ref ::foo)))
(is (ig/reflike? (ig/ref [::foo ::bar]))))
(deftest refset-test
(is (ig/refset? (ig/refset ::foo)))
(is (ig/refset? (ig/refset [::foo ::bar])))
(is (ig/reflike? (ig/refset ::foo)))
(is (ig/reflike? (ig/refset [::foo ::bar]))))
(deftest composite-keyword-test
(let [k (ig/composite-keyword [::a ::b])]
(is (isa? k ::a))
(is (isa? k ::b))
(is (identical? k (ig/composite-keyword [::a ::b])))
(is (not= k (ig/composite-keyword [::a ::c])))))
(deftest valid-config-key-test
(is (ig/valid-config-key? ::a))
(is (not (ig/valid-config-key? :a))))
(deftest expand-test
(is (= (ig/expand {::a (ig/ref ::b), ::b 1})
{::a 1, ::b 1}))
(is (= (ig/expand {::a (ig/ref ::b), ::b (ig/ref ::c), ::c 2})
{::a 2, ::b 2, ::c 2}))
(is (= (ig/expand {::a (ig/ref ::pp), ::p 1})
{::a 1, ::p 1}))
(is (= (ig/expand {::a (ig/refset ::ppp), ::p 1, ::pp 2})
{::a #{1 2}, ::p 1, ::pp 2}))
(is (= (ig/expand {::a (ig/refset ::ppp)})
{::a #{}})))
#?(:clj
(deftest read-string-test
(is (= (ig/read-string "{:foo/a #ig/ref :foo/b, :foo/b 1}")
{:foo/a (ig/ref :foo/b), :foo/b 1}))
(is (= (ig/read-string "{:foo/a #ig/refset :foo/b, :foo/b 1}")
{:foo/a (ig/refset :foo/b), :foo/b 1}))
(is (= (ig/read-string {:readers {'test/var find-var}}
"{:foo/a #test/var clojure.core/+}")
{:foo/a #'+}))))
#?(:clj
(defn- remove-lib [lib]
(remove-ns lib)
(dosync (alter @#'clojure.core/*loaded-libs* disj lib))))
(derive :integrant.test-child/foo :integrant.test/foo)
#?(:clj
(deftest load-namespaces-test
(testing "all namespaces"
(remove-lib 'integrant.test.foo)
(remove-lib 'integrant.test.bar)
(remove-lib 'integrant.test.baz)
(remove-lib 'integrant.test.quz)
(is (= (set (ig/load-namespaces {:integrant.test/foo 1
:integrant.test.bar/wuz 2
[:integrant.test/baz :integrant.test/x] 3
[:integrant.test/y :integrant.test/quz] 4}))
'#{integrant.test.foo
integrant.test.bar
integrant.test.baz
integrant.test.quz}))
(is (some? (find-ns 'integrant.test.foo)))
(is (some? (find-ns 'integrant.test.bar)))
(is (some? (find-ns 'integrant.test.baz)))
(is (some? (find-ns 'integrant.test.quz)))
(is (= (some-> 'integrant.test.foo/message find-var var-get) "foo"))
(is (= (some-> 'integrant.test.bar/message find-var var-get) "bar"))
(is (= (some-> 'integrant.test.baz/message find-var var-get) "baz"))
(is (= (some-> 'integrant.test.quz/message find-var var-get) "quz")))
(testing "some namespaces"
(remove-lib 'integrant.test.foo)
(remove-lib 'integrant.test.bar)
(remove-lib 'integrant.test.baz)
(remove-lib 'integrant.test.quz)
(is (= (set (ig/load-namespaces
{:integrant.test/foo 1
:integrant.test/bar (ig/ref :integrant.test/foo)
:integrant.test/baz 3}
[:integrant.test/bar]))
'#{integrant.test.foo
integrant.test.bar}))
(is (some? (find-ns 'integrant.test.foo)))
(is (some? (find-ns 'integrant.test.bar)))
(is (nil? (find-ns 'integrant.test.baz))))
(testing "load namespaces of ancestors"
(remove-lib 'integrant.test.foo)
(is (= (set (ig/load-namespaces
{:integrant.test-child/foo 1}))
'#{integrant.test.foo}))
(is (some? (find-ns 'integrant.test.foo))))))
(deftest dependency-graph-test
(let [m {::a (ig/ref ::p), ::b (ig/refset ::ppp) ::p 1, ::pp 2}]
(testing "graph with refsets"
(let [g (ig/dependency-graph m)]
(is (dep/depends? g ::a ::p))
(is (dep/depends? g ::b ::p))
(is (dep/depends? g ::b ::pp))))
(testing "graph without refsets"
(let [g (ig/dependency-graph m {:include-refsets? false})]
(is (dep/depends? g ::a ::p))
(is (not (dep/depends? g ::b ::p)))
(is (not (dep/depends? g ::b ::pp)))))))
(deftest key-comparator-test
(let [graph (ig/dependency-graph {::a (ig/ref ::ppp) ::p 1, ::b 2})]
(is (= (sort (ig/key-comparator graph) [::b ::a ::p])
[::p ::a ::b]))))
(deftest derived-from?-test
(are [a b] (ig/derived-from? a b)
::p ::p
::p ::pp
::p ::ppp
::ap [::a ::p]
::ap [::a ::pp]
[::a ::p] [::a ::pp]
[::a ::b ::p] [::a ::ppp]))
(deftest find-derived-1-test
(testing "missing key"
(is (nil? (ig/find-derived-1 {} ::p))))
(testing "derived key"
(is (= (ig/find-derived-1 {::a "x" ::p "y"} ::pp)
[::p "y"])))
(testing "ambiguous key"
(is (thrown-with-msg?
#?(:clj clojure.lang.ExceptionInfo :cljs cljs.core.ExceptionInfo)
(re-pattern (str "Ambiguous key: " ::pp "\\. "
"Found multiple candidates: " ::p ", " ::pp))
(ig/find-derived-1 {::a "x" ::p "y", ::pp "z"} ::pp))))
(testing "composite key"
(is (= (ig/find-derived-1 {::a "x" [::b ::x] "y"} ::x)
[[::b ::x] "y"]))))
(deftest find-derived-test
(testing "missing key"
(is (nil? (ig/find-derived {} ::p))))
(testing "derived key"
(is (= (ig/find-derived {::a "x" ::p "y" ::pp "z"} ::pp)
[[::p "y"] [::pp "z"]])))
(testing "ambiguous key"
(is (= (ig/find-derived {::a "x" ::p "y" ::pp "z"} ::ppp)
[[::p "y"] [::pp "z"]])))
(testing "composite key"
(is (= (ig/find-derived {::a "x" [::b ::x] "y", [::b ::y] "z"} ::b)
[[[::b ::x] "y"] [[::b ::y] "z"]]))))
(deftest prep-test
(testing "default"
(is (= (ig/prep {::q {:b 2}, ::a 1})
{::q {:b 2}, ::a 1})))
(testing "custom prep-key"
(is (= (ig/prep {::p {:b 2}, ::a 1})
{::p {:a (ig/ref ::a), :b 2}, ::a 1})))
(testing "prep then init"
(is (= (ig/init (ig/prep {::p {:b 2}, ::a 1}))
{::p [{:a [1], :b 2}], ::a [1]}))))
(deftest init-test
(testing "without keys"
(reset! log [])
(let [m (ig/init {::a (ig/ref ::b), ::b 1})]
(is (= m {::a [[1]], ::b [1]}))
(is (= @log [[:init ::b 1]
[:init ::a [1]]]))))
(testing "with keys"
(reset! log [])
(let [m (ig/init {::a (ig/ref ::b), ::b 1, ::c 2} [::a])]
(is (= m {::a [[1]], ::b [1]}))
(is (= @log [[:init ::b 1]
[:init ::a [1]]]))))
(testing "with inherited keys"
(reset! log [])
(let [m (ig/init {::p (ig/ref ::a), ::a 1} [::pp])]
(is (= m {::p [[1]], ::a [1]}))
(is (= @log [[:init ::a 1]
[:init ::p [1]]]))))
(testing "with composite keys"
(reset! log [])
(let [m (ig/init {::a (ig/ref ::b), [::x ::b] 1})]
(is (= m {::a [:x], [::x ::b] :x}))
(is (= @log [[:init [::x ::b] 1]
[:init ::a :x]]))))
(testing "with composite refs"
(reset! log [])
(let [m (ig/init {::a (ig/ref [::b ::c]), [::b ::c ::e] 1, [::b ::d] 2})]
(is (= m {::a [[1]], [::b ::c ::e] [1], [::b ::d] [2]}))
(is (or (= @log [[:init [::b ::c ::e] 1]
[:init ::a [1]]
[:init [::b ::d] 2]])
(= @log [[:init [::b ::d] 2]
[:init [::b ::c ::e] 1]
[:init ::a [1]]])))))
(testing "with failing composite refs"
(reset! log [])
(is (thrown-with-msg?
#?(:clj clojure.lang.ExceptionInfo :cljs cljs.core.ExceptionInfo)
#"^Invalid composite key: \[:integrant.core-test/a :b\]. Every keyword must be namespaced.$"
(ig/init {[::a :b] :anything}))))
(testing "with custom resolve-key"
(let [m (ig/init {::a (ig/ref ::r), ::r 1})]
(is (= m {::a [1], ::r {:v 1}}))))
(testing "with refsets"
(reset! log [])
(let [m (ig/init {::a (ig/refset ::ppp), ::p 1, ::pp 2})]
(is (= m {::a [#{[1] [2]}], ::p [1], ::pp [2]}))
(is (= @log [[:init ::p 1]
[:init ::pp 2]
[:init ::a #{[1] [2]}]]))))
(testing "with refsets and keys"
(reset! log [])
(let [m {::a (ig/refset ::ppp), ::p 1, ::pp 2}]
(is (= (ig/init m [::a]) {::a [#{}]}))
(is (= (ig/init m [::a ::p]) {::a [#{[1]}] ::p [1]}))
(is (= (ig/init m [::a ::pp]) {::a [#{[1] [2]}] ::p [1] ::pp [2]}))))
(testing "large config"
(is (= (ig/init {:a/a1 {} :a/a2 {:_ (ig/ref :a/a1)}
:a/a3 {} :a/a4 {} :a/a5 {}
:a/a6 {} :a/a7 {} :a/a8 {}
:a/a9 {} :a/a10 {}})
{:a/a1 [{}] :a/a2 [{:_ [{}]}]
:a/a3 [{}] :a/a4 [{}] :a/a5 [{}]
:a/a6 [{}] :a/a7 [{}] :a/a8 [{}]
:a/a9 [{}] :a/a10 [{}]})))
(testing "with passing specs"
(let [m (ig/init {::n (ig/ref ::k), ::k 1})]
(is (= m {::n 2, ::k 1}))))
(testing "with failing specs"
(is (thrown-with-msg?
#?(:clj clojure.lang.ExceptionInfo :cljs cljs.core.ExceptionInfo)
(re-pattern (str "Spec failed on key " ::n " when building system"))
(ig/init {::n (ig/ref ::k), ::k 1.1}))))
(testing "with failing composite specs"
(is (thrown-with-msg?
#?(:clj clojure.lang.ExceptionInfo :cljs cljs.core.ExceptionInfo)
(re-pattern (str "Spec failed on key \\[" ::n " " ::nnn "\\] when building system"))
(ig/init {[::n ::nnn] 1.1})))))
(deftest halt-test
(testing "without keys"
(reset! log [])
(let [m (ig/init {::a (ig/ref ::b), ::b 1})]
(ig/halt! m)
(is (= @log [[:init ::b 1]
[:init ::a [1]]
[:halt ::a [[1]]]
[:halt ::b [1]]]))))
(testing "with keys"
(reset! log [])
(let [m (ig/init {::a (ig/ref ::b), ::b (ig/ref ::c), ::c 1})]
(ig/halt! m [::a])
(is (= @log [[:init ::c 1]
[:init ::b [1]]
[:init ::a [[1]]]
[:halt ::a [[[1]]]]]))
(reset! log [])
(ig/halt! m [::c])
(is (= @log [[:halt ::a [[[1]]]]
[:halt ::b [[1]]]
[:halt ::c [1]]]))))
(testing "with partial system"
(reset! log [])
(let [m (ig/init {::a 1, ::b (ig/ref ::a)} [::a])]
(ig/halt! m)
(is (= @log [[:init ::a 1]
[:halt ::a [1]]]))))
(testing "with inherited keys"
(reset! log [])
(let [m (ig/init {::a (ig/ref ::p), ::p 1} [::a])]
(ig/halt! m [::pp])
(is (= @log [[:init ::p 1]
[:init ::a [1]]
[:halt ::a [[1]]]
[:halt ::p [1]]]))))
(testing "with composite keys"
(reset! log [])
(let [m (ig/init {::a (ig/ref ::b), [::x ::b] 1})]
(ig/halt! m)
(is (= @log [[:init [::x ::b] 1]
[:init ::a :x]
[:halt ::a [:x]]
[:halt [::x ::b] :x]])))))
(deftest suspend-resume-test
(testing "same configuration"
(reset! log [])
(let [c {::a (ig/ref ::b), ::b 1}
m (ig/init c)
_ (ig/suspend! m)
m' (ig/resume c m)]
(is (= @log [[:init ::b 1]
[:init ::a [1]]
[:suspend ::a [[1]]]
[:suspend ::b [1]]
[:resume ::b 1 1 [1]]
[:resume ::a [1] [1] [[1]]]]))))
(testing "missing keys"
(reset! log [])
(let [c {::a (ig/ref ::b), ::b 1}
m (ig/init c)
_ (ig/suspend! m)
m' (ig/resume (dissoc c ::a) m)]
(is (= @log [[:init ::b 1]
[:init ::a [1]]
[:suspend ::a [[1]]]
[:suspend ::b [1]]
[:halt ::a [[1]]]
[:resume ::b 1 1 [1]]]))))
(testing "missing refs"
(reset! log [])
(let [c {::a {:b (ig/ref ::b)}, ::b 1}
m (ig/init c)
_ (ig/suspend! m)
m' (ig/resume {::a []} m)]
(is (= @log [[:init ::b 1]
[:init ::a {:b [1]}]
[:suspend ::a [{:b [1]}]]
[:suspend ::b [1]]
[:halt ::b [1]]
[:resume ::a [] {:b [1]} [{:b [1]}]]]))))
(testing "with custom resolve-key"
(let [c {::a (ig/ref ::r), ::r 1}
m (ig/init c)
_ (ig/suspend! m)
m' (ig/resume c m)]
(is (= m m'))))
(testing "composite keys"
(reset! log [])
(let [c {::a (ig/ref ::x), [::b ::x] 1}
m (ig/init c)
_ (ig/suspend! m)
m' (ig/resume c m)]
(is (= @log [[:init [::b ::x] 1]
[:init ::a :x]
[:suspend ::a [:x]]
[:suspend [::b ::x] :x]
[:resume [::b ::x] 1 1 :x]
[:resume ::a :rx :x [:x]]]))))
(testing "resume key with dependencies"
(reset! log [])
(let [c {::a {:b (ig/ref ::b)}, ::b 1}
m (ig/init c [::a])
_ (ig/suspend! m)
m' (ig/resume c m [::a])]
(is (= @log
[[:init ::b 1]
[:init ::a {:b [1]}]
[:suspend ::a [{:b [1]}]]
[:suspend ::b [1]]
[:resume ::b 1 1 [1]]
[:resume ::a {:b [1]} {:b [1]} [{:b [1]}]]])))))
(deftest invalid-configs-test
(testing "ambiguous refs"
(is (thrown-with-msg?
#?(:clj clojure.lang.ExceptionInfo :cljs cljs.core.ExceptionInfo)
(re-pattern (str "Ambiguous key: " ::ppp "\\. "
"Found multiple candidates: "
"(" ::p ", " ::pp "|" ::pp ", " ::p ")"))
(ig/init {::a (ig/ref ::ppp), ::p 1, ::pp 2}))))
(testing "missing refs"
(is (thrown-with-msg?
#?(:clj clojure.lang.ExceptionInfo :cljs cljs.core.ExceptionInfo)
(re-pattern (str "Missing definitions for refs: " ::b))
(ig/init {::a (ig/ref ::b)}))))
(testing "missing refs with explicit keys"
(is (= (ig/init {::a (ig/ref ::ppp), ::p 1, ::pp 2} [::p ::pp])
{::p [1], ::pp [2]})))
(testing "missing refs with explicit keys"
(is (= (ig/init {::a 1, ::b (ig/ref ::c)} [::a])
{::a [1]}))))
(defn build-log [config]
(let [log (atom [])]
[(ig/build config (keys config) (fn [k v] (last (swap! log conj [:build k v]))))
@log]))
(deftest build-test
(is (= [{::a [:build ::a [:build ::b 1]]
::b [:build ::b 1]}
[[:build ::b 1]
[:build ::a [:build ::b 1]]]]
(build-log {::a (ig/ref ::b)
::b 1}))))
(defn test-log [f m]
(let [log (atom [])]
[(f m (keys m) (fn [k v] (last (swap! log conj [:test k v]))))
@log]))
(deftest run-test
(let [config {::a (ig/ref ::b), ::b 1}
[system _] (build-log config)]
(is (= [nil
[[:test ::b [:build ::b 1]]
[:test ::a [:build ::a [:build ::b 1]]]]]
(test-log ig/run! system)))
(is (= [nil
[[:test ::a [:build ::a [:build ::b 1]]]
[:test ::b [:build ::b 1]]]]
(test-log ig/reverse-run! system)))))
(deftest fold-test
(let [config {::a (ig/ref ::ppp), ::b (ig/ref ::pp), ::p 1, ::c 2}
system (ig/init config)]
(is (= (ig/fold system #(conj %1 [%2 %3]) [])
[[::p [1]] [::a [[1]]] [::b [[1]]] [::c [2]]]))))
(deftest wrapped-exception-test
(testing "exception when building"
(let [ex (try (ig/init {::a 1, ::error-init (ig/ref ::a)}) nil
(catch #?(:clj Throwable :cljs :default) t t))]
(is (some? ex))
(is (= (#?(:clj .getMessage :cljs ex-message) ex)
(str "Error on key " ::error-init " when building system")))
(is (= (ex-data ex)
{:reason ::ig/build-threw-exception
:system {::a [1]}
:function ig/init-key
:key ::error-init
:value [1]}))
(let [cause (#?(:clj .getCause :cljs ex-cause) ex)]
(is (some? cause))
(is (= (#?(:clj .getMessage :cljs ex-message) cause) "Testing"))
(is (= (ex-data cause) {:reason ::test})))))
(testing "exception when running"
(let [system (ig/init {::a 1
::error-halt (ig/ref ::a)
::b (ig/ref ::error-halt)
::c (ig/ref ::b)})
ex (try (ig/halt! system)
(catch #?(:clj Throwable :cljs :default) t t))]
(is (some? ex))
(is (= (#?(:clj .getMessage :cljs ex-message) ex)
(str "Error on key " ::error-halt " when running system")))
(is (= (ex-data ex)
{:reason ::ig/run-threw-exception
:system {::a [1], ::error-halt [[1]], ::b [[[1]]], ::c [[[[1]]]]}
:completed-keys '(::c ::b)
:remaining-keys '(::a)
:function ig/halt-key!
:key ::error-halt
:value [[1]]}))
(let [cause (#?(:clj .getCause :cljs ex-cause) ex)]
(is (some? cause))
(is (= (#?(:clj .getMessage :cljs ex-message) cause) "Testing"))
(is (= (ex-data cause) {:reason ::test}))))))
| null | https://raw.githubusercontent.com/weavejester/integrant/32a46f5dca8a6b563a6dddf88bec887be3201b08/test/integrant/core_test.cljc | clojure | (ns integrant.core-test
(:require [clojure.spec.alpha :as s]
#?(:clj [clojure.test :refer :all]
:cljs [cljs.test :refer-macros [are deftest is testing]])
[integrant.core :as ig]
[weavejester.dependency :as dep]))
(def log (atom []))
(defmethod ig/prep-key ::p [_ v]
(merge {:a (ig/ref ::a)} v))
(defmethod ig/init-key :default [k v]
(swap! log conj [:init k v])
[v])
(defmethod ig/init-key ::x [k v]
(swap! log conj [:init k v])
:x)
(defmethod ig/init-key ::error-init [_ _]
(throw (ex-info "Testing" {:reason ::test})))
(defmethod ig/init-key ::k [_ v] v)
(defmethod ig/init-key ::n [_ v] (inc v))
(defmethod ig/pre-init-spec ::n [_] nat-int?)
(defmethod ig/init-key ::r [_ v] {:v v})
(defmethod ig/resolve-key ::r [_ {:keys [v]}] v)
(defmethod ig/resume-key ::r [k v _ _] (ig/init-key k v))
(defmethod ig/halt-key! :default [k v]
(swap! log conj [:halt k v]))
(defmethod ig/halt-key! ::error-halt [_ _]
(throw (ex-info "Testing" {:reason ::test})))
(defmethod ig/resume-key :default [k cfg cfg' sys]
(swap! log conj [:resume k cfg cfg' sys])
[cfg])
(defmethod ig/resume-key ::x [k cfg cfg' sys]
(swap! log conj [:resume k cfg cfg' sys])
:rx)
(defmethod ig/suspend-key! :default [k v]
(swap! log conj [:suspend k v]))
(derive ::p ::pp)
(derive ::pp ::ppp)
(derive ::ap ::a)
(derive ::ap ::p)
(deftest ref-test
(is (ig/ref? (ig/ref ::foo)))
(is (ig/ref? (ig/ref [::foo ::bar])))
(is (ig/reflike? (ig/ref ::foo)))
(is (ig/reflike? (ig/ref [::foo ::bar]))))
(deftest refset-test
(is (ig/refset? (ig/refset ::foo)))
(is (ig/refset? (ig/refset [::foo ::bar])))
(is (ig/reflike? (ig/refset ::foo)))
(is (ig/reflike? (ig/refset [::foo ::bar]))))
(deftest composite-keyword-test
(let [k (ig/composite-keyword [::a ::b])]
(is (isa? k ::a))
(is (isa? k ::b))
(is (identical? k (ig/composite-keyword [::a ::b])))
(is (not= k (ig/composite-keyword [::a ::c])))))
(deftest valid-config-key-test
(is (ig/valid-config-key? ::a))
(is (not (ig/valid-config-key? :a))))
(deftest expand-test
(is (= (ig/expand {::a (ig/ref ::b), ::b 1})
{::a 1, ::b 1}))
(is (= (ig/expand {::a (ig/ref ::b), ::b (ig/ref ::c), ::c 2})
{::a 2, ::b 2, ::c 2}))
(is (= (ig/expand {::a (ig/ref ::pp), ::p 1})
{::a 1, ::p 1}))
(is (= (ig/expand {::a (ig/refset ::ppp), ::p 1, ::pp 2})
{::a #{1 2}, ::p 1, ::pp 2}))
(is (= (ig/expand {::a (ig/refset ::ppp)})
{::a #{}})))
#?(:clj
(deftest read-string-test
(is (= (ig/read-string "{:foo/a #ig/ref :foo/b, :foo/b 1}")
{:foo/a (ig/ref :foo/b), :foo/b 1}))
(is (= (ig/read-string "{:foo/a #ig/refset :foo/b, :foo/b 1}")
{:foo/a (ig/refset :foo/b), :foo/b 1}))
(is (= (ig/read-string {:readers {'test/var find-var}}
"{:foo/a #test/var clojure.core/+}")
{:foo/a #'+}))))
#?(:clj
(defn- remove-lib [lib]
(remove-ns lib)
(dosync (alter @#'clojure.core/*loaded-libs* disj lib))))
(derive :integrant.test-child/foo :integrant.test/foo)
#?(:clj
(deftest load-namespaces-test
(testing "all namespaces"
(remove-lib 'integrant.test.foo)
(remove-lib 'integrant.test.bar)
(remove-lib 'integrant.test.baz)
(remove-lib 'integrant.test.quz)
(is (= (set (ig/load-namespaces {:integrant.test/foo 1
:integrant.test.bar/wuz 2
[:integrant.test/baz :integrant.test/x] 3
[:integrant.test/y :integrant.test/quz] 4}))
'#{integrant.test.foo
integrant.test.bar
integrant.test.baz
integrant.test.quz}))
(is (some? (find-ns 'integrant.test.foo)))
(is (some? (find-ns 'integrant.test.bar)))
(is (some? (find-ns 'integrant.test.baz)))
(is (some? (find-ns 'integrant.test.quz)))
(is (= (some-> 'integrant.test.foo/message find-var var-get) "foo"))
(is (= (some-> 'integrant.test.bar/message find-var var-get) "bar"))
(is (= (some-> 'integrant.test.baz/message find-var var-get) "baz"))
(is (= (some-> 'integrant.test.quz/message find-var var-get) "quz")))
(testing "some namespaces"
(remove-lib 'integrant.test.foo)
(remove-lib 'integrant.test.bar)
(remove-lib 'integrant.test.baz)
(remove-lib 'integrant.test.quz)
(is (= (set (ig/load-namespaces
{:integrant.test/foo 1
:integrant.test/bar (ig/ref :integrant.test/foo)
:integrant.test/baz 3}
[:integrant.test/bar]))
'#{integrant.test.foo
integrant.test.bar}))
(is (some? (find-ns 'integrant.test.foo)))
(is (some? (find-ns 'integrant.test.bar)))
(is (nil? (find-ns 'integrant.test.baz))))
(testing "load namespaces of ancestors"
(remove-lib 'integrant.test.foo)
(is (= (set (ig/load-namespaces
{:integrant.test-child/foo 1}))
'#{integrant.test.foo}))
(is (some? (find-ns 'integrant.test.foo))))))
(deftest dependency-graph-test
(let [m {::a (ig/ref ::p), ::b (ig/refset ::ppp) ::p 1, ::pp 2}]
(testing "graph with refsets"
(let [g (ig/dependency-graph m)]
(is (dep/depends? g ::a ::p))
(is (dep/depends? g ::b ::p))
(is (dep/depends? g ::b ::pp))))
(testing "graph without refsets"
(let [g (ig/dependency-graph m {:include-refsets? false})]
(is (dep/depends? g ::a ::p))
(is (not (dep/depends? g ::b ::p)))
(is (not (dep/depends? g ::b ::pp)))))))
(deftest key-comparator-test
(let [graph (ig/dependency-graph {::a (ig/ref ::ppp) ::p 1, ::b 2})]
(is (= (sort (ig/key-comparator graph) [::b ::a ::p])
[::p ::a ::b]))))
(deftest derived-from?-test
(are [a b] (ig/derived-from? a b)
::p ::p
::p ::pp
::p ::ppp
::ap [::a ::p]
::ap [::a ::pp]
[::a ::p] [::a ::pp]
[::a ::b ::p] [::a ::ppp]))
(deftest find-derived-1-test
(testing "missing key"
(is (nil? (ig/find-derived-1 {} ::p))))
(testing "derived key"
(is (= (ig/find-derived-1 {::a "x" ::p "y"} ::pp)
[::p "y"])))
(testing "ambiguous key"
(is (thrown-with-msg?
#?(:clj clojure.lang.ExceptionInfo :cljs cljs.core.ExceptionInfo)
(re-pattern (str "Ambiguous key: " ::pp "\\. "
"Found multiple candidates: " ::p ", " ::pp))
(ig/find-derived-1 {::a "x" ::p "y", ::pp "z"} ::pp))))
(testing "composite key"
(is (= (ig/find-derived-1 {::a "x" [::b ::x] "y"} ::x)
[[::b ::x] "y"]))))
(deftest find-derived-test
(testing "missing key"
(is (nil? (ig/find-derived {} ::p))))
(testing "derived key"
(is (= (ig/find-derived {::a "x" ::p "y" ::pp "z"} ::pp)
[[::p "y"] [::pp "z"]])))
(testing "ambiguous key"
(is (= (ig/find-derived {::a "x" ::p "y" ::pp "z"} ::ppp)
[[::p "y"] [::pp "z"]])))
(testing "composite key"
(is (= (ig/find-derived {::a "x" [::b ::x] "y", [::b ::y] "z"} ::b)
[[[::b ::x] "y"] [[::b ::y] "z"]]))))
(deftest prep-test
(testing "default"
(is (= (ig/prep {::q {:b 2}, ::a 1})
{::q {:b 2}, ::a 1})))
(testing "custom prep-key"
(is (= (ig/prep {::p {:b 2}, ::a 1})
{::p {:a (ig/ref ::a), :b 2}, ::a 1})))
(testing "prep then init"
(is (= (ig/init (ig/prep {::p {:b 2}, ::a 1}))
{::p [{:a [1], :b 2}], ::a [1]}))))
(deftest init-test
(testing "without keys"
(reset! log [])
(let [m (ig/init {::a (ig/ref ::b), ::b 1})]
(is (= m {::a [[1]], ::b [1]}))
(is (= @log [[:init ::b 1]
[:init ::a [1]]]))))
(testing "with keys"
(reset! log [])
(let [m (ig/init {::a (ig/ref ::b), ::b 1, ::c 2} [::a])]
(is (= m {::a [[1]], ::b [1]}))
(is (= @log [[:init ::b 1]
[:init ::a [1]]]))))
(testing "with inherited keys"
(reset! log [])
(let [m (ig/init {::p (ig/ref ::a), ::a 1} [::pp])]
(is (= m {::p [[1]], ::a [1]}))
(is (= @log [[:init ::a 1]
[:init ::p [1]]]))))
(testing "with composite keys"
(reset! log [])
(let [m (ig/init {::a (ig/ref ::b), [::x ::b] 1})]
(is (= m {::a [:x], [::x ::b] :x}))
(is (= @log [[:init [::x ::b] 1]
[:init ::a :x]]))))
(testing "with composite refs"
(reset! log [])
(let [m (ig/init {::a (ig/ref [::b ::c]), [::b ::c ::e] 1, [::b ::d] 2})]
(is (= m {::a [[1]], [::b ::c ::e] [1], [::b ::d] [2]}))
(is (or (= @log [[:init [::b ::c ::e] 1]
[:init ::a [1]]
[:init [::b ::d] 2]])
(= @log [[:init [::b ::d] 2]
[:init [::b ::c ::e] 1]
[:init ::a [1]]])))))
(testing "with failing composite refs"
(reset! log [])
(is (thrown-with-msg?
#?(:clj clojure.lang.ExceptionInfo :cljs cljs.core.ExceptionInfo)
#"^Invalid composite key: \[:integrant.core-test/a :b\]. Every keyword must be namespaced.$"
(ig/init {[::a :b] :anything}))))
(testing "with custom resolve-key"
(let [m (ig/init {::a (ig/ref ::r), ::r 1})]
(is (= m {::a [1], ::r {:v 1}}))))
(testing "with refsets"
(reset! log [])
(let [m (ig/init {::a (ig/refset ::ppp), ::p 1, ::pp 2})]
(is (= m {::a [#{[1] [2]}], ::p [1], ::pp [2]}))
(is (= @log [[:init ::p 1]
[:init ::pp 2]
[:init ::a #{[1] [2]}]]))))
(testing "with refsets and keys"
(reset! log [])
(let [m {::a (ig/refset ::ppp), ::p 1, ::pp 2}]
(is (= (ig/init m [::a]) {::a [#{}]}))
(is (= (ig/init m [::a ::p]) {::a [#{[1]}] ::p [1]}))
(is (= (ig/init m [::a ::pp]) {::a [#{[1] [2]}] ::p [1] ::pp [2]}))))
(testing "large config"
(is (= (ig/init {:a/a1 {} :a/a2 {:_ (ig/ref :a/a1)}
:a/a3 {} :a/a4 {} :a/a5 {}
:a/a6 {} :a/a7 {} :a/a8 {}
:a/a9 {} :a/a10 {}})
{:a/a1 [{}] :a/a2 [{:_ [{}]}]
:a/a3 [{}] :a/a4 [{}] :a/a5 [{}]
:a/a6 [{}] :a/a7 [{}] :a/a8 [{}]
:a/a9 [{}] :a/a10 [{}]})))
(testing "with passing specs"
(let [m (ig/init {::n (ig/ref ::k), ::k 1})]
(is (= m {::n 2, ::k 1}))))
(testing "with failing specs"
(is (thrown-with-msg?
#?(:clj clojure.lang.ExceptionInfo :cljs cljs.core.ExceptionInfo)
(re-pattern (str "Spec failed on key " ::n " when building system"))
(ig/init {::n (ig/ref ::k), ::k 1.1}))))
(testing "with failing composite specs"
(is (thrown-with-msg?
#?(:clj clojure.lang.ExceptionInfo :cljs cljs.core.ExceptionInfo)
(re-pattern (str "Spec failed on key \\[" ::n " " ::nnn "\\] when building system"))
(ig/init {[::n ::nnn] 1.1})))))
(deftest halt-test
(testing "without keys"
(reset! log [])
(let [m (ig/init {::a (ig/ref ::b), ::b 1})]
(ig/halt! m)
(is (= @log [[:init ::b 1]
[:init ::a [1]]
[:halt ::a [[1]]]
[:halt ::b [1]]]))))
(testing "with keys"
(reset! log [])
(let [m (ig/init {::a (ig/ref ::b), ::b (ig/ref ::c), ::c 1})]
(ig/halt! m [::a])
(is (= @log [[:init ::c 1]
[:init ::b [1]]
[:init ::a [[1]]]
[:halt ::a [[[1]]]]]))
(reset! log [])
(ig/halt! m [::c])
(is (= @log [[:halt ::a [[[1]]]]
[:halt ::b [[1]]]
[:halt ::c [1]]]))))
(testing "with partial system"
(reset! log [])
(let [m (ig/init {::a 1, ::b (ig/ref ::a)} [::a])]
(ig/halt! m)
(is (= @log [[:init ::a 1]
[:halt ::a [1]]]))))
(testing "with inherited keys"
(reset! log [])
(let [m (ig/init {::a (ig/ref ::p), ::p 1} [::a])]
(ig/halt! m [::pp])
(is (= @log [[:init ::p 1]
[:init ::a [1]]
[:halt ::a [[1]]]
[:halt ::p [1]]]))))
(testing "with composite keys"
(reset! log [])
(let [m (ig/init {::a (ig/ref ::b), [::x ::b] 1})]
(ig/halt! m)
(is (= @log [[:init [::x ::b] 1]
[:init ::a :x]
[:halt ::a [:x]]
[:halt [::x ::b] :x]])))))
(deftest suspend-resume-test
(testing "same configuration"
(reset! log [])
(let [c {::a (ig/ref ::b), ::b 1}
m (ig/init c)
_ (ig/suspend! m)
m' (ig/resume c m)]
(is (= @log [[:init ::b 1]
[:init ::a [1]]
[:suspend ::a [[1]]]
[:suspend ::b [1]]
[:resume ::b 1 1 [1]]
[:resume ::a [1] [1] [[1]]]]))))
(testing "missing keys"
(reset! log [])
(let [c {::a (ig/ref ::b), ::b 1}
m (ig/init c)
_ (ig/suspend! m)
m' (ig/resume (dissoc c ::a) m)]
(is (= @log [[:init ::b 1]
[:init ::a [1]]
[:suspend ::a [[1]]]
[:suspend ::b [1]]
[:halt ::a [[1]]]
[:resume ::b 1 1 [1]]]))))
(testing "missing refs"
(reset! log [])
(let [c {::a {:b (ig/ref ::b)}, ::b 1}
m (ig/init c)
_ (ig/suspend! m)
m' (ig/resume {::a []} m)]
(is (= @log [[:init ::b 1]
[:init ::a {:b [1]}]
[:suspend ::a [{:b [1]}]]
[:suspend ::b [1]]
[:halt ::b [1]]
[:resume ::a [] {:b [1]} [{:b [1]}]]]))))
(testing "with custom resolve-key"
(let [c {::a (ig/ref ::r), ::r 1}
m (ig/init c)
_ (ig/suspend! m)
m' (ig/resume c m)]
(is (= m m'))))
(testing "composite keys"
(reset! log [])
(let [c {::a (ig/ref ::x), [::b ::x] 1}
m (ig/init c)
_ (ig/suspend! m)
m' (ig/resume c m)]
(is (= @log [[:init [::b ::x] 1]
[:init ::a :x]
[:suspend ::a [:x]]
[:suspend [::b ::x] :x]
[:resume [::b ::x] 1 1 :x]
[:resume ::a :rx :x [:x]]]))))
(testing "resume key with dependencies"
(reset! log [])
(let [c {::a {:b (ig/ref ::b)}, ::b 1}
m (ig/init c [::a])
_ (ig/suspend! m)
m' (ig/resume c m [::a])]
(is (= @log
[[:init ::b 1]
[:init ::a {:b [1]}]
[:suspend ::a [{:b [1]}]]
[:suspend ::b [1]]
[:resume ::b 1 1 [1]]
[:resume ::a {:b [1]} {:b [1]} [{:b [1]}]]])))))
(deftest invalid-configs-test
(testing "ambiguous refs"
(is (thrown-with-msg?
#?(:clj clojure.lang.ExceptionInfo :cljs cljs.core.ExceptionInfo)
(re-pattern (str "Ambiguous key: " ::ppp "\\. "
"Found multiple candidates: "
"(" ::p ", " ::pp "|" ::pp ", " ::p ")"))
(ig/init {::a (ig/ref ::ppp), ::p 1, ::pp 2}))))
(testing "missing refs"
(is (thrown-with-msg?
#?(:clj clojure.lang.ExceptionInfo :cljs cljs.core.ExceptionInfo)
(re-pattern (str "Missing definitions for refs: " ::b))
(ig/init {::a (ig/ref ::b)}))))
(testing "missing refs with explicit keys"
(is (= (ig/init {::a (ig/ref ::ppp), ::p 1, ::pp 2} [::p ::pp])
{::p [1], ::pp [2]})))
(testing "missing refs with explicit keys"
(is (= (ig/init {::a 1, ::b (ig/ref ::c)} [::a])
{::a [1]}))))
(defn build-log [config]
(let [log (atom [])]
[(ig/build config (keys config) (fn [k v] (last (swap! log conj [:build k v]))))
@log]))
(deftest build-test
(is (= [{::a [:build ::a [:build ::b 1]]
::b [:build ::b 1]}
[[:build ::b 1]
[:build ::a [:build ::b 1]]]]
(build-log {::a (ig/ref ::b)
::b 1}))))
(defn test-log [f m]
(let [log (atom [])]
[(f m (keys m) (fn [k v] (last (swap! log conj [:test k v]))))
@log]))
(deftest run-test
(let [config {::a (ig/ref ::b), ::b 1}
[system _] (build-log config)]
(is (= [nil
[[:test ::b [:build ::b 1]]
[:test ::a [:build ::a [:build ::b 1]]]]]
(test-log ig/run! system)))
(is (= [nil
[[:test ::a [:build ::a [:build ::b 1]]]
[:test ::b [:build ::b 1]]]]
(test-log ig/reverse-run! system)))))
(deftest fold-test
(let [config {::a (ig/ref ::ppp), ::b (ig/ref ::pp), ::p 1, ::c 2}
system (ig/init config)]
(is (= (ig/fold system #(conj %1 [%2 %3]) [])
[[::p [1]] [::a [[1]]] [::b [[1]]] [::c [2]]]))))
(deftest wrapped-exception-test
(testing "exception when building"
(let [ex (try (ig/init {::a 1, ::error-init (ig/ref ::a)}) nil
(catch #?(:clj Throwable :cljs :default) t t))]
(is (some? ex))
(is (= (#?(:clj .getMessage :cljs ex-message) ex)
(str "Error on key " ::error-init " when building system")))
(is (= (ex-data ex)
{:reason ::ig/build-threw-exception
:system {::a [1]}
:function ig/init-key
:key ::error-init
:value [1]}))
(let [cause (#?(:clj .getCause :cljs ex-cause) ex)]
(is (some? cause))
(is (= (#?(:clj .getMessage :cljs ex-message) cause) "Testing"))
(is (= (ex-data cause) {:reason ::test})))))
(testing "exception when running"
(let [system (ig/init {::a 1
::error-halt (ig/ref ::a)
::b (ig/ref ::error-halt)
::c (ig/ref ::b)})
ex (try (ig/halt! system)
(catch #?(:clj Throwable :cljs :default) t t))]
(is (some? ex))
(is (= (#?(:clj .getMessage :cljs ex-message) ex)
(str "Error on key " ::error-halt " when running system")))
(is (= (ex-data ex)
{:reason ::ig/run-threw-exception
:system {::a [1], ::error-halt [[1]], ::b [[[1]]], ::c [[[[1]]]]}
:completed-keys '(::c ::b)
:remaining-keys '(::a)
:function ig/halt-key!
:key ::error-halt
:value [[1]]}))
(let [cause (#?(:clj .getCause :cljs ex-cause) ex)]
(is (some? cause))
(is (= (#?(:clj .getMessage :cljs ex-message) cause) "Testing"))
(is (= (ex-data cause) {:reason ::test}))))))
|
|
2ed85a2eb4368009ce5b64ce8cb71cc13a63e5211f19fab116d499e2fa773063 | fgalassi/cs61a-sp11 | 3.40.scm | (define x 10)
(parallel-execute (lambda () (set! x (* x x)))
(lambda () (set! x (* x x x))))
; a1 => x; a2 => x; a3 => * a1 a2; a4 => !set x a3
; b1 => x; b2 => x; b3 => x; b4 => * b1 b2 b3; b5 => set! x b4
;
the minimum is 100
; a1 a2 a3 b1...b5 a4
;
the maximum is 1000000
; a1...a4 b1...b5
;
a result x must be x = 10^n with 2 < = n < = 5
;
1000
; b1 b2 b3 b4 a1...a4 b5
;
10000
; b1 b2 a1...a4 b3...b5
;
100000
; b1 a1..a4 b2..b5
;
; so 100 1000 10000 1000000
(define x 10)
(parallel-execute (s lambda () (set! x (* x x)))
(s lambda () (set! x (* x x x))))
result can be 100 ^ 3 or 1000 ^ 2 which is always 1000000
| null | https://raw.githubusercontent.com/fgalassi/cs61a-sp11/66df3b54b03ee27f368c716ae314fd7ed85c4dba/homework/3.40.scm | scheme | a1 => x; a2 => x; a3 => * a1 a2; a4 => !set x a3
b1 => x; b2 => x; b3 => x; b4 => * b1 b2 b3; b5 => set! x b4
a1 a2 a3 b1...b5 a4
a1...a4 b1...b5
b1 b2 b3 b4 a1...a4 b5
b1 b2 a1...a4 b3...b5
b1 a1..a4 b2..b5
so 100 1000 10000 1000000 | (define x 10)
(parallel-execute (lambda () (set! x (* x x)))
(lambda () (set! x (* x x x))))
the minimum is 100
the maximum is 1000000
a result x must be x = 10^n with 2 < = n < = 5
1000
10000
100000
(define x 10)
(parallel-execute (s lambda () (set! x (* x x)))
(s lambda () (set! x (* x x x))))
result can be 100 ^ 3 or 1000 ^ 2 which is always 1000000
|
c9cc88094bc332688a7b18ce92ca9d68a9eab9c91492f5b21b9ceb1988c5f981 | manuel-serrano/bigloo | http.scm | ;*=====================================================================*/
* serrano / prgm / project / bigloo / runtime / Unsafe / http.scm * /
;* ------------------------------------------------------------- */
* Author : * /
* Creation : Thu Aug 9 15:02:05 2007 * /
* Last change : Thu Oct 13 11:56:13 2016 ( serrano ) * /
* Copyright : 2007 - 16 * /
;* ------------------------------------------------------------- */
;* Dealing with HTTP requests */
;*=====================================================================*/
;*---------------------------------------------------------------------*/
;* The module */
;*---------------------------------------------------------------------*/
(module __http
(use __type
__bigloo
__tvector
__bexit
__object
__thread
__bit
__bignum
__r4_numbers_6_5
__r4_numbers_6_5_fixnum
__r4_numbers_6_5_flonum
__r4_numbers_6_5_flonum_dtoa
__r4_booleans_6_1
__r4_symbols_6_4
__r4_vectors_6_8
__r4_control_features_6_9
__r4_pairs_and_lists_6_3
__r4_characters_6_6
__r4_equivalence_6_2
__r4_strings_6_7
__r4_ports_6_10_1
__r4_input_6_10_2
__r4_output_6_10_3
__r5_control_features_6_4
__foreign
__error
__evenv
__os
__structure
__param
__reader)
(import __url
__rgc
__base64
__socket)
(export (class &http-error::&error)
(class &http-redirection-error::&http-error)
(class &http-status-error::&http-error
(status::int read-only))
(class &http-redirection::&exception
(port::input-port read-only)
(url::bstring read-only))
(http #!key
(in #f) (out #f) (socket #f)
(protocol 'http)
(method 'get)
(timeout 0)
(proxy #f)
(host "localhost") (port 80)
(path "/")
(login #f) (authorization #f) (username #f) (password #f)
(http-version "HTTP/1.1")
(content-type #f)
(connection #unspecified)
(header '((user-agent: "Mozilla/5.0")))
(args '())
(body #f))
(http-read-line ::input-port)
(http-read-crlf ::input-port)
(http-parse-status-line ::input-port)
(http-parse-header ::input-port ::obj)
(http-parse-response ::input-port ::obj ::procedure)
(http-response-body->port::input-port ::input-port ::output-port)
(http-chunks->procedure::procedure ::input-port)
(http-chunks->port::input-port ::input-port)
(http-send-chunks ::input-port ::output-port ::bool)))
;*---------------------------------------------------------------------*/
;* display-line ... */
;*---------------------------------------------------------------------*/
(define-macro (display-line . args)
(let* ((sgra (reverse args))
(port (car sgra))
(vals (reverse (cdr sgra))))
`(begin
,@(map (lambda (a) `(display ,a ,port)) vals)
(display "\r\n" ,port))))
;*---------------------------------------------------------------------*/
;* http ... */
;* ------------------------------------------------------------- */
;* Establishes a HTTP connection with a remote host */
;*---------------------------------------------------------------------*/
(define (http #!key
(in #f)
(out #f)
(socket #f)
(protocol 'http)
(method 'get)
(timeout 0)
(proxy #f)
(host "localhost") (port 80)
(path "/")
(login #f) (authorization #f) (username #f) (password #f)
(http-version "HTTP/1.1")
(content-type #f)
(connection #unspecified)
(header '((user-agent: "Mozilla/5.0")))
(args '())
(body #f))
;; preliminary checks
(cond
(socket
(set! in (socket-input socket))
(set! out (socket-output socket)))
((and (not in) (not out))
(unless (and host port)
(error "http" "Missing either \"host\" or \"port\" argument" host))
(set! socket (make-http-socket host port proxy timeout))
(set! in (socket-input socket))
(set! out (socket-output socket)))
((not in)
(error "http" "Missing either \"in\" or \"socket\" argument" in))
((not out)
(error "http" "Missing \"out\" argument" out)))
;; header line
(if (string? proxy)
(display-proxy-method method host port path http-version out)
(display-direct-method method host port path http-version out))
;; host and port
(if (or (and (=fx port 80) (eq? protocol 'http))
(and (=fx port 443) (eq? protocol 'https)))
(display-line "Host: " host out)
(display-line "Host: " host ":" port out))
;; user additional header
(for-each (lambda (h)
(display-line (keyword->string (car h)) ": "
(if (pair? (cdr h)) (cadr h) (cdr h))
out))
header)
;; authentication
(cond
((string? login)
(display-authentication login out))
((string? authorization)
(display-line "Authorization: " authorization out))
((and (string? username) (string? password))
(display-authentication (string-append username ":" password) out)))
;; connection keep-alive
(when (string? connection)
(display-line "Connection: " connection out))
(cond
((and (or (eq? method 'post) (eq? method 'POST))
(or (eq? content-type 'multipart/form-data)
(pair? args)))
;; post method
(cond
((eq? content-type 'multipart/form-data)
(let* ((boundary (generate-http-boundary))
(content (make-http-post-body boundary args))
(content-length (apply + (map string-length content))))
(display-line "Content-Length: " content-length out)
(display-line "Content-Type: multipart/form-data; boundary="
(substring boundary 2 (string-length boundary)) out)
(display-line out)
(for-each (lambda (o) (display-string o out)) content)))
(else
(let ((content (x-www-form-urlencode args))
(ct (or content-type "application/x-www-form-urlencoded")))
(display-line "Content-Type: " ct out)
(display-line "Content-Length: " (string-length content) out)
(display-line out)
(display content out)
(display-line out)))))
((string? body)
;; a request with a fixed length body
(display-line "Content-Length: " (string-length body) out)
(display-line out)
(display body out))
((input-port? body)
;; a request with a variable length body
(display-line out)
(send-chars body out))
((procedure? body)
(display-line out)
(body out))
(else
;; a request without a body
(display-line out)))
(flush-output-port out)
socket)
;*---------------------------------------------------------------------*/
;* make-http-socket ... */
;*---------------------------------------------------------------------*/
(define (make-http-socket host port proxy timeout)
(when (string? proxy)
(let ((i (string-index proxy #\:)))
(if i
(begin
(set! host (substring proxy 0 i))
(set! port (string->integer
(substring proxy (+fx i 1) (string-length proxy)))))
(begin
(set! host proxy)
(set! port 80)))))
(cond
((not (string? host))
(bigloo-type-error 'http "string" host))
((not (integer? port))
(bigloo-type-error 'http "integer" port))
(else
(let ((s (make-client-socket host port :timeout timeout)))
;(socket-option-set! s :SO_RCVTIMEO timeout)
s))))
;*---------------------------------------------------------------------*/
;* generate-http-post-body ... */
;*---------------------------------------------------------------------*/
(define (generate-http-post-body boundary args)
(let ((port (open-output-string)))
(if (null? args)
(begin
(display-line port)
(close-output-port port))
(let loop ((args args))
(if (null? args)
(begin
(display-line boundary "--" port)
(close-output-port port))
(let ((a (car args)))
(display-line boundary port)
(if (pair? (car a))
(display-line "Content-Disposition: form-data; name=\""
" ( cadar a ) port )
(display-line "Content-Disposition: form-data; name=\""
(car a) "\"" port))
(when (pair? (cddr a)) (display-line (caddr a) port))
(display-line port)
(display-line (cadr a) port)
(loop (cdr args))))))))
;*---------------------------------------------------------------------*/
;* make-http-post-body ... */
;*---------------------------------------------------------------------*/
(define (make-http-post-body boundary args)
(define (->string a)
(if (string? a)
a
(call-with-output-string (lambda (p) (display a p)))))
(if (null? args)
'("\r\n")
(let loop ((args args))
(if (null? args)
(list boundary "--" "\r\n")
(let* ((a (car args))
(body (cons* (->string (cadr a)) "\r\n"
(loop (cdr args))))
(hd (if (pair? (cddr a))
(cons* (caddr a) "\r\n\r\n" body)
(cons "\r\n" body)))
(disp (if (pair? (car a))
(cons* "Content-Disposition: form-data; name=\""
" ( cadar a ) " \r\n "
hd)
(cons* "Content-Disposition: form-data; name=\""
(car a) "\"\r\n"
hd))))
(cons* boundary "\r\n" disp))))))
;*---------------------------------------------------------------------*/
;* generate-http-boundary ... */
;*---------------------------------------------------------------------*/
(define (generate-http-boundary)
(let ((s (make-string 22 #\-))
(chars "0123456789abcdef"))
(let loop ((i 2))
(when (<fx i 22)
(let ((num (random 16)))
(string-set! s i (string-ref chars num))
(loop (+fx i 1)))))
s))
;*---------------------------------------------------------------------*/
;* display-proxy-method ... */
;*---------------------------------------------------------------------*/
(define (display-proxy-method method server port path http-version out)
(display (string-upcase (symbol->string! method)) out)
(display-line " http://" server ":" port path " " http-version out))
;*---------------------------------------------------------------------*/
;* display-direct-method ... */
;*---------------------------------------------------------------------*/
(define (display-direct-method method server port path http-version out)
(display (string-upcase (symbol->string! method)) out)
(display-line " " path " " http-version out))
;*---------------------------------------------------------------------*/
;* display-authentication ... */
;*---------------------------------------------------------------------*/
(define (display-authentication login out)
(let ((uinfo (base64-encode login -1)))
(display-line "Authorization: Basic " uinfo out)))
;*---------------------------------------------------------------------*/
;* http-parse-error-msg ... */
;*---------------------------------------------------------------------*/
(define (http-parse-error-msg c port)
(if (char? c)
(let ((line (http-read-line port)))
(string-for-read
(string-append "{" (string c) "}" (if (string? line) line ""))))
c))
;*---------------------------------------------------------------------*/
;* status-line-grammar ... */
;*---------------------------------------------------------------------*/
(define status-line-grammar
(regular-grammar ((SP #\Space)
(HTTP (: (+ (in "httpsHTTPS"))
#\/ (+ digit) #\. (+ digit)))
(ICY "ICY")
(CODE (+ (in digit)))
(line (or (: (+ all) "\r\n") (: (+ all) "\n") (+ all))))
((: (or HTTP ICY) SP)
(let ((http (the-substring 0 (-fx (the-length) 1))))
(let ((code (http-read-fixnum (the-port))))
(http-skip-blank (the-port))
(values http code (http-read-line (the-port))))))
(else
(let ((c (the-failure)))
(raise
(if (eof-object? c)
(instantiate::&io-parse-error
(obj (the-port))
(proc "http-parse-status-line")
(msg "Illegal status line, premature end of input"))
(instantiate::&io-parse-error
(obj (http-parse-error-msg c (the-port)))
(proc "http-parse-status-line")
(msg "Illegal status line"))))))))
;*---------------------------------------------------------------------*/
;* http-parse-status-line ... */
;* ------------------------------------------------------------- */
* The syntax of the status ( section 6.1 of http/1.1 ) is defined * /
;* as follows: */
;* Status-Line = HTTP-Version SP Status-Code SP Reason-Phrase CRLF */
;*---------------------------------------------------------------------*/
(define (http-parse-status-line ip)
(read/rp status-line-grammar ip))
;*---------------------------------------------------------------------*/
;* http-read-line ... */
;*---------------------------------------------------------------------*/
(define (http-read-line p)
(read/rp (regular-grammar ()
((or (: (+ all) "\r\n") (: (+ all) "\n") (+ all))
(the-string))
(else
(let ((c (the-failure)))
(if (eof-object? c)
c
(the-string)))))
p))
;*---------------------------------------------------------------------*/
;* http-skip-blank ... */
;*---------------------------------------------------------------------*/
(define (http-skip-blank p)
(read/rp (regular-grammar ()
((+ (in " \t")) #unspecified)
(else
(raise
(instantiate::&io-parse-error
(obj (http-parse-error-msg (the-failure) (the-port)))
(proc "http")
(msg "Illegal separator")))))
p))
;*---------------------------------------------------------------------*/
;* http-skip-line ... */
;*---------------------------------------------------------------------*/
(define (http-skip-line p)
(read/rp (regular-grammar ()
((or (: (+ all) "\r\n") (: (+ all) "\n") (+ all))
#f)
(else
(let ((c (the-failure)))
(when (eof-object? c) c))))
p))
;*---------------------------------------------------------------------*/
;* http-read-fixnum ... */
;*---------------------------------------------------------------------*/
(define (http-read-fixnum p)
(read/rp (regular-grammar ((DIGIT (in ("09"))))
((+ DIGIT) (the-fixnum))
((+ (in " \t")) (ignore))
(else
(raise
(instantiate::&io-parse-error
(obj (http-parse-error-msg (the-failure) (the-port)))
(proc "http")
(msg "Illegal integer")))))
p))
;*---------------------------------------------------------------------*/
;* http-parse-header ... */
;*---------------------------------------------------------------------*/
(define (http-parse-header p po)
(define value-grammar
(regular-grammar ()
((+ (in " \t"))
(ignore))
((: (out " \t\r\n") (* (or (out "\r\n") (: "\r" (out "\n")))) "\r\n")
(the-substring 0 -2))
((: (out " \t\r\n") (* (or (out "\r\n") (: "\r" (out "\n")))) "\n")
(the-substring 0 -1))
((: (? #\Return) #\Newline)
"")
(else
"")))
(define blank-grammar
(regular-grammar ()
((+ (in " \t")) (ignore))))
(define hostname-grammar
(regular-grammar ()
((: (+ (out ":\n\r\t ")) #\:)
(let* ((h (the-substring 0 -1))
(p (http-read-fixnum (the-port))))
(values h p)))
((+ (out ":\n\r\t "))
(values (the-string) #f))
((+ (in " \t"))
(ignore))))
(define name-grammar
(regular-grammar ()
((+ (out "\n\r\t ")) (the-string))
((+ (in " \t")) (ignore))))
(define elong-grammar
(regular-grammar ((DIGIT (in ("09"))))
((+ DIGIT) (fixnum->elong (the-fixnum)))
((+ (in " \t")) (ignore))))
(define symbol-grammar
(regular-grammar ()
((+ (or alpha #\-)) (the-downcase-symbol))
((+ (in " \t")) (ignore))))
(define symbol+-grammar
(regular-grammar ()
((: (+ (or alpha #\-)) "\r\n")
(the-downcase-subsymbol 0 -2))
((: (+ (or alpha #\-))
(* (: "," (* (in " \t")) (+ (or alpha #\-)))) "\r\n")
(the-downcase-subsymbol 0 -2))
((+ (in " \t"))
(ignore))
(else
'||)))
(define auth-grammar
(regular-grammar ()
((: (+ (in #\Space #\Tab)))
(ignore))
((: (out #\Space #\Tab) (* (out "\n\r")))
(the-string))
(else
#f)))
(define crlf-grammar
(regular-grammar ()
((: (* (in #\space #\tab)) (? #\Return) #\Newline)
#unspecified)
(else
#f)))
(define upto-crlf-grammar
(regular-grammar ()
((: (* (out #\Return #\Newline)) (? #\Return) #\Newline)
#unspecified)
(else
#f)))
(define header-grammar
(regular-grammar (po
header
hostname port content-length transfer-encoding
authorization proxy-authorization connection)
((: (+ (or (out " :\r\n\t") (: #\space (out #\:)))) #\:)
(let* ((k (the-downcase-keyword)))
(case k
((host:)
(multiple-value-bind (h p)
(read/rp hostname-grammar (the-port))
(set! hostname h)
(set! port p)
(read/rp crlf-grammar (the-port))
(let ((host (if (fixnum? p)
(string-append h ":" (integer->string p))
h)))
(set! header (cons (cons k host) header))
(ignore))))
((content-length:)
;; Some web server uses extra characters after the length
;; in bytes (wakka uses things such as 12345bytes). This
;; is incorrect with respect to HTTP/1.1 but it seems that
;; regular web crawlers accept this extension...
(set! content-length (read/rp elong-grammar (the-port)))
(read/rp upto-crlf-grammar (the-port))
(set! header (cons (cons k content-length) header))
(ignore))
((transfer-encoding:)
(set! transfer-encoding (read/rp symbol-grammar (the-port)))
(read/rp crlf-grammar (the-port))
(set! header (cons (cons k transfer-encoding) header))
(ignore))
((authorization:)
(set! authorization (read/rp auth-grammar (the-port)))
(read/rp crlf-grammar (the-port))
(set! header (cons (cons k authorization) header))
(ignore))
((connection:)
(set! connection (read/rp symbol+-grammar (the-port)))
(set! header (cons (cons k connection) header))
(ignore))
((proxy-authorization:)
(set! proxy-authorization (read/rp auth-grammar (the-port)))
(read/rp crlf-grammar (the-port))
;; don't store the proxy-authorization in the header
(ignore))
((expect:)
(let ((e (read/rp value-grammar (the-port))))
(cond
((not (output-port? po))
(error "expect-header"
"Cannot honnor message because output-port is #f"
po))
((string=? e "100-continue")
(fprint po "HTTP/1.1 100 Continue\r\n\r\n")
(flush-output-port po)
(ignore))
(else
(fprint po "HTTP/1.1 417 Expectation Failed\r\n\r\n")
(flush-output-port po)
(raise
(instantiate::&io-parse-error
(obj (the-port))
(proc "expect-header")
(msg (format "Expectation failed (~a)" e))))))))
(else
(let ((v (read/rp value-grammar (the-port))))
(set! header (cons (cons k v) header))
(ignore))))))
((: (* (in #\space #\tab)) (? #\Return) #\Newline)
(values (reverse! header)
hostname
port
content-length
transfer-encoding
authorization
proxy-authorization
connection))
(else
(let ((c (the-failure)))
(if (eof-object? c)
;; some (bugous?) HTTP server don't send the appropriate
;; CRLF when the body is empty
(values (reverse! header)
hostname
port
content-length
transfer-encoding
authorization
proxy-authorization
connection)
(raise (instantiate::&io-parse-error
(obj (list (reverse! header) hostname
port content-length
transfer-encoding authorization
proxy-authorization connection))
(proc "http-parse-header")
(msg (format "Illegal characters: ~a"
(http-parse-error-msg
(the-failure) (the-port)))))))))))
(read/rp header-grammar p
po ;; output port
'() ;; header
#f ;; hostname
#f ;; port
#e-1 ;; content-length
#f ;; transfer-encoding
#f ;; authorization
#f ;; proxy-authorization
#f)) ;; connection
;*---------------------------------------------------------------------*/
;* http-parse-response ... */
;*---------------------------------------------------------------------*/
(define (http-parse-response ip op proc)
(multiple-value-bind (_1 status _2)
(http-parse-status-line ip)
(multiple-value-bind (header _host _port clen tenc _aut _paut _conn)
(http-parse-header ip op)
(case status
((200 207)
;; ok
(cond
((eq? tenc 'chunked)
(proc (http-chunks->port ip) status header clen tenc))
(else
(proc ip status header clen tenc))))
((201 204 304)
;; no message body
(proc #f status header clen tenc))
((301 302 303 307)
;; redirection
(let ((loc (assq location: header)))
(if (not (pair? loc))
(raise (instantiate::&http-redirection-error
(proc 'http-parse-body)
(msg "No URL for redirection!")
(obj ip)))
(raise (instantiate::&http-redirection
(port ip)
(url (cdr loc)))))))
(else
(or (proc ip status header clen tenc)
(raise (instantiate::&http-status-error
(proc 'http-parse-response)
(msg (format "Bad status code: ~a" status))
(obj ip)
(status status)))))))))
;*---------------------------------------------------------------------*/
;* http-response-body->port ... */
;*---------------------------------------------------------------------*/
(define (http-response-body->port ip op)
(define (parse-body ip status-code header clen tenc)
(cond
((not (input-port? ip))
(open-input-string ""))
(clen
(let ((p (open-input-procedure (barrier-port ip clen))))
(input-port-close-hook-set! p (lambda (in) (close-input-port ip)))
p))
(else
ip)))
(http-parse-response ip op parse-body))
;*---------------------------------------------------------------------*/
;* http-read-crlf ... */
;*---------------------------------------------------------------------*/
(define (http-read-crlf p)
(define crlf-grammar
(regular-grammar ()
((: (* (in #\space #\tab)) (? #\Return) #\Newline)
"\r\n")
(else
(raise (instantiate::&io-parse-error
(proc 'http-read-crlf)
(msg "Illegal character")
(obj (http-parse-error-msg (the-failure) (the-port))))))))
(read/rp crlf-grammar p))
;*---------------------------------------------------------------------*/
;* *chunk-size-grammar* ... */
;*---------------------------------------------------------------------*/
(define *chunk-size-grammar*
(regular-grammar ((SZ (+ xdigit))
(BLANK (in " \t"))
(CRLF "\r\n")
op)
((: SZ (* BLANK) #\;)
(when op (display (the-string) op))
(let ((sz (string->integer
(the-substring 0 (-fx (the-length) 1))
16)))
(read/rp (regular-grammar ((CRLF "\r\n"))
((: (+ (or (+ (out "\r")) (+ (: "\r" (out "\n"))))) CRLF)
(when op (display (the-string) op)))
(else
(raise (instantiate::&io-parse-error
(proc 'chunks)
(msg "Illegal character")
(obj (http-parse-error-msg
(the-failure) (the-port)))))))
(the-port))
sz))
((: SZ (* BLANK) CRLF)
(when op (display (the-string) op))
(let ((l (the-length)))
(string->integer (the-substring 0 (-fx l 2)) 16)))
(else
(let* ((c1 (the-failure))
(c2 (read-char (the-port)))
(c3 (read-char (the-port)))
(c4 (read-char (the-port)))
(c5 (read-char (the-port))))
(raise (instantiate::&io-parse-error
(proc 'chunks)
(msg "Illegal chunk size")
(obj (if (or (eof-object? c1)
(eof-object? c2)
(eof-object? c3)
(eof-object? c4)
(eof-object? c5))
"#<eof-object>"
(string-for-read (string c1 c2 c3 c4 c5))))))))))
;*---------------------------------------------------------------------*/
;* *buffer-length* ... */
;*---------------------------------------------------------------------*/
(define *buffer-length* 8192)
;*---------------------------------------------------------------------*/
;* barrier-port ... */
;*---------------------------------------------------------------------*/
(define (barrier-port port content-length)
(let ((buf (make-string *buffer-length*)))
(lambda ()
(when (>elong content-length #e0)
(let* ((n (minfx *buffer-length* (elong->fixnum content-length)))
(m (read-chars! buf n port)))
(set! content-length (-elong content-length (fixnum->elong m)))
(if (<fx m *buffer-length*)
(substring buf 0 m)
buf))))))
;*---------------------------------------------------------------------*/
;* http-chunks->procedure ... */
;*---------------------------------------------------------------------*/
(define (http-chunks->procedure ip::input-port)
(let* ((state 'size)
(sz 0)
(bufsz 512)
(buffer (make-string bufsz #a000)))
(lambda ()
(let loop ()
(case state
((eof)
#f)
((trailer)
(let ((l (http-read-line ip)))
(cond
((eof-object? l)
(set! state 'eof)
"")
((or (string=? l "\r\n") (string=? l "\n"))
(set! state 'eof)
l)
(else
l))))
((chunk)
(cond
((=fx sz 0)
(http-read-crlf ip)
(set! state 'size)
(loop))
((<fx sz bufsz)
(let ((s (read-chars sz ip)))
(set! sz (-fx sz (string-length s)))
s))
(else
(let ((s (read-chars! buffer bufsz ip)))
(set! sz (-fx sz s))
(if (=fx s bufsz)
buffer
(substring buffer 0 s))))))
(else
(set! sz (read/rp *chunk-size-grammar* ip #f))
(if (>fx sz 0)
;; a regular chunk
(begin
(set! state 'chunk)
(loop))
;; the last chunk starting with an optional trailer
(begin
(set! state 'trailer)
(loop)))))))))
;*---------------------------------------------------------------------*/
;* http-chunks->port ... */
;*---------------------------------------------------------------------*/
(define (http-chunks->port ip)
(let ((ip2 (open-input-procedure (http-chunks->procedure ip))))
(input-port-close-hook-set! ip (lambda (in) (close-input-port ip)))
ip2))
;*---------------------------------------------------------------------*/
;* http-send-chunks ... */
;*---------------------------------------------------------------------*/
(define (http-send-chunks ip::input-port op::output-port trailer::bool)
(let loop ()
(let ((sz (read/rp *chunk-size-grammar* ip op)))
(if (>fx sz 0)
;; a regular chunk
(begin
(let loop ((sz sz))
(when (>fx sz 0)
(let ((s (send-chars ip op sz)))
(when (>fx s 0)
(loop (-fx sz s))))))
(flush-output-port op)
(let ((s (http-read-crlf ip)))
(display s op)
(loop)))
;; the last chunk starting with an optional trailer
(if trailer
(let loop ()
(let ((l (http-read-line ip)))
(if (eof-object? l)
(flush-output-port op)
(begin
(display l op)
(if (>fx (string-length l) 2)
(loop)
(flush-output-port op))))))
(begin
(display (http-read-line ip) op)
(flush-output-port op)))))))
| null | https://raw.githubusercontent.com/manuel-serrano/bigloo/eb650ed4429155f795a32465e009706bbf1b8d74/runtime/Unsafe/http.scm | scheme | *=====================================================================*/
* ------------------------------------------------------------- */
* ------------------------------------------------------------- */
* Dealing with HTTP requests */
*=====================================================================*/
*---------------------------------------------------------------------*/
* The module */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* display-line ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* http ... */
* ------------------------------------------------------------- */
* Establishes a HTTP connection with a remote host */
*---------------------------------------------------------------------*/
preliminary checks
header line
host and port
user additional header
authentication
connection keep-alive
post method
a request with a fixed length body
a request with a variable length body
a request without a body
*---------------------------------------------------------------------*/
* make-http-socket ... */
*---------------------------------------------------------------------*/
(socket-option-set! s :SO_RCVTIMEO timeout)
*---------------------------------------------------------------------*/
* generate-http-post-body ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* make-http-post-body ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* generate-http-boundary ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* display-proxy-method ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* display-direct-method ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* display-authentication ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* http-parse-error-msg ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* status-line-grammar ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* http-parse-status-line ... */
* ------------------------------------------------------------- */
* as follows: */
* Status-Line = HTTP-Version SP Status-Code SP Reason-Phrase CRLF */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* http-read-line ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* http-skip-blank ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* http-skip-line ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* http-read-fixnum ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* http-parse-header ... */
*---------------------------------------------------------------------*/
Some web server uses extra characters after the length
in bytes (wakka uses things such as 12345bytes). This
is incorrect with respect to HTTP/1.1 but it seems that
regular web crawlers accept this extension...
don't store the proxy-authorization in the header
some (bugous?) HTTP server don't send the appropriate
CRLF when the body is empty
output port
header
hostname
port
content-length
transfer-encoding
authorization
proxy-authorization
connection
*---------------------------------------------------------------------*/
* http-parse-response ... */
*---------------------------------------------------------------------*/
ok
no message body
redirection
*---------------------------------------------------------------------*/
* http-response-body->port ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* http-read-crlf ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* *chunk-size-grammar* ... */
*---------------------------------------------------------------------*/
)
*---------------------------------------------------------------------*/
* *buffer-length* ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* barrier-port ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* http-chunks->procedure ... */
*---------------------------------------------------------------------*/
a regular chunk
the last chunk starting with an optional trailer
*---------------------------------------------------------------------*/
* http-chunks->port ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* http-send-chunks ... */
*---------------------------------------------------------------------*/
a regular chunk
the last chunk starting with an optional trailer | * serrano / prgm / project / bigloo / runtime / Unsafe / http.scm * /
* Author : * /
* Creation : Thu Aug 9 15:02:05 2007 * /
* Last change : Thu Oct 13 11:56:13 2016 ( serrano ) * /
* Copyright : 2007 - 16 * /
(module __http
(use __type
__bigloo
__tvector
__bexit
__object
__thread
__bit
__bignum
__r4_numbers_6_5
__r4_numbers_6_5_fixnum
__r4_numbers_6_5_flonum
__r4_numbers_6_5_flonum_dtoa
__r4_booleans_6_1
__r4_symbols_6_4
__r4_vectors_6_8
__r4_control_features_6_9
__r4_pairs_and_lists_6_3
__r4_characters_6_6
__r4_equivalence_6_2
__r4_strings_6_7
__r4_ports_6_10_1
__r4_input_6_10_2
__r4_output_6_10_3
__r5_control_features_6_4
__foreign
__error
__evenv
__os
__structure
__param
__reader)
(import __url
__rgc
__base64
__socket)
(export (class &http-error::&error)
(class &http-redirection-error::&http-error)
(class &http-status-error::&http-error
(status::int read-only))
(class &http-redirection::&exception
(port::input-port read-only)
(url::bstring read-only))
(http #!key
(in #f) (out #f) (socket #f)
(protocol 'http)
(method 'get)
(timeout 0)
(proxy #f)
(host "localhost") (port 80)
(path "/")
(login #f) (authorization #f) (username #f) (password #f)
(http-version "HTTP/1.1")
(content-type #f)
(connection #unspecified)
(header '((user-agent: "Mozilla/5.0")))
(args '())
(body #f))
(http-read-line ::input-port)
(http-read-crlf ::input-port)
(http-parse-status-line ::input-port)
(http-parse-header ::input-port ::obj)
(http-parse-response ::input-port ::obj ::procedure)
(http-response-body->port::input-port ::input-port ::output-port)
(http-chunks->procedure::procedure ::input-port)
(http-chunks->port::input-port ::input-port)
(http-send-chunks ::input-port ::output-port ::bool)))
(define-macro (display-line . args)
(let* ((sgra (reverse args))
(port (car sgra))
(vals (reverse (cdr sgra))))
`(begin
,@(map (lambda (a) `(display ,a ,port)) vals)
(display "\r\n" ,port))))
(define (http #!key
(in #f)
(out #f)
(socket #f)
(protocol 'http)
(method 'get)
(timeout 0)
(proxy #f)
(host "localhost") (port 80)
(path "/")
(login #f) (authorization #f) (username #f) (password #f)
(http-version "HTTP/1.1")
(content-type #f)
(connection #unspecified)
(header '((user-agent: "Mozilla/5.0")))
(args '())
(body #f))
(cond
(socket
(set! in (socket-input socket))
(set! out (socket-output socket)))
((and (not in) (not out))
(unless (and host port)
(error "http" "Missing either \"host\" or \"port\" argument" host))
(set! socket (make-http-socket host port proxy timeout))
(set! in (socket-input socket))
(set! out (socket-output socket)))
((not in)
(error "http" "Missing either \"in\" or \"socket\" argument" in))
((not out)
(error "http" "Missing \"out\" argument" out)))
(if (string? proxy)
(display-proxy-method method host port path http-version out)
(display-direct-method method host port path http-version out))
(if (or (and (=fx port 80) (eq? protocol 'http))
(and (=fx port 443) (eq? protocol 'https)))
(display-line "Host: " host out)
(display-line "Host: " host ":" port out))
(for-each (lambda (h)
(display-line (keyword->string (car h)) ": "
(if (pair? (cdr h)) (cadr h) (cdr h))
out))
header)
(cond
((string? login)
(display-authentication login out))
((string? authorization)
(display-line "Authorization: " authorization out))
((and (string? username) (string? password))
(display-authentication (string-append username ":" password) out)))
(when (string? connection)
(display-line "Connection: " connection out))
(cond
((and (or (eq? method 'post) (eq? method 'POST))
(or (eq? content-type 'multipart/form-data)
(pair? args)))
(cond
((eq? content-type 'multipart/form-data)
(let* ((boundary (generate-http-boundary))
(content (make-http-post-body boundary args))
(content-length (apply + (map string-length content))))
(display-line "Content-Length: " content-length out)
(display-line "Content-Type: multipart/form-data; boundary="
(substring boundary 2 (string-length boundary)) out)
(display-line out)
(for-each (lambda (o) (display-string o out)) content)))
(else
(let ((content (x-www-form-urlencode args))
(ct (or content-type "application/x-www-form-urlencoded")))
(display-line "Content-Type: " ct out)
(display-line "Content-Length: " (string-length content) out)
(display-line out)
(display content out)
(display-line out)))))
((string? body)
(display-line "Content-Length: " (string-length body) out)
(display-line out)
(display body out))
((input-port? body)
(display-line out)
(send-chars body out))
((procedure? body)
(display-line out)
(body out))
(else
(display-line out)))
(flush-output-port out)
socket)
(define (make-http-socket host port proxy timeout)
(when (string? proxy)
(let ((i (string-index proxy #\:)))
(if i
(begin
(set! host (substring proxy 0 i))
(set! port (string->integer
(substring proxy (+fx i 1) (string-length proxy)))))
(begin
(set! host proxy)
(set! port 80)))))
(cond
((not (string? host))
(bigloo-type-error 'http "string" host))
((not (integer? port))
(bigloo-type-error 'http "integer" port))
(else
(let ((s (make-client-socket host port :timeout timeout)))
s))))
(define (generate-http-post-body boundary args)
(let ((port (open-output-string)))
(if (null? args)
(begin
(display-line port)
(close-output-port port))
(let loop ((args args))
(if (null? args)
(begin
(display-line boundary "--" port)
(close-output-port port))
(let ((a (car args)))
(display-line boundary port)
(if (pair? (car a))
(display-line "Content-Disposition: form-data; name=\""
" ( cadar a ) port )
(display-line "Content-Disposition: form-data; name=\""
(car a) "\"" port))
(when (pair? (cddr a)) (display-line (caddr a) port))
(display-line port)
(display-line (cadr a) port)
(loop (cdr args))))))))
(define (make-http-post-body boundary args)
(define (->string a)
(if (string? a)
a
(call-with-output-string (lambda (p) (display a p)))))
(if (null? args)
'("\r\n")
(let loop ((args args))
(if (null? args)
(list boundary "--" "\r\n")
(let* ((a (car args))
(body (cons* (->string (cadr a)) "\r\n"
(loop (cdr args))))
(hd (if (pair? (cddr a))
(cons* (caddr a) "\r\n\r\n" body)
(cons "\r\n" body)))
(disp (if (pair? (car a))
(cons* "Content-Disposition: form-data; name=\""
" ( cadar a ) " \r\n "
hd)
(cons* "Content-Disposition: form-data; name=\""
(car a) "\"\r\n"
hd))))
(cons* boundary "\r\n" disp))))))
(define (generate-http-boundary)
(let ((s (make-string 22 #\-))
(chars "0123456789abcdef"))
(let loop ((i 2))
(when (<fx i 22)
(let ((num (random 16)))
(string-set! s i (string-ref chars num))
(loop (+fx i 1)))))
s))
(define (display-proxy-method method server port path http-version out)
(display (string-upcase (symbol->string! method)) out)
(display-line " http://" server ":" port path " " http-version out))
(define (display-direct-method method server port path http-version out)
(display (string-upcase (symbol->string! method)) out)
(display-line " " path " " http-version out))
(define (display-authentication login out)
(let ((uinfo (base64-encode login -1)))
(display-line "Authorization: Basic " uinfo out)))
(define (http-parse-error-msg c port)
(if (char? c)
(let ((line (http-read-line port)))
(string-for-read
(string-append "{" (string c) "}" (if (string? line) line ""))))
c))
(define status-line-grammar
(regular-grammar ((SP #\Space)
(HTTP (: (+ (in "httpsHTTPS"))
#\/ (+ digit) #\. (+ digit)))
(ICY "ICY")
(CODE (+ (in digit)))
(line (or (: (+ all) "\r\n") (: (+ all) "\n") (+ all))))
((: (or HTTP ICY) SP)
(let ((http (the-substring 0 (-fx (the-length) 1))))
(let ((code (http-read-fixnum (the-port))))
(http-skip-blank (the-port))
(values http code (http-read-line (the-port))))))
(else
(let ((c (the-failure)))
(raise
(if (eof-object? c)
(instantiate::&io-parse-error
(obj (the-port))
(proc "http-parse-status-line")
(msg "Illegal status line, premature end of input"))
(instantiate::&io-parse-error
(obj (http-parse-error-msg c (the-port)))
(proc "http-parse-status-line")
(msg "Illegal status line"))))))))
* The syntax of the status ( section 6.1 of http/1.1 ) is defined * /
(define (http-parse-status-line ip)
(read/rp status-line-grammar ip))
(define (http-read-line p)
(read/rp (regular-grammar ()
((or (: (+ all) "\r\n") (: (+ all) "\n") (+ all))
(the-string))
(else
(let ((c (the-failure)))
(if (eof-object? c)
c
(the-string)))))
p))
(define (http-skip-blank p)
(read/rp (regular-grammar ()
((+ (in " \t")) #unspecified)
(else
(raise
(instantiate::&io-parse-error
(obj (http-parse-error-msg (the-failure) (the-port)))
(proc "http")
(msg "Illegal separator")))))
p))
(define (http-skip-line p)
(read/rp (regular-grammar ()
((or (: (+ all) "\r\n") (: (+ all) "\n") (+ all))
#f)
(else
(let ((c (the-failure)))
(when (eof-object? c) c))))
p))
(define (http-read-fixnum p)
(read/rp (regular-grammar ((DIGIT (in ("09"))))
((+ DIGIT) (the-fixnum))
((+ (in " \t")) (ignore))
(else
(raise
(instantiate::&io-parse-error
(obj (http-parse-error-msg (the-failure) (the-port)))
(proc "http")
(msg "Illegal integer")))))
p))
(define (http-parse-header p po)
(define value-grammar
(regular-grammar ()
((+ (in " \t"))
(ignore))
((: (out " \t\r\n") (* (or (out "\r\n") (: "\r" (out "\n")))) "\r\n")
(the-substring 0 -2))
((: (out " \t\r\n") (* (or (out "\r\n") (: "\r" (out "\n")))) "\n")
(the-substring 0 -1))
((: (? #\Return) #\Newline)
"")
(else
"")))
(define blank-grammar
(regular-grammar ()
((+ (in " \t")) (ignore))))
(define hostname-grammar
(regular-grammar ()
((: (+ (out ":\n\r\t ")) #\:)
(let* ((h (the-substring 0 -1))
(p (http-read-fixnum (the-port))))
(values h p)))
((+ (out ":\n\r\t "))
(values (the-string) #f))
((+ (in " \t"))
(ignore))))
(define name-grammar
(regular-grammar ()
((+ (out "\n\r\t ")) (the-string))
((+ (in " \t")) (ignore))))
(define elong-grammar
(regular-grammar ((DIGIT (in ("09"))))
((+ DIGIT) (fixnum->elong (the-fixnum)))
((+ (in " \t")) (ignore))))
(define symbol-grammar
(regular-grammar ()
((+ (or alpha #\-)) (the-downcase-symbol))
((+ (in " \t")) (ignore))))
(define symbol+-grammar
(regular-grammar ()
((: (+ (or alpha #\-)) "\r\n")
(the-downcase-subsymbol 0 -2))
((: (+ (or alpha #\-))
(* (: "," (* (in " \t")) (+ (or alpha #\-)))) "\r\n")
(the-downcase-subsymbol 0 -2))
((+ (in " \t"))
(ignore))
(else
'||)))
(define auth-grammar
(regular-grammar ()
((: (+ (in #\Space #\Tab)))
(ignore))
((: (out #\Space #\Tab) (* (out "\n\r")))
(the-string))
(else
#f)))
(define crlf-grammar
(regular-grammar ()
((: (* (in #\space #\tab)) (? #\Return) #\Newline)
#unspecified)
(else
#f)))
(define upto-crlf-grammar
(regular-grammar ()
((: (* (out #\Return #\Newline)) (? #\Return) #\Newline)
#unspecified)
(else
#f)))
(define header-grammar
(regular-grammar (po
header
hostname port content-length transfer-encoding
authorization proxy-authorization connection)
((: (+ (or (out " :\r\n\t") (: #\space (out #\:)))) #\:)
(let* ((k (the-downcase-keyword)))
(case k
((host:)
(multiple-value-bind (h p)
(read/rp hostname-grammar (the-port))
(set! hostname h)
(set! port p)
(read/rp crlf-grammar (the-port))
(let ((host (if (fixnum? p)
(string-append h ":" (integer->string p))
h)))
(set! header (cons (cons k host) header))
(ignore))))
((content-length:)
(set! content-length (read/rp elong-grammar (the-port)))
(read/rp upto-crlf-grammar (the-port))
(set! header (cons (cons k content-length) header))
(ignore))
((transfer-encoding:)
(set! transfer-encoding (read/rp symbol-grammar (the-port)))
(read/rp crlf-grammar (the-port))
(set! header (cons (cons k transfer-encoding) header))
(ignore))
((authorization:)
(set! authorization (read/rp auth-grammar (the-port)))
(read/rp crlf-grammar (the-port))
(set! header (cons (cons k authorization) header))
(ignore))
((connection:)
(set! connection (read/rp symbol+-grammar (the-port)))
(set! header (cons (cons k connection) header))
(ignore))
((proxy-authorization:)
(set! proxy-authorization (read/rp auth-grammar (the-port)))
(read/rp crlf-grammar (the-port))
(ignore))
((expect:)
(let ((e (read/rp value-grammar (the-port))))
(cond
((not (output-port? po))
(error "expect-header"
"Cannot honnor message because output-port is #f"
po))
((string=? e "100-continue")
(fprint po "HTTP/1.1 100 Continue\r\n\r\n")
(flush-output-port po)
(ignore))
(else
(fprint po "HTTP/1.1 417 Expectation Failed\r\n\r\n")
(flush-output-port po)
(raise
(instantiate::&io-parse-error
(obj (the-port))
(proc "expect-header")
(msg (format "Expectation failed (~a)" e))))))))
(else
(let ((v (read/rp value-grammar (the-port))))
(set! header (cons (cons k v) header))
(ignore))))))
((: (* (in #\space #\tab)) (? #\Return) #\Newline)
(values (reverse! header)
hostname
port
content-length
transfer-encoding
authorization
proxy-authorization
connection))
(else
(let ((c (the-failure)))
(if (eof-object? c)
(values (reverse! header)
hostname
port
content-length
transfer-encoding
authorization
proxy-authorization
connection)
(raise (instantiate::&io-parse-error
(obj (list (reverse! header) hostname
port content-length
transfer-encoding authorization
proxy-authorization connection))
(proc "http-parse-header")
(msg (format "Illegal characters: ~a"
(http-parse-error-msg
(the-failure) (the-port)))))))))))
(read/rp header-grammar p
(define (http-parse-response ip op proc)
(multiple-value-bind (_1 status _2)
(http-parse-status-line ip)
(multiple-value-bind (header _host _port clen tenc _aut _paut _conn)
(http-parse-header ip op)
(case status
((200 207)
(cond
((eq? tenc 'chunked)
(proc (http-chunks->port ip) status header clen tenc))
(else
(proc ip status header clen tenc))))
((201 204 304)
(proc #f status header clen tenc))
((301 302 303 307)
(let ((loc (assq location: header)))
(if (not (pair? loc))
(raise (instantiate::&http-redirection-error
(proc 'http-parse-body)
(msg "No URL for redirection!")
(obj ip)))
(raise (instantiate::&http-redirection
(port ip)
(url (cdr loc)))))))
(else
(or (proc ip status header clen tenc)
(raise (instantiate::&http-status-error
(proc 'http-parse-response)
(msg (format "Bad status code: ~a" status))
(obj ip)
(status status)))))))))
(define (http-response-body->port ip op)
(define (parse-body ip status-code header clen tenc)
(cond
((not (input-port? ip))
(open-input-string ""))
(clen
(let ((p (open-input-procedure (barrier-port ip clen))))
(input-port-close-hook-set! p (lambda (in) (close-input-port ip)))
p))
(else
ip)))
(http-parse-response ip op parse-body))
(define (http-read-crlf p)
(define crlf-grammar
(regular-grammar ()
((: (* (in #\space #\tab)) (? #\Return) #\Newline)
"\r\n")
(else
(raise (instantiate::&io-parse-error
(proc 'http-read-crlf)
(msg "Illegal character")
(obj (http-parse-error-msg (the-failure) (the-port))))))))
(read/rp crlf-grammar p))
(define *chunk-size-grammar*
(regular-grammar ((SZ (+ xdigit))
(BLANK (in " \t"))
(CRLF "\r\n")
op)
(when op (display (the-string) op))
(let ((sz (string->integer
(the-substring 0 (-fx (the-length) 1))
16)))
(read/rp (regular-grammar ((CRLF "\r\n"))
((: (+ (or (+ (out "\r")) (+ (: "\r" (out "\n"))))) CRLF)
(when op (display (the-string) op)))
(else
(raise (instantiate::&io-parse-error
(proc 'chunks)
(msg "Illegal character")
(obj (http-parse-error-msg
(the-failure) (the-port)))))))
(the-port))
sz))
((: SZ (* BLANK) CRLF)
(when op (display (the-string) op))
(let ((l (the-length)))
(string->integer (the-substring 0 (-fx l 2)) 16)))
(else
(let* ((c1 (the-failure))
(c2 (read-char (the-port)))
(c3 (read-char (the-port)))
(c4 (read-char (the-port)))
(c5 (read-char (the-port))))
(raise (instantiate::&io-parse-error
(proc 'chunks)
(msg "Illegal chunk size")
(obj (if (or (eof-object? c1)
(eof-object? c2)
(eof-object? c3)
(eof-object? c4)
(eof-object? c5))
"#<eof-object>"
(string-for-read (string c1 c2 c3 c4 c5))))))))))
(define *buffer-length* 8192)
(define (barrier-port port content-length)
(let ((buf (make-string *buffer-length*)))
(lambda ()
(when (>elong content-length #e0)
(let* ((n (minfx *buffer-length* (elong->fixnum content-length)))
(m (read-chars! buf n port)))
(set! content-length (-elong content-length (fixnum->elong m)))
(if (<fx m *buffer-length*)
(substring buf 0 m)
buf))))))
(define (http-chunks->procedure ip::input-port)
(let* ((state 'size)
(sz 0)
(bufsz 512)
(buffer (make-string bufsz #a000)))
(lambda ()
(let loop ()
(case state
((eof)
#f)
((trailer)
(let ((l (http-read-line ip)))
(cond
((eof-object? l)
(set! state 'eof)
"")
((or (string=? l "\r\n") (string=? l "\n"))
(set! state 'eof)
l)
(else
l))))
((chunk)
(cond
((=fx sz 0)
(http-read-crlf ip)
(set! state 'size)
(loop))
((<fx sz bufsz)
(let ((s (read-chars sz ip)))
(set! sz (-fx sz (string-length s)))
s))
(else
(let ((s (read-chars! buffer bufsz ip)))
(set! sz (-fx sz s))
(if (=fx s bufsz)
buffer
(substring buffer 0 s))))))
(else
(set! sz (read/rp *chunk-size-grammar* ip #f))
(if (>fx sz 0)
(begin
(set! state 'chunk)
(loop))
(begin
(set! state 'trailer)
(loop)))))))))
(define (http-chunks->port ip)
(let ((ip2 (open-input-procedure (http-chunks->procedure ip))))
(input-port-close-hook-set! ip (lambda (in) (close-input-port ip)))
ip2))
(define (http-send-chunks ip::input-port op::output-port trailer::bool)
(let loop ()
(let ((sz (read/rp *chunk-size-grammar* ip op)))
(if (>fx sz 0)
(begin
(let loop ((sz sz))
(when (>fx sz 0)
(let ((s (send-chars ip op sz)))
(when (>fx s 0)
(loop (-fx sz s))))))
(flush-output-port op)
(let ((s (http-read-crlf ip)))
(display s op)
(loop)))
(if trailer
(let loop ()
(let ((l (http-read-line ip)))
(if (eof-object? l)
(flush-output-port op)
(begin
(display l op)
(if (>fx (string-length l) 2)
(loop)
(flush-output-port op))))))
(begin
(display (http-read-line ip) op)
(flush-output-port op)))))))
|
2c6c80ee56aa4933c70ff46db421227491390dabd09f499665d78cc2cca89f01 | bhauman/figwheel-template | figwheel.clj | (ns leiningen.new.figwheel
(:require [leiningen.new.templates :refer [renderer name-to-path ->files]]
[leiningen.core.main :as main]
[clojure.string :as string]))
(def render (renderer "figwheel"))
(defn os? []
(let [os-name
(-> (System/getProperty "os.name" "generic")
(.toLowerCase java.util.Locale/ENGLISH))
has? #(>= (.indexOf %1 %2) 0)]
(cond
(or (has? os-name "mac")
(has? os-name "darwin")) :macos
(has? os-name "win") :windows
(has? os-name "nux") :linux
:else :other)))
;; Check if om or reagent are in the options
Copied from :
;; I copy this levenshtein impl everywhere
(defn- next-row
[previous current other-seq]
(reduce
(fn [row [diagonal above other]]
(let [update-val (if (= other current)
diagonal
(inc (min diagonal above (peek row))))]
(conj row update-val)))
[(inc (first previous))]
(map vector previous (next previous) other-seq)))
(defn- levenshtein
[sequence1 sequence2]
(peek
(reduce (fn [previous current] (next-row previous current sequence2))
(map #(identity %2) (cons nil sequence2) (range))
sequence1)))
(defn- similar [ky ky2]
(let [dist (levenshtein (str ky) (str ky2))]
(when (<= dist 2) dist)))
(def supported-frameworks #{"reagent" "rum" "react"})
(def framework-opts (set (map #(str "--" %) supported-frameworks)))
(def supported-attributes #{"bundle"})
(def attribute-opts (set (map #(str "+" %) supported-attributes)))
(defn similar-options [opt]
(second (first (sort-by first
(filter first (map (juxt (partial similar opt) identity)
(concat framework-opts attribute-opts)))))))
(defn parse-opts [opts]
(reduce (fn [accum opt]
(cond
(framework-opts opt) (assoc accum :framework (keyword (subs opt 2)))
(attribute-opts opt) (update accum :attributes
(fnil conj #{})
(keyword (subs opt 1)))
:else
(let [suggestion (similar-options opt)]
(throw
(ex-info (format "Unknown option '%s' %s"
opt
(str
(when suggestion
(format "\n --> Perhaps you intended to use the '%s' option?" suggestion))))
{:opts opts
::error true})))))
{} opts))
#_ (parse-opts ["--om" "+no-bundle"])
(defn figwheel
"Takes a name and options with the form --option and produces an interactive
ClojureScript + Figwheel template.
The valid options are:
--react which adds a minimal Ract application in core.cljs
--reagent which adds a minimal Reagent application in core.cljs
--rum which adds a minimal Rum application in core.cljs
+bundle include npm bundle support
Only one option can be specified at a time. If no option is specified,
nothing but a print statment is added in core.cljs"
[name & opts]
(do
(when (= name "figwheel")
(main/abort
(str "Cannot name a figwheel project \"figwheel\" the namespace will clash.\n"
"Please choose a different name, maybe \"tryfig\"?")))
(let [{:keys [framework attributes]} (parse-opts opts)
bundle? (get attributes :bundle false)
data {:name name
:sanitized (name-to-path name)
:react? (= :react framework)
:reagent? (= :reagent framework)
:rum? (= :rum framework)
:npx-command (if (= :windows (os?)) "npx.cmd" "npx")
:bundle? bundle?
:reactdep? (boolean (#{:om :react :reagent} framework))}]
(main/info (str "Generating fresh 'lein new' figwheel project.\n\n"
"Change into your '" name "' directory\n\n"
(when bundle?
"Install npm dependencies via 'npm install'\n")
"Then run 'lein figwheel'\n"
"Wait for it to finish compiling\n"
"A browser window should open to the demo application, if not\n"
"then open ':3449/index.html' in your browser"))
(apply ->files data
(cond-> [["README.md" (render "README.md" data)]
["project.clj" (render "project.clj" data)]
["dev/user.clj" (render "user.clj" data)]
["src/{{sanitized}}/core.cljs" (render "core.cljs" data)]
["resources/public/index.html" (render "index.html" data)]
["resources/public/css/style.css" (render "style.css" data)]
[".gitignore" (render "gitignore" data)]]
bundle?
(concat [["package.json" (render "package.json" data)]
["webpack.config.js" (render "webpack.config.js" data)]]))))))
| null | https://raw.githubusercontent.com/bhauman/figwheel-template/04b527a5c03e1ef6bd0f399c6b634d0adc231fae/src/leiningen/new/figwheel.clj | clojure | Check if om or reagent are in the options
I copy this levenshtein impl everywhere | (ns leiningen.new.figwheel
(:require [leiningen.new.templates :refer [renderer name-to-path ->files]]
[leiningen.core.main :as main]
[clojure.string :as string]))
(def render (renderer "figwheel"))
(defn os? []
(let [os-name
(-> (System/getProperty "os.name" "generic")
(.toLowerCase java.util.Locale/ENGLISH))
has? #(>= (.indexOf %1 %2) 0)]
(cond
(or (has? os-name "mac")
(has? os-name "darwin")) :macos
(has? os-name "win") :windows
(has? os-name "nux") :linux
:else :other)))
Copied from :
(defn- next-row
[previous current other-seq]
(reduce
(fn [row [diagonal above other]]
(let [update-val (if (= other current)
diagonal
(inc (min diagonal above (peek row))))]
(conj row update-val)))
[(inc (first previous))]
(map vector previous (next previous) other-seq)))
(defn- levenshtein
[sequence1 sequence2]
(peek
(reduce (fn [previous current] (next-row previous current sequence2))
(map #(identity %2) (cons nil sequence2) (range))
sequence1)))
(defn- similar [ky ky2]
(let [dist (levenshtein (str ky) (str ky2))]
(when (<= dist 2) dist)))
(def supported-frameworks #{"reagent" "rum" "react"})
(def framework-opts (set (map #(str "--" %) supported-frameworks)))
(def supported-attributes #{"bundle"})
(def attribute-opts (set (map #(str "+" %) supported-attributes)))
(defn similar-options [opt]
(second (first (sort-by first
(filter first (map (juxt (partial similar opt) identity)
(concat framework-opts attribute-opts)))))))
(defn parse-opts [opts]
(reduce (fn [accum opt]
(cond
(framework-opts opt) (assoc accum :framework (keyword (subs opt 2)))
(attribute-opts opt) (update accum :attributes
(fnil conj #{})
(keyword (subs opt 1)))
:else
(let [suggestion (similar-options opt)]
(throw
(ex-info (format "Unknown option '%s' %s"
opt
(str
(when suggestion
(format "\n --> Perhaps you intended to use the '%s' option?" suggestion))))
{:opts opts
::error true})))))
{} opts))
#_ (parse-opts ["--om" "+no-bundle"])
(defn figwheel
"Takes a name and options with the form --option and produces an interactive
ClojureScript + Figwheel template.
The valid options are:
--react which adds a minimal Ract application in core.cljs
--reagent which adds a minimal Reagent application in core.cljs
--rum which adds a minimal Rum application in core.cljs
+bundle include npm bundle support
Only one option can be specified at a time. If no option is specified,
nothing but a print statment is added in core.cljs"
[name & opts]
(do
(when (= name "figwheel")
(main/abort
(str "Cannot name a figwheel project \"figwheel\" the namespace will clash.\n"
"Please choose a different name, maybe \"tryfig\"?")))
(let [{:keys [framework attributes]} (parse-opts opts)
bundle? (get attributes :bundle false)
data {:name name
:sanitized (name-to-path name)
:react? (= :react framework)
:reagent? (= :reagent framework)
:rum? (= :rum framework)
:npx-command (if (= :windows (os?)) "npx.cmd" "npx")
:bundle? bundle?
:reactdep? (boolean (#{:om :react :reagent} framework))}]
(main/info (str "Generating fresh 'lein new' figwheel project.\n\n"
"Change into your '" name "' directory\n\n"
(when bundle?
"Install npm dependencies via 'npm install'\n")
"Then run 'lein figwheel'\n"
"Wait for it to finish compiling\n"
"A browser window should open to the demo application, if not\n"
"then open ':3449/index.html' in your browser"))
(apply ->files data
(cond-> [["README.md" (render "README.md" data)]
["project.clj" (render "project.clj" data)]
["dev/user.clj" (render "user.clj" data)]
["src/{{sanitized}}/core.cljs" (render "core.cljs" data)]
["resources/public/index.html" (render "index.html" data)]
["resources/public/css/style.css" (render "style.css" data)]
[".gitignore" (render "gitignore" data)]]
bundle?
(concat [["package.json" (render "package.json" data)]
["webpack.config.js" (render "webpack.config.js" data)]]))))))
|
e09026090da46200dc4e208ebc92fec90391facaf5187485a002be90506aef1e | WhatsApp/eqwalizer | wip_maps.erl | Copyright ( c ) Meta Platforms , Inc. and affiliates . All rights reserved .
%%%
This source code is licensed under the Apache 2.0 license found in
%%% the LICENSE file in the root directory of this source tree.
-module(wip_maps).
-compile([export_all, nowarn_export_all]).
% only atom keys can be updated
% unconditionally
-spec update_req_non_atom_neg
(map()) -> map().
update_req_non_atom_neg(M) ->
M#{1 := 1}.
% "mixed" updates are not supported
( they are not used in WA codebase )
-spec bad_mixed_update1
(#{a := term()}) -> #{a := a, b := b}.
bad_mixed_update1(M) ->
M#{a := a, b => b}.
-spec bad_mixed_update2
(#{a := term()}) -> #{a := a, b := b}.
bad_mixed_update2(M) ->
M#{b => b, a := a}.
-spec bad_mixed_update3
(any()) -> term().
bad_mixed_update3(M)
when is_map(M#{a := a, b => b}) -> M.
-spec bad_mixed_update4
(any()) -> term().
bad_mixed_update4(M)
when is_map(M#{b => b, a := a}) -> M.
| null | https://raw.githubusercontent.com/WhatsApp/eqwalizer/8017d486c025eaa5c35ced1481ad5bad0f665efa/eqwalizer/test_projects/debug/src/wip_maps.erl | erlang |
the LICENSE file in the root directory of this source tree.
only atom keys can be updated
unconditionally
"mixed" updates are not supported | Copyright ( c ) Meta Platforms , Inc. and affiliates . All rights reserved .
This source code is licensed under the Apache 2.0 license found in
-module(wip_maps).
-compile([export_all, nowarn_export_all]).
-spec update_req_non_atom_neg
(map()) -> map().
update_req_non_atom_neg(M) ->
M#{1 := 1}.
( they are not used in WA codebase )
-spec bad_mixed_update1
(#{a := term()}) -> #{a := a, b := b}.
bad_mixed_update1(M) ->
M#{a := a, b => b}.
-spec bad_mixed_update2
(#{a := term()}) -> #{a := a, b := b}.
bad_mixed_update2(M) ->
M#{b => b, a := a}.
-spec bad_mixed_update3
(any()) -> term().
bad_mixed_update3(M)
when is_map(M#{a := a, b => b}) -> M.
-spec bad_mixed_update4
(any()) -> term().
bad_mixed_update4(M)
when is_map(M#{b => b, a := a}) -> M.
|
45ec882d3f9dd19109110dda74716b4497fc1c0e639a5a82c3d1cbfefe61b617 | kwantam/lviv | lviv-funcalls.scm | ;
Copyright ( c ) 2011 < >
;
;Permission is hereby granted, free of charge, to any person obtaining a copy
;of this software and associated documentation files (the "Software"), to deal
;in the Software without restriction, including without limitation the rights
;to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is
;furnished to do so, subject to the following conditions:
;
;The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
;
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
;FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
;OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
;THE SOFTWARE.
;
;
# # # # # # # # # # # # # # # # # # # # # # # #
# # # # FUNCTION CALLS # # # #
# # # # # # # # # # # # # # # # # # # # # # # #
; all the function calling machinery
; some of the symbol defs in here should
; probably move to lviv-symbols.scm
(define (stLambdaCall state binding)
(if (null? (lambda-code binding)) (eRight '())
(let* ((rfunc (if (lambda-reverse? binding) reverse values))
(fnArgNames (lambda-args binding))
(fnNArgs (length fnArgNames))
(fnArgs (delay (stStackNPop state fnNArgs)))
(lambdaCodeParts ; tail call optimization
(splitAt (- (length (lambda-code binding)) 1) ; take the last call in the lambda
(lambda-code binding))) ; apply it in tail position
(lambdaState ; state during the lambda
(delay
(cons (stGetStackBox state)
(cons (zip fnArgNames (rfunc (fromLeftRight (force fnArgs))))
(lambda-env binding)))))
(fnCompResult ; apply all but last piece of code
(lambda ()
(eRight (applyMap (force lambdaState)
(car lambdaCodeParts)))))
(fnResult (delay (with-exception-catcher ; catch errors in above
exceptionHandlerQuiet
fnCompResult)))
(fnFinalEval ; eval last piece in lambda env
(delay (lviv-eval (force lambdaState) (cadr lambdaCodeParts)))))
(cond ((eLeft? (force fnArgs)) (force fnArgs))
((eLeft? (force fnResult))
(rewind state
(reverse (fromLeftRight (force fnArgs)))
(fromLeftRight (force fnResult))))
((eLeft? (force fnFinalEval)) ; make sure final eval works
(rewind state ; otherwise rewind and throw err
(reverse (fromLeftRight (force fnArgs)))
(fromLeftRight (force fnFinalEval))))
; this is like an "else", since stUpdateStack always returns a true value
so we first update the stack , then we call the already evaluated call
; from the lambda in the original state, which gets the last call into
; tail position
(else
(lviv-apply (force lambdaState) (force fnFinalEval))))))) ; tail call
; primitive call
; no tail call optimization necessary here; Scheme will do it
for calls that require it , and to us it 's just one monolithic
; call
(define (stPrimCall state binding)
(let* ((rfunc (if (primitive-reverse? binding) reverse values))
(fnNArgs (delay (primitive-arity binding)))
(fnArgs (delay (stStackNPop state (force fnNArgs))))
(fnCompResult
(lambda ()
(eRight (apply (eval (primitive-id binding))
(rfunc (fromLeftRight (force fnArgs)))))))
(fnResult (delay (with-exception-catcher
exceptionHandlerQuiet
fnCompResult))))
(cond ((eLeft? (force fnArgs)) (force fnArgs))
; if there aren't enough args, the procedure fails
; and the stack doesn't get rewound any further
; note that if stStackNPop fails, it will rewind what
; it did
((eLeft? (force fnResult))
(rewind state
(reverse (fromLeftRight (force fnArgs)))
(fromLeftRight (force fnResult))))
; if the primitive application fails, put the args
; back on the stack
(else (stStackPush state (fromLeftRight (force fnResult)))))))
; else push the new value onto the stack
| null | https://raw.githubusercontent.com/kwantam/lviv/bbfda50a4801f92b79631f77e7fa997dc10f0516/src/lviv-funcalls.scm | scheme |
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
all the function calling machinery
some of the symbol defs in here should
probably move to lviv-symbols.scm
tail call optimization
take the last call in the lambda
apply it in tail position
state during the lambda
apply all but last piece of code
catch errors in above
eval last piece in lambda env
make sure final eval works
otherwise rewind and throw err
this is like an "else", since stUpdateStack always returns a true value
from the lambda in the original state, which gets the last call into
tail position
tail call
primitive call
no tail call optimization necessary here; Scheme will do it
call
if there aren't enough args, the procedure fails
and the stack doesn't get rewound any further
note that if stStackNPop fails, it will rewind what
it did
if the primitive application fails, put the args
back on the stack
else push the new value onto the stack | Copyright ( c ) 2011 < >
copies of the Software , and to permit persons to whom the Software is
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
# # # # # # # # # # # # # # # # # # # # # # # #
# # # # FUNCTION CALLS # # # #
# # # # # # # # # # # # # # # # # # # # # # # #
(define (stLambdaCall state binding)
(if (null? (lambda-code binding)) (eRight '())
(let* ((rfunc (if (lambda-reverse? binding) reverse values))
(fnArgNames (lambda-args binding))
(fnNArgs (length fnArgNames))
(fnArgs (delay (stStackNPop state fnNArgs)))
(delay
(cons (stGetStackBox state)
(cons (zip fnArgNames (rfunc (fromLeftRight (force fnArgs))))
(lambda-env binding)))))
(lambda ()
(eRight (applyMap (force lambdaState)
(car lambdaCodeParts)))))
exceptionHandlerQuiet
fnCompResult)))
(delay (lviv-eval (force lambdaState) (cadr lambdaCodeParts)))))
(cond ((eLeft? (force fnArgs)) (force fnArgs))
((eLeft? (force fnResult))
(rewind state
(reverse (fromLeftRight (force fnArgs)))
(fromLeftRight (force fnResult))))
(reverse (fromLeftRight (force fnArgs)))
(fromLeftRight (force fnFinalEval))))
so we first update the stack , then we call the already evaluated call
(else
for calls that require it , and to us it 's just one monolithic
(define (stPrimCall state binding)
(let* ((rfunc (if (primitive-reverse? binding) reverse values))
(fnNArgs (delay (primitive-arity binding)))
(fnArgs (delay (stStackNPop state (force fnNArgs))))
(fnCompResult
(lambda ()
(eRight (apply (eval (primitive-id binding))
(rfunc (fromLeftRight (force fnArgs)))))))
(fnResult (delay (with-exception-catcher
exceptionHandlerQuiet
fnCompResult))))
(cond ((eLeft? (force fnArgs)) (force fnArgs))
((eLeft? (force fnResult))
(rewind state
(reverse (fromLeftRight (force fnArgs)))
(fromLeftRight (force fnResult))))
(else (stStackPush state (fromLeftRight (force fnResult)))))))
|
38f5b6cd4f2297cf297f487c244da3781badda92c0a76bd105860649fb39d71d | replete-repl/replete-shared | repl_resources.cljs | (ns replete.repl-resources)
(def special-doc-map
'{. {:forms [(.instanceMethod instance args*)
(.-instanceField instance)]
:doc "The instance member form works for methods and fields.
They all expand into calls to the dot operator at macroexpansion time."}
ns {:forms [(name docstring? attr-map? references*)]
:doc "You must currently use the ns form only with the following caveats
* You must use the :only form of :use
* :require supports :as and :refer
- both options can be skipped
- in this case a symbol can be used as a libspec directly
- that is, (:require lib.foo) and (:require [lib.foo]) are both
supported and mean the same thing
- prefix lists are not supported
* The only option for :refer-clojure is :exclude
* :import is available for importing Google Closure classes
- ClojureScript types and records should be brought in with :use
or :require :refer, not :import ed
* Macros are written in Clojure, and are referenced via the new
:require-macros / :use-macros options to ns
- :require-macros and :use-macros support the same forms that
:require and :use do
Implicit macro loading: If a namespace is required or used, and that
namespace itself requires or uses macros from its own namespace, then
the macros will be implicitly required or used using the same
specifications. This oftentimes leads to simplified library usage,
such that the consuming namespace need not be concerned about
explicitly distinguishing between whether certain vars are functions
or macros.
Inline macro specification: As a convenience, :require can be given
either :include-macros true or :refer-macros [syms...]. Both desugar
into forms which explicitly load the matching Clojure file containing
macros. (This works independently of whether the namespace being
required internally requires or uses its own macros.) For example:
(ns testme.core
(:require [foo.core :as foo :refer [foo-fn] :include-macros true]
[woz.core :as woz :refer [woz-fn] :refer-macros [app jx]]))
is sugar for
(ns testme.core
(:require [foo.core :as foo :refer [foo-fn]]
[woz.core :as woz :refer [woz-fn]])
(:require-macros [foo.core :as foo]
[woz.core :as woz :refer [app jx]]))"}
def {:forms [(def symbol doc-string? init?)]
:doc "Creates and interns a global var with the name
of symbol in the current namespace (*ns*) or locates such a var if
it already exists. If init is supplied, it is evaluated, and the
root binding of the var is set to the resulting value. If init is
not supplied, the root binding of the var is unaffected."}
do {:forms [(do exprs*)]
:doc "Evaluates the expressions in order and returns the value of
the last. If no expressions are supplied, returns nil."}
if {:forms [(if test then else?)]
:doc "Evaluates test. If not the singular values nil or false,
evaluates and yields then, otherwise, evaluates and yields else. If
else is not supplied it defaults to nil."}
new {:forms [(Constructor. args*) (new Constructor args*)]
:url "java_interop#new"
:doc "The args, if any, are evaluated from left to right, and
passed to the JavaScript constructor. The constructed object is
returned."}
quote {:forms [(quote form)]
:doc "Yields the unevaluated form."}
recur {:forms [(recur exprs*)]
:doc "Evaluates the exprs in order, then, in parallel, rebinds
the bindings of the recursion point to the values of the exprs.
Execution then jumps back to the recursion point, a loop or fn method."}
set! {:forms [(set! var-symbol expr)
(set! (.- instance-expr instanceFieldName-symbol) expr)]
:url "vars#set"
:doc "Used to set vars and JavaScript object fields"}
throw {:forms [(throw expr)]
:doc "The expr is evaluated and thrown."}
try {:forms [(try expr* catch-clause* finally-clause?)]
:doc "catch-clause => (catch classname name expr*)
finally-clause => (finally expr*)
Catches and handles JavaScript exceptions."}
var {:forms [(var symbol)]
:doc "The symbol must resolve to a var, and the Var object
itself (not its value) is returned. The reader macro #'x expands to (var x)."}})
(def repl-special-doc-map
'{in-ns {:arglists ([name])
:doc "Sets *cljs-ns* to the namespace named by the symbol, creating it if needed."}
dir {:arglists ([nsname])
:doc "Prints a sorted directory of public vars in a namespace"}
apropos {:arglists ([str-or-pattern])
:doc "Given a regular expression or stringable thing, return a seq of all
public definitions in all currently-loaded namespaces that match the
str-or-pattern."}
doc {:arglists ([sym])
:doc "Prints documentation for a var or special form given its name"}
find-doc {:arglists ([str-or-pattern])
:doc "Prints documentation for any var whose documentation or name
contains a match for re-string-or-pattern"}
source {:arglists ([sym])
:doc "Prints the source code for the given symbol, if it can find it.
This requires that the symbol resolve to a Var defined in a
namespace for which the source is available.
Example: (source filter)"}
pst {:arglists ([] [e])
:doc "Prints a stack trace of the exception.
If none supplied, uses the root cause of the most recent repl exception (*e)"}})
| null | https://raw.githubusercontent.com/replete-repl/replete-shared/8c289b6c8be4eb7ffabcd92a712155b03d8dc665/src/replete/repl_resources.cljs | clojure | (ns replete.repl-resources)
(def special-doc-map
'{. {:forms [(.instanceMethod instance args*)
(.-instanceField instance)]
:doc "The instance member form works for methods and fields.
They all expand into calls to the dot operator at macroexpansion time."}
ns {:forms [(name docstring? attr-map? references*)]
:doc "You must currently use the ns form only with the following caveats
* You must use the :only form of :use
* :require supports :as and :refer
- both options can be skipped
- in this case a symbol can be used as a libspec directly
- that is, (:require lib.foo) and (:require [lib.foo]) are both
supported and mean the same thing
- prefix lists are not supported
* The only option for :refer-clojure is :exclude
* :import is available for importing Google Closure classes
- ClojureScript types and records should be brought in with :use
or :require :refer, not :import ed
* Macros are written in Clojure, and are referenced via the new
:require-macros / :use-macros options to ns
- :require-macros and :use-macros support the same forms that
:require and :use do
Implicit macro loading: If a namespace is required or used, and that
namespace itself requires or uses macros from its own namespace, then
the macros will be implicitly required or used using the same
specifications. This oftentimes leads to simplified library usage,
such that the consuming namespace need not be concerned about
explicitly distinguishing between whether certain vars are functions
or macros.
Inline macro specification: As a convenience, :require can be given
either :include-macros true or :refer-macros [syms...]. Both desugar
into forms which explicitly load the matching Clojure file containing
macros. (This works independently of whether the namespace being
required internally requires or uses its own macros.) For example:
(ns testme.core
(:require [foo.core :as foo :refer [foo-fn] :include-macros true]
[woz.core :as woz :refer [woz-fn] :refer-macros [app jx]]))
is sugar for
(ns testme.core
(:require [foo.core :as foo :refer [foo-fn]]
[woz.core :as woz :refer [woz-fn]])
(:require-macros [foo.core :as foo]
[woz.core :as woz :refer [app jx]]))"}
def {:forms [(def symbol doc-string? init?)]
:doc "Creates and interns a global var with the name
of symbol in the current namespace (*ns*) or locates such a var if
it already exists. If init is supplied, it is evaluated, and the
root binding of the var is set to the resulting value. If init is
not supplied, the root binding of the var is unaffected."}
do {:forms [(do exprs*)]
:doc "Evaluates the expressions in order and returns the value of
the last. If no expressions are supplied, returns nil."}
if {:forms [(if test then else?)]
:doc "Evaluates test. If not the singular values nil or false,
evaluates and yields then, otherwise, evaluates and yields else. If
else is not supplied it defaults to nil."}
new {:forms [(Constructor. args*) (new Constructor args*)]
:url "java_interop#new"
:doc "The args, if any, are evaluated from left to right, and
passed to the JavaScript constructor. The constructed object is
returned."}
quote {:forms [(quote form)]
:doc "Yields the unevaluated form."}
recur {:forms [(recur exprs*)]
:doc "Evaluates the exprs in order, then, in parallel, rebinds
the bindings of the recursion point to the values of the exprs.
Execution then jumps back to the recursion point, a loop or fn method."}
set! {:forms [(set! var-symbol expr)
(set! (.- instance-expr instanceFieldName-symbol) expr)]
:url "vars#set"
:doc "Used to set vars and JavaScript object fields"}
throw {:forms [(throw expr)]
:doc "The expr is evaluated and thrown."}
try {:forms [(try expr* catch-clause* finally-clause?)]
:doc "catch-clause => (catch classname name expr*)
finally-clause => (finally expr*)
Catches and handles JavaScript exceptions."}
var {:forms [(var symbol)]
:doc "The symbol must resolve to a var, and the Var object
itself (not its value) is returned. The reader macro #'x expands to (var x)."}})
(def repl-special-doc-map
'{in-ns {:arglists ([name])
:doc "Sets *cljs-ns* to the namespace named by the symbol, creating it if needed."}
dir {:arglists ([nsname])
:doc "Prints a sorted directory of public vars in a namespace"}
apropos {:arglists ([str-or-pattern])
:doc "Given a regular expression or stringable thing, return a seq of all
public definitions in all currently-loaded namespaces that match the
str-or-pattern."}
doc {:arglists ([sym])
:doc "Prints documentation for a var or special form given its name"}
find-doc {:arglists ([str-or-pattern])
:doc "Prints documentation for any var whose documentation or name
contains a match for re-string-or-pattern"}
source {:arglists ([sym])
:doc "Prints the source code for the given symbol, if it can find it.
This requires that the symbol resolve to a Var defined in a
namespace for which the source is available.
Example: (source filter)"}
pst {:arglists ([] [e])
:doc "Prints a stack trace of the exception.
If none supplied, uses the root cause of the most recent repl exception (*e)"}})
|
|
b6b14a43b23c8c13e11bf9775b03aa7592d75d059951094197f37d51e9afca6a | metametadata/carry | core.cljs | (ns carry-history.core
(:require [cljs.core.match :refer-macros [match]]
[goog.events]
[goog.history.EventType :as EventType]
[clojure.string]
[clojure.set])
(:import goog.history.Html5History
[goog History]))
;;;;;;;;;;;;;;;;;;;;;;;;; History
(defprotocol HistoryProtocol
"Protocol for objects managing browser history."
(listen [this callback]
"Starts calling back on history events.
Callback function signature: `[token browser-event? event-data]`, where:
* `token` - new token
* `browser-event?` - `true` if event was initiated by action in browser, e.g. clicking Back button
* `event-data` - data which was passed from `replace-token`/`push-token`
Returns a function which stops listening.")
(replace-token [this token] [this token event-data]
"Replace token and fire an event with additional data passed (data is `nil` if not specified);
do nothing if current token is already equal to the specified one.")
(push-token [this token] [this token event-data]
"Push token and fire an event with additional data passed (data is `nil` if not specified);
do nothing if current token is already equal to the specified one.")
(token [this] "Return current token.")
(token->href [this token] "Returns the href for the specified token to be used in HTML links."))
Implementation of HistoryProtocol using Closure API
(def ^:dynamic ^:no-doc *-history-event-data* nil)
(defrecord ^:no-doc -History [-goog-history]
HistoryProtocol
(listen
[_this callback]
(let [key (goog.events/listen -goog-history EventType/NAVIGATE #(callback (.-token %)
(.-isNavigation %)
*-history-event-data*))]
#(goog.events/unlistenByKey key)))
(replace-token [this new-token] (replace-token this new-token nil))
(replace-token
[this new-token event-data]
(binding [*-history-event-data* event-data]
(when (not= (token this) new-token) ; prevent firing an event if token is going to stay the same
(.replaceToken -goog-history new-token))))
(push-token [this token] (push-token this token nil))
(push-token
[_this token event-data]
(binding [*-history-event-data* event-data]
(.setToken -goog-history token)))
(token
[_this]
(.getToken -goog-history))
(token->href
[_this token]
(.getUrl_ -goog-history token)))
(defn new-legacy-hash-history
"For history management using hashes. Should work in Opera Mini."
[]
(let [history (History.)]
(.setEnabled history true)
(->-History history)))
(defn new-hash-history
"For history management using hashes based on onhashchange event. Will not correctly work in Opera Mini: /#search=hash"
[]
(let [history (Html5History.)]
(.setUseFragment history true)
(.setEnabled history true)
(->-History history)))
(defn new-history
"For history management using pushState. Supported browsers: /#search=pushstate"
[]
(let [history (Html5History.)]
gets rid of " Uncaught SecurityError : Failed to execute ' pushState ' on ' History ' : A history state object with URL
' / ' can not be created in a document with origin ' :3449 ' and URL ' :3449/ ' "
(.setPathPrefix history "")
(.setUseFragment history false)
(.setEnabled history true)
(->-History history)))
Middleware
(defn ^:no-doc -wrap-initial-model
[app-initial-model]
(merge {::token "/"} app-initial-model))
(defn ^:no-doc -wrap-on-signal
[app-on-signal history]
(let [unlisten (atom nil)]
(fn on-signal
[model signal dispatch-signal dispatch-action]
(match signal
:on-start
(let [original-signal-result (app-on-signal model signal dispatch-signal dispatch-action)]
(add-watch model ::token-watch
(fn [_key _ref old-state new-state]
(when (not= (::token old-state) (::token new-state))
(replace-token history (::token new-state)))))
(reset! unlisten
(listen history #(dispatch-signal [::on-history-event {:token %1 :browser-event? %2 :event-data %3}])))
; initial signal
(when (not (-> @model :carry-debugger.core/debugger :replay-mode?))
(dispatch-signal [::on-history-event {:token (token history) :browser-event? true :event-data nil}]))
original-signal-result)
:on-stop
(do
(when (ifn? @unlisten)
(@unlisten))
(app-on-signal model signal dispatch-signal dispatch-action))
[::on-history-event {:token token :browser-event? browser-event? :event-data event-data}]
(do
(dispatch-action [::set-token token])
(when (or browser-event? (:treat-as-browser-event? event-data))
(dispatch-signal [::on-enter token])))
:else
(app-on-signal model signal dispatch-signal dispatch-action)))))
(defn ^:no-doc -wrap-on-action
"Updates the token."
[app-on-action]
(fn on-action
[model action]
(match action
[::set-token token]
(assoc model ::token token)
:else
(app-on-action model action))))
Middleware
(defn add
"Applies middleware which syncs app model with browser history.
After start it begins catching history events and updates `::token` in model accordingly.
If `::token` changes in model (e.g. by toggling action in debugger), then current url is replaced using new token.
Initial `::token` value will be applied on clicking debugger's Reset.
Sends `[::on-enter token]` signal to app after handling token change event initiated from browser (e.g. on clicking Back button).
So using [[HistoryProtocol]]'s `replace-token`/`push-token` would not trigger this signal.
You can still force sending this signal by passing `{:treat-as-browser-event? true}` event-data to these functions.
Middleware is friendly to `carry-debugger`: it won't automatically dispatch initial signal on app start if debugger's replay mode is on."
[blueprint history]
(-> blueprint
(update :initial-model -wrap-initial-model)
(update :on-signal -wrap-on-signal history)
(update :on-action -wrap-on-action)))
;;; Link
(defn ^:no-doc -pure-click?
"Returns false if the user did a middle-click, right-click, or used a modifier."
[e]
(not (or (.-altKey e)
(.-ctrlKey e)
(.-metaKey e)
(.-shiftKey e)
(not (zero? (.-button e))))))
(defn ^:no-doc -on-click
[e history token replace?]
(when (-pure-click? e)
(.preventDefault e)
(if replace?
(replace-token history token {:treat-as-browser-event? true})
(push-token history token {:treat-as-browser-event? true}))))
(defn link
"Link Reagent component which changes current URL without sending request to server.
Will replace current token instead of pushing if `:replace?` attribute is `true` (attribute is `false` by default).
If history middleware is added then clicking the link will produce `:on-enter` signal."
[history token {:keys [replace?] :as attrs} & body]
(into [:a (merge attrs {:href (token->href history token)
:on-click #(-on-click % history token replace?)})]
body)) | null | https://raw.githubusercontent.com/metametadata/carry/fa5c7cd0d8f1b71edca70330acc97c6245638efb/contrib/history/src/carry_history/core.cljs | clojure | History
prevent firing an event if token is going to stay the same
initial signal
Link | (ns carry-history.core
(:require [cljs.core.match :refer-macros [match]]
[goog.events]
[goog.history.EventType :as EventType]
[clojure.string]
[clojure.set])
(:import goog.history.Html5History
[goog History]))
(defprotocol HistoryProtocol
"Protocol for objects managing browser history."
(listen [this callback]
"Starts calling back on history events.
Callback function signature: `[token browser-event? event-data]`, where:
* `token` - new token
* `browser-event?` - `true` if event was initiated by action in browser, e.g. clicking Back button
* `event-data` - data which was passed from `replace-token`/`push-token`
Returns a function which stops listening.")
(replace-token [this token] [this token event-data]
do nothing if current token is already equal to the specified one.")
(push-token [this token] [this token event-data]
do nothing if current token is already equal to the specified one.")
(token [this] "Return current token.")
(token->href [this token] "Returns the href for the specified token to be used in HTML links."))
Implementation of HistoryProtocol using Closure API
(def ^:dynamic ^:no-doc *-history-event-data* nil)
(defrecord ^:no-doc -History [-goog-history]
HistoryProtocol
(listen
[_this callback]
(let [key (goog.events/listen -goog-history EventType/NAVIGATE #(callback (.-token %)
(.-isNavigation %)
*-history-event-data*))]
#(goog.events/unlistenByKey key)))
(replace-token [this new-token] (replace-token this new-token nil))
(replace-token
[this new-token event-data]
(binding [*-history-event-data* event-data]
(.replaceToken -goog-history new-token))))
(push-token [this token] (push-token this token nil))
(push-token
[_this token event-data]
(binding [*-history-event-data* event-data]
(.setToken -goog-history token)))
(token
[_this]
(.getToken -goog-history))
(token->href
[_this token]
(.getUrl_ -goog-history token)))
(defn new-legacy-hash-history
"For history management using hashes. Should work in Opera Mini."
[]
(let [history (History.)]
(.setEnabled history true)
(->-History history)))
(defn new-hash-history
"For history management using hashes based on onhashchange event. Will not correctly work in Opera Mini: /#search=hash"
[]
(let [history (Html5History.)]
(.setUseFragment history true)
(.setEnabled history true)
(->-History history)))
(defn new-history
"For history management using pushState. Supported browsers: /#search=pushstate"
[]
(let [history (Html5History.)]
gets rid of " Uncaught SecurityError : Failed to execute ' pushState ' on ' History ' : A history state object with URL
' / ' can not be created in a document with origin ' :3449 ' and URL ' :3449/ ' "
(.setPathPrefix history "")
(.setUseFragment history false)
(.setEnabled history true)
(->-History history)))
Middleware
(defn ^:no-doc -wrap-initial-model
[app-initial-model]
(merge {::token "/"} app-initial-model))
(defn ^:no-doc -wrap-on-signal
[app-on-signal history]
(let [unlisten (atom nil)]
(fn on-signal
[model signal dispatch-signal dispatch-action]
(match signal
:on-start
(let [original-signal-result (app-on-signal model signal dispatch-signal dispatch-action)]
(add-watch model ::token-watch
(fn [_key _ref old-state new-state]
(when (not= (::token old-state) (::token new-state))
(replace-token history (::token new-state)))))
(reset! unlisten
(listen history #(dispatch-signal [::on-history-event {:token %1 :browser-event? %2 :event-data %3}])))
(when (not (-> @model :carry-debugger.core/debugger :replay-mode?))
(dispatch-signal [::on-history-event {:token (token history) :browser-event? true :event-data nil}]))
original-signal-result)
:on-stop
(do
(when (ifn? @unlisten)
(@unlisten))
(app-on-signal model signal dispatch-signal dispatch-action))
[::on-history-event {:token token :browser-event? browser-event? :event-data event-data}]
(do
(dispatch-action [::set-token token])
(when (or browser-event? (:treat-as-browser-event? event-data))
(dispatch-signal [::on-enter token])))
:else
(app-on-signal model signal dispatch-signal dispatch-action)))))
(defn ^:no-doc -wrap-on-action
"Updates the token."
[app-on-action]
(fn on-action
[model action]
(match action
[::set-token token]
(assoc model ::token token)
:else
(app-on-action model action))))
Middleware
(defn add
"Applies middleware which syncs app model with browser history.
After start it begins catching history events and updates `::token` in model accordingly.
If `::token` changes in model (e.g. by toggling action in debugger), then current url is replaced using new token.
Initial `::token` value will be applied on clicking debugger's Reset.
Sends `[::on-enter token]` signal to app after handling token change event initiated from browser (e.g. on clicking Back button).
So using [[HistoryProtocol]]'s `replace-token`/`push-token` would not trigger this signal.
You can still force sending this signal by passing `{:treat-as-browser-event? true}` event-data to these functions.
Middleware is friendly to `carry-debugger`: it won't automatically dispatch initial signal on app start if debugger's replay mode is on."
[blueprint history]
(-> blueprint
(update :initial-model -wrap-initial-model)
(update :on-signal -wrap-on-signal history)
(update :on-action -wrap-on-action)))
(defn ^:no-doc -pure-click?
"Returns false if the user did a middle-click, right-click, or used a modifier."
[e]
(not (or (.-altKey e)
(.-ctrlKey e)
(.-metaKey e)
(.-shiftKey e)
(not (zero? (.-button e))))))
(defn ^:no-doc -on-click
[e history token replace?]
(when (-pure-click? e)
(.preventDefault e)
(if replace?
(replace-token history token {:treat-as-browser-event? true})
(push-token history token {:treat-as-browser-event? true}))))
(defn link
"Link Reagent component which changes current URL without sending request to server.
Will replace current token instead of pushing if `:replace?` attribute is `true` (attribute is `false` by default).
If history middleware is added then clicking the link will produce `:on-enter` signal."
[history token {:keys [replace?] :as attrs} & body]
(into [:a (merge attrs {:href (token->href history token)
:on-click #(-on-click % history token replace?)})]
body)) |
1e813e6cf3d9c60aa7d50b81cb6044b365638e1c3c0c430a57dd9d6c7dbe1d6c | danieljharvey/mimsa | Wasm.hs | {-# LANGUAGE OverloadedStrings #-}
module Test.Backend.Wasm
( spec,
)
where
import Data.Bifunctor
import Data.Text (Text)
import Language.Mimsa.Backend.Wasm.Compile
import Language.Mimsa.Core
import Language.Mimsa.Typechecker.NumberVars
import Language.Mimsa.Typechecker.Typecheck
import qualified Language.Wasm as Wasm
import qualified Language.Wasm.Interpreter as Wasm
import Test.Hspec
import Test.Utils.Helpers
runWasm :: Wasm.Module -> IO (Maybe [Wasm.Value])
runWasm wasmModule = do
case Wasm.validate wasmModule of
Right validModule -> do
(result, store) <- Wasm.instantiate Wasm.emptyStore mempty validModule
case result of
Right moduleInstance ->
Wasm.invokeExport store moduleInstance "test" mempty
Left e -> error e
Left e -> do
print wasmModule
error $ "invalid module: " <> show e
typecheck' ::
(Monoid ann) =>
Expr Name Annotation ->
Expr Name (Type ann)
typecheck' expr = do
let numberedExpr = fromRight (addNumbersToStoreExpression expr mempty)
let result =
fmap (\(_, _, a, _) -> first fst a)
. typecheck mempty mempty
$ numberedExpr
(fmap . fmap) (const mempty) (fromRight result)
wasmTest :: Text -> IO (Maybe [Wasm.Value])
wasmTest input =
let expr = typecheck' $ unsafeParseExpr' input
in runWasm (compileRaw expr)
spec :: Spec
spec = do
describe "Wasm" $ do
describe "Number literals" $ do
it "int literal 1" $ do
result <- wasmTest "1"
result `shouldBe` Just [Wasm.VI32 1]
it "int literal 42" $ do
result <- wasmTest "42"
result `shouldBe` Just [Wasm.VI32 42]
describe "Boolean literals" $ do
it "true" $ do
result <- wasmTest "True"
result `shouldBe` Just [Wasm.VI32 1]
it "false" $ do
result <- wasmTest "False"
result `shouldBe` Just [Wasm.VI32 0]
describe "If expression" $ do
it "true branch" $ do
result <- wasmTest "if True then 42 else 5"
result `shouldBe` Just [Wasm.VI32 42]
it "false branch" $ do
result <- wasmTest "if False then 42 else 5"
result `shouldBe` Just [Wasm.VI32 5]
it "using infix op" $ do
result <- wasmTest "if 4 == 5 then 42 else 5"
result `shouldBe` Just [Wasm.VI32 5]
describe "Infix ops" $ do
it "1 + 1 == 2" $ do
result <- wasmTest "1 + 1"
result `shouldBe` Just [Wasm.VI32 2]
it "10 - 9" $ do
result <- wasmTest "10 - 9"
result `shouldBe` Just [Wasm.VI32 1]
it "1 == 1" $ do
result <- wasmTest "1 == 1"
result `shouldBe` Just [Wasm.VI32 1]
it "1 == 2" $ do
result <- wasmTest "1 == 2"
result `shouldBe` Just [Wasm.VI32 0]
it "1 < 2" $ do
result <- wasmTest "1 < 2"
result `shouldBe` Just [Wasm.VI32 1]
it "1 > 2" $ do
result <- wasmTest "1 > 2"
result `shouldBe` Just [Wasm.VI32 0]
it "1 >= 1" $ do
result <- wasmTest "1 >= 1"
result `shouldBe` Just [Wasm.VI32 1]
it "1 <= 1" $ do
result <- wasmTest "1 <= 1"
result `shouldBe` Just [Wasm.VI32 1]
it "1 + 2 + 3 + 4 + 5" $ do
result <- wasmTest "1 + 2 + 3 + 4 + 5"
result `shouldBe` Just [Wasm.VI32 15]
describe "Function" $ do
xit "let inc = \\a -> a + 1; inc 1" $ do
result <- wasmTest "let inc = \\a -> a + 1; inc 1"
result `shouldBe` Just [Wasm.VI32 2]
describe "Variables" $ do
it "let a = 1 in a + 1" $ do
result <- wasmTest "let a = 1 in a + 1"
result `shouldBe` Just [Wasm.VI32 2]
it "let a = 1; let b = 2; a + b" $ do
result <- wasmTest "let a = 1; let b = 2; a + b"
result `shouldBe` Just [Wasm.VI32 3]
it "let a = 1; let b = 2; let c = 3; a + b - c" $ do
result <- wasmTest "let a = 1; let b = 2; let c = 3; a + b - c"
result `shouldBe` Just [Wasm.VI32 0]
| null | https://raw.githubusercontent.com/danieljharvey/mimsa/e6b177dd2c38e8a67d6e27063ca600406b3e6b56/compiler/test/Test/Backend/Wasm.hs | haskell | # LANGUAGE OverloadedStrings # |
module Test.Backend.Wasm
( spec,
)
where
import Data.Bifunctor
import Data.Text (Text)
import Language.Mimsa.Backend.Wasm.Compile
import Language.Mimsa.Core
import Language.Mimsa.Typechecker.NumberVars
import Language.Mimsa.Typechecker.Typecheck
import qualified Language.Wasm as Wasm
import qualified Language.Wasm.Interpreter as Wasm
import Test.Hspec
import Test.Utils.Helpers
runWasm :: Wasm.Module -> IO (Maybe [Wasm.Value])
runWasm wasmModule = do
case Wasm.validate wasmModule of
Right validModule -> do
(result, store) <- Wasm.instantiate Wasm.emptyStore mempty validModule
case result of
Right moduleInstance ->
Wasm.invokeExport store moduleInstance "test" mempty
Left e -> error e
Left e -> do
print wasmModule
error $ "invalid module: " <> show e
typecheck' ::
(Monoid ann) =>
Expr Name Annotation ->
Expr Name (Type ann)
typecheck' expr = do
let numberedExpr = fromRight (addNumbersToStoreExpression expr mempty)
let result =
fmap (\(_, _, a, _) -> first fst a)
. typecheck mempty mempty
$ numberedExpr
(fmap . fmap) (const mempty) (fromRight result)
wasmTest :: Text -> IO (Maybe [Wasm.Value])
wasmTest input =
let expr = typecheck' $ unsafeParseExpr' input
in runWasm (compileRaw expr)
spec :: Spec
spec = do
describe "Wasm" $ do
describe "Number literals" $ do
it "int literal 1" $ do
result <- wasmTest "1"
result `shouldBe` Just [Wasm.VI32 1]
it "int literal 42" $ do
result <- wasmTest "42"
result `shouldBe` Just [Wasm.VI32 42]
describe "Boolean literals" $ do
it "true" $ do
result <- wasmTest "True"
result `shouldBe` Just [Wasm.VI32 1]
it "false" $ do
result <- wasmTest "False"
result `shouldBe` Just [Wasm.VI32 0]
describe "If expression" $ do
it "true branch" $ do
result <- wasmTest "if True then 42 else 5"
result `shouldBe` Just [Wasm.VI32 42]
it "false branch" $ do
result <- wasmTest "if False then 42 else 5"
result `shouldBe` Just [Wasm.VI32 5]
it "using infix op" $ do
result <- wasmTest "if 4 == 5 then 42 else 5"
result `shouldBe` Just [Wasm.VI32 5]
describe "Infix ops" $ do
it "1 + 1 == 2" $ do
result <- wasmTest "1 + 1"
result `shouldBe` Just [Wasm.VI32 2]
it "10 - 9" $ do
result <- wasmTest "10 - 9"
result `shouldBe` Just [Wasm.VI32 1]
it "1 == 1" $ do
result <- wasmTest "1 == 1"
result `shouldBe` Just [Wasm.VI32 1]
it "1 == 2" $ do
result <- wasmTest "1 == 2"
result `shouldBe` Just [Wasm.VI32 0]
it "1 < 2" $ do
result <- wasmTest "1 < 2"
result `shouldBe` Just [Wasm.VI32 1]
it "1 > 2" $ do
result <- wasmTest "1 > 2"
result `shouldBe` Just [Wasm.VI32 0]
it "1 >= 1" $ do
result <- wasmTest "1 >= 1"
result `shouldBe` Just [Wasm.VI32 1]
it "1 <= 1" $ do
result <- wasmTest "1 <= 1"
result `shouldBe` Just [Wasm.VI32 1]
it "1 + 2 + 3 + 4 + 5" $ do
result <- wasmTest "1 + 2 + 3 + 4 + 5"
result `shouldBe` Just [Wasm.VI32 15]
describe "Function" $ do
xit "let inc = \\a -> a + 1; inc 1" $ do
result <- wasmTest "let inc = \\a -> a + 1; inc 1"
result `shouldBe` Just [Wasm.VI32 2]
describe "Variables" $ do
it "let a = 1 in a + 1" $ do
result <- wasmTest "let a = 1 in a + 1"
result `shouldBe` Just [Wasm.VI32 2]
it "let a = 1; let b = 2; a + b" $ do
result <- wasmTest "let a = 1; let b = 2; a + b"
result `shouldBe` Just [Wasm.VI32 3]
it "let a = 1; let b = 2; let c = 3; a + b - c" $ do
result <- wasmTest "let a = 1; let b = 2; let c = 3; a + b - c"
result `shouldBe` Just [Wasm.VI32 0]
|
4e86c1a4f7076ecbe619d0b4f3f6f3f3f90e26162c06afbbed420fe9d2a89d55 | rd--/hsc3 | diskIn.help.hs | diskIn ; requires = dsk ; c.f . sndfileIn
let (buf, nc) = (control kr "dsk" 0, 2) in diskIn nc buf Loop
---- ; setup & cleanup
{fn = sfResolve "20.2-LW+RD.flac";nc = 2;dsk = 0}
withSc3 (mapM_ async [b_alloc dsk 65536 2,b_read dsk fn 0 (-1) 0 True])
withSc3 (mapM_ async [b_close dsk,b_free dsk])
| null | https://raw.githubusercontent.com/rd--/hsc3/024d45b6b5166e5cd3f0142fbf65aeb6ef642d46/Help/Ugen/diskIn.help.hs | haskell | -- ; setup & cleanup | diskIn ; requires = dsk ; c.f . sndfileIn
let (buf, nc) = (control kr "dsk" 0, 2) in diskIn nc buf Loop
{fn = sfResolve "20.2-LW+RD.flac";nc = 2;dsk = 0}
withSc3 (mapM_ async [b_alloc dsk 65536 2,b_read dsk fn 0 (-1) 0 True])
withSc3 (mapM_ async [b_close dsk,b_free dsk])
|
cc351c525a157d712edf894bdf21300b3fb20590b490ba1a5be4e70772b80733 | hidaris/thinking-dumps | chap4.rkt | #lang racket/base
;; Load the J-Bob language:
(require "j-bob/j-bob-lang.rkt")
;; Load J-Bob, our little proof assistant:
(require "j-bob/j-bob.rkt")
;; to part of this total breakfast.
(defun my-list0? (x)
(if (equal x 'oatmeal)
'nil
(if (equal x '())
't
(if (equal x '(toast))
'nil
'nil))))
(defun my-list0?₂ (x)
(equal x '()))
(defun list1? (x)
(if (atom x)
'nil
(list0? (cdr x))))
(defun list2?₁ (x)
(if (atom x)
'nil
(list1? (cdr x))))
The Law of Defun ( final )
;;; Given the total function (defun name (x1 ... xn) body),
;;; (name e1 ... en) = body where x1 is e1, ..., xn is en.
;;; Here is a partial functions.
(defun partial (x)
(if (partial x)
'nil
't))
(dethm contradiction ()
'nil)
(defun list? (x)
(if (atom x)
(equal x '())
(list? (cdr x))))
;;; A measure is an expression that is
;;; included with a function definition. It
;;; may only refer to previously defined,
;;; total functions and to the function
;;; definition's formal arguments. The measure must
;;; produce a natural number that decreases for every
;;; recursive call to the function.
The Axioms of Size
(dethm natp/size (x)
(equal (natp (size x)) 't))
(dethm size/car (x)
(if (atom x) 't (equal (< (size (car x)) (size x)) 't)))
(dethm size/cdr (x)
(if (atom x) 't (equal (< (size (cdr x)) (size x)) 't)))
(defun sub (x y)
(if (atom y)
(if (equal y '?)
x
y)
(cons (sub x (car y))
(sub x (cdr y)))))
| null | https://raw.githubusercontent.com/hidaris/thinking-dumps/3fceaf9e6195ab99c8315749814a7377ef8baf86/the-little-series/the-little-prover/chap4.rkt | racket | Load the J-Bob language:
Load J-Bob, our little proof assistant:
to part of this total breakfast.
Given the total function (defun name (x1 ... xn) body),
(name e1 ... en) = body where x1 is e1, ..., xn is en.
Here is a partial functions.
A measure is an expression that is
included with a function definition. It
may only refer to previously defined,
total functions and to the function
definition's formal arguments. The measure must
produce a natural number that decreases for every
recursive call to the function. | #lang racket/base
(require "j-bob/j-bob-lang.rkt")
(require "j-bob/j-bob.rkt")
(defun my-list0? (x)
(if (equal x 'oatmeal)
'nil
(if (equal x '())
't
(if (equal x '(toast))
'nil
'nil))))
(defun my-list0?₂ (x)
(equal x '()))
(defun list1? (x)
(if (atom x)
'nil
(list0? (cdr x))))
(defun list2?₁ (x)
(if (atom x)
'nil
(list1? (cdr x))))
The Law of Defun ( final )
(defun partial (x)
(if (partial x)
'nil
't))
(dethm contradiction ()
'nil)
(defun list? (x)
(if (atom x)
(equal x '())
(list? (cdr x))))
The Axioms of Size
(dethm natp/size (x)
(equal (natp (size x)) 't))
(dethm size/car (x)
(if (atom x) 't (equal (< (size (car x)) (size x)) 't)))
(dethm size/cdr (x)
(if (atom x) 't (equal (< (size (cdr x)) (size x)) 't)))
(defun sub (x y)
(if (atom y)
(if (equal y '?)
x
y)
(cons (sub x (car y))
(sub x (cdr y)))))
|
778e3e9943a49c3cce9eda8b4948accc62ecadbeda6975123018c95aecbe8b98 | fission-codes/fission | ServerError.hs | # OPTIONS_GHC -fno - warn - orphans #
module Fission.Web.Server.Internal.Orphanage.ServerError () where
import RIO
import qualified RIO.Text as Text
import Servant.Server
instance Display ServerError where
display = displayShow
instance Display [ServerError] where
textDisplay errs = Text.intercalate ", " $ fmap textDisplay errs
| null | https://raw.githubusercontent.com/fission-codes/fission/ae177407dccc20be67948a901956b99f40d37ac8/fission-web-server/library/Fission/Web/Server/Internal/Orphanage/ServerError.hs | haskell | # OPTIONS_GHC -fno - warn - orphans #
module Fission.Web.Server.Internal.Orphanage.ServerError () where
import RIO
import qualified RIO.Text as Text
import Servant.Server
instance Display ServerError where
display = displayShow
instance Display [ServerError] where
textDisplay errs = Text.intercalate ", " $ fmap textDisplay errs
|
|
ccdb8c83642bce5078cc3fcbedd0c27a84a847ce34b5349f313d544377730a5c | jabber-at/ejabberd | mod_adhoc.erl | %%%----------------------------------------------------------------------
File :
Author : < >
Purpose : Handle incoming ad - doc requests ( XEP-0050 )
Created : 15 Nov 2005 by < >
%%%
%%%
ejabberd , Copyright ( C ) 2002 - 2018 ProcessOne
%%%
%%% This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
%%% License, or (at your option) any later version.
%%%
%%% This program is distributed in the hope that it will be useful,
%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
%%% General Public License for more details.
%%%
You should have received a copy of the GNU General Public License along
with this program ; if not , write to the Free Software Foundation , Inc. ,
51 Franklin Street , Fifth Floor , Boston , USA .
%%%
%%%----------------------------------------------------------------------
-module(mod_adhoc).
-author('').
-protocol({xep, 50, '1.2'}).
-behaviour(gen_mod).
-export([start/2, stop/1, reload/3, process_local_iq/1,
process_sm_iq/1, get_local_commands/5,
get_local_identity/5, get_local_features/5,
get_sm_commands/5, get_sm_identity/5, get_sm_features/5,
ping_item/4, ping_command/4, mod_opt_type/1, depends/2,
mod_options/1]).
-include("logger.hrl").
-include("xmpp.hrl").
start(Host, _Opts) ->
gen_iq_handler:add_iq_handler(ejabberd_local, Host,
?NS_COMMANDS, ?MODULE, process_local_iq),
gen_iq_handler:add_iq_handler(ejabberd_sm, Host,
?NS_COMMANDS, ?MODULE, process_sm_iq),
ejabberd_hooks:add(disco_local_identity, Host, ?MODULE,
get_local_identity, 99),
ejabberd_hooks:add(disco_local_features, Host, ?MODULE,
get_local_features, 99),
ejabberd_hooks:add(disco_local_items, Host, ?MODULE,
get_local_commands, 99),
ejabberd_hooks:add(disco_sm_identity, Host, ?MODULE,
get_sm_identity, 99),
ejabberd_hooks:add(disco_sm_features, Host, ?MODULE,
get_sm_features, 99),
ejabberd_hooks:add(disco_sm_items, Host, ?MODULE,
get_sm_commands, 99),
ejabberd_hooks:add(adhoc_local_items, Host, ?MODULE,
ping_item, 100),
ejabberd_hooks:add(adhoc_local_commands, Host, ?MODULE,
ping_command, 100).
stop(Host) ->
ejabberd_hooks:delete(adhoc_local_commands, Host,
?MODULE, ping_command, 100),
ejabberd_hooks:delete(adhoc_local_items, Host, ?MODULE,
ping_item, 100),
ejabberd_hooks:delete(disco_sm_items, Host, ?MODULE,
get_sm_commands, 99),
ejabberd_hooks:delete(disco_sm_features, Host, ?MODULE,
get_sm_features, 99),
ejabberd_hooks:delete(disco_sm_identity, Host, ?MODULE,
get_sm_identity, 99),
ejabberd_hooks:delete(disco_local_items, Host, ?MODULE,
get_local_commands, 99),
ejabberd_hooks:delete(disco_local_features, Host,
?MODULE, get_local_features, 99),
ejabberd_hooks:delete(disco_local_identity, Host,
?MODULE, get_local_identity, 99),
gen_iq_handler:remove_iq_handler(ejabberd_sm, Host,
?NS_COMMANDS),
gen_iq_handler:remove_iq_handler(ejabberd_local, Host,
?NS_COMMANDS).
reload(_Host, _NewOpts, _OldOpts) ->
ok.
%-------------------------------------------------------------------------
get_local_commands(Acc, _From,
#jid{server = Server, lserver = LServer} = _To, <<"">>,
Lang) ->
Display = gen_mod:get_module_opt(LServer, ?MODULE,
report_commands_node),
case Display of
false -> Acc;
_ ->
Items = case Acc of
{result, I} -> I;
_ -> []
end,
Nodes = [#disco_item{jid = jid:make(Server),
node = ?NS_COMMANDS,
name = translate:translate(Lang, <<"Commands">>)}],
{result, Items ++ Nodes}
end;
get_local_commands(_Acc, From,
#jid{lserver = LServer} = To, ?NS_COMMANDS, Lang) ->
ejabberd_hooks:run_fold(adhoc_local_items, LServer,
{result, []}, [From, To, Lang]);
get_local_commands(_Acc, _From, _To, <<"ping">>,
_Lang) ->
{result, []};
get_local_commands(Acc, _From, _To, _Node, _Lang) ->
Acc.
%-------------------------------------------------------------------------
get_sm_commands(Acc, _From,
#jid{lserver = LServer} = To, <<"">>, Lang) ->
Display = gen_mod:get_module_opt(LServer, ?MODULE,
report_commands_node),
case Display of
false -> Acc;
_ ->
Items = case Acc of
{result, I} -> I;
_ -> []
end,
Nodes = [#disco_item{jid = To,
node = ?NS_COMMANDS,
name = translate:translate(Lang, <<"Commands">>)}],
{result, Items ++ Nodes}
end;
get_sm_commands(_Acc, From,
#jid{lserver = LServer} = To, ?NS_COMMANDS, Lang) ->
ejabberd_hooks:run_fold(adhoc_sm_items, LServer,
{result, []}, [From, To, Lang]);
get_sm_commands(Acc, _From, _To, _Node, _Lang) -> Acc.
%-------------------------------------------------------------------------
%% On disco info request to the ad-hoc node, return automation/command-list.
get_local_identity(Acc, _From, _To, ?NS_COMMANDS,
Lang) ->
[#identity{category = <<"automation">>,
type = <<"command-list">>,
name = translate:translate(Lang, <<"Commands">>)}
| Acc];
get_local_identity(Acc, _From, _To, <<"ping">>, Lang) ->
[#identity{category = <<"automation">>,
type = <<"command-node">>,
name = translate:translate(Lang, <<"Ping">>)}
| Acc];
get_local_identity(Acc, _From, _To, _Node, _Lang) ->
Acc.
%-------------------------------------------------------------------------
%% On disco info request to the ad-hoc node, return automation/command-list.
get_sm_identity(Acc, _From, _To, ?NS_COMMANDS, Lang) ->
[#identity{category = <<"automation">>,
type = <<"command-list">>,
name = translate:translate(Lang, <<"Commands">>)}
| Acc];
get_sm_identity(Acc, _From, _To, _Node, _Lang) -> Acc.
%-------------------------------------------------------------------------
-spec get_local_features({error, stanza_error()} | {result, [binary()]} | empty,
jid(), jid(), binary(), binary()) ->
{error, stanza_error()} | {result, [binary()]} | empty.
get_local_features(Acc, _From, _To, <<"">>, _Lang) ->
Feats = case Acc of
{result, I} -> I;
_ -> []
end,
{result, Feats ++ [?NS_COMMANDS]};
get_local_features(_Acc, _From, _To, ?NS_COMMANDS,
_Lang) ->
{result, []};
get_local_features(_Acc, _From, _To, <<"ping">>,
_Lang) ->
{result, [?NS_COMMANDS]};
get_local_features(Acc, _From, _To, _Node, _Lang) ->
Acc.
%-------------------------------------------------------------------------
get_sm_features(Acc, _From, _To, <<"">>, _Lang) ->
Feats = case Acc of
{result, I} -> I;
_ -> []
end,
{result, Feats ++ [?NS_COMMANDS]};
get_sm_features(_Acc, _From, _To, ?NS_COMMANDS,
_Lang) ->
{result, []};
get_sm_features(Acc, _From, _To, _Node, _Lang) -> Acc.
%-------------------------------------------------------------------------
process_local_iq(IQ) ->
process_adhoc_request(IQ, local).
process_sm_iq(IQ) ->
process_adhoc_request(IQ, sm).
process_adhoc_request(#iq{from = From, to = To,
type = set, lang = Lang,
sub_els = [#adhoc_command{} = SubEl]} = IQ, Type) ->
Host = To#jid.lserver,
Res = case Type of
local ->
ejabberd_hooks:run_fold(adhoc_local_commands, Host, empty,
[From, To, SubEl]);
sm ->
ejabberd_hooks:run_fold(adhoc_sm_commands, Host, empty,
[From, To, SubEl])
end,
case Res of
ignore ->
ignore;
empty ->
Txt = <<"No hook has processed this command">>,
xmpp:make_error(IQ, xmpp:err_item_not_found(Txt, Lang));
{error, Error} ->
xmpp:make_error(IQ, Error);
Command ->
xmpp:make_iq_result(IQ, Command)
end;
process_adhoc_request(#iq{} = IQ, _Hooks) ->
xmpp:make_error(IQ, xmpp:err_bad_request()).
-spec ping_item(empty | {error, stanza_error()} | {result, [disco_item()]},
jid(), jid(), binary()) -> {result, [disco_item()]}.
ping_item(Acc, _From, #jid{server = Server} = _To,
Lang) ->
Items = case Acc of
{result, I} -> I;
_ -> []
end,
Nodes = [#disco_item{jid = jid:make(Server),
node = <<"ping">>,
name = translate:translate(Lang, <<"Ping">>)}],
{result, Items ++ Nodes}.
-spec ping_command(adhoc_command(), jid(), jid(), adhoc_command()) ->
adhoc_command() | {error, stanza_error()}.
ping_command(_Acc, _From, _To,
#adhoc_command{lang = Lang, node = <<"ping">>,
action = Action} = Request) ->
if Action == execute ->
xmpp_util:make_adhoc_response(
Request,
#adhoc_command{
status = completed,
notes = [#adhoc_note{
type = info,
data = translate:translate(Lang, <<"Pong">>)}]});
true ->
Txt = <<"Incorrect value of 'action' attribute">>,
{error, xmpp:err_bad_request(Txt, Lang)}
end;
ping_command(Acc, _From, _To, _Request) -> Acc.
depends(_Host, _Opts) ->
[].
mod_opt_type(report_commands_node) ->
fun (B) when is_boolean(B) -> B end.
mod_options(_Host) ->
[{report_commands_node, false}].
| null | https://raw.githubusercontent.com/jabber-at/ejabberd/7bfec36856eaa4df21b26e879d3ba90285bad1aa/src/mod_adhoc.erl | erlang | ----------------------------------------------------------------------
This program is free software; you can redistribute it and/or
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
----------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
On disco info request to the ad-hoc node, return automation/command-list.
-------------------------------------------------------------------------
On disco info request to the ad-hoc node, return automation/command-list.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
------------------------------------------------------------------------- | File :
Author : < >
Purpose : Handle incoming ad - doc requests ( XEP-0050 )
Created : 15 Nov 2005 by < >
ejabberd , Copyright ( C ) 2002 - 2018 ProcessOne
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
You should have received a copy of the GNU General Public License along
with this program ; if not , write to the Free Software Foundation , Inc. ,
51 Franklin Street , Fifth Floor , Boston , USA .
-module(mod_adhoc).
-author('').
-protocol({xep, 50, '1.2'}).
-behaviour(gen_mod).
-export([start/2, stop/1, reload/3, process_local_iq/1,
process_sm_iq/1, get_local_commands/5,
get_local_identity/5, get_local_features/5,
get_sm_commands/5, get_sm_identity/5, get_sm_features/5,
ping_item/4, ping_command/4, mod_opt_type/1, depends/2,
mod_options/1]).
-include("logger.hrl").
-include("xmpp.hrl").
start(Host, _Opts) ->
gen_iq_handler:add_iq_handler(ejabberd_local, Host,
?NS_COMMANDS, ?MODULE, process_local_iq),
gen_iq_handler:add_iq_handler(ejabberd_sm, Host,
?NS_COMMANDS, ?MODULE, process_sm_iq),
ejabberd_hooks:add(disco_local_identity, Host, ?MODULE,
get_local_identity, 99),
ejabberd_hooks:add(disco_local_features, Host, ?MODULE,
get_local_features, 99),
ejabberd_hooks:add(disco_local_items, Host, ?MODULE,
get_local_commands, 99),
ejabberd_hooks:add(disco_sm_identity, Host, ?MODULE,
get_sm_identity, 99),
ejabberd_hooks:add(disco_sm_features, Host, ?MODULE,
get_sm_features, 99),
ejabberd_hooks:add(disco_sm_items, Host, ?MODULE,
get_sm_commands, 99),
ejabberd_hooks:add(adhoc_local_items, Host, ?MODULE,
ping_item, 100),
ejabberd_hooks:add(adhoc_local_commands, Host, ?MODULE,
ping_command, 100).
stop(Host) ->
ejabberd_hooks:delete(adhoc_local_commands, Host,
?MODULE, ping_command, 100),
ejabberd_hooks:delete(adhoc_local_items, Host, ?MODULE,
ping_item, 100),
ejabberd_hooks:delete(disco_sm_items, Host, ?MODULE,
get_sm_commands, 99),
ejabberd_hooks:delete(disco_sm_features, Host, ?MODULE,
get_sm_features, 99),
ejabberd_hooks:delete(disco_sm_identity, Host, ?MODULE,
get_sm_identity, 99),
ejabberd_hooks:delete(disco_local_items, Host, ?MODULE,
get_local_commands, 99),
ejabberd_hooks:delete(disco_local_features, Host,
?MODULE, get_local_features, 99),
ejabberd_hooks:delete(disco_local_identity, Host,
?MODULE, get_local_identity, 99),
gen_iq_handler:remove_iq_handler(ejabberd_sm, Host,
?NS_COMMANDS),
gen_iq_handler:remove_iq_handler(ejabberd_local, Host,
?NS_COMMANDS).
reload(_Host, _NewOpts, _OldOpts) ->
ok.
get_local_commands(Acc, _From,
#jid{server = Server, lserver = LServer} = _To, <<"">>,
Lang) ->
Display = gen_mod:get_module_opt(LServer, ?MODULE,
report_commands_node),
case Display of
false -> Acc;
_ ->
Items = case Acc of
{result, I} -> I;
_ -> []
end,
Nodes = [#disco_item{jid = jid:make(Server),
node = ?NS_COMMANDS,
name = translate:translate(Lang, <<"Commands">>)}],
{result, Items ++ Nodes}
end;
get_local_commands(_Acc, From,
#jid{lserver = LServer} = To, ?NS_COMMANDS, Lang) ->
ejabberd_hooks:run_fold(adhoc_local_items, LServer,
{result, []}, [From, To, Lang]);
get_local_commands(_Acc, _From, _To, <<"ping">>,
_Lang) ->
{result, []};
get_local_commands(Acc, _From, _To, _Node, _Lang) ->
Acc.
get_sm_commands(Acc, _From,
#jid{lserver = LServer} = To, <<"">>, Lang) ->
Display = gen_mod:get_module_opt(LServer, ?MODULE,
report_commands_node),
case Display of
false -> Acc;
_ ->
Items = case Acc of
{result, I} -> I;
_ -> []
end,
Nodes = [#disco_item{jid = To,
node = ?NS_COMMANDS,
name = translate:translate(Lang, <<"Commands">>)}],
{result, Items ++ Nodes}
end;
get_sm_commands(_Acc, From,
#jid{lserver = LServer} = To, ?NS_COMMANDS, Lang) ->
ejabberd_hooks:run_fold(adhoc_sm_items, LServer,
{result, []}, [From, To, Lang]);
get_sm_commands(Acc, _From, _To, _Node, _Lang) -> Acc.
get_local_identity(Acc, _From, _To, ?NS_COMMANDS,
Lang) ->
[#identity{category = <<"automation">>,
type = <<"command-list">>,
name = translate:translate(Lang, <<"Commands">>)}
| Acc];
get_local_identity(Acc, _From, _To, <<"ping">>, Lang) ->
[#identity{category = <<"automation">>,
type = <<"command-node">>,
name = translate:translate(Lang, <<"Ping">>)}
| Acc];
get_local_identity(Acc, _From, _To, _Node, _Lang) ->
Acc.
get_sm_identity(Acc, _From, _To, ?NS_COMMANDS, Lang) ->
[#identity{category = <<"automation">>,
type = <<"command-list">>,
name = translate:translate(Lang, <<"Commands">>)}
| Acc];
get_sm_identity(Acc, _From, _To, _Node, _Lang) -> Acc.
-spec get_local_features({error, stanza_error()} | {result, [binary()]} | empty,
jid(), jid(), binary(), binary()) ->
{error, stanza_error()} | {result, [binary()]} | empty.
get_local_features(Acc, _From, _To, <<"">>, _Lang) ->
Feats = case Acc of
{result, I} -> I;
_ -> []
end,
{result, Feats ++ [?NS_COMMANDS]};
get_local_features(_Acc, _From, _To, ?NS_COMMANDS,
_Lang) ->
{result, []};
get_local_features(_Acc, _From, _To, <<"ping">>,
_Lang) ->
{result, [?NS_COMMANDS]};
get_local_features(Acc, _From, _To, _Node, _Lang) ->
Acc.
get_sm_features(Acc, _From, _To, <<"">>, _Lang) ->
Feats = case Acc of
{result, I} -> I;
_ -> []
end,
{result, Feats ++ [?NS_COMMANDS]};
get_sm_features(_Acc, _From, _To, ?NS_COMMANDS,
_Lang) ->
{result, []};
get_sm_features(Acc, _From, _To, _Node, _Lang) -> Acc.
process_local_iq(IQ) ->
process_adhoc_request(IQ, local).
process_sm_iq(IQ) ->
process_adhoc_request(IQ, sm).
process_adhoc_request(#iq{from = From, to = To,
type = set, lang = Lang,
sub_els = [#adhoc_command{} = SubEl]} = IQ, Type) ->
Host = To#jid.lserver,
Res = case Type of
local ->
ejabberd_hooks:run_fold(adhoc_local_commands, Host, empty,
[From, To, SubEl]);
sm ->
ejabberd_hooks:run_fold(adhoc_sm_commands, Host, empty,
[From, To, SubEl])
end,
case Res of
ignore ->
ignore;
empty ->
Txt = <<"No hook has processed this command">>,
xmpp:make_error(IQ, xmpp:err_item_not_found(Txt, Lang));
{error, Error} ->
xmpp:make_error(IQ, Error);
Command ->
xmpp:make_iq_result(IQ, Command)
end;
process_adhoc_request(#iq{} = IQ, _Hooks) ->
xmpp:make_error(IQ, xmpp:err_bad_request()).
-spec ping_item(empty | {error, stanza_error()} | {result, [disco_item()]},
jid(), jid(), binary()) -> {result, [disco_item()]}.
ping_item(Acc, _From, #jid{server = Server} = _To,
Lang) ->
Items = case Acc of
{result, I} -> I;
_ -> []
end,
Nodes = [#disco_item{jid = jid:make(Server),
node = <<"ping">>,
name = translate:translate(Lang, <<"Ping">>)}],
{result, Items ++ Nodes}.
-spec ping_command(adhoc_command(), jid(), jid(), adhoc_command()) ->
adhoc_command() | {error, stanza_error()}.
ping_command(_Acc, _From, _To,
#adhoc_command{lang = Lang, node = <<"ping">>,
action = Action} = Request) ->
if Action == execute ->
xmpp_util:make_adhoc_response(
Request,
#adhoc_command{
status = completed,
notes = [#adhoc_note{
type = info,
data = translate:translate(Lang, <<"Pong">>)}]});
true ->
Txt = <<"Incorrect value of 'action' attribute">>,
{error, xmpp:err_bad_request(Txt, Lang)}
end;
ping_command(Acc, _From, _To, _Request) -> Acc.
depends(_Host, _Opts) ->
[].
mod_opt_type(report_commands_node) ->
fun (B) when is_boolean(B) -> B end.
mod_options(_Host) ->
[{report_commands_node, false}].
|
6ddb3b83bfc3af985451307c653dd0550408046de6ab3a2606adfc890d98ff0b | philc/clj-maxmind-geoip | core.clj | A thin clojure wrapper around 's java APIs which access its geoip database .
; Currently this handles only countries.
(ns clj-maxmind-geoip.core
(:import com.maxmind.geoip.LookupService
com.maxmind.geoip.regionName
java.io.File
java.util.Locale))
(set! *warn-on-reflection* true)
(def ^:private geoip-access-modes {:memory 1 :check 2 :index 4})
(def ^:private lookup-service (atom nil))
; NOTE: We define init-geoip as a multimethod so that we can provide the contract of accepting either a
; string or file while still preventing reflection.
(defmulti init-geoip
"- database: the country edition of the maxmind geoip database. Can be either a File or a String."
class)
(defmethod init-geoip String
[^String database-path]
(let [service (LookupService. database-path ^int (:memory geoip-access-modes))]
(swap! lookup-service (constantly service))))
(defmethod init-geoip File
[^File database-file]
(let [service (LookupService. database-file ^int (:memory geoip-access-modes))]
(swap! lookup-service (constantly service))))
(defn lookup-country
"Lookup country information for an IP address. Only available when querying a GeoIP Country database.
Returns a map of the following country info, or nil if none found:
{:name, :code}"
[^String ip]
(when-let [^com.maxmind.geoip.Country result
(.getCountry ^com.maxmind.geoip.LookupService @lookup-service ip)]
{:code (.getCode result) :name (.getName result)}))
(defn lookup-location
"Lookup location information for an IP address. Only available when querying a GeoIP City database.
Returns a map of the following location info, or nil if none found:
{:country-code, :country-name, :region-code, :region-name, :city, :postal-code, :latitude, :longitude,
:dma-code}"
[^String ip]
(when-let [^com.maxmind.geoip.Location result
(.getLocation ^com.maxmind.geoip.LookupService @lookup-service ip)]
{:country-code (.countryCode result)
:country-name (.countryName result)
:region-code (.region result)
:region-name (regionName/regionNameByCode (.countryCode result) (.region result))
:city (.city result)
:postal-code (.postalCode result)
:latitude (.latitude result)
:longitude (.longitude result)
:dma-code (.dma_code result)}))
| null | https://raw.githubusercontent.com/philc/clj-maxmind-geoip/065b2d7410fd0981b3261e7aadd49580ed44c526/src/clj_maxmind_geoip/core.clj | clojure | Currently this handles only countries.
NOTE: We define init-geoip as a multimethod so that we can provide the contract of accepting either a
string or file while still preventing reflection. | A thin clojure wrapper around 's java APIs which access its geoip database .
(ns clj-maxmind-geoip.core
(:import com.maxmind.geoip.LookupService
com.maxmind.geoip.regionName
java.io.File
java.util.Locale))
(set! *warn-on-reflection* true)
(def ^:private geoip-access-modes {:memory 1 :check 2 :index 4})
(def ^:private lookup-service (atom nil))
(defmulti init-geoip
"- database: the country edition of the maxmind geoip database. Can be either a File or a String."
class)
(defmethod init-geoip String
[^String database-path]
(let [service (LookupService. database-path ^int (:memory geoip-access-modes))]
(swap! lookup-service (constantly service))))
(defmethod init-geoip File
[^File database-file]
(let [service (LookupService. database-file ^int (:memory geoip-access-modes))]
(swap! lookup-service (constantly service))))
(defn lookup-country
"Lookup country information for an IP address. Only available when querying a GeoIP Country database.
Returns a map of the following country info, or nil if none found:
{:name, :code}"
[^String ip]
(when-let [^com.maxmind.geoip.Country result
(.getCountry ^com.maxmind.geoip.LookupService @lookup-service ip)]
{:code (.getCode result) :name (.getName result)}))
(defn lookup-location
"Lookup location information for an IP address. Only available when querying a GeoIP City database.
Returns a map of the following location info, or nil if none found:
{:country-code, :country-name, :region-code, :region-name, :city, :postal-code, :latitude, :longitude,
:dma-code}"
[^String ip]
(when-let [^com.maxmind.geoip.Location result
(.getLocation ^com.maxmind.geoip.LookupService @lookup-service ip)]
{:country-code (.countryCode result)
:country-name (.countryName result)
:region-code (.region result)
:region-name (regionName/regionNameByCode (.countryCode result) (.region result))
:city (.city result)
:postal-code (.postalCode result)
:latitude (.latitude result)
:longitude (.longitude result)
:dma-code (.dma_code result)}))
|
06a08e7d745d5b74ffdabaaceacc891a40ecaa42b2af79146fc02b726751b7a2 | broom-lang/broom | Untuple.mli | val untuple : Cps.Program.t -> Cps.Program.t
| null | https://raw.githubusercontent.com/broom-lang/broom/2b9869c79c52f5879db7ac27d4c61fff042b4ca4/compiler/lib/Optimizer/Untuple.mli | ocaml | val untuple : Cps.Program.t -> Cps.Program.t
|
|
0e05e59a41bcf295a4f7a9304c66a136800c4343c160df596dc26ef35f29c29d | nunchaku-inria/nunchaku | Tip_ast.ml |
(* This file is free software. See file "license" for more details. *)
* { 1 Trivial AST for parsing }
open Nunchaku_core
let pp_str = Format.pp_print_string
let pp_to_string pp x =
let buf = Buffer.create 64 in
let fmt = Format.formatter_of_buffer buf in
pp fmt x;
Format.pp_print_flush fmt ();
Buffer.contents buf
module Loc = Location
type var = string
type ty_var = string
(** Polymorphic types *)
type ty = ty_view Loc.with_loc
and ty_view =
| Ty_bool
| Ty_app of ty_var * ty list
| Ty_arrow of ty list * ty
let ty_view : ty -> ty_view = Loc.get
let ty_loc = Loc.get_loc
type typed_var = var * ty
* { 2 AST : S - expressions with locations }
type term = term_view Loc.with_loc
and term_view =
| True
| False
| Const of string
| App of string * term list
| HO_app of term * term (* higher-order application *)
| Match of term * match_branch list
| If of term * term * term
| Let of (var * term) list * term
| Fun of typed_var * term
| Eq of term * term
| Imply of term * term
| And of term list
| Or of term list
| Not of term
| Distinct of term list
| Cast of term * ty (* type cast *)
| Forall of (var * ty) list * term
| Exists of (var * ty) list * term
and match_branch =
| Match_default of term
| Match_case of string * var list * term
type cstor = {
cstor_name: string;
cstor_args: (string * ty) list; (* selector+type *)
}
type 'arg fun_decl = {
fun_ty_vars: ty_var list;
fun_name: string;
fun_args: 'arg list;
fun_ret: ty;
fun_loc: Loc.t;
}
type fun_def = {
fr_decl: typed_var fun_decl;
fr_body: term;
}
type funs_rec_def = {
fsr_decls: typed_var fun_decl list;
fsr_bodies: term list;
}
type statement = {
stmt: stmt;
loc: Loc.t;
}
and stmt =
| Stmt_decl_sort of string * int (* arity *)
| Stmt_decl of ty fun_decl
| Stmt_fun_def of fun_def
| Stmt_fun_rec of fun_def
| Stmt_funs_rec of funs_rec_def
| Stmt_data of ty_var list * (string * cstor list) list
| Stmt_assert of term
| Stmt_assert_not of ty_var list * term
| Stmt_check_sat
let ty_bool ~loc : ty = Loc.with_loc ~loc @@ Ty_bool
let ty_app ~loc s l = Loc.with_loc ~loc @@ Ty_app (s,l)
let ty_const ~loc s = ty_app ~loc s []
let ty_arrow_l ~loc args ret = if args=[] then ret else Loc.with_loc ~loc (Ty_arrow (args, ret))
let ty_arrow ~loc a b = ty_arrow_l ~loc [a] b
let t_loc t = Loc.get_loc t
let t_view (t:term) : term_view = Loc.get t
let mk_ ~loc x : term = Loc.with_loc ~loc x
let true_ ~loc : term = mk_ ~loc @@ True
let false_ ~loc = mk_ ~loc @@ False
let const ~loc s = mk_ ~loc @@ Const s
let app ~loc f l = mk_ ~loc @@ App (f,l)
let ho_app ~loc a b = mk_ ~loc @@ HO_app (a,b)
let match_ ~loc u l = mk_ ~loc @@ Match (u,l)
let if_ ~loc a b c = mk_ ~loc @@ If(a,b,c)
let fun_ ~loc v t = mk_ ~loc @@ Fun (v,t)
let fun_l ~loc = List.fold_right @@ fun_ ~loc
let let_ ~loc l t = mk_ ~loc @@ Let (l,t)
let eq ~loc a b = mk_ ~loc @@ Eq (a,b)
let imply ~loc a b = mk_ ~loc @@ Imply(a,b)
let and_ ~loc l = mk_ ~loc @@ And l
let or_ ~loc l = mk_ ~loc @@ Or l
let distinct ~loc l = mk_ ~loc @@ Distinct l
let cast ~loc t ~ty = mk_ ~loc @@ Cast (t, ty)
let forall ~loc vars f = match vars with [] -> f | _ -> mk_ ~loc @@ Forall (vars, f)
let exists ~loc vars f = match vars with [] -> f | _ -> mk_ ~loc @@ Exists (vars, f)
let rec not_ ~loc t = match t_view t with
| Forall (vars,u) -> exists ~loc vars (not_ ~loc u)
| Exists (vars,u) -> forall ~loc vars (not_ ~loc u)
| _ -> mk_ ~loc @@ Not t
let mk_st_ ~loc stmt = { loc; stmt }
let mk_cstor name l : cstor = { cstor_name=name; cstor_args=l }
let mk_fun_decl ~loc ~ty_vars f args ret =
{ fun_ty_vars=ty_vars; fun_name=f;
fun_args=args; fun_ret=ret; fun_loc=loc; }
let mk_fun_rec ~loc ~ty_vars f args ret body =
{ fr_decl=mk_fun_decl ~loc ~ty_vars f args ret; fr_body=body; }
let decl_sort ~loc s ~arity = mk_st_ ~loc (Stmt_decl_sort (s, arity))
let decl_fun ~loc ~ty_vars f ty_args ty_ret =
let d = mk_fun_decl ~loc ~ty_vars f ty_args ty_ret in
mk_st_ ~loc (Stmt_decl d)
let fun_def ~loc fr = mk_st_ ~loc (Stmt_fun_def fr)
let fun_rec ~loc fr = mk_st_ ~loc (Stmt_fun_rec fr)
let funs_rec ~loc decls bodies = mk_st_ ~loc (Stmt_funs_rec {fsr_decls=decls; fsr_bodies=bodies})
let data ~loc tyvars l = mk_st_ ~loc (Stmt_data (tyvars,l))
let assert_ ~loc t = mk_st_ ~loc (Stmt_assert t)
let assert_not ~loc ~ty_vars t = mk_st_ ~loc (Stmt_assert_not (ty_vars, t))
let check_sat ~loc () = mk_st_ ~loc Stmt_check_sat
let loc t = t.loc
let view t = t.stmt
let fpf = Format.fprintf
let pp_list ?(start="") ?(stop="") ?(sep=" ") pp out l =
let rec pp_list l = match l with
| x::((_::_) as l) ->
pp out x;
Format.pp_print_string out sep;
Format.pp_print_cut out ();
pp_list l
| x::[] -> pp out x
| [] -> ()
in
Format.pp_print_string out start;
pp_list l;
Format.pp_print_string out stop
let pp_tyvar = pp_str
let rec pp_ty out (ty:ty) = match Loc.get ty with
| Ty_bool -> pp_str out "Bool"
| Ty_app (s,[]) -> pp_str out s
| Ty_app (s,l) -> Format.fprintf out "(@[<hv1>%s@ %a@])" s (pp_list pp_ty) l
| Ty_arrow (args,ret) ->
fpf out "(@[=>@ %a@ %a@])" (pp_list pp_ty) args pp_ty ret
let rec pp_term out (t:term) = match t_view t with
| True -> pp_str out "true"
| False -> pp_str out "false"
| Const s -> pp_str out s
| App (f,l) -> fpf out "(@[<1>%s@ %a@])" f (pp_list pp_term) l
| HO_app (a,b) -> fpf out "(@[<1>@@@ %a@ %a@])" pp_term a pp_term b
| Match (lhs,cases) ->
let pp_case out = function
| Match_default rhs -> fpf out "(@[<2>case default@ %a@])" pp_term rhs
| Match_case (c,[],rhs) ->
fpf out "(@[<2>case %s@ %a@])" c pp_term rhs
| Match_case (c,vars,rhs) ->
fpf out "(@[<2>case@ (@[%s@ %a@])@ %a@])" c (pp_list pp_str) vars pp_term rhs
in
fpf out "(@[<1>match@ %a@ @[<v>%a@]@])" pp_term lhs
(pp_list pp_case) cases
| If (a,b,c) -> fpf out "(@[<hv1>ite %a@ %a@ %a@])" pp_term a pp_term b pp_term c
| Fun (v,body) -> fpf out "(@[<1>lambda @ (%a)@ %a@])" pp_typed_var v pp_term body
| Let (l,t) ->
let pp_binding out (v,t) = fpf out "(@[%s@ %a@])" v pp_term t in
fpf out "(@[<2>let@ (@[%a@])@ %a@])" (pp_list pp_binding) l pp_term t
| Eq (a,b) -> fpf out "(@[=@ %a@ %a@])" pp_term a pp_term b
| Imply (a,b) -> fpf out "(@[=>@ %a@ %a@])" pp_term a pp_term b
| And l -> fpf out "(@[<hv>and@ %a@])" (pp_list pp_term) l
| Or l -> fpf out "(@[<hv>or@ %a@])" (pp_list pp_term) l
| Not t -> fpf out "(not %a)" pp_term t
| Distinct l -> fpf out "(@[distinct@ %a@])" (pp_list pp_term) l
| Cast (t, ty) -> fpf out "(@[<hv2>as@ @[%a@]@ @[%a@]@])" pp_term t pp_ty ty
| Forall (vars,f) ->
fpf out "(@[<hv2>forall@ (@[%a@])@ %a@])" (pp_list pp_typed_var) vars pp_term f
| Exists (vars,f) ->
fpf out "(@[<hv2>exists@ (@[%a@])@ %a@])" (pp_list pp_typed_var) vars pp_term f
and pp_typed_var out (v,ty) =
fpf out "(@[%s@ %a@])" v pp_ty ty
let pp_par pp_x out (ty_vars,x) = match ty_vars with
| [] -> pp_x out x
| _ ->
fpf out "(@[<2>par (@[%a@])@ (%a)@])" (pp_list pp_tyvar) ty_vars pp_x x
let pp_fun_decl pp_arg out fd =
fpf out "%s@ (@[%a@])@ %a"
fd.fun_name (pp_list pp_arg) fd.fun_args pp_ty fd.fun_ret
let pp_fr out fr =
fpf out "@[<2>%a@ %a@]" (pp_fun_decl pp_typed_var) fr.fr_decl pp_term fr.fr_body
let pp_stmt out (st:statement) = match view st with
| Stmt_decl_sort (s,n) -> fpf out "(@[declare-sort@ %s %d@])" s n
| Stmt_assert t -> fpf out "(@[assert@ %a@])" pp_term t
| Stmt_assert_not (ty_vars,t) ->
fpf out "(@[assert-not@ %a@])" (pp_par pp_term) (ty_vars,t)
| Stmt_decl d ->
fpf out "(@[declare-fun@ %a@])"
(pp_par (pp_fun_decl pp_ty)) (d.fun_ty_vars,d)
| Stmt_fun_def fr ->
fpf out "(@[<2>define-fun@ %a@])"
(pp_par pp_fr) (fr.fr_decl.fun_ty_vars, fr)
| Stmt_fun_rec fr ->
fpf out "(@[<2>define-fun-rec@ %a@])"
(pp_par pp_fr) (fr.fr_decl.fun_ty_vars, fr)
| Stmt_funs_rec fsr ->
let pp_decl' out d = fpf out "(@[<2>%a@])" (pp_fun_decl pp_typed_var) d in
fpf out "(@[<hv2>define-funs-rec@ (@[<v>%a@])@ (@[<v>%a@])@])"
(pp_list pp_decl') fsr.fsr_decls (pp_list pp_term) fsr.fsr_bodies
| Stmt_data (tyvars,l) ->
let pp_cstor_arg out (sel,ty) = fpf out "(@[%s %a@])" sel pp_ty ty in
let pp_cstor out c =
if c.cstor_args = []
then fpf out "(%s)" c.cstor_name
else fpf out "(@[<1>%s@ %a@])" c.cstor_name (pp_list pp_cstor_arg) c.cstor_args
in
let pp_data out (s,cstors) =
fpf out "(@[<2>%s@ @[<v>%a@]@])" s (pp_list pp_cstor) cstors
in
fpf out "(@[<hv2>declare-datatypes@ (@[%a@])@ (@[<v>%a@])@])"
(pp_list pp_tyvar) tyvars (pp_list pp_data) l
| Stmt_check_sat -> pp_str out "(check-sat)"
* { 2 Result from SMBC }
module Smbc_res = struct
type entry =
| Val of term * term
| Ty of ty * var list
type model = entry list
type t =
| Sat of model
| Unsat
| Unknown of string
| Timeout
let pp_entry out = function
| Ty (ty,dom) ->
Format.fprintf out "(@[type %a@ (@[%a@])@])" pp_ty ty
(Utils.pp_list ~sep:"" CCFormat.string) dom
| Val (a,b) ->
Format.fprintf out "(@[val %a@ %a@])" pp_term a pp_term b
let pp_model : model CCFormat.printer =
fun out m ->
Format.fprintf out "(@[<hv>%a@])" (Utils.pp_list ~sep:" " pp_entry) m
let pp : t CCFormat.printer = fun out r -> match r with
| Unsat -> CCFormat.string out "UNSAT"
| Unknown _ -> CCFormat.string out "UNKNOWN"
| Timeout -> CCFormat.string out "TIMEOUT"
| Sat m -> Format.fprintf out "(@SAT :model %a@])" pp_model m
end
(** {2 Errors} *)
let parse_errorf ~loc msg = Parsing_utils.parse_error_ ~loc msg
| null | https://raw.githubusercontent.com/nunchaku-inria/nunchaku/16f33db3f5e92beecfb679a13329063b194f753d/src/parsers/Tip_ast.ml | ocaml | This file is free software. See file "license" for more details.
* Polymorphic types
higher-order application
type cast
selector+type
arity
* {2 Errors} |
* { 1 Trivial AST for parsing }
open Nunchaku_core
let pp_str = Format.pp_print_string
let pp_to_string pp x =
let buf = Buffer.create 64 in
let fmt = Format.formatter_of_buffer buf in
pp fmt x;
Format.pp_print_flush fmt ();
Buffer.contents buf
module Loc = Location
type var = string
type ty_var = string
type ty = ty_view Loc.with_loc
and ty_view =
| Ty_bool
| Ty_app of ty_var * ty list
| Ty_arrow of ty list * ty
let ty_view : ty -> ty_view = Loc.get
let ty_loc = Loc.get_loc
type typed_var = var * ty
* { 2 AST : S - expressions with locations }
type term = term_view Loc.with_loc
and term_view =
| True
| False
| Const of string
| App of string * term list
| Match of term * match_branch list
| If of term * term * term
| Let of (var * term) list * term
| Fun of typed_var * term
| Eq of term * term
| Imply of term * term
| And of term list
| Or of term list
| Not of term
| Distinct of term list
| Forall of (var * ty) list * term
| Exists of (var * ty) list * term
and match_branch =
| Match_default of term
| Match_case of string * var list * term
type cstor = {
cstor_name: string;
}
type 'arg fun_decl = {
fun_ty_vars: ty_var list;
fun_name: string;
fun_args: 'arg list;
fun_ret: ty;
fun_loc: Loc.t;
}
type fun_def = {
fr_decl: typed_var fun_decl;
fr_body: term;
}
type funs_rec_def = {
fsr_decls: typed_var fun_decl list;
fsr_bodies: term list;
}
type statement = {
stmt: stmt;
loc: Loc.t;
}
and stmt =
| Stmt_decl of ty fun_decl
| Stmt_fun_def of fun_def
| Stmt_fun_rec of fun_def
| Stmt_funs_rec of funs_rec_def
| Stmt_data of ty_var list * (string * cstor list) list
| Stmt_assert of term
| Stmt_assert_not of ty_var list * term
| Stmt_check_sat
let ty_bool ~loc : ty = Loc.with_loc ~loc @@ Ty_bool
let ty_app ~loc s l = Loc.with_loc ~loc @@ Ty_app (s,l)
let ty_const ~loc s = ty_app ~loc s []
let ty_arrow_l ~loc args ret = if args=[] then ret else Loc.with_loc ~loc (Ty_arrow (args, ret))
let ty_arrow ~loc a b = ty_arrow_l ~loc [a] b
let t_loc t = Loc.get_loc t
let t_view (t:term) : term_view = Loc.get t
let mk_ ~loc x : term = Loc.with_loc ~loc x
let true_ ~loc : term = mk_ ~loc @@ True
let false_ ~loc = mk_ ~loc @@ False
let const ~loc s = mk_ ~loc @@ Const s
let app ~loc f l = mk_ ~loc @@ App (f,l)
let ho_app ~loc a b = mk_ ~loc @@ HO_app (a,b)
let match_ ~loc u l = mk_ ~loc @@ Match (u,l)
let if_ ~loc a b c = mk_ ~loc @@ If(a,b,c)
let fun_ ~loc v t = mk_ ~loc @@ Fun (v,t)
let fun_l ~loc = List.fold_right @@ fun_ ~loc
let let_ ~loc l t = mk_ ~loc @@ Let (l,t)
let eq ~loc a b = mk_ ~loc @@ Eq (a,b)
let imply ~loc a b = mk_ ~loc @@ Imply(a,b)
let and_ ~loc l = mk_ ~loc @@ And l
let or_ ~loc l = mk_ ~loc @@ Or l
let distinct ~loc l = mk_ ~loc @@ Distinct l
let cast ~loc t ~ty = mk_ ~loc @@ Cast (t, ty)
let forall ~loc vars f = match vars with [] -> f | _ -> mk_ ~loc @@ Forall (vars, f)
let exists ~loc vars f = match vars with [] -> f | _ -> mk_ ~loc @@ Exists (vars, f)
let rec not_ ~loc t = match t_view t with
| Forall (vars,u) -> exists ~loc vars (not_ ~loc u)
| Exists (vars,u) -> forall ~loc vars (not_ ~loc u)
| _ -> mk_ ~loc @@ Not t
let mk_st_ ~loc stmt = { loc; stmt }
let mk_cstor name l : cstor = { cstor_name=name; cstor_args=l }
let mk_fun_decl ~loc ~ty_vars f args ret =
{ fun_ty_vars=ty_vars; fun_name=f;
fun_args=args; fun_ret=ret; fun_loc=loc; }
let mk_fun_rec ~loc ~ty_vars f args ret body =
{ fr_decl=mk_fun_decl ~loc ~ty_vars f args ret; fr_body=body; }
let decl_sort ~loc s ~arity = mk_st_ ~loc (Stmt_decl_sort (s, arity))
let decl_fun ~loc ~ty_vars f ty_args ty_ret =
let d = mk_fun_decl ~loc ~ty_vars f ty_args ty_ret in
mk_st_ ~loc (Stmt_decl d)
let fun_def ~loc fr = mk_st_ ~loc (Stmt_fun_def fr)
let fun_rec ~loc fr = mk_st_ ~loc (Stmt_fun_rec fr)
let funs_rec ~loc decls bodies = mk_st_ ~loc (Stmt_funs_rec {fsr_decls=decls; fsr_bodies=bodies})
let data ~loc tyvars l = mk_st_ ~loc (Stmt_data (tyvars,l))
let assert_ ~loc t = mk_st_ ~loc (Stmt_assert t)
let assert_not ~loc ~ty_vars t = mk_st_ ~loc (Stmt_assert_not (ty_vars, t))
let check_sat ~loc () = mk_st_ ~loc Stmt_check_sat
let loc t = t.loc
let view t = t.stmt
let fpf = Format.fprintf
let pp_list ?(start="") ?(stop="") ?(sep=" ") pp out l =
let rec pp_list l = match l with
| x::((_::_) as l) ->
pp out x;
Format.pp_print_string out sep;
Format.pp_print_cut out ();
pp_list l
| x::[] -> pp out x
| [] -> ()
in
Format.pp_print_string out start;
pp_list l;
Format.pp_print_string out stop
let pp_tyvar = pp_str
let rec pp_ty out (ty:ty) = match Loc.get ty with
| Ty_bool -> pp_str out "Bool"
| Ty_app (s,[]) -> pp_str out s
| Ty_app (s,l) -> Format.fprintf out "(@[<hv1>%s@ %a@])" s (pp_list pp_ty) l
| Ty_arrow (args,ret) ->
fpf out "(@[=>@ %a@ %a@])" (pp_list pp_ty) args pp_ty ret
let rec pp_term out (t:term) = match t_view t with
| True -> pp_str out "true"
| False -> pp_str out "false"
| Const s -> pp_str out s
| App (f,l) -> fpf out "(@[<1>%s@ %a@])" f (pp_list pp_term) l
| HO_app (a,b) -> fpf out "(@[<1>@@@ %a@ %a@])" pp_term a pp_term b
| Match (lhs,cases) ->
let pp_case out = function
| Match_default rhs -> fpf out "(@[<2>case default@ %a@])" pp_term rhs
| Match_case (c,[],rhs) ->
fpf out "(@[<2>case %s@ %a@])" c pp_term rhs
| Match_case (c,vars,rhs) ->
fpf out "(@[<2>case@ (@[%s@ %a@])@ %a@])" c (pp_list pp_str) vars pp_term rhs
in
fpf out "(@[<1>match@ %a@ @[<v>%a@]@])" pp_term lhs
(pp_list pp_case) cases
| If (a,b,c) -> fpf out "(@[<hv1>ite %a@ %a@ %a@])" pp_term a pp_term b pp_term c
| Fun (v,body) -> fpf out "(@[<1>lambda @ (%a)@ %a@])" pp_typed_var v pp_term body
| Let (l,t) ->
let pp_binding out (v,t) = fpf out "(@[%s@ %a@])" v pp_term t in
fpf out "(@[<2>let@ (@[%a@])@ %a@])" (pp_list pp_binding) l pp_term t
| Eq (a,b) -> fpf out "(@[=@ %a@ %a@])" pp_term a pp_term b
| Imply (a,b) -> fpf out "(@[=>@ %a@ %a@])" pp_term a pp_term b
| And l -> fpf out "(@[<hv>and@ %a@])" (pp_list pp_term) l
| Or l -> fpf out "(@[<hv>or@ %a@])" (pp_list pp_term) l
| Not t -> fpf out "(not %a)" pp_term t
| Distinct l -> fpf out "(@[distinct@ %a@])" (pp_list pp_term) l
| Cast (t, ty) -> fpf out "(@[<hv2>as@ @[%a@]@ @[%a@]@])" pp_term t pp_ty ty
| Forall (vars,f) ->
fpf out "(@[<hv2>forall@ (@[%a@])@ %a@])" (pp_list pp_typed_var) vars pp_term f
| Exists (vars,f) ->
fpf out "(@[<hv2>exists@ (@[%a@])@ %a@])" (pp_list pp_typed_var) vars pp_term f
and pp_typed_var out (v,ty) =
fpf out "(@[%s@ %a@])" v pp_ty ty
let pp_par pp_x out (ty_vars,x) = match ty_vars with
| [] -> pp_x out x
| _ ->
fpf out "(@[<2>par (@[%a@])@ (%a)@])" (pp_list pp_tyvar) ty_vars pp_x x
let pp_fun_decl pp_arg out fd =
fpf out "%s@ (@[%a@])@ %a"
fd.fun_name (pp_list pp_arg) fd.fun_args pp_ty fd.fun_ret
let pp_fr out fr =
fpf out "@[<2>%a@ %a@]" (pp_fun_decl pp_typed_var) fr.fr_decl pp_term fr.fr_body
let pp_stmt out (st:statement) = match view st with
| Stmt_decl_sort (s,n) -> fpf out "(@[declare-sort@ %s %d@])" s n
| Stmt_assert t -> fpf out "(@[assert@ %a@])" pp_term t
| Stmt_assert_not (ty_vars,t) ->
fpf out "(@[assert-not@ %a@])" (pp_par pp_term) (ty_vars,t)
| Stmt_decl d ->
fpf out "(@[declare-fun@ %a@])"
(pp_par (pp_fun_decl pp_ty)) (d.fun_ty_vars,d)
| Stmt_fun_def fr ->
fpf out "(@[<2>define-fun@ %a@])"
(pp_par pp_fr) (fr.fr_decl.fun_ty_vars, fr)
| Stmt_fun_rec fr ->
fpf out "(@[<2>define-fun-rec@ %a@])"
(pp_par pp_fr) (fr.fr_decl.fun_ty_vars, fr)
| Stmt_funs_rec fsr ->
let pp_decl' out d = fpf out "(@[<2>%a@])" (pp_fun_decl pp_typed_var) d in
fpf out "(@[<hv2>define-funs-rec@ (@[<v>%a@])@ (@[<v>%a@])@])"
(pp_list pp_decl') fsr.fsr_decls (pp_list pp_term) fsr.fsr_bodies
| Stmt_data (tyvars,l) ->
let pp_cstor_arg out (sel,ty) = fpf out "(@[%s %a@])" sel pp_ty ty in
let pp_cstor out c =
if c.cstor_args = []
then fpf out "(%s)" c.cstor_name
else fpf out "(@[<1>%s@ %a@])" c.cstor_name (pp_list pp_cstor_arg) c.cstor_args
in
let pp_data out (s,cstors) =
fpf out "(@[<2>%s@ @[<v>%a@]@])" s (pp_list pp_cstor) cstors
in
fpf out "(@[<hv2>declare-datatypes@ (@[%a@])@ (@[<v>%a@])@])"
(pp_list pp_tyvar) tyvars (pp_list pp_data) l
| Stmt_check_sat -> pp_str out "(check-sat)"
* { 2 Result from SMBC }
module Smbc_res = struct
type entry =
| Val of term * term
| Ty of ty * var list
type model = entry list
type t =
| Sat of model
| Unsat
| Unknown of string
| Timeout
let pp_entry out = function
| Ty (ty,dom) ->
Format.fprintf out "(@[type %a@ (@[%a@])@])" pp_ty ty
(Utils.pp_list ~sep:"" CCFormat.string) dom
| Val (a,b) ->
Format.fprintf out "(@[val %a@ %a@])" pp_term a pp_term b
let pp_model : model CCFormat.printer =
fun out m ->
Format.fprintf out "(@[<hv>%a@])" (Utils.pp_list ~sep:" " pp_entry) m
let pp : t CCFormat.printer = fun out r -> match r with
| Unsat -> CCFormat.string out "UNSAT"
| Unknown _ -> CCFormat.string out "UNKNOWN"
| Timeout -> CCFormat.string out "TIMEOUT"
| Sat m -> Format.fprintf out "(@SAT :model %a@])" pp_model m
end
let parse_errorf ~loc msg = Parsing_utils.parse_error_ ~loc msg
|
4aa382c2eae594c98f201cc16c68f0689a26fb06f58efa1ae8d8054e7fdde9d7 | openmusic-project/openmusic | ominterface.lisp | ;=========================================================================
OpenMusic : Visual Programming Language for Music Composition
;
Copyright ( c ) 1997- ... IRCAM - Centre , Paris , France .
;
This file is part of the OpenMusic environment sources
;
OpenMusic is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
; (at your option) any later version.
;
OpenMusic is distributed in the hope that it will be useful ,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
;
You should have received a copy of the GNU General Public License
along with OpenMusic . If not , see < / > .
;
Authors : , ,
;=========================================================================
MATHTOOLS by , et al .
(in-package :om)
;;; -----------------------------------------------------------------------------
;;; pcs type fn-name
;;; -----------------------------------------------------------------------------
(defmethod! pc-set ((type symbol) (fn-name t))
:icon 240
:menuins '( (0 (("integer" :integer) ("vector" :vector) ("pitch" :pitch))))
:initvals '(:integer '6-Z10)
:doc "|Pitch-class, one of the 12 pitch-classes designated by integers 0 11.
Pitch-class 0 refers to all
notated pitches C, B-sharp, D-double-flat. Pitch-class 1 refers to all notated pitches C-sharp,
D-flat, B-double-sharp , and so on.
type - :integer :vector :pitch
The name of pcs consisting of two numerals separated by a hyphen. The numeral to the left of the
hyphen is the cardinal-number of the set; the numeral to the right of the hyphen is the ordinal
number of the set, its position on the list of prime-forms.
(pcs :integer '6-Z10)
=> (0 1 3 4 5 7)
(pcs :vector '9-7)
=> (6 7 7 6 7 3)
(pcs :pitch '6-Z10)
=> (c c# d# e f g)
The final parameter is an fname-list and expects the composer to make explicit reference to the
Forte Notation name for a particular pitch-class set. Thus, the name 3-1 denotes a pitch class
set of 0,1,2 and the interval vector, 210000, an ordered array of numerals representing the
interval content of the pitch class set.
(pcs :integer '(3-1 3-8))
=> ((0 1 2) (0 2 6))
(pcs :vector '(3-1 3-8))
=> ((2 1 0 0 0 0) (0 1 0 1 0 1))
(pcs :pitch '(3-1 3-8))
=> ((c c# d) (c d f#))
"
(pcs type fn-name))
;;; -----------------------------------------------------------------------------
;;; pcs-cardinal number
;;; -----------------------------------------------------------------------------
(defmethod! Dn-orbites ((self integer))
:icon 240
:initvals '(3)
:doc "This function displays all the sets of the chosen cardinal-number (the number of element in a set).
(dn-orbites 3)
=> (3-1 3-2 3-3 3-4 3-5 3-6 3-7 3-8 3-9 3-10 3-11 3-12)
(dn-orbites 5)
=> (5-1 5-2 5-3 5-4 5-5 5-6 5-7 5-8 5-9 5-10 5-11 5-z12 5-13 5-14 5-15
5-16 5-z17 5-z18 5-19 5-20 5-21 5-22 5-23 5-24 5-25 5-26 5-27 5-28
5-29 5-30 5-31 5-32 5-33 5-34 5-35 5-z36 5-z37 5-z38)"
(pcs-cardinal self))
;;; -----------------------------------------------------------------------------
;;; pcs-complement type low high fn-name
;;; -----------------------------------------------------------------------------
(defmethod! comp ((type symbol) (from integer) (to integer) (list list))
:icon 240
:menuins '( (0 (("integer" :integer) ("pitch" :pitch))))
:initvals '(:integer 4 5 '(0 1 2 7))
:doc "This function is able to produce an output of 'complementary' intervals to that of the pcs generation.
If for example, A is a pitch-class-set containing 4 elements (pc integers),
then the complement of A is the set of 8 elements not contained in A.
type - :integer :pitch
(pcs :integer '4-6)
=> (0 1 2 7)
(pcs-complement :integer 4 5 (pcs :integer '4-6))
=> (3 4 7 8 9 10 11)
(pcs-complement :pitch 0 5 (pcs :integer '5-6))
=> (d# e g g# a a# b)
(pcs-complement :integer 0 6 (pcs :integer '(6-7 5-7 4-5 2-1)))
=> ((3 4 5 9 10 11) (3 4 5 8 9 10 11)
(3 4 5 7 8 9 10 11) (2 3 4 5 6 7 8 9 10 11))"
(pcs-complement type from to list))
;;; -----------------------------------------------------------------------------
;;; pcs-invert type fn-set
;;; -----------------------------------------------------------------------------
(defmethod! inv ((type symbol) (set list))
:icon 240
:menuins '( (0 (("integer" :integer) ("pitch" :pitch))))
:initvals '(:integer '(0 1 2 7))
:doc "The function pcs-invert enables an intervallic statement in symbols or integers to be inverted.
type - :integer :pitch
(pcs-invert :integer (pcs :integer '6-Z10))
=> (0 11 9 8 7 5)
(pcs-invert :pitch (pcs :integer '6-Z10))
=> (c b a g# g f)
(pcs-invert :integer (pcs :integer '9-7))
=> (0 11 10 9 8 7 5 4 2)
(pcs-invert :integer '((6 7 9 11 0 2 3) (1 3 6 9 0 11 4 7)))
=> ((6 5 3 1 0 10 9) (11 9 6 3 0 1 8 5))
(pcs-invert :pitch (pcs :integer '(5-1 5-8)))
=> ((c b a# a g#) (c a# a g# f#))
(pcs-invert :integer (pcs :integer '(3-1 3-8)))
=> ((0 11 10) (0 10 6))"
(pcs-invert type set))
;;; -----------------------------------------------------------------------------
;;; pcs-normal-order type integer-list
;;; -----------------------------------------------------------------------------
(defmethod! n-ord ((type symbol) (set list))
:icon 240
:menuins '( (0 (("integer" :integer) ("pitch" :pitch))))
:initvals '(:integer '(3 9 6 5 0))
:doc "Normal order.
type - :integer :pitch
(pcs-normal-order :integer '(3 8 9 6 1 2 0 10))
=> (6 8 9 10 0 1 2 3)
(pcs-normal-order :integer '(3 9 6 5 0))
=> (3 5 6 9 0)
(pcs-normal-order :integer '(0 3 4 5 10))
=> (10 0 3 4 5)
(pcs-normal-order :pitch '((3 9 6 5 0) (0 3 4 5 10)))
=> ((d# f f# a c) (a# c d# e f))"
(pcs-normal-order type set))
;;; -----------------------------------------------------------------------------
;;; pcs-prime-form type integer-set
;;; -----------------------------------------------------------------------------
(defmethod! p-form ((type symbol) (set list))
:icon 240
:menuins '( (0 (("fn" :fn) ("integer" :integer) ("pitch" :pitch))))
:initvals '(:integer '(3 9 6 5 0))
:doc "Set in prime form.
type - :fn :integer :pitch
(pcs-prime-form :integer '(11 0 5 6))
=> (0 1 6 7)
(pcs-prime-form :fn '(11 0 5 6))
=> 4-9
(pcs-prime-form :fn '(10 1 3 4))
=> 4-13
(pcs-prime-form :fn '((10 11 1 3 4) (10 0 1 3 4)))
=> (5-z12 5-10)"
(pcs-prime-form type set))
;;; -----------------------------------------------------------------------------
;;; pcs-sub-power type set-low set-high set
;;; -----------------------------------------------------------------------------
(defmethod! sub-power ((type symbol) (low integer) (high integer) (set list))
:icon 240
:menuins '( (0 (("fn" :fn) ("integer" :integer) ("pitch" :pitch))))
:initvals '(:integer 3 3 '(11 0 5 6))
:doc "type - :fn :integer :pitch
(pcs-sub-power :integer 3 3 '(11 0 5 6))
=> ((11 0 5) (11 0 6) (11 5 6) (0 5 6))
(pcs-sub-power :fn 2 3 '(11 0 5 6))
=> (2-5 2-6 2-1 3-5)
(pcs-sub-power :integer 3 3 '(10 0 1 3 4))
=> ((10 0 1) (10 0 3) (10 0 4) (10 1 3) (10 1 4)
(10 3 4) (0 1 3) (0 1 4) (0 3 4) (1 3 4))
(pcs-sub-power :fn 3 4 '(10 0 1 3 4))
=> (3-8 3-7 3-10 3-5 3-3 3-2 4-10 4-12 4-z15 4-13 4-3)
(pcs-sub-power :integer 4 4 '((10 11 1 3 4) (10 0 1 3 4)))
=> (((10 11 1 3) (10 11 1 4) (10 11 3 4) (10 1 3 4) (11 1 3 4))
((10 0 1 3) (10 0 1 4) (10 0 3 4) (10 1 3 4) (0 1 3 4)))"
(pcs-sub-power type low high set))
;;; -----------------------------------------------------------------------------
;;; pcs-sub-prime-form type set-low set-high set
;;; -----------------------------------------------------------------------------
(defmethod! sub-p-form ((type symbol) (low integer) (high integer) (set list))
:icon 240
:menuins '( (0 (("fn" :fn) ("integer" :integer) ("vector" :vector) ("pitch" :pitch))))
:initvals '(:integer 3 3 '(11 0 5 6))
:doc "type - :fn :integer :vector :pitch
(pcs-sub-prime-form :integer 3 3 '(11 0 5 6))
=> ((0 1 6) (0 5 6))
(pcs-sub-prime-form :fn 3 3 '(11 0 5 6))
=> 3-5
(pcs-sub-prime-form :fn 3 3 '(10 1 3 4))
=> (3-7 3-10 3-5 3-2)
(pcs-sub-prime-form :vector 3 3 '(10 1 3 4))
=> ((0 1 1 0 1 0) (0 0 2 0 0 1) (1 0 0 0 1 1) (1 1 1 0 0 0))
(pcs-sub-prime-form :fn 4 4 '((10 11 1 3 4) (10 0 1 3 4)))
=> ((4-8 4-13 4-11) (4-10 4-12 4-z15 4-13 4-3))"
(pcs-sub-prime-form type low high set))
;;; -----------------------------------------------------------------------------
;;; pcs-sub-relation type low high sets
;;; -----------------------------------------------------------------------------
(defmethod! sub-rel ((type symbol) (low integer) (high integer) (set list))
:icon 240
:menuins '( (0 (("fn" :fn) ("integer" :integer) ("vector" :vector) ("pitch" :pitch))))
:initvals '(:integer 3 3 '((10 11 1 3 4) (10 0 1 3 4)))
:doc "type - :fn :integer :vector :pitch
(pcs-sub-relation :fn 3 4 '((10 11 1 3 4) (10 0 1 3 4)))
=> (3-7 3-10 3-5 3-2 4-13)
(pcs-sub-relation :integer 3 4 '((10 11 1 3 4) (10 0 1 3 4)))
=> ((0 2 5) (0 3 6) (0 1 6) (0 1 3) (0 1 3 6))
(pcs-sub-relation :fn 3 3 '(5-10 7-3))
=> (3-5 3-2 3-8 3-10 3-7 3-3)"
(pcs-sub-relation type low high set))
;;; -----------------------------------------------------------------------------
;;; pcs-subcomplex type low high fn-name
;;; -----------------------------------------------------------------------------
(defmethod! sub-complex ((type symbol) (low integer) (high integer) (fn-name symbol))
:icon 240
:menuins '( (0 (("fn" :fn) ("integer" :integer) ("pitch" :pitch))))
:initvals '(:fn 3 3 '4-6)
:doc "type - :fn :integer :pitch
(pcs-subcomplex :fn 3 3 '4-6)
=> (3-1 3-5 3-9)
(pcs-subcomplex :integer 3 3 '4-6)
=> ((0 1 2) (0 1 6) (0 2 7))
(pcs-subcomplex :pitch 3 3 '4-6)
=> ((c c# d) (c c# f#) (c d g))
(pcs-subcomplex :fn 3 4 '4-25)
=> (3-8)
(pcs-subcomplex :fn 3 6 '4-25)
=> (3-8 5-15 5-28 5-33 6-7 6-21 6-22 6-30 6-34 6-35)
(pcs-subcomplex :vector 3 3 '6-z3)
=> ((2 1 0 0 0 0) (1 1 1 0 0 0)
(1 0 1 1 0 0) (1 0 0 1 1 0)
(1 0 0 0 1 1) (0 2 0 1 0 0)
(0 1 1 0 1 0) (0 1 0 1 0 1)
(0 0 2 0 0 1))"
(pcs-subcomplex type low high fn-name))
;;; -----------------------------------------------------------------------------
;;; pcs-transpose type transp-value set
;;; -----------------------------------------------------------------------------
(defmethod! transp ((type symbol) (value integer) (set list))
:icon 420
:menuins '( (0 ( ("integer" :integer) ("pitch" :pitch))))
:initvals '(:integer 3 '(0 1 3 4 5 7))
:doc "The function pcs-transpose enables an intervallic statement in symbols or integers to be
transposed in accordance with the protocol attached to the transposition of pitch class sets.
type - :integer :pitch
(pcs-transpose :integer 3 (pcs :integer '6-Z10))
=> (3 4 6 7 8 10)
(pcs-transpose :pitch 3 (pcs :integer '6-Z10))
=> (d# e f# g g# a#)
Multiple lists may be processed by writing the transp-value for each list in its own list, thus:
(pcs-transpose :integer '(3 11) (pcs :integer '(5-1 5-8)))
=> ((3 4 5 6 7) (11 1 2 3 5))
(pcs-transpose :integer 4 '(0 1 2 4 5 7 8))
=> (4 5 6 8 9 11 0)
(pcs-transpose :integer '(0 6 11) (pcs :integer '(5-1 5-8 6-Z10)))
=> ((0 1 2 3 4) (6 8 9 10 0) (11 0 2 3 4 6))"
(pcs-transpose type value set))
;;; -----------------------------------------------------------------------------
;;; End
;;; -----------------------------------------------------------------------------
;===================
(defun summation (n k)
(let ((pgcd (pgcd n k)))
(loop for j from 1 to pgcd
when (integerp (/ pgcd j)) sum (* (euler j) (binomial (/ n j) (/ k j))))))
(defmethod! Dn-card ((n integer) (k integer))
:initvals '(12 6) :indoc '("Zn" "elments")
:doc "Nombre d'ensembles de classes de hauteurs ayant k elements a une transposition et une inversion pres.
Par exemple il y a 38 gammes de sept notes a l'interieur du total chromatique a une transposition et/ou une inversion pres."
:icon 240
(cond
((oddp n)
(* (/ 1 (* 2 n)) (+ (summation n k)
(* n (binomial (/ (- n 1) 2) (floor (/ k 2)))))))
((and (evenp n) (evenp k))
(* (/ 1 (* 2 n)) (+ (summation n k)
(* n (binomial (/ n 2) (/ k 2))))))
((and (evenp n) (oddp k))
(* (/ 1 (* 2 n)) (+ (summation n k)
(* n (binomial (- (/ n 2) 1) (floor (/ k 2)))))))))
| null | https://raw.githubusercontent.com/openmusic-project/openmusic/9560c064512a1598cd57bcc9f0151c0815178e6f/OPENMUSIC/code/projects/mathtools/groups/dn/ominterface.lisp | lisp | =========================================================================
(at your option) any later version.
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
=========================================================================
-----------------------------------------------------------------------------
pcs type fn-name
-----------------------------------------------------------------------------
the numeral to the right of the hyphen is the ordinal
-----------------------------------------------------------------------------
pcs-cardinal number
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
pcs-complement type low high fn-name
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
pcs-invert type fn-set
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
pcs-normal-order type integer-list
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
pcs-prime-form type integer-set
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
pcs-sub-power type set-low set-high set
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
pcs-sub-prime-form type set-low set-high set
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
pcs-sub-relation type low high sets
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
pcs-subcomplex type low high fn-name
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
pcs-transpose type transp-value set
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
End
-----------------------------------------------------------------------------
===================
| OpenMusic : Visual Programming Language for Music Composition
Copyright ( c ) 1997- ... IRCAM - Centre , Paris , France .
This file is part of the OpenMusic environment sources
OpenMusic is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
OpenMusic is distributed in the hope that it will be useful ,
You should have received a copy of the GNU General Public License
along with OpenMusic . If not , see < / > .
Authors : , ,
MATHTOOLS by , et al .
(in-package :om)
(defmethod! pc-set ((type symbol) (fn-name t))
:icon 240
:menuins '( (0 (("integer" :integer) ("vector" :vector) ("pitch" :pitch))))
:initvals '(:integer '6-Z10)
:doc "|Pitch-class, one of the 12 pitch-classes designated by integers 0 11.
Pitch-class 0 refers to all
notated pitches C, B-sharp, D-double-flat. Pitch-class 1 refers to all notated pitches C-sharp,
D-flat, B-double-sharp , and so on.
type - :integer :vector :pitch
The name of pcs consisting of two numerals separated by a hyphen. The numeral to the left of the
number of the set, its position on the list of prime-forms.
(pcs :integer '6-Z10)
=> (0 1 3 4 5 7)
(pcs :vector '9-7)
=> (6 7 7 6 7 3)
(pcs :pitch '6-Z10)
=> (c c# d# e f g)
The final parameter is an fname-list and expects the composer to make explicit reference to the
Forte Notation name for a particular pitch-class set. Thus, the name 3-1 denotes a pitch class
set of 0,1,2 and the interval vector, 210000, an ordered array of numerals representing the
interval content of the pitch class set.
(pcs :integer '(3-1 3-8))
=> ((0 1 2) (0 2 6))
(pcs :vector '(3-1 3-8))
=> ((2 1 0 0 0 0) (0 1 0 1 0 1))
(pcs :pitch '(3-1 3-8))
=> ((c c# d) (c d f#))
"
(pcs type fn-name))
(defmethod! Dn-orbites ((self integer))
:icon 240
:initvals '(3)
:doc "This function displays all the sets of the chosen cardinal-number (the number of element in a set).
(dn-orbites 3)
=> (3-1 3-2 3-3 3-4 3-5 3-6 3-7 3-8 3-9 3-10 3-11 3-12)
(dn-orbites 5)
=> (5-1 5-2 5-3 5-4 5-5 5-6 5-7 5-8 5-9 5-10 5-11 5-z12 5-13 5-14 5-15
5-16 5-z17 5-z18 5-19 5-20 5-21 5-22 5-23 5-24 5-25 5-26 5-27 5-28
5-29 5-30 5-31 5-32 5-33 5-34 5-35 5-z36 5-z37 5-z38)"
(pcs-cardinal self))
(defmethod! comp ((type symbol) (from integer) (to integer) (list list))
:icon 240
:menuins '( (0 (("integer" :integer) ("pitch" :pitch))))
:initvals '(:integer 4 5 '(0 1 2 7))
:doc "This function is able to produce an output of 'complementary' intervals to that of the pcs generation.
If for example, A is a pitch-class-set containing 4 elements (pc integers),
then the complement of A is the set of 8 elements not contained in A.
type - :integer :pitch
(pcs :integer '4-6)
=> (0 1 2 7)
(pcs-complement :integer 4 5 (pcs :integer '4-6))
=> (3 4 7 8 9 10 11)
(pcs-complement :pitch 0 5 (pcs :integer '5-6))
=> (d# e g g# a a# b)
(pcs-complement :integer 0 6 (pcs :integer '(6-7 5-7 4-5 2-1)))
=> ((3 4 5 9 10 11) (3 4 5 8 9 10 11)
(3 4 5 7 8 9 10 11) (2 3 4 5 6 7 8 9 10 11))"
(pcs-complement type from to list))
(defmethod! inv ((type symbol) (set list))
:icon 240
:menuins '( (0 (("integer" :integer) ("pitch" :pitch))))
:initvals '(:integer '(0 1 2 7))
:doc "The function pcs-invert enables an intervallic statement in symbols or integers to be inverted.
type - :integer :pitch
(pcs-invert :integer (pcs :integer '6-Z10))
=> (0 11 9 8 7 5)
(pcs-invert :pitch (pcs :integer '6-Z10))
=> (c b a g# g f)
(pcs-invert :integer (pcs :integer '9-7))
=> (0 11 10 9 8 7 5 4 2)
(pcs-invert :integer '((6 7 9 11 0 2 3) (1 3 6 9 0 11 4 7)))
=> ((6 5 3 1 0 10 9) (11 9 6 3 0 1 8 5))
(pcs-invert :pitch (pcs :integer '(5-1 5-8)))
=> ((c b a# a g#) (c a# a g# f#))
(pcs-invert :integer (pcs :integer '(3-1 3-8)))
=> ((0 11 10) (0 10 6))"
(pcs-invert type set))
(defmethod! n-ord ((type symbol) (set list))
:icon 240
:menuins '( (0 (("integer" :integer) ("pitch" :pitch))))
:initvals '(:integer '(3 9 6 5 0))
:doc "Normal order.
type - :integer :pitch
(pcs-normal-order :integer '(3 8 9 6 1 2 0 10))
=> (6 8 9 10 0 1 2 3)
(pcs-normal-order :integer '(3 9 6 5 0))
=> (3 5 6 9 0)
(pcs-normal-order :integer '(0 3 4 5 10))
=> (10 0 3 4 5)
(pcs-normal-order :pitch '((3 9 6 5 0) (0 3 4 5 10)))
=> ((d# f f# a c) (a# c d# e f))"
(pcs-normal-order type set))
(defmethod! p-form ((type symbol) (set list))
:icon 240
:menuins '( (0 (("fn" :fn) ("integer" :integer) ("pitch" :pitch))))
:initvals '(:integer '(3 9 6 5 0))
:doc "Set in prime form.
type - :fn :integer :pitch
(pcs-prime-form :integer '(11 0 5 6))
=> (0 1 6 7)
(pcs-prime-form :fn '(11 0 5 6))
=> 4-9
(pcs-prime-form :fn '(10 1 3 4))
=> 4-13
(pcs-prime-form :fn '((10 11 1 3 4) (10 0 1 3 4)))
=> (5-z12 5-10)"
(pcs-prime-form type set))
(defmethod! sub-power ((type symbol) (low integer) (high integer) (set list))
:icon 240
:menuins '( (0 (("fn" :fn) ("integer" :integer) ("pitch" :pitch))))
:initvals '(:integer 3 3 '(11 0 5 6))
:doc "type - :fn :integer :pitch
(pcs-sub-power :integer 3 3 '(11 0 5 6))
=> ((11 0 5) (11 0 6) (11 5 6) (0 5 6))
(pcs-sub-power :fn 2 3 '(11 0 5 6))
=> (2-5 2-6 2-1 3-5)
(pcs-sub-power :integer 3 3 '(10 0 1 3 4))
=> ((10 0 1) (10 0 3) (10 0 4) (10 1 3) (10 1 4)
(10 3 4) (0 1 3) (0 1 4) (0 3 4) (1 3 4))
(pcs-sub-power :fn 3 4 '(10 0 1 3 4))
=> (3-8 3-7 3-10 3-5 3-3 3-2 4-10 4-12 4-z15 4-13 4-3)
(pcs-sub-power :integer 4 4 '((10 11 1 3 4) (10 0 1 3 4)))
=> (((10 11 1 3) (10 11 1 4) (10 11 3 4) (10 1 3 4) (11 1 3 4))
((10 0 1 3) (10 0 1 4) (10 0 3 4) (10 1 3 4) (0 1 3 4)))"
(pcs-sub-power type low high set))
(defmethod! sub-p-form ((type symbol) (low integer) (high integer) (set list))
:icon 240
:menuins '( (0 (("fn" :fn) ("integer" :integer) ("vector" :vector) ("pitch" :pitch))))
:initvals '(:integer 3 3 '(11 0 5 6))
:doc "type - :fn :integer :vector :pitch
(pcs-sub-prime-form :integer 3 3 '(11 0 5 6))
=> ((0 1 6) (0 5 6))
(pcs-sub-prime-form :fn 3 3 '(11 0 5 6))
=> 3-5
(pcs-sub-prime-form :fn 3 3 '(10 1 3 4))
=> (3-7 3-10 3-5 3-2)
(pcs-sub-prime-form :vector 3 3 '(10 1 3 4))
=> ((0 1 1 0 1 0) (0 0 2 0 0 1) (1 0 0 0 1 1) (1 1 1 0 0 0))
(pcs-sub-prime-form :fn 4 4 '((10 11 1 3 4) (10 0 1 3 4)))
=> ((4-8 4-13 4-11) (4-10 4-12 4-z15 4-13 4-3))"
(pcs-sub-prime-form type low high set))
(defmethod! sub-rel ((type symbol) (low integer) (high integer) (set list))
:icon 240
:menuins '( (0 (("fn" :fn) ("integer" :integer) ("vector" :vector) ("pitch" :pitch))))
:initvals '(:integer 3 3 '((10 11 1 3 4) (10 0 1 3 4)))
:doc "type - :fn :integer :vector :pitch
(pcs-sub-relation :fn 3 4 '((10 11 1 3 4) (10 0 1 3 4)))
=> (3-7 3-10 3-5 3-2 4-13)
(pcs-sub-relation :integer 3 4 '((10 11 1 3 4) (10 0 1 3 4)))
=> ((0 2 5) (0 3 6) (0 1 6) (0 1 3) (0 1 3 6))
(pcs-sub-relation :fn 3 3 '(5-10 7-3))
=> (3-5 3-2 3-8 3-10 3-7 3-3)"
(pcs-sub-relation type low high set))
(defmethod! sub-complex ((type symbol) (low integer) (high integer) (fn-name symbol))
:icon 240
:menuins '( (0 (("fn" :fn) ("integer" :integer) ("pitch" :pitch))))
:initvals '(:fn 3 3 '4-6)
:doc "type - :fn :integer :pitch
(pcs-subcomplex :fn 3 3 '4-6)
=> (3-1 3-5 3-9)
(pcs-subcomplex :integer 3 3 '4-6)
=> ((0 1 2) (0 1 6) (0 2 7))
(pcs-subcomplex :pitch 3 3 '4-6)
=> ((c c# d) (c c# f#) (c d g))
(pcs-subcomplex :fn 3 4 '4-25)
=> (3-8)
(pcs-subcomplex :fn 3 6 '4-25)
=> (3-8 5-15 5-28 5-33 6-7 6-21 6-22 6-30 6-34 6-35)
(pcs-subcomplex :vector 3 3 '6-z3)
=> ((2 1 0 0 0 0) (1 1 1 0 0 0)
(1 0 1 1 0 0) (1 0 0 1 1 0)
(1 0 0 0 1 1) (0 2 0 1 0 0)
(0 1 1 0 1 0) (0 1 0 1 0 1)
(0 0 2 0 0 1))"
(pcs-subcomplex type low high fn-name))
(defmethod! transp ((type symbol) (value integer) (set list))
:icon 420
:menuins '( (0 ( ("integer" :integer) ("pitch" :pitch))))
:initvals '(:integer 3 '(0 1 3 4 5 7))
:doc "The function pcs-transpose enables an intervallic statement in symbols or integers to be
transposed in accordance with the protocol attached to the transposition of pitch class sets.
type - :integer :pitch
(pcs-transpose :integer 3 (pcs :integer '6-Z10))
=> (3 4 6 7 8 10)
(pcs-transpose :pitch 3 (pcs :integer '6-Z10))
=> (d# e f# g g# a#)
Multiple lists may be processed by writing the transp-value for each list in its own list, thus:
(pcs-transpose :integer '(3 11) (pcs :integer '(5-1 5-8)))
=> ((3 4 5 6 7) (11 1 2 3 5))
(pcs-transpose :integer 4 '(0 1 2 4 5 7 8))
=> (4 5 6 8 9 11 0)
(pcs-transpose :integer '(0 6 11) (pcs :integer '(5-1 5-8 6-Z10)))
=> ((0 1 2 3 4) (6 8 9 10 0) (11 0 2 3 4 6))"
(pcs-transpose type value set))
(defun summation (n k)
(let ((pgcd (pgcd n k)))
(loop for j from 1 to pgcd
when (integerp (/ pgcd j)) sum (* (euler j) (binomial (/ n j) (/ k j))))))
(defmethod! Dn-card ((n integer) (k integer))
:initvals '(12 6) :indoc '("Zn" "elments")
:doc "Nombre d'ensembles de classes de hauteurs ayant k elements a une transposition et une inversion pres.
Par exemple il y a 38 gammes de sept notes a l'interieur du total chromatique a une transposition et/ou une inversion pres."
:icon 240
(cond
((oddp n)
(* (/ 1 (* 2 n)) (+ (summation n k)
(* n (binomial (/ (- n 1) 2) (floor (/ k 2)))))))
((and (evenp n) (evenp k))
(* (/ 1 (* 2 n)) (+ (summation n k)
(* n (binomial (/ n 2) (/ k 2))))))
((and (evenp n) (oddp k))
(* (/ 1 (* 2 n)) (+ (summation n k)
(* n (binomial (- (/ n 2) 1) (floor (/ k 2)))))))))
|
9e5a0e1db29e6ed27507204293d131d7bb6d1f6b5688203551eee62ea1a28673 | ocaml-flambda/ocaml-jst | deprecated_module.ml | (* TEST
flags = "-w +A"
* bytecode
*)
module M = struct
type t = int
let x = 10
end
[@@ocaml.deprecated]
let _ = M.x
include M
| null | https://raw.githubusercontent.com/ocaml-flambda/ocaml-jst/5bf2820278c58f6715dcfaf6fa61e09a9b0d8db3/testsuite/tests/warnings/deprecated_module.ml | ocaml | TEST
flags = "-w +A"
* bytecode
|
module M = struct
type t = int
let x = 10
end
[@@ocaml.deprecated]
let _ = M.x
include M
|
60071fbb11d208f510355448cef5dd0cfaba07fa2cfbbde93efda97709d6f4af | ghc/packages-dph | Options.hs |
module DPH.War.Options
( MainArg(..)
, mainArgs)
where
import System.Console.ParseArgs
data MainArg
= ArgHelp
| ArgVerbose
| ArgTestDir
| ArgJobs
| ArgClean
deriving (Show, Eq, Ord)
mainArgs :: [Arg MainArg]
mainArgs
= [ Arg { argIndex = ArgHelp
, argAbbr = Just 'h'
, argName = Just "help"
, argData = Nothing
, argDesc = "Print this usage help." }
, Arg { argIndex = ArgVerbose
, argAbbr = Just 'v'
, argName = Just "verbose"
, argData = Nothing
, argDesc = "Emit debugging info for the test driver." }
, Arg { argIndex = ArgTestDir
, argAbbr = Just 'd'
, argName = Just "dir"
, argData = argDataDefaulted "dir" ArgtypeString "test"
, argDesc = "Test directories" }
, Arg { argIndex = ArgJobs
, argAbbr = Just 'j'
, argName = Just "jobs"
, argData = argDataDefaulted "Int" ArgtypeInt 1
, argDesc = "Number of parallel jobs to use" }
, Arg { argIndex = ArgClean
, argAbbr = Just 'c'
, argName = Just "clean"
, argData = Nothing
, argDesc = "Cleanup after each test" }
]
| null | https://raw.githubusercontent.com/ghc/packages-dph/64eca669f13f4d216af9024474a3fc73ce101793/dph-test/framework/DPH/War/Options.hs | haskell |
module DPH.War.Options
( MainArg(..)
, mainArgs)
where
import System.Console.ParseArgs
data MainArg
= ArgHelp
| ArgVerbose
| ArgTestDir
| ArgJobs
| ArgClean
deriving (Show, Eq, Ord)
mainArgs :: [Arg MainArg]
mainArgs
= [ Arg { argIndex = ArgHelp
, argAbbr = Just 'h'
, argName = Just "help"
, argData = Nothing
, argDesc = "Print this usage help." }
, Arg { argIndex = ArgVerbose
, argAbbr = Just 'v'
, argName = Just "verbose"
, argData = Nothing
, argDesc = "Emit debugging info for the test driver." }
, Arg { argIndex = ArgTestDir
, argAbbr = Just 'd'
, argName = Just "dir"
, argData = argDataDefaulted "dir" ArgtypeString "test"
, argDesc = "Test directories" }
, Arg { argIndex = ArgJobs
, argAbbr = Just 'j'
, argName = Just "jobs"
, argData = argDataDefaulted "Int" ArgtypeInt 1
, argDesc = "Number of parallel jobs to use" }
, Arg { argIndex = ArgClean
, argAbbr = Just 'c'
, argName = Just "clean"
, argData = Nothing
, argDesc = "Cleanup after each test" }
]
|
|
e926aa94757a09d814180c435acb7b670891f27b7a826a608e95e027041a1b8d | dmitryvk/sbcl-win32-threads | early-impl.lisp | This software is part of the SBCL system . See the README file for
;;;; more information.
;;;;
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
;;;; public domain. The software is in the public domain and is
;;;; provided with absolutely no warranty. See the COPYING and CREDITS
;;;; files for more information.
(in-package "SB!IMPL")
entries in STATIC - SYMBOLS table , references to which can be compiled
;;; as though they're special variables
;;;
;;; FIXME: These should be listed once and only once, instead of
listed here and then listed separately ( and by now , 2001 - 06 - 06 ,
;;; slightly differently) elsewhere.
(declaim (special *posix-argv*
*core-string*
*read-only-space-free-pointer*
sb!vm:*static-space-free-pointer*
sb!vm::*current-catch-block*
sb!vm::*current-unwind-protect-block*
sb!vm::*alien-stack*
sb!vm::*control-stack-start*
sb!vm::*control-stack-end*
sb!vm::*binding-stack-start*
;; FIXME: The pseudo-atomic variable stuff should be
conditional on : SB - PSEUDO - ATOMIC - SYMBOLS , which
;; should be conditional on :X86, instead of the
;; pseudo-atomic stuff being directly conditional on
: X86 . ( Note that non - X86 ports mention
;; pseudo-atomicity too, but they handle it without
;; messing with special variables.)
#!+(or x86 x86-64) *pseudo-atomic-bits*
#!+(or hpux) sb!vm::*c-lra*
*allow-with-interrupts*
sb!unix::*unblock-deferrables-on-enabling-interrupts-p*
*interrupts-enabled*
*interrupt-pending*
#!+(and win32 sb-thread)
*gc-safe*
#!+(and win32 sb-thread)
*in-safepoint*
#!+(and win32 sb-thread)
*disable-safepoints*
*free-interrupt-context-index*
sb!kernel::*gc-epoch*
sb!vm::*unwind-to-frame-function*
sb!vm::*allocation-pointer*
sb!vm::*binding-stack-pointer*
sb!vm::*fp-constant-0d0*
sb!vm::*fp-constant-1d0*
sb!vm::*fp-constant-0f0*
sb!vm::*fp-constant-1f0*
sb!vm::*fp-constant-0l0*
sb!vm::*fp-constant-1l0*
sb!vm::*fp-constant-pi*
sb!vm::*fp-constant-l2t*
sb!vm::*fp-constant-l2e*
sb!vm::*fp-constant-lg2*
sb!vm::*fp-constant-ln2*
sb!vm:*alloc-signal*
sb!pcl::..slot-unbound..
sb!pcl::*cache-miss-values-stack*
sb!pcl::*dfun-miss-gfs-on-stack*))
| null | https://raw.githubusercontent.com/dmitryvk/sbcl-win32-threads/5abfd64b00a0937ba2df2919f177697d1d91bde4/src/code/early-impl.lisp | lisp | more information.
public domain. The software is in the public domain and is
provided with absolutely no warranty. See the COPYING and CREDITS
files for more information.
as though they're special variables
FIXME: These should be listed once and only once, instead of
slightly differently) elsewhere.
FIXME: The pseudo-atomic variable stuff should be
should be conditional on :X86, instead of the
pseudo-atomic stuff being directly conditional on
pseudo-atomicity too, but they handle it without
messing with special variables.) | This software is part of the SBCL system . See the README file for
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
(in-package "SB!IMPL")
entries in STATIC - SYMBOLS table , references to which can be compiled
listed here and then listed separately ( and by now , 2001 - 06 - 06 ,
(declaim (special *posix-argv*
*core-string*
*read-only-space-free-pointer*
sb!vm:*static-space-free-pointer*
sb!vm::*current-catch-block*
sb!vm::*current-unwind-protect-block*
sb!vm::*alien-stack*
sb!vm::*control-stack-start*
sb!vm::*control-stack-end*
sb!vm::*binding-stack-start*
conditional on : SB - PSEUDO - ATOMIC - SYMBOLS , which
: X86 . ( Note that non - X86 ports mention
#!+(or x86 x86-64) *pseudo-atomic-bits*
#!+(or hpux) sb!vm::*c-lra*
*allow-with-interrupts*
sb!unix::*unblock-deferrables-on-enabling-interrupts-p*
*interrupts-enabled*
*interrupt-pending*
#!+(and win32 sb-thread)
*gc-safe*
#!+(and win32 sb-thread)
*in-safepoint*
#!+(and win32 sb-thread)
*disable-safepoints*
*free-interrupt-context-index*
sb!kernel::*gc-epoch*
sb!vm::*unwind-to-frame-function*
sb!vm::*allocation-pointer*
sb!vm::*binding-stack-pointer*
sb!vm::*fp-constant-0d0*
sb!vm::*fp-constant-1d0*
sb!vm::*fp-constant-0f0*
sb!vm::*fp-constant-1f0*
sb!vm::*fp-constant-0l0*
sb!vm::*fp-constant-1l0*
sb!vm::*fp-constant-pi*
sb!vm::*fp-constant-l2t*
sb!vm::*fp-constant-l2e*
sb!vm::*fp-constant-lg2*
sb!vm::*fp-constant-ln2*
sb!vm:*alloc-signal*
sb!pcl::..slot-unbound..
sb!pcl::*cache-miss-values-stack*
sb!pcl::*dfun-miss-gfs-on-stack*))
|
3e5934d7ca4a6c29ba4ebd230c12c0375dddf6a6962433ca913af7d54978cee6 | steffan-westcott/clj-otel | programmatic_sdk_config.clj | ! { : style [: respect - nl ] }
(ns example.programmatic_sdk_config
"An example application demonstrating programmatic configuration,
initialisation and shutdown of the OpenTelemetry SDK."
#_{:clj-kondo/ignore [:unsorted-required-namespaces]}
(:require
;; Require desired span exporters
[steffan-westcott.clj-otel.exporter.otlp.grpc.trace :as otlp-grpc-trace]
;[steffan-westcott.clj-otel.exporter.otlp.http.trace :as otlp-http-trace]
[ steffan - westcott.clj - otel.exporter.jaeger - grpc : as ]
;[steffan-westcott.clj-otel.exporter.jaeger-thrift :as jaeger-thrift]
;[steffan-westcott.clj-otel.exporter.zipkin :as zipkin]
;[steffan-westcott.clj-otel.exporter.logging :as logging]
;[steffan-westcott.clj-otel.exporter.logging-otlp :as logging-otlp]
[steffan-westcott.clj-otel.api.trace.span :as span]
[steffan-westcott.clj-otel.resource.resources :as res]
[steffan-westcott.clj-otel.sdk.otel-sdk :as sdk]))
(defn init-otel!
"Configure and initialise the OpenTelemetry SDK as the global OpenTelemetry
instance used by the application. This function should be evaluated before
performing any OpenTelemetry API operations such as tracing. This function
may be evaluated once only, any attempts to evaluate it more than once will
result in error."
[]
(sdk/init-otel-sdk!
;; The service name is the minimum resource information.
"example-app"
{;; The collection of additional resources are merged with the service name
;; to form information about the entity for which telemetry is recorded.
;; Here the additional resources provide information on the host, OS,
process and JVM .
:resources [(res/host-resource)
(res/os-resource)
(res/process-resource)
(res/process-runtime-resource)]
;; Configuration options for the context propagation, sampling, batching
and export of traces . Here we configure export to a local Jaeger server
;; with default options. The exported spans are batched by default.
:tracer-provider
{:span-processors
;; Configure selected span exporter(s). See span exporter docstrings for
;; further configuration options.
[{:exporters [
Export spans to locally deployed OpenTelemetry Collector
;; via gRPC
(otlp-grpc-trace/span-exporter)
Export spans to locally deployed OpenTelemetry Collector
;; via HTTP
; (otlp-http-trace/span-exporter)
Export spans to locally deployed Jaeger via gRPC
; (jaeger-grpc/span-exporter)
Export spans to locally deployed Jaeger via Thrift
; (jaeger-thrift/span-exporter)
Export spans to locally deployed
; (zipkin/span-exporter)
Export spans to Honeycomb using OTLP via gRPC
;(otlp-grpc-trace/span-exporter
{ : endpoint " :443 "
; :headers {"x-honeycomb-team" "YOUR_HONEYCOMB_TEAM_API_KEY"
" x - honeycomb - dataset " " YOUR_HONEYCOMB_DATASET " } } )
;; Export spans to Honeycomb using OTLP via HTTP
;(otlp-http-trace/span-exporter
{ : endpoint " :443 "
; :headers {"x-honeycomb-team" "YOUR_HONEYCOMB_TEAM_API_KEY"
" x - honeycomb - dataset " " YOUR_HONEYCOMB_DATASET " } } )
Export spans to Lightstep using OTLP via gRPC
;(otlp-grpc-trace/span-exporter
; {:endpoint ":443"
: headers { " lightstep - access - token " " YOUR_LIGHTSTEP_ACCESS_TOKEN " } } )
;; Export spans to java.util.logging (used for debugging
;; only)
;(logging/span-exporter)
;; Export spans to java.util.logging in OTLP JSON format
;; (used for debugging only)
;(logging-otlp/span-exporter)
]}]}}))
(defn close-otel!
"Shut down OpenTelemetry SDK processes. This should be called before the
application exits."
[]
(sdk/close-otel-sdk!))
(defn square
"Returns the square of a number."
[n]
(span/with-span! {:name "squaring"}
(Thread/sleep 500)
(* n n)))
(comment
(init-otel!) ; once only
(square 7)
(close-otel!)
)
| null | https://raw.githubusercontent.com/steffan-westcott/clj-otel/3103cef4a0badf946e1511521ab764202e7eb1f7/examples/programmatic-sdk-config/src/example/programmatic_sdk_config.clj | clojure | Require desired span exporters
[steffan-westcott.clj-otel.exporter.otlp.http.trace :as otlp-http-trace]
[steffan-westcott.clj-otel.exporter.jaeger-thrift :as jaeger-thrift]
[steffan-westcott.clj-otel.exporter.zipkin :as zipkin]
[steffan-westcott.clj-otel.exporter.logging :as logging]
[steffan-westcott.clj-otel.exporter.logging-otlp :as logging-otlp]
The service name is the minimum resource information.
The collection of additional resources are merged with the service name
to form information about the entity for which telemetry is recorded.
Here the additional resources provide information on the host, OS,
Configuration options for the context propagation, sampling, batching
with default options. The exported spans are batched by default.
Configure selected span exporter(s). See span exporter docstrings for
further configuration options.
via gRPC
via HTTP
(otlp-http-trace/span-exporter)
(jaeger-grpc/span-exporter)
(jaeger-thrift/span-exporter)
(zipkin/span-exporter)
(otlp-grpc-trace/span-exporter
:headers {"x-honeycomb-team" "YOUR_HONEYCOMB_TEAM_API_KEY"
Export spans to Honeycomb using OTLP via HTTP
(otlp-http-trace/span-exporter
:headers {"x-honeycomb-team" "YOUR_HONEYCOMB_TEAM_API_KEY"
(otlp-grpc-trace/span-exporter
{:endpoint ":443"
Export spans to java.util.logging (used for debugging
only)
(logging/span-exporter)
Export spans to java.util.logging in OTLP JSON format
(used for debugging only)
(logging-otlp/span-exporter)
once only | ! { : style [: respect - nl ] }
(ns example.programmatic_sdk_config
"An example application demonstrating programmatic configuration,
initialisation and shutdown of the OpenTelemetry SDK."
#_{:clj-kondo/ignore [:unsorted-required-namespaces]}
(:require
[steffan-westcott.clj-otel.exporter.otlp.grpc.trace :as otlp-grpc-trace]
[ steffan - westcott.clj - otel.exporter.jaeger - grpc : as ]
[steffan-westcott.clj-otel.api.trace.span :as span]
[steffan-westcott.clj-otel.resource.resources :as res]
[steffan-westcott.clj-otel.sdk.otel-sdk :as sdk]))
(defn init-otel!
"Configure and initialise the OpenTelemetry SDK as the global OpenTelemetry
instance used by the application. This function should be evaluated before
performing any OpenTelemetry API operations such as tracing. This function
may be evaluated once only, any attempts to evaluate it more than once will
result in error."
[]
(sdk/init-otel-sdk!
"example-app"
process and JVM .
:resources [(res/host-resource)
(res/os-resource)
(res/process-resource)
(res/process-runtime-resource)]
and export of traces . Here we configure export to a local Jaeger server
:tracer-provider
{:span-processors
[{:exporters [
Export spans to locally deployed OpenTelemetry Collector
(otlp-grpc-trace/span-exporter)
Export spans to locally deployed OpenTelemetry Collector
Export spans to locally deployed Jaeger via gRPC
Export spans to locally deployed Jaeger via Thrift
Export spans to locally deployed
Export spans to Honeycomb using OTLP via gRPC
{ : endpoint " :443 "
" x - honeycomb - dataset " " YOUR_HONEYCOMB_DATASET " } } )
{ : endpoint " :443 "
" x - honeycomb - dataset " " YOUR_HONEYCOMB_DATASET " } } )
Export spans to Lightstep using OTLP via gRPC
: headers { " lightstep - access - token " " YOUR_LIGHTSTEP_ACCESS_TOKEN " } } )
]}]}}))
(defn close-otel!
"Shut down OpenTelemetry SDK processes. This should be called before the
application exits."
[]
(sdk/close-otel-sdk!))
(defn square
"Returns the square of a number."
[n]
(span/with-span! {:name "squaring"}
(Thread/sleep 500)
(* n n)))
(comment
(square 7)
(close-otel!)
)
|
d245bcde8131f301fa0bdb0a01989a3976be8d98e6a49c98898d1bb4a772f22b | haskell-CI/hackage-matrix-builder | Test.hs | # LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE StandaloneDeriving #
# OPTIONS_GHC -Wno - orphans #
module Main where
import Prelude
import Test.Tasty
import Test . Tasty . SmallCheck as SC
import Test.Tasty.QuickCheck as QC
import Test . Tasty . HUnit
import PkgId
import qualified PkgIdxRanges as IR
import qualified PkgIdxTsSet as PIS
import qualified Data.Set as Set
main :: IO ()
main = defaultMain tests
tests :: TestTree
tests = testGroup "Tests" [properties]
properties :: TestTree
properties = testGroup "Properties" [qcProps2,qcProps1]
qcProps1 :: TestTree
qcProps1 = testGroup "PkgIdxTsSet"
[ QC.testProperty "Eq" $
\x -> (x :: Int) == x
, QC.testProperty "toSet . fromSet" $
\s -> (PIS.toSet . PIS.fromSet) s == s
, QC.testProperty "member" $
\s -> (not (Set.null s)) QC.==>
let PkgIdxTs lx = Set.findMin s
PkgIdxTs ux = Set.findMin s
s' = PIS.fromSet s
in and [ PIS.member (PkgIdxTs x) s' == Set.member (PkgIdxTs x) s
| x <- [ lx - 50 .. lx + 50 ] <> [ ux - 50 .. ux + 50 ]
]
, QC.testProperty "member2" $
\s x -> (not (Set.null s)) QC.==>
let PkgIdxTs lx = Set.findMin s
PkgIdxTs ux = Set.findMin s
s' = PIS.fromSet s
x' = PkgIdxTs (wrapIntoRange (lx-2, ux+2) x)
in PIS.member x' s' == Set.member x' s
, QC.testProperty "lookupIndex" $
\s -> (not (Set.null s)) QC.==>
let PkgIdxTs lx = Set.findMin s
PkgIdxTs ux = Set.findMin s
s' = PIS.fromSet s
in and [ PIS.lookupIndex (PkgIdxTs x) s' == Set.lookupIndex (PkgIdxTs x) s
| x <- [ lx - 50 .. lx + 50 ] <> [ ux - 50 .. ux + 50 ]
]
, QC.testProperty "lookupIndex2" $
\s x -> (not (Set.null s)) QC.==>
let PkgIdxTs lx = Set.findMin s
PkgIdxTs ux = Set.findMin s
s' = PIS.fromSet s
x' = PkgIdxTs (wrapIntoRange (lx-2, ux+2) x)
in PIS.lookupIndex x' s' == Set.lookupIndex x' s
, QC.testProperty "lookupIndexLE" $
\s x -> (not (Set.null s)) QC.==>
let PkgIdxTs lx = Set.findMin s
PkgIdxTs ux = Set.findMin s
s' = PIS.fromSet s
x' = PkgIdxTs (wrapIntoRange (lx-2, ux+2) x)
in (fmap snd (PIS.lookupIndexLE x' s') == Set.lookupLE x' s) &&
(maybe True (\(i,v) -> Set.elemAt i s == v) (PIS.lookupIndexLE x' s'))
, QC.testProperty "lookupIndexGE" $
\s x -> (not (Set.null s)) QC.==>
let PkgIdxTs lx = Set.findMin s
PkgIdxTs ux = Set.findMin s
s' = PIS.fromSet s
x' = PkgIdxTs (wrapIntoRange (lx-2, ux+2) x)
in (fmap snd (PIS.lookupIndexGE x' s') == Set.lookupGE x' s) &&
(maybe True (\(i,v) -> Set.elemAt i s == v) (PIS.lookupIndexGE x' s'))
]
wrapIntoRange :: (Int,Int) -> Int -> Int
wrapIntoRange (l,u) x = l + ((x-l) `mod` (u-l))
----------------------------------------------------------------------------
qcProps2 :: TestTree
qcProps2 = testGroup "PkgIdxRanges"
[ QC.testProperty "eq1" $
\rs -> IR.fromList rs == IR.fromList (reverse rs)
, QC.testProperty "eq2" $
\rs -> irFromList8 rs == irFromList8 (reverse rs)
, QC.testProperty "eq3" $
\rs1 rs2 -> let rs1' = irFromList8 rs1
rs2' = irFromList8 rs2
in (irToSet rs1' == irToSet rs2') == (rs1' == rs2')
, QC.testProperty "union" $
\rs1 rs2 -> let rs1' = irFromList8 rs1
rs2' = irFromList8 rs2
rs12' = mappend rs1' rs2'
in (irToSet rs1' `mappend` irToSet rs2') == irToSet rs12'
, QC.testProperty "intersection" $
\rs1 rs2 -> let rs1' = irFromList8 rs1
rs2' = irFromList8 rs2
rs12' = IR.intersection rs1' rs2'
in (irToSet rs1' `Set.intersection` irToSet rs2') == irToSet rs12'
]
irFromList8 :: [(Word8, Word8)] -> IR.IdxRanges
irFromList8 = IR.fromList . fmap f
where
f :: (Word8,Word8) -> (PkgIdxTs,Maybe PkgIdxTs)
f (l,0) = (PkgIdxTs (fromIntegral l), Nothing)
f (l,b) = (PkgIdxTs (fromIntegral (min l b)), Just (PkgIdxTs (fromIntegral (max l b))))
irToSet :: IR.IdxRanges -> Set.Set PkgIdxTs
irToSet ir = Set.fromList [ PkgIdxTs j | j <- [ (-2) .. 260 ], IR.member (PkgIdxTs j) ir ]
deriving instance Arbitrary PkgIdxTs
| null | https://raw.githubusercontent.com/haskell-CI/hackage-matrix-builder/bb813e9e4cf0d08352f33004c00ede987f45da56/src-test/Test.hs | haskell | -------------------------------------------------------------------------- | # LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE StandaloneDeriving #
# OPTIONS_GHC -Wno - orphans #
module Main where
import Prelude
import Test.Tasty
import Test . Tasty . SmallCheck as SC
import Test.Tasty.QuickCheck as QC
import Test . Tasty . HUnit
import PkgId
import qualified PkgIdxRanges as IR
import qualified PkgIdxTsSet as PIS
import qualified Data.Set as Set
main :: IO ()
main = defaultMain tests
tests :: TestTree
tests = testGroup "Tests" [properties]
properties :: TestTree
properties = testGroup "Properties" [qcProps2,qcProps1]
qcProps1 :: TestTree
qcProps1 = testGroup "PkgIdxTsSet"
[ QC.testProperty "Eq" $
\x -> (x :: Int) == x
, QC.testProperty "toSet . fromSet" $
\s -> (PIS.toSet . PIS.fromSet) s == s
, QC.testProperty "member" $
\s -> (not (Set.null s)) QC.==>
let PkgIdxTs lx = Set.findMin s
PkgIdxTs ux = Set.findMin s
s' = PIS.fromSet s
in and [ PIS.member (PkgIdxTs x) s' == Set.member (PkgIdxTs x) s
| x <- [ lx - 50 .. lx + 50 ] <> [ ux - 50 .. ux + 50 ]
]
, QC.testProperty "member2" $
\s x -> (not (Set.null s)) QC.==>
let PkgIdxTs lx = Set.findMin s
PkgIdxTs ux = Set.findMin s
s' = PIS.fromSet s
x' = PkgIdxTs (wrapIntoRange (lx-2, ux+2) x)
in PIS.member x' s' == Set.member x' s
, QC.testProperty "lookupIndex" $
\s -> (not (Set.null s)) QC.==>
let PkgIdxTs lx = Set.findMin s
PkgIdxTs ux = Set.findMin s
s' = PIS.fromSet s
in and [ PIS.lookupIndex (PkgIdxTs x) s' == Set.lookupIndex (PkgIdxTs x) s
| x <- [ lx - 50 .. lx + 50 ] <> [ ux - 50 .. ux + 50 ]
]
, QC.testProperty "lookupIndex2" $
\s x -> (not (Set.null s)) QC.==>
let PkgIdxTs lx = Set.findMin s
PkgIdxTs ux = Set.findMin s
s' = PIS.fromSet s
x' = PkgIdxTs (wrapIntoRange (lx-2, ux+2) x)
in PIS.lookupIndex x' s' == Set.lookupIndex x' s
, QC.testProperty "lookupIndexLE" $
\s x -> (not (Set.null s)) QC.==>
let PkgIdxTs lx = Set.findMin s
PkgIdxTs ux = Set.findMin s
s' = PIS.fromSet s
x' = PkgIdxTs (wrapIntoRange (lx-2, ux+2) x)
in (fmap snd (PIS.lookupIndexLE x' s') == Set.lookupLE x' s) &&
(maybe True (\(i,v) -> Set.elemAt i s == v) (PIS.lookupIndexLE x' s'))
, QC.testProperty "lookupIndexGE" $
\s x -> (not (Set.null s)) QC.==>
let PkgIdxTs lx = Set.findMin s
PkgIdxTs ux = Set.findMin s
s' = PIS.fromSet s
x' = PkgIdxTs (wrapIntoRange (lx-2, ux+2) x)
in (fmap snd (PIS.lookupIndexGE x' s') == Set.lookupGE x' s) &&
(maybe True (\(i,v) -> Set.elemAt i s == v) (PIS.lookupIndexGE x' s'))
]
wrapIntoRange :: (Int,Int) -> Int -> Int
wrapIntoRange (l,u) x = l + ((x-l) `mod` (u-l))
qcProps2 :: TestTree
qcProps2 = testGroup "PkgIdxRanges"
[ QC.testProperty "eq1" $
\rs -> IR.fromList rs == IR.fromList (reverse rs)
, QC.testProperty "eq2" $
\rs -> irFromList8 rs == irFromList8 (reverse rs)
, QC.testProperty "eq3" $
\rs1 rs2 -> let rs1' = irFromList8 rs1
rs2' = irFromList8 rs2
in (irToSet rs1' == irToSet rs2') == (rs1' == rs2')
, QC.testProperty "union" $
\rs1 rs2 -> let rs1' = irFromList8 rs1
rs2' = irFromList8 rs2
rs12' = mappend rs1' rs2'
in (irToSet rs1' `mappend` irToSet rs2') == irToSet rs12'
, QC.testProperty "intersection" $
\rs1 rs2 -> let rs1' = irFromList8 rs1
rs2' = irFromList8 rs2
rs12' = IR.intersection rs1' rs2'
in (irToSet rs1' `Set.intersection` irToSet rs2') == irToSet rs12'
]
irFromList8 :: [(Word8, Word8)] -> IR.IdxRanges
irFromList8 = IR.fromList . fmap f
where
f :: (Word8,Word8) -> (PkgIdxTs,Maybe PkgIdxTs)
f (l,0) = (PkgIdxTs (fromIntegral l), Nothing)
f (l,b) = (PkgIdxTs (fromIntegral (min l b)), Just (PkgIdxTs (fromIntegral (max l b))))
irToSet :: IR.IdxRanges -> Set.Set PkgIdxTs
irToSet ir = Set.fromList [ PkgIdxTs j | j <- [ (-2) .. 260 ], IR.member (PkgIdxTs j) ir ]
deriving instance Arbitrary PkgIdxTs
|
b59ffbe3cd5a9956136972b7b1f6df82e0a77e649b85bd11cd0bcf43bae04f62 | haskell-numerics/hmatrix | Tests.hs | # LANGUAGE CPP #
# OPTIONS_GHC -fno - warn - unused - imports -fno - warn - incomplete - patterns -fno - warn - missing - signatures #
# LANGUAGE DataKinds #
# LANGUAGE TypeFamilies #
# LANGUAGE FlexibleContexts #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE TypeOperators #
# LANGUAGE ViewPatterns #
-----------------------------------------------------------------------------
|
Module : Numeric . LinearAlgebra . Tests
Copyright : ( c ) 2007 - 14
License : :
Stability : provisional
Some tests .
Module : Numeric.LinearAlgebra.Tests
Copyright : (c) Alberto Ruiz 2007-14
License : BSD3
Maintainer : Alberto Ruiz
Stability : provisional
Some tests.
-}
module Numeric.LinearAlgebra.Tests(
module Numeric . LinearAlgebra . Tests . Instances ,
module Numeric . LinearAlgebra . Tests . Properties ,
qCheck,
utest,
runTests,
runBenchmarks
, binaryTests
-- , findNaN
, runBigTests
) where
import Numeric.LinearAlgebra
import Numeric.LinearAlgebra.Devel
import Numeric.LinearAlgebra.Static(L)
import Numeric.LinearAlgebra.Tests.Instances
import Numeric.LinearAlgebra.Tests.Properties
import Test.HUnit hiding ((~:),test,Testable,State)
import System.Info
import Data.List(foldl1')
#if MIN_VERSION_base(4,11,0)
import Prelude hiding ((^),(<>))
#else
import Prelude hiding ((^))
#endif
import qualified Prelude
import System.CPUTime
import System.Exit
import Text.Printf
import Numeric.LinearAlgebra.Devel(unsafeFromForeignPtr,unsafeToForeignPtr)
import Control.Arrow((***))
import Debug.Trace
import Control.Monad(when)
import Control.Applicative
import Control.Monad(ap)
import Control.DeepSeq ( NFData(..) )
import Test.QuickCheck(Arbitrary,arbitrary,coarbitrary,choose,vector
,sized,classify,Testable,Property
,quickCheckWithResult,maxSize,stdArgs,shrink)
import qualified Test.QuickCheck as T
import Test.QuickCheck.Test(isSuccess)
--eps = peps :: Double
--i = 0:+1 :: Complex Double
qCheck n x = do
r <- quickCheckWithResult stdArgs {maxSize = n} x
when (not $ isSuccess r) (exitFailure)
a ^ b = a Prelude.^ (b :: Int)
utest str b = TestCase $ assertBool str b
feye n = flipud (ident n) :: Matrix Double
-----------------------------------------------------------
detTest1 = det m == 26
&& det mc == 38 :+ (-3)
&& det (feye 2) == -1
where
m = (3><3)
[ 1, 2, 3
, 4, 5, 7
, 2, 8, 4 :: Double
]
mc = (3><3)
[ 1, 2, 3
, 4, 5, 7
, 2, 8, iC
]
detTest2 = inv1 |~| inv2 && [det1] ~~ [det2]
where
m = complex (feye 6)
inv1 = inv m
det1 = det m
(inv2,(lda,sa)) = invlndet m
det2 = sa * exp lda
---------------------------------------------------------------------
nd1 = (3><3) [ 1/2, 1/4, 1/4
, 0/1, 1/2, 1/4
, 1/2, 1/4, 1/2 :: Double]
nd2 = (2><2) [1, 0, 1, 1:: Complex Double]
expmTest1 = expm nd1 :~14~: (3><3)
[ 1.762110887278176
, 0.478085470590435
, 0.478085470590435
, 0.104719410945666
, 1.709751181805343
, 0.425725765117601
, 0.851451530235203
, 0.530445176063267
, 1.814470592751009 ]
expmTest2 = expm nd2 :~15~: (2><2)
[ 2.718281828459045
, 0.000000000000000
, 2.718281828459045
, 2.718281828459045 ]
-----------------------------------------------------
mbCholTest = utest "mbCholTest" (ok1 && ok2) where
m1 = (2><2) [2,5,5,8 :: Double]
m2 = (2><2) [3,5,5,9 :: Complex Double]
ok1 = mbChol (trustSym m1) == Nothing
ok2 = mbChol (trustSym m2) == Just (chol $ trustSym m2)
-----------------------------------------------------
triTest = utest "triTest" ok1 where
a :: Matrix R
a = (4><4)
[
4.30, 0.00, 0.00, 0.00,
-3.96, -4.87, 0.00, 0.00,
0.40, 0.31, -8.02, 0.00,
-0.27, 0.07, -5.95, 0.12
]
w :: Matrix R
w = (4><2)
[
-12.90, -21.50,
16.75, 14.93,
-17.55, 6.33,
-11.04, 8.09
]
v :: Matrix R
v = triSolve Lower a w
e :: Matrix R
e = (4><2)
[
-3.0000, -5.0000,
-1.0000, 1.0000,
2.0000, -1.0000,
1.0000, 6.0000
]
ok1 = (norm_Inf . flatten $ e - v) <= 1e-13
-----------------------------------------------------
triDiagTest = utest "triDiagTest" (ok1 && ok2) where
dL, d, dU :: Vector Double
dL = fromList [3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0]
d = fromList [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]
dU = fromList [4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0]
b :: Matrix R
b = (9><3)
[
1.0, 1.0, 1.0,
1.0, -1.0, 2.0,
1.0, 1.0, 3.0,
1.0, -1.0, 4.0,
1.0, 1.0, 5.0,
1.0, -1.0, 6.0,
1.0, 1.0, 7.0,
1.0, -1.0, 8.0,
1.0, 1.0, 9.0
]
y :: Matrix R
y = (9><9)
[
1.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
3.0, 1.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 3.0, 1.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 3.0, 1.0, 4.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 3.0, 1.0, 4.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 3.0, 1.0, 4.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 1.0, 4.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 1.0, 4.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 1.0
]
x :: Matrix R
x = triDiagSolve dL d dU b
z :: Matrix C
z = (4><4)
[
1.0 :+ 1.0, 4.0 :+ 4.0, 0.0 :+ 0.0, 0.0 :+ 0.0,
3.0 :+ 3.0, 1.0 :+ 1.0, 4.0 :+ 4.0, 0.0 :+ 0.0,
0.0 :+ 0.0, 3.0 :+ 3.0, 1.0 :+ 1.0, 4.0 :+ 4.0,
0.0 :+ 0.0, 0.0 :+ 0.0, 3.0 :+ 3.0, 1.0 :+ 1.0
]
zDL, zD, zDu :: Vector C
zDL = fromList [3.0 :+ 3.0, 3.0 :+ 3.0, 3.0 :+ 3.0]
zD = fromList [1.0 :+ 1.0, 1.0 :+ 1.0, 1.0 :+ 1.0, 1.0 :+ 1.0]
zDu = fromList [4.0 :+ 4.0, 4.0 :+ 4.0, 4.0 :+ 4.0]
zB :: Matrix C
zB = (4><3)
[
1.0 :+ 1.0, 1.0 :+ 1.0, 1.0 :+ (-1.0),
1.0 :+ 1.0, (-1.0) :+ (-1.0), 1.0 :+ (-1.0),
1.0 :+ 1.0, 1.0 :+ 1.0, 1.0 :+ (-1.0),
1.0 :+ 1.0, (-1.0) :+ (-1.0), 1.0 :+ (-1.0)
]
u :: Matrix C
u = triDiagSolve zDL zD zDu zB
ok1 = (maximum $ map abs $ concat $ toLists $ b - (y <> x)) <= 1e-15
ok2 = (maximum $ map magnitude $ concat $ toLists $ zB - (z <> u)) <= 1e-15
---------------------------------------------------------------------
triDiagRegression = utest "triDiagRegression" ok where
minusOnes, twos :: Vector R
minusOnes = fromList [-1, -1]
twos = fromList [2, 2, 2]
k :: Matrix R
k = (3><3)
[ 2, -1, 0
, -1, 2, -1
, 0, -1, 2
]
b :: Matrix R
b = (3><1) [10, 10, 10]
tridiag = triDiagSolve minusOnes twos minusOnes b
simple = linearSolve k b
ok = case simple of
Just m -> tridiag |~| m
Nothing -> False
---------------------------------------------------------------------
randomTestGaussian = (unSym c) :~3~: unSym (snd (meanCov dat))
where
a = (3><3) [1,2,3,
2,4,0,
-2,2,1]
m = 3 |> [1,2,3]
c = mTm a
dat = gaussianSample 7 (10^6) m c
randomTestUniform = c :~2~: unSym (snd (meanCov dat))
where
c = diag $ 3 |> map ((/12).(^2)) [1,2,3]
dat = uniformSample 7 (10^6) [(0,1),(1,3),(3,6)]
---------------------------------------------------------------------
rot :: Double -> Matrix Double
rot a = (3><3) [ c,0,s
, 0,1,0
,-s,0,c ]
where c = cos a
s = sin a
rotTest = fun (10^5) :~11~: rot 5E4
where fun n = foldl1' (<>) (map rot angles)
where angles = toList $ linspace n (0,1)
---------------------------------------------------------------------
vector < = 0.6.0.2 bug discovered by
--
offsetTest = y == y' where
x = fromList [0..3 :: Double]
y = subVector 1 3 x
(f,o,n) = unsafeToForeignPtr y
y' = unsafeFromForeignPtr f o n
---------------------------------------------------------------------
normsVTest = TestList [
utest "normv2CD" $ norm2PropC v
-- , utest "normv2CF" $ norm2PropC (single v)
#ifndef NONORMVTEST
, utest "normv2D" $ norm2PropR x
-- , utest "normv2F" $ norm2PropR (single x)
#endif
, utest "normv1CD" $ norm_1 v == 8
, utest " normv1CF " $ norm_1 ( single v ) = = 8
, utest "normv1D" $ norm_1 x == 6
, utest " normv1F " $ norm_1 ( single x ) = = 6
, utest "normvInfCD" $ norm_Inf v == 5
, utest " normvInfCF " $ norm_Inf ( single v ) = = 5
, utest "normvInfD" $ norm_Inf x == 3
, utest " normvInfF " $ norm_Inf ( single x ) = = 3
] where v = fromList [1,-2,3:+4] :: Vector (Complex Double)
x = fromList [1,2,-3] :: Vector Double
#ifndef NONORMVTEST
norm2PropR a = norm_2 a =~= sqrt (udot a a)
#endif
norm2PropC a = norm_2 a =~= realPart (sqrt (a `dot` a))
a =~= b = fromList [a] |~| fromList [b]
normsMTest = TestList [
utest "norm2mCD" $ norm_2 v =~= 8.86164970498005
, utest " norm2mCF " $ norm_2 ( single v ) = ~= 8.86164970498005
, utest "norm2mD" $ norm_2 x =~= 5.96667765076216
, utest " norm2mF " $ norm_2 ( single x ) = ~= 5.96667765076216
, utest "norm1mCD" $ norm_1 v == 9
, utest " norm1mCF " $ norm_1 ( single v ) = = 9
, utest "norm1mD" $ norm_1 x == 7
, utest " norm1mF " $ norm_1 ( single x ) = = 7
, utest "normmInfCD" $ norm_Inf v == 12
, utest " normmInfCF " $ norm_Inf ( single v ) = = 12
, utest "normmInfD" $ norm_Inf x == 8
, utest " normmInfF " $ norm_Inf ( single x ) = = 8
, utest "normmFroCD" $ norm_Frob v =~= 8.88819441731559
, utest " normmFroCF " $ norm_Frob ( single v ) = ~~= 8.88819441731559
, utest "normmFroD" $ norm_Frob x =~= 6.24499799839840
, utest " normmFroF " $ norm_Frob ( single x ) = ~~= 6.24499799839840
] where v = (2><2) [1,-2*iC,3:+4,7] :: Matrix (Complex Double)
x = (2><2) [1,2,-3,5] :: Matrix Double
a =~= b = fromList [a] :~10~: fromList [b]
-- a =~~= b = fromList [a] :~5~: fromList [b]
---------------------------------------------------------------------
sumprodTest = TestList [
utest "sumCD" $ sumElements z == 6
, utest "sumCF" $ sumElements (single z) == 6
, utest "sumD" $ sumElements v == 6
, utest "sumF" $ sumElements (single v) == 6
, utest "prodCD" $ prodProp z
, utest "prodCF" $ prodProp (single z)
, utest "prodD" $ prodProp v
, utest "prodF" $ prodProp (single v)
] where v = fromList [1,2,3] :: Vector Double
z = fromList [1,2-iC,3+iC]
prodProp x = prodElements x == product (toList x)
---------------------------------------------------------------------
chainTest = utest "chain" $ foldl1' (<>) ms |~| optimiseMult ms where
ms = [ diag (fromList [1,2,3 :: Double])
, konst 3 (3,5)
, (5><10) [1 .. ]
, konst 5 (10,2)
]
---------------------------------------------------------------------
conjuTest m = cmap conjugate (flatten (conj (tr m))) == flatten (tr m)
---------------------------------------------------------------------
newtype State s a = State { runState :: s -> (a,s) }
instance Functor (State s)
where
fmap f x = pure f <*> x
instance Applicative (State s)
where
pure = return
(<*>) = ap
instance Monad (State s) where
return a = State $ \s -> (a,s)
m >>= f = State $ \s -> let (a,s') = runState m s
in runState (f a) s'
state_get :: State s s
state_get = State $ \s -> (s,s)
state_put :: s -> State s ()
state_put s = State $ \_ -> ((),s)
evalState :: State s a -> s -> a
evalState m s = let (a,s') = runState m s
in seq s' a
newtype MaybeT m a = MaybeT { runMaybeT :: m (Maybe a) }
instance Monad m => Functor (MaybeT m)
where
fmap f x = pure f <*> x
instance Monad m => Applicative (MaybeT m)
where
pure = return
(<*>) = ap
instance Monad m => Monad (MaybeT m) where
return a = MaybeT $ return $ Just a
m >>= f = MaybeT $ do
res <- runMaybeT m
case res of
Nothing -> return Nothing
Just r -> runMaybeT (f r)
lift_maybe = MaybeT . fmap Just
-- apply a test to successive elements of a vector, evaluates to true iff test passes for all pairs
--successive_ :: Storable a => (a -> a -> Bool) -> Vector a -> Bool
successive_ t v = maybe False (\_ -> True) $ evalState (runMaybeT (mapVectorM_ stp (subVector 1 (size v - 1) v))) (v ! 0)
where stp e = do
ep <- lift_maybe $ state_get
if t e ep
then lift_maybe $ state_put e
else MaybeT $ return Nothing
operate on successive elements of a vector and return the resulting vector , whose length 1 less than that of the input
--successive :: (Storable a, Storable b) => (a -> a -> b) -> Vector a -> Vector b
successive f v = evalState (mapVectorM stp (subVector 1 (size v - 1) v)) (v ! 0)
where stp e = do
ep <- state_get
state_put e
return $ f ep e
succTest = utest "successive" $
successive_ (>) (fromList [1 :: Double,2,3,4]) == True
&& successive_ (>) (fromList [1 :: Double,3,2,4]) == False
&& successive (+) (fromList [1..10 :: Double]) == 9 |> [3,5,7,9,11,13,15,17,19]
---------------------------------------------------------------------
findAssocTest = utest "findAssoc" ok
where
ok = m1 == m2
m1 = assoc (6,6) 7 $ zip (find (>0) (ident 5 :: Matrix Float)) [10 ..] :: Matrix Double
m2 = diagRect 7 (fromList[10..14]) 6 6
---------------------------------------------------------------------
condTest = utest "cond" ok
where
ok = step v * v == cond v 0 0 0 v
v = fromList [-7 .. 7 ] :: Vector Float
---------------------------------------------------------------------
conformTest = utest "conform" ok
where
ok = 1 + row [1,2,3] + col [10,20,30,40] + (4><3) [1..]
== (4><3) [13,15,17
,26,28,30
,39,41,43
,52,54,56]
---------------------------------------------------------------------
accumTest = utest "accum" ok
where
x = ident 3 :: Matrix Double
ok = accum x (+) [((1,2),7), ((2,2),3)]
== (3><3) [1,0,0
,0,1,7
,0,0,4]
&&
toList (flatten x) == [1,0,0,0,1,0,0,0,1]
--------------------------------------------------------------------------------
convolutionTest = utest "convolution" ok
where
a = fromList [ 1 .. 10 ] : : Vector Double
b = fromList [1..3] :: Vector Double
c = (5><7) [1..] :: Matrix Double
-- d = (3><3) [0,-1,0,-1,4,-1,0,-1,0] :: Matrix Double
ok = separable (corr b) c == corr2 (outer b b) c
&& separable (conv b) c == conv2 (outer b b) c
--------------------------------------------------------------------------------
sparseTest = utest "sparse" (fst $ checkT (undefined :: GMatrix))
--------------------------------------------------------------------------------
staticTest = utest "static" (fst $ checkT (undefined :: L 3 5))
--------------------------------------------------------------------------------
intTest = utest "int ops" (fst $ checkT (undefined :: Matrix I))
--------------------------------------------------------------------------------
modularTest = utest "modular ops" (fst $ checkT (undefined :: Matrix (Mod 13 I)))
--------------------------------------------------------------------------------
indexProp g f x = a1 == g a2 && a2 == a3 && b1 == g b2 && b2 == b3
where
l = map g (toList (f x))
a1 = maximum l
b1 = minimum l
a2 = x `atIndex` maxIndex x
b2 = x `atIndex` minIndex x
a3 = maxElement x
b3 = minElement x
--------------------------------------------------------------------------------
_sliceTest = TestList
[ testSlice (chol . trustSym) (gen 5 :: Matrix R)
, testSlice (chol . trustSym) (gen 5 :: Matrix C)
, testSlice qr (rec :: Matrix R)
, testSlice qr (rec :: Matrix C)
, testSlice hess (agen 5 :: Matrix R)
, testSlice hess (agen 5 :: Matrix C)
, testSlice schur (agen 5 :: Matrix R)
, testSlice schur (agen 5 :: Matrix C)
, testSlice lu (agen 5 :: Matrix R)
, testSlice lu (agen 5 :: Matrix C)
, testSlice (luSolve (luPacked (agen 5 :: Matrix R))) (agen 5)
, testSlice (luSolve (luPacked (agen 5 :: Matrix C))) (agen 5)
, test_lus (agen 5 :: Matrix R)
, test_lus (agen 5 :: Matrix C)
, testSlice eig (agen 5 :: Matrix R)
, testSlice eig (agen 5 :: Matrix C)
, testSlice (eigSH . trustSym) (gen 5 :: Matrix R)
, testSlice (eigSH . trustSym) (gen 5 :: Matrix C)
, testSlice eigenvalues (agen 5 :: Matrix R)
, testSlice eigenvalues (agen 5 :: Matrix C)
, testSlice (eigenvaluesSH . trustSym) (gen 5 :: Matrix R)
, testSlice (eigenvaluesSH . trustSym) (gen 5 :: Matrix C)
, testSlice svd (rec :: Matrix R)
, testSlice thinSVD (rec :: Matrix R)
, testSlice compactSVD (rec :: Matrix R)
, testSlice leftSV (rec :: Matrix R)
, testSlice rightSV (rec :: Matrix R)
, testSlice singularValues (rec :: Matrix R)
, testSlice svd (rec :: Matrix C)
, testSlice thinSVD (rec :: Matrix C)
, testSlice compactSVD (rec :: Matrix C)
, testSlice leftSV (rec :: Matrix C)
, testSlice rightSV (rec :: Matrix C)
, testSlice singularValues (rec :: Matrix C)
, testSlice (linearSolve (agen 5:: Matrix R)) (agen 5)
, testSlice (flip linearSolve (agen 5:: Matrix R)) (agen 5)
, testSlice (linearSolve (agen 5:: Matrix C)) (agen 5)
, testSlice (flip linearSolve (agen 5:: Matrix C)) (agen 5)
, testSlice (linearSolveLS (ogen 5:: Matrix R)) (ogen 5)
, testSlice (flip linearSolveLS (ogen 5:: Matrix R)) (ogen 5)
, testSlice (linearSolveLS (ogen 5:: Matrix C)) (ogen 5)
, testSlice (flip linearSolveLS (ogen 5:: Matrix C)) (ogen 5)
, testSlice (linearSolveSVD (ogen 5:: Matrix R)) (ogen 5)
, testSlice (flip linearSolveSVD (ogen 5:: Matrix R)) (ogen 5)
, testSlice (linearSolveSVD (ogen 5:: Matrix C)) (ogen 5)
, testSlice (flip linearSolveSVD (ogen 5:: Matrix C)) (ogen 5)
, testSlice (linearSolveLS (ugen 5:: Matrix R)) (ugen 5)
, testSlice (flip linearSolveLS (ugen 5:: Matrix R)) (ugen 5)
, testSlice (linearSolveLS (ugen 5:: Matrix C)) (ugen 5)
, testSlice (flip linearSolveLS (ugen 5:: Matrix C)) (ugen 5)
, testSlice (linearSolveSVD (ugen 5:: Matrix R)) (ugen 5)
, testSlice (flip linearSolveSVD (ugen 5:: Matrix R)) (ugen 5)
, testSlice (linearSolveSVD (ugen 5:: Matrix C)) (ugen 5)
, testSlice (flip linearSolveSVD (ugen 5:: Matrix C)) (ugen 5)
, testSlice ((<>) (ogen 5:: Matrix R)) (gen 5)
, testSlice (flip (<>) (gen 5:: Matrix R)) (ogen 5)
, testSlice ((<>) (ogen 5:: Matrix C)) (gen 5)
, testSlice (flip (<>) (gen 5:: Matrix C)) (ogen 5)
, testSlice ((<>) (ogen 5:: Matrix Float)) (gen 5)
, testSlice (flip (<>) (gen 5:: Matrix Float)) (ogen 5)
, testSlice ((<>) (ogen 5:: Matrix (Complex Float))) (gen 5)
, testSlice (flip (<>) (gen 5:: Matrix (Complex Float))) (ogen 5)
, testSlice ((<>) (ogen 5:: Matrix I)) (gen 5)
, testSlice (flip (<>) (gen 5:: Matrix I)) (ogen 5)
, testSlice ((<>) (ogen 5:: Matrix Z)) (gen 5)
, testSlice (flip (<>) (gen 5:: Matrix Z)) (ogen 5)
, testSlice ((<>) (ogen 5:: Matrix (I ./. 7))) (gen 5)
, testSlice (flip (<>) (gen 5:: Matrix (I ./. 7))) (ogen 5)
, testSlice ((<>) (ogen 5:: Matrix (Z ./. 7))) (gen 5)
, testSlice (flip (<>) (gen 5:: Matrix (Z ./. 7))) (ogen 5)
, testSlice (flip cholSolve (agen 5:: Matrix R)) (chol $ trustSym $ gen 5)
, testSlice (flip cholSolve (agen 5:: Matrix C)) (chol $ trustSym $ gen 5)
, testSlice (cholSolve (chol $ trustSym $ gen 5:: Matrix R)) (agen 5)
, testSlice (cholSolve (chol $ trustSym $ gen 5:: Matrix C)) (agen 5)
, ok_qrgr (rec :: Matrix R)
, ok_qrgr (rec :: Matrix C)
, testSlice (test_qrgr 4 tau1) qrr1
, testSlice (test_qrgr 4 tau2) qrr2
]
where
QR qrr1 tau1 = qrRaw (rec :: Matrix R)
QR qrr2 tau2 = qrRaw (rec :: Matrix C)
test_qrgr n t x = qrgr n (QR x t)
ok_qrgr x = TestCase . assertBool "ok_qrgr" $ simeq 1E-15 q q'
where
(q,_) = qr x
atau = qrRaw x
q' = qrgr (rows q) atau
simeq eps a b = not $ magnit eps (norm_1 $ flatten (a-b))
test_lus m = testSlice f lup
where
f x = luSolve (LU x p) m
(LU lup p) = luPacked m
gen :: Numeric t => Int -> Matrix t
gen n = diagRect 1 (konst 5 n) n n
agen :: (Numeric t, Num (Vector t))=> Int -> Matrix t
agen n = gen n + fromInt ((n><n)[0..])
ogen :: (Numeric t, Num (Vector t))=> Int -> Matrix t
ogen n = gen n === gen n
ugen :: (Numeric t, Num (Vector t))=> Int -> Matrix t
ugen n = takeRows 3 (gen n)
rec :: Numeric t => Matrix t
rec = subMatrix (0,0) (4,5) (gen 5)
testSlice f x@(size->sz@(r,c)) =
TestList . map (TestCase . assertEqual "" (f x)) $ (map f (g y1 ++ g y2))
where
subm = subMatrix
g y = [ subm (a*r,b*c) sz y | a <-[0..2], b <- [0..2]]
h z = fromBlocks (replicate 3 (replicate 3 z))
y1 = h x
y2 = (tr . h . tr) x
--------------------------------------------------------------------------------
| All tests must pass with a maximum dimension of about 20
-- (some tests may fail with bigger sizes due to precision loss).
runTests :: Int -- ^ maximum dimension
-> IO ()
runTests n = do
let test :: forall t . T.Testable t => t -> IO ()
test p = qCheck n p
putStrLn "------ index"
test( \m -> indexProp id flatten (single (m :: RM)) )
test( \v -> indexProp id id (single (v :: Vector Double)) )
test( \m -> indexProp id flatten (m :: RM) )
test( \v -> indexProp id id (v :: Vector Double) )
test( \m -> indexProp magnitude flatten (single (m :: CM)) )
test( \v -> indexProp magnitude id (single (v :: Vector (Complex Double))) )
test( \m -> indexProp magnitude flatten (m :: CM) )
test( \v -> indexProp magnitude id (v :: Vector (Complex Double)) )
putStrLn "------ mult Double"
test (multProp1 10 . rConsist)
test (multProp1 10 . cConsist)
test (multProp2 10 . rConsist)
test (multProp2 10 . cConsist)
" ------ mult Float "
test ( multProp1 6 . ( single * * * single ) . )
test ( multProp1 6 . ( single * * * single ) . cConsist )
test ( multProp2 6 . ( single * * * single ) . )
test ( multProp2 6 . ( single * * * single ) . cConsist )
putStrLn "------ sub-trans"
test (subProp . rM)
test (subProp . cM)
putStrLn "------ ctrans"
test (conjuTest . cM)
test (conjuTest . zM)
putStrLn "------ lu"
test (luProp . rM)
test (luProp . cM)
putStrLn "------ inv (linearSolve)"
test (invProp . rSqWC)
test (invProp . cSqWC)
putStrLn "------ luSolve"
test (linearSolveProp (luSolve.luPacked) . rSqWC)
test (linearSolveProp (luSolve.luPacked) . cSqWC)
putStrLn "------ ldlSolve"
test (linearSolvePropH (ldlSolve.ldlPacked) . rSymWC)
test (linearSolvePropH (ldlSolve.ldlPacked) . cSymWC)
putStrLn "------ cholSolve"
test (linearSolveProp (cholSolve.chol.trustSym) . rPosDef)
test (linearSolveProp (cholSolve.chol.trustSym) . cPosDef)
putStrLn "------ luSolveLS"
test (linearSolveProp linearSolveLS . rSqWC)
test (linearSolveProp linearSolveLS . cSqWC)
test (linearSolveProp2 linearSolveLS . rConsist)
test (linearSolveProp2 linearSolveLS . cConsist)
putStrLn "------ pinv (linearSolveSVD)"
test (pinvProp . rM)
test (pinvProp . cM)
putStrLn "------ det"
test (detProp . rSqWC)
test (detProp . cSqWC)
putStrLn "------ svd"
test (svdProp1 . rM)
test (svdProp1 . cM)
test (svdProp1a svd . rM)
test (svdProp1a svd . cM)
test ( svdProp1a svdRd )
test (svdProp1b svd . rM)
test (svdProp1b svd . cM)
-- test (svdProp1b svdRd)
test (svdProp2 thinSVD . rM)
test (svdProp2 thinSVD . cM)
-- test (svdProp2 thinSVDRd)
-- test (svdProp2 thinSVDCd)
test (svdProp3 . rM)
test (svdProp3 . cM)
test (svdProp4 . rM)
test (svdProp4 . cM)
test (svdProp5a)
test (svdProp5b)
test (svdProp6a)
test (svdProp6b)
test (svdProp7 . rM)
test (svdProp7 . cM)
" ------ svdCd "
#ifdef NOZGESDD
Omitted "
#else
test ( svdProp1a svdCd )
test ( svdProp1b svdCd )
#endif
putStrLn "------ eig"
test (eigSHProp . rHer)
test (eigSHProp . cHer)
test (eigProp . rSq)
test (eigProp . cSq)
test (eigSHProp2 . rHer)
test (eigSHProp2 . cHer)
test (eigProp2 . rSq)
test (eigProp2 . cSq)
putStrLn "------ geig"
test (uncurry geigProp . rSq2WC)
test (uncurry geigProp . cSq2WC)
putStrLn "------ nullSpace"
test (nullspaceProp . rM)
test (nullspaceProp . cM)
putStrLn "------ qr"
test (qrProp . rM)
test (qrProp . cM)
test (rqProp . rM)
-- test (rqProp . cM)
test (rqProp1 . cM)
test (rqProp2 . cM)
test ( rqProp3 . cM )
putStrLn "------ hess"
test (hessProp . rSq)
test (hessProp . cSq)
putStrLn "------ schur"
test (schurProp2 . rSq)
test (schurProp1 . cSq)
putStrLn "------ chol"
test (cholProp . rPosDef)
test (cholProp . cPosDef)
-- test (exactProp . rPosDef)
-- test (exactProp . cPosDef)
putStrLn "------ expm"
test (expmDiagProp . complex. rSqWC)
test (expmDiagProp . cSqWC)
putStrLn "------ vector operations - Double"
test (\u -> sin u ^ 2 + cos u ^ 2 |~| (1::RM))
test $ (\u -> sin u ^ 2 + cos u ^ 2 |~| (1::CM)) . liftMatrix makeUnitary
test (\u -> sin u ** 2 + cos u ** 2 |~| (1::RM))
test (\u -> cos u * tan u |~| sin (u::RM))
test $ (\u -> cos u * tan u |~| sin (u::CM)) . liftMatrix makeUnitary
" ------ vector operations - Float "
test ( \u - > sin u ^ 2 + cos u ^ 2 |~~| ( 1::FM ) )
test $ ( \u - > sin u ^ 2 + cos u ^ 2 |~~| ( 1::ZM ) ) . liftMatrix makeUnitary
test ( \u - > sin u * * 2 + cos u * * 2 |~~| ( 1::FM ) )
test ( \u - > cos u * tan u |~~| sin ( u::FM ) )
test $ ( \u - > cos u * tan u |~~| sin ( ) ) . liftMatrix makeUnitary
putStrLn "------ read . show"
test (\m -> (m::RM) == read (show m))
test (\m -> (m::CM) == read (show m))
test (\m -> toRows (m::RM) == read (show (toRows m)))
test (\m -> toRows (m::CM) == read (show (toRows m)))
test (\m -> (m::FM) == read (show m))
test (\m -> (m::ZM) == read (show m))
test (\m -> toRows (m::FM) == read (show (toRows m)))
test (\m -> toRows (m::ZM) == read (show (toRows m)))
putStrLn "------ some unit tests"
c <- runTestTT $ TestList
[ utest "1E5 rots" rotTest
, utest "det1" detTest1
, utest "invlndet" detTest2
, utest "expm1" (expmTest1)
, utest "expm2" (expmTest2)
, utest "arith1" $ ((ones (100,100) * 5 + 2)/0.5 - 7)**2 |~| (49 :: RM)
, utest "arith2" $ ((scalar (1+iC) * ones (100,100) * 5 + 2)/0.5 - 7)**2 |~| ( scalar (140*iC-51) :: CM)
, utest "arith3" $ exp (scalar iC * ones(10,10)*pi) + 1 |~| 0
, utest "<\\>" $ (3><2) [2,0,0,3,1,1::Double] <\> 3|>[4,9,5] |~| 2|>[2,3]
, utest " gamma " ( gamma 5 = = 24.0 )
-- , besselTest
-- , exponentialTest
, utest "randomGaussian" randomTestGaussian
, utest "randomUniform" randomTestUniform
, utest "buildVector/Matrix" $
complex (10 |> [0::Double ..]) == build 10 id
&& ident 5 == build (5,5) (\r c -> if r==c then 1::Double else 0)
, utest "rank" $ rank ((2><3)[1,0,0,1,5*peps,0::Double]) == 1
&& rank ((2><3)[1,0,0,1,7*peps,0::Double]) == 2
, utest "block" $ fromBlocks [[ident 3,0],[0,ident 4]] == (ident 7 :: CM)
, mbCholTest
, triTest
, triDiagTest
, triDiagRegression
, utest "offset" offsetTest
, normsVTest
, normsMTest
, sumprodTest
, chainTest
, succTest
, findAssocTest
, condTest
, conformTest
, accumTest
, convolutionTest
, sparseTest
, staticTest
, intTest
, modularTest
-- , sliceTest
]
when (errors c + failures c > 0) exitFailure
return ()
-- single precision approximate equality
infixl 4 |~~|
a |~~| b = a : ~6~ : b
makeUnitary v | realPart n > 1 = v / scalar n
| otherwise = v
where n = sqrt (v `dot` v)
binaryTests :: IO ()
binaryTests = do
let test :: forall t . T.Testable t => t -> IO ()
test = qCheck 100
test vectorBinaryRoundtripProp
test staticVectorBinaryRoundtripProp
qCheck 30 matrixBinaryRoundtripProp
qCheck 30 staticMatrixBinaryRoundtripProp
-- | Some additional tests on big matrices . They take a few minutes .
-- runBigTests :: IO ()
-- runBigTests = undefined
-- | testcase for nonempty fpu stack
findNaN : : Int - > Bool
findNaN n = all ( bugProp . eye ) ( take n $ cycle [ 1 .. 20 ] )
where eye m = ident m : : Matrix ( Double )
-- | testcase for nonempty fpu stack
findNaN :: Int -> Bool
findNaN n = all (bugProp . eye) (take n $ cycle [1..20])
where eye m = ident m :: Matrix ( Double)
-}
--------------------------------------------------------------------------------
-- | Performance measurements.
runBenchmarks :: IO ()
runBenchmarks = do
solveBench
subBench
mkVecBench
multBench
cholBench
luBench
luBench_2
svdBench
eigBench
putStrLn ""
--------------------------------
time msg act = do
putStr (msg++" ")
t0 <- getCPUTime
act `seq` putStr " "
t1 <- getCPUTime
printf "%6.2f s CPU\n" $ (fromIntegral (t1 - t0) / (10^12 :: Double)) :: IO ()
return ()
timeR msg act = do
putStr (msg++" ")
t0 <- getCPUTime
putStr (show act)
t1 <- getCPUTime
printf "%6.2f s CPU\n" $ (fromIntegral (t1 - t0) / (10^12 :: Double)) :: IO ()
return ()
--------------------------------
manymult n = foldl1' (<>) (map rot2 angles) where
angles = toList $ linspace n (0,1)
rot2 :: Double -> Matrix Double
rot2 a = (3><3) [ c,0,s
, 0,1,0
,-s,0,c ]
where c = cos a
s = sin a
multb n = foldl1' (<>) (replicate (10^6) (ident n :: Matrix Double))
--------------------------------
manyvec0 xs = sum $ map (\x -> x + x**2 + x**3) xs
manyvec1 xs = sumElements $ fromRows $ map (\x -> fromList [x,x**2,x**3]) xs
manyvec5 xs = sumElements $ fromRows $ map (\x -> vec3 x (x**2) (x**3)) xs
manyvec2 xs = sum $ map (\x -> sqrt(x^2 + (x**2)^2 +(x**3)^2)) xs
manyvec3 xs = sum $ map (norm_2 . (\x -> fromList [x,x**2,x**3])) xs
manyvec4 xs = sum $ map (norm_2 . (\x -> vec3 x (x**2) (x**3))) xs
vec3 :: Double -> Double -> Double -> Vector Double
vec3 a b c = runSTVector $ do
v <- newUndefinedVector 3
writeVector v 0 a
writeVector v 1 b
writeVector v 2 c
return v
mkVecBench = do
let n = 1000000
xs = toList $ linspace n (0,1::Double)
putStr "\neval data... "; print (sum xs)
timeR "listproc " $ manyvec0 xs
timeR "fromList matrix " $ manyvec1 xs
timeR "vec3 matrix " $ manyvec5 xs
timeR "listproc norm " $ manyvec2 xs
timeR "norm fromList " $ manyvec3 xs
timeR "norm vec3 " $ manyvec4 xs
--------------------------------
subBench = do
putStrLn ""
let g = foldl1' (.) (replicate (10^5) (\v -> subVector 1 (size v -1) v))
time "0.1M subVector " (g (konst 1 (1+10^5) :: Vector Double) ! 0)
let f = foldl1' (.) (replicate (10^5) (fromRows.toRows))
time "subVector-join 3" (f (ident 3 :: Matrix Double) `atIndex` (0,0))
time "subVector-join 10" (f (ident 10 :: Matrix Double) `atIndex` (0,0))
--------------------------------
multBench = do
let a = ident 1000 :: Matrix Double
let b = ident 2000 :: Matrix Double
a `seq` b `seq` putStrLn ""
time "product of 1M different 3x3 matrices" (manymult (10^6))
putStrLn ""
time "product of 1M constant 1x1 matrices" (multb 1)
time "product of 1M constant 3x3 matrices" (multb 3)
time " product of 1 M constant 5x5 matrices " ( multb 5 )
time "product of 1M const. 10x10 matrices" (multb 10)
time " product of 1 M const . 15x15 matrices " ( multb 15 )
time "product of 1M const. 20x20 matrices" (multb 20)
time " product of 1 M const . 25x25 matrices " ( multb 25 )
putStrLn ""
time "product (1000 x 1000)<>(1000 x 1000)" (a<>a)
time "product (2000 x 2000)<>(2000 x 2000)" (b<>b)
--------------------------------
eigBench = do
let m = reshape 1000 (randomVector 777 Uniform (1000*1000))
s = m + tr m
m `seq` s `seq` putStrLn ""
time "eigenvalues symmetric 1000x1000" (eigenvaluesSH (trustSym m))
time "eigenvectors symmetric 1000x1000" (snd $ eigSH (trustSym m))
time "eigenvalues general 1000x1000" (eigenvalues m)
time "eigenvectors general 1000x1000" (snd $ eig m)
--------------------------------
svdBench = do
let a = reshape 500 (randomVector 777 Uniform (3000*500))
b = reshape 1000 (randomVector 777 Uniform (1000*1000))
fv (_,_,v) = v `atIndex` (0,0)
a `seq` b `seq` putStrLn ""
time "singular values 3000x500" (singularValues a)
time "thin svd 3000x500" (fv $ thinSVD a)
time "full svd 3000x500" (fv $ svd a)
time "singular values 1000x1000" (singularValues b)
time "full svd 1000x1000" (fv $ svd b)
--------------------------------
solveBenchN n = do
let x = uniformSample 777 (2*n) (replicate n (-1,1))
a = tr x <> x
b = asColumn $ randomVector 666 Uniform n
a `seq` b `seq` putStrLn ""
time ("svd solve " ++ show n) (linearSolveSVD a b)
time (" ls solve " ++ show n) (linearSolveLS a b)
time (" solve " ++ show n) (linearSolve a b)
-- time (" LU solve " ++ show n) (luSolve (luPacked a) b)
time ("LDL solve " ++ show n) (ldlSolve (ldlPacked (trustSym a)) b)
time ("cholSolve " ++ show n) (cholSolve (chol $ trustSym a) b)
solveBench = do
solveBenchN 500
solveBenchN 1000
solveBenchN 1500
--------------------------------
cholBenchN n = do
let x = uniformSample 777 (2*n) (replicate n (-1,1))
a = tr x <> x
a `seq` putStr ""
time ("chol " ++ show n) (chol $ trustSym a)
cholBench = do
putStrLn ""
cholBenchN 1200
cholBenchN 600
cholBenchN 300
cholBenchN 150
cholBenchN 50
--------------------------------------------------------------------------------
luBenchN f n x msg = do
let m = diagRect 1 (fromList (replicate n x)) n n
m `seq` putStr ""
time (msg ++ " "++ show n) (rnf $ f m)
luBench = do
putStrLn ""
luBenchN luPacked 1000 (5::R) "luPacked Double "
luBenchN luPacked' 1000 (5::R) "luPacked' Double "
luBenchN luPacked' 1000 (5::Mod 9973 I) "luPacked' I mod 9973"
luBenchN luPacked' 1000 (5::Mod 9973 Z) "luPacked' Z mod 9973"
luBenchN_2 f g n x msg = do
let m = diagRect 1 (fromList (replicate n x)) n n
b = flipud m
m `seq` b `seq` putStr ""
time (msg ++ " "++ show n) (f (g m) b)
luBench_2 = do
putStrLn ""
luBenchN_2 luSolve luPacked 500 (5::R) "luSolve .luPacked Double "
luBenchN_2 luSolve' luPacked' 500 (5::R) "luSolve'.luPacked' Double "
luBenchN_2 luSolve' luPacked' 500 (5::Mod 9973 I) "luSolve'.luPacked' I mod 9973"
luBenchN_2 luSolve' luPacked' 500 (5::Mod 9973 Z) "luSolve'.luPacked' Z mod 9973"
| null | https://raw.githubusercontent.com/haskell-numerics/hmatrix/2694f776c7b5034d239acb5d984c489417739225/packages/tests/src/Numeric/LinearAlgebra/Tests.hs | haskell | # LANGUAGE RankNTypes #
---------------------------------------------------------------------------
, findNaN
eps = peps :: Double
i = 0:+1 :: Complex Double
---------------------------------------------------------
-------------------------------------------------------------------
---------------------------------------------------
---------------------------------------------------
---------------------------------------------------
-------------------------------------------------------------------
-------------------------------------------------------------------
-------------------------------------------------------------------
-------------------------------------------------------------------
-------------------------------------------------------------------
, utest "normv2CF" $ norm2PropC (single v)
, utest "normv2F" $ norm2PropR (single x)
a =~~= b = fromList [a] :~5~: fromList [b]
-------------------------------------------------------------------
-------------------------------------------------------------------
-------------------------------------------------------------------
-------------------------------------------------------------------
apply a test to successive elements of a vector, evaluates to true iff test passes for all pairs
successive_ :: Storable a => (a -> a -> Bool) -> Vector a -> Bool
successive :: (Storable a, Storable b) => (a -> a -> b) -> Vector a -> Vector b
-------------------------------------------------------------------
-------------------------------------------------------------------
-------------------------------------------------------------------
-------------------------------------------------------------------
------------------------------------------------------------------------------
d = (3><3) [0,-1,0,-1,4,-1,0,-1,0] :: Matrix Double
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
(some tests may fail with bigger sizes due to precision loss).
^ maximum dimension
test (svdProp1b svdRd)
test (svdProp2 thinSVDRd)
test (svdProp2 thinSVDCd)
test (rqProp . cM)
test (exactProp . rPosDef)
test (exactProp . cPosDef)
, besselTest
, exponentialTest
, sliceTest
single precision approximate equality
| Some additional tests on big matrices . They take a few minutes .
runBigTests :: IO ()
runBigTests = undefined
| testcase for nonempty fpu stack
| testcase for nonempty fpu stack
------------------------------------------------------------------------------
| Performance measurements.
------------------------------
------------------------------
------------------------------
------------------------------
------------------------------
------------------------------
------------------------------
------------------------------
time (" LU solve " ++ show n) (luSolve (luPacked a) b)
------------------------------
------------------------------------------------------------------------------ | # LANGUAGE CPP #
# OPTIONS_GHC -fno - warn - unused - imports -fno - warn - incomplete - patterns -fno - warn - missing - signatures #
# LANGUAGE DataKinds #
# LANGUAGE TypeFamilies #
# LANGUAGE FlexibleContexts #
# LANGUAGE TypeOperators #
# LANGUAGE ViewPatterns #
|
Module : Numeric . LinearAlgebra . Tests
Copyright : ( c ) 2007 - 14
License : :
Stability : provisional
Some tests .
Module : Numeric.LinearAlgebra.Tests
Copyright : (c) Alberto Ruiz 2007-14
License : BSD3
Maintainer : Alberto Ruiz
Stability : provisional
Some tests.
-}
module Numeric.LinearAlgebra.Tests(
module Numeric . LinearAlgebra . Tests . Instances ,
module Numeric . LinearAlgebra . Tests . Properties ,
qCheck,
utest,
runTests,
runBenchmarks
, binaryTests
, runBigTests
) where
import Numeric.LinearAlgebra
import Numeric.LinearAlgebra.Devel
import Numeric.LinearAlgebra.Static(L)
import Numeric.LinearAlgebra.Tests.Instances
import Numeric.LinearAlgebra.Tests.Properties
import Test.HUnit hiding ((~:),test,Testable,State)
import System.Info
import Data.List(foldl1')
#if MIN_VERSION_base(4,11,0)
import Prelude hiding ((^),(<>))
#else
import Prelude hiding ((^))
#endif
import qualified Prelude
import System.CPUTime
import System.Exit
import Text.Printf
import Numeric.LinearAlgebra.Devel(unsafeFromForeignPtr,unsafeToForeignPtr)
import Control.Arrow((***))
import Debug.Trace
import Control.Monad(when)
import Control.Applicative
import Control.Monad(ap)
import Control.DeepSeq ( NFData(..) )
import Test.QuickCheck(Arbitrary,arbitrary,coarbitrary,choose,vector
,sized,classify,Testable,Property
,quickCheckWithResult,maxSize,stdArgs,shrink)
import qualified Test.QuickCheck as T
import Test.QuickCheck.Test(isSuccess)
qCheck n x = do
r <- quickCheckWithResult stdArgs {maxSize = n} x
when (not $ isSuccess r) (exitFailure)
a ^ b = a Prelude.^ (b :: Int)
utest str b = TestCase $ assertBool str b
feye n = flipud (ident n) :: Matrix Double
detTest1 = det m == 26
&& det mc == 38 :+ (-3)
&& det (feye 2) == -1
where
m = (3><3)
[ 1, 2, 3
, 4, 5, 7
, 2, 8, 4 :: Double
]
mc = (3><3)
[ 1, 2, 3
, 4, 5, 7
, 2, 8, iC
]
detTest2 = inv1 |~| inv2 && [det1] ~~ [det2]
where
m = complex (feye 6)
inv1 = inv m
det1 = det m
(inv2,(lda,sa)) = invlndet m
det2 = sa * exp lda
nd1 = (3><3) [ 1/2, 1/4, 1/4
, 0/1, 1/2, 1/4
, 1/2, 1/4, 1/2 :: Double]
nd2 = (2><2) [1, 0, 1, 1:: Complex Double]
expmTest1 = expm nd1 :~14~: (3><3)
[ 1.762110887278176
, 0.478085470590435
, 0.478085470590435
, 0.104719410945666
, 1.709751181805343
, 0.425725765117601
, 0.851451530235203
, 0.530445176063267
, 1.814470592751009 ]
expmTest2 = expm nd2 :~15~: (2><2)
[ 2.718281828459045
, 0.000000000000000
, 2.718281828459045
, 2.718281828459045 ]
mbCholTest = utest "mbCholTest" (ok1 && ok2) where
m1 = (2><2) [2,5,5,8 :: Double]
m2 = (2><2) [3,5,5,9 :: Complex Double]
ok1 = mbChol (trustSym m1) == Nothing
ok2 = mbChol (trustSym m2) == Just (chol $ trustSym m2)
triTest = utest "triTest" ok1 where
a :: Matrix R
a = (4><4)
[
4.30, 0.00, 0.00, 0.00,
-3.96, -4.87, 0.00, 0.00,
0.40, 0.31, -8.02, 0.00,
-0.27, 0.07, -5.95, 0.12
]
w :: Matrix R
w = (4><2)
[
-12.90, -21.50,
16.75, 14.93,
-17.55, 6.33,
-11.04, 8.09
]
v :: Matrix R
v = triSolve Lower a w
e :: Matrix R
e = (4><2)
[
-3.0000, -5.0000,
-1.0000, 1.0000,
2.0000, -1.0000,
1.0000, 6.0000
]
ok1 = (norm_Inf . flatten $ e - v) <= 1e-13
triDiagTest = utest "triDiagTest" (ok1 && ok2) where
dL, d, dU :: Vector Double
dL = fromList [3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0]
d = fromList [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]
dU = fromList [4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0]
b :: Matrix R
b = (9><3)
[
1.0, 1.0, 1.0,
1.0, -1.0, 2.0,
1.0, 1.0, 3.0,
1.0, -1.0, 4.0,
1.0, 1.0, 5.0,
1.0, -1.0, 6.0,
1.0, 1.0, 7.0,
1.0, -1.0, 8.0,
1.0, 1.0, 9.0
]
y :: Matrix R
y = (9><9)
[
1.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
3.0, 1.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 3.0, 1.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 3.0, 1.0, 4.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 3.0, 1.0, 4.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 3.0, 1.0, 4.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 1.0, 4.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 1.0, 4.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 1.0
]
x :: Matrix R
x = triDiagSolve dL d dU b
z :: Matrix C
z = (4><4)
[
1.0 :+ 1.0, 4.0 :+ 4.0, 0.0 :+ 0.0, 0.0 :+ 0.0,
3.0 :+ 3.0, 1.0 :+ 1.0, 4.0 :+ 4.0, 0.0 :+ 0.0,
0.0 :+ 0.0, 3.0 :+ 3.0, 1.0 :+ 1.0, 4.0 :+ 4.0,
0.0 :+ 0.0, 0.0 :+ 0.0, 3.0 :+ 3.0, 1.0 :+ 1.0
]
zDL, zD, zDu :: Vector C
zDL = fromList [3.0 :+ 3.0, 3.0 :+ 3.0, 3.0 :+ 3.0]
zD = fromList [1.0 :+ 1.0, 1.0 :+ 1.0, 1.0 :+ 1.0, 1.0 :+ 1.0]
zDu = fromList [4.0 :+ 4.0, 4.0 :+ 4.0, 4.0 :+ 4.0]
zB :: Matrix C
zB = (4><3)
[
1.0 :+ 1.0, 1.0 :+ 1.0, 1.0 :+ (-1.0),
1.0 :+ 1.0, (-1.0) :+ (-1.0), 1.0 :+ (-1.0),
1.0 :+ 1.0, 1.0 :+ 1.0, 1.0 :+ (-1.0),
1.0 :+ 1.0, (-1.0) :+ (-1.0), 1.0 :+ (-1.0)
]
u :: Matrix C
u = triDiagSolve zDL zD zDu zB
ok1 = (maximum $ map abs $ concat $ toLists $ b - (y <> x)) <= 1e-15
ok2 = (maximum $ map magnitude $ concat $ toLists $ zB - (z <> u)) <= 1e-15
triDiagRegression = utest "triDiagRegression" ok where
minusOnes, twos :: Vector R
minusOnes = fromList [-1, -1]
twos = fromList [2, 2, 2]
k :: Matrix R
k = (3><3)
[ 2, -1, 0
, -1, 2, -1
, 0, -1, 2
]
b :: Matrix R
b = (3><1) [10, 10, 10]
tridiag = triDiagSolve minusOnes twos minusOnes b
simple = linearSolve k b
ok = case simple of
Just m -> tridiag |~| m
Nothing -> False
randomTestGaussian = (unSym c) :~3~: unSym (snd (meanCov dat))
where
a = (3><3) [1,2,3,
2,4,0,
-2,2,1]
m = 3 |> [1,2,3]
c = mTm a
dat = gaussianSample 7 (10^6) m c
randomTestUniform = c :~2~: unSym (snd (meanCov dat))
where
c = diag $ 3 |> map ((/12).(^2)) [1,2,3]
dat = uniformSample 7 (10^6) [(0,1),(1,3),(3,6)]
rot :: Double -> Matrix Double
rot a = (3><3) [ c,0,s
, 0,1,0
,-s,0,c ]
where c = cos a
s = sin a
rotTest = fun (10^5) :~11~: rot 5E4
where fun n = foldl1' (<>) (map rot angles)
where angles = toList $ linspace n (0,1)
vector < = 0.6.0.2 bug discovered by
offsetTest = y == y' where
x = fromList [0..3 :: Double]
y = subVector 1 3 x
(f,o,n) = unsafeToForeignPtr y
y' = unsafeFromForeignPtr f o n
normsVTest = TestList [
utest "normv2CD" $ norm2PropC v
#ifndef NONORMVTEST
, utest "normv2D" $ norm2PropR x
#endif
, utest "normv1CD" $ norm_1 v == 8
, utest " normv1CF " $ norm_1 ( single v ) = = 8
, utest "normv1D" $ norm_1 x == 6
, utest " normv1F " $ norm_1 ( single x ) = = 6
, utest "normvInfCD" $ norm_Inf v == 5
, utest " normvInfCF " $ norm_Inf ( single v ) = = 5
, utest "normvInfD" $ norm_Inf x == 3
, utest " normvInfF " $ norm_Inf ( single x ) = = 3
] where v = fromList [1,-2,3:+4] :: Vector (Complex Double)
x = fromList [1,2,-3] :: Vector Double
#ifndef NONORMVTEST
norm2PropR a = norm_2 a =~= sqrt (udot a a)
#endif
norm2PropC a = norm_2 a =~= realPart (sqrt (a `dot` a))
a =~= b = fromList [a] |~| fromList [b]
normsMTest = TestList [
utest "norm2mCD" $ norm_2 v =~= 8.86164970498005
, utest " norm2mCF " $ norm_2 ( single v ) = ~= 8.86164970498005
, utest "norm2mD" $ norm_2 x =~= 5.96667765076216
, utest " norm2mF " $ norm_2 ( single x ) = ~= 5.96667765076216
, utest "norm1mCD" $ norm_1 v == 9
, utest " norm1mCF " $ norm_1 ( single v ) = = 9
, utest "norm1mD" $ norm_1 x == 7
, utest " norm1mF " $ norm_1 ( single x ) = = 7
, utest "normmInfCD" $ norm_Inf v == 12
, utest " normmInfCF " $ norm_Inf ( single v ) = = 12
, utest "normmInfD" $ norm_Inf x == 8
, utest " normmInfF " $ norm_Inf ( single x ) = = 8
, utest "normmFroCD" $ norm_Frob v =~= 8.88819441731559
, utest " normmFroCF " $ norm_Frob ( single v ) = ~~= 8.88819441731559
, utest "normmFroD" $ norm_Frob x =~= 6.24499799839840
, utest " normmFroF " $ norm_Frob ( single x ) = ~~= 6.24499799839840
] where v = (2><2) [1,-2*iC,3:+4,7] :: Matrix (Complex Double)
x = (2><2) [1,2,-3,5] :: Matrix Double
a =~= b = fromList [a] :~10~: fromList [b]
sumprodTest = TestList [
utest "sumCD" $ sumElements z == 6
, utest "sumCF" $ sumElements (single z) == 6
, utest "sumD" $ sumElements v == 6
, utest "sumF" $ sumElements (single v) == 6
, utest "prodCD" $ prodProp z
, utest "prodCF" $ prodProp (single z)
, utest "prodD" $ prodProp v
, utest "prodF" $ prodProp (single v)
] where v = fromList [1,2,3] :: Vector Double
z = fromList [1,2-iC,3+iC]
prodProp x = prodElements x == product (toList x)
chainTest = utest "chain" $ foldl1' (<>) ms |~| optimiseMult ms where
ms = [ diag (fromList [1,2,3 :: Double])
, konst 3 (3,5)
, (5><10) [1 .. ]
, konst 5 (10,2)
]
conjuTest m = cmap conjugate (flatten (conj (tr m))) == flatten (tr m)
newtype State s a = State { runState :: s -> (a,s) }
instance Functor (State s)
where
fmap f x = pure f <*> x
instance Applicative (State s)
where
pure = return
(<*>) = ap
instance Monad (State s) where
return a = State $ \s -> (a,s)
m >>= f = State $ \s -> let (a,s') = runState m s
in runState (f a) s'
state_get :: State s s
state_get = State $ \s -> (s,s)
state_put :: s -> State s ()
state_put s = State $ \_ -> ((),s)
evalState :: State s a -> s -> a
evalState m s = let (a,s') = runState m s
in seq s' a
newtype MaybeT m a = MaybeT { runMaybeT :: m (Maybe a) }
instance Monad m => Functor (MaybeT m)
where
fmap f x = pure f <*> x
instance Monad m => Applicative (MaybeT m)
where
pure = return
(<*>) = ap
instance Monad m => Monad (MaybeT m) where
return a = MaybeT $ return $ Just a
m >>= f = MaybeT $ do
res <- runMaybeT m
case res of
Nothing -> return Nothing
Just r -> runMaybeT (f r)
lift_maybe = MaybeT . fmap Just
successive_ t v = maybe False (\_ -> True) $ evalState (runMaybeT (mapVectorM_ stp (subVector 1 (size v - 1) v))) (v ! 0)
where stp e = do
ep <- lift_maybe $ state_get
if t e ep
then lift_maybe $ state_put e
else MaybeT $ return Nothing
operate on successive elements of a vector and return the resulting vector , whose length 1 less than that of the input
successive f v = evalState (mapVectorM stp (subVector 1 (size v - 1) v)) (v ! 0)
where stp e = do
ep <- state_get
state_put e
return $ f ep e
succTest = utest "successive" $
successive_ (>) (fromList [1 :: Double,2,3,4]) == True
&& successive_ (>) (fromList [1 :: Double,3,2,4]) == False
&& successive (+) (fromList [1..10 :: Double]) == 9 |> [3,5,7,9,11,13,15,17,19]
findAssocTest = utest "findAssoc" ok
where
ok = m1 == m2
m1 = assoc (6,6) 7 $ zip (find (>0) (ident 5 :: Matrix Float)) [10 ..] :: Matrix Double
m2 = diagRect 7 (fromList[10..14]) 6 6
condTest = utest "cond" ok
where
ok = step v * v == cond v 0 0 0 v
v = fromList [-7 .. 7 ] :: Vector Float
conformTest = utest "conform" ok
where
ok = 1 + row [1,2,3] + col [10,20,30,40] + (4><3) [1..]
== (4><3) [13,15,17
,26,28,30
,39,41,43
,52,54,56]
accumTest = utest "accum" ok
where
x = ident 3 :: Matrix Double
ok = accum x (+) [((1,2),7), ((2,2),3)]
== (3><3) [1,0,0
,0,1,7
,0,0,4]
&&
toList (flatten x) == [1,0,0,0,1,0,0,0,1]
convolutionTest = utest "convolution" ok
where
a = fromList [ 1 .. 10 ] : : Vector Double
b = fromList [1..3] :: Vector Double
c = (5><7) [1..] :: Matrix Double
ok = separable (corr b) c == corr2 (outer b b) c
&& separable (conv b) c == conv2 (outer b b) c
sparseTest = utest "sparse" (fst $ checkT (undefined :: GMatrix))
staticTest = utest "static" (fst $ checkT (undefined :: L 3 5))
intTest = utest "int ops" (fst $ checkT (undefined :: Matrix I))
modularTest = utest "modular ops" (fst $ checkT (undefined :: Matrix (Mod 13 I)))
indexProp g f x = a1 == g a2 && a2 == a3 && b1 == g b2 && b2 == b3
where
l = map g (toList (f x))
a1 = maximum l
b1 = minimum l
a2 = x `atIndex` maxIndex x
b2 = x `atIndex` minIndex x
a3 = maxElement x
b3 = minElement x
_sliceTest = TestList
[ testSlice (chol . trustSym) (gen 5 :: Matrix R)
, testSlice (chol . trustSym) (gen 5 :: Matrix C)
, testSlice qr (rec :: Matrix R)
, testSlice qr (rec :: Matrix C)
, testSlice hess (agen 5 :: Matrix R)
, testSlice hess (agen 5 :: Matrix C)
, testSlice schur (agen 5 :: Matrix R)
, testSlice schur (agen 5 :: Matrix C)
, testSlice lu (agen 5 :: Matrix R)
, testSlice lu (agen 5 :: Matrix C)
, testSlice (luSolve (luPacked (agen 5 :: Matrix R))) (agen 5)
, testSlice (luSolve (luPacked (agen 5 :: Matrix C))) (agen 5)
, test_lus (agen 5 :: Matrix R)
, test_lus (agen 5 :: Matrix C)
, testSlice eig (agen 5 :: Matrix R)
, testSlice eig (agen 5 :: Matrix C)
, testSlice (eigSH . trustSym) (gen 5 :: Matrix R)
, testSlice (eigSH . trustSym) (gen 5 :: Matrix C)
, testSlice eigenvalues (agen 5 :: Matrix R)
, testSlice eigenvalues (agen 5 :: Matrix C)
, testSlice (eigenvaluesSH . trustSym) (gen 5 :: Matrix R)
, testSlice (eigenvaluesSH . trustSym) (gen 5 :: Matrix C)
, testSlice svd (rec :: Matrix R)
, testSlice thinSVD (rec :: Matrix R)
, testSlice compactSVD (rec :: Matrix R)
, testSlice leftSV (rec :: Matrix R)
, testSlice rightSV (rec :: Matrix R)
, testSlice singularValues (rec :: Matrix R)
, testSlice svd (rec :: Matrix C)
, testSlice thinSVD (rec :: Matrix C)
, testSlice compactSVD (rec :: Matrix C)
, testSlice leftSV (rec :: Matrix C)
, testSlice rightSV (rec :: Matrix C)
, testSlice singularValues (rec :: Matrix C)
, testSlice (linearSolve (agen 5:: Matrix R)) (agen 5)
, testSlice (flip linearSolve (agen 5:: Matrix R)) (agen 5)
, testSlice (linearSolve (agen 5:: Matrix C)) (agen 5)
, testSlice (flip linearSolve (agen 5:: Matrix C)) (agen 5)
, testSlice (linearSolveLS (ogen 5:: Matrix R)) (ogen 5)
, testSlice (flip linearSolveLS (ogen 5:: Matrix R)) (ogen 5)
, testSlice (linearSolveLS (ogen 5:: Matrix C)) (ogen 5)
, testSlice (flip linearSolveLS (ogen 5:: Matrix C)) (ogen 5)
, testSlice (linearSolveSVD (ogen 5:: Matrix R)) (ogen 5)
, testSlice (flip linearSolveSVD (ogen 5:: Matrix R)) (ogen 5)
, testSlice (linearSolveSVD (ogen 5:: Matrix C)) (ogen 5)
, testSlice (flip linearSolveSVD (ogen 5:: Matrix C)) (ogen 5)
, testSlice (linearSolveLS (ugen 5:: Matrix R)) (ugen 5)
, testSlice (flip linearSolveLS (ugen 5:: Matrix R)) (ugen 5)
, testSlice (linearSolveLS (ugen 5:: Matrix C)) (ugen 5)
, testSlice (flip linearSolveLS (ugen 5:: Matrix C)) (ugen 5)
, testSlice (linearSolveSVD (ugen 5:: Matrix R)) (ugen 5)
, testSlice (flip linearSolveSVD (ugen 5:: Matrix R)) (ugen 5)
, testSlice (linearSolveSVD (ugen 5:: Matrix C)) (ugen 5)
, testSlice (flip linearSolveSVD (ugen 5:: Matrix C)) (ugen 5)
, testSlice ((<>) (ogen 5:: Matrix R)) (gen 5)
, testSlice (flip (<>) (gen 5:: Matrix R)) (ogen 5)
, testSlice ((<>) (ogen 5:: Matrix C)) (gen 5)
, testSlice (flip (<>) (gen 5:: Matrix C)) (ogen 5)
, testSlice ((<>) (ogen 5:: Matrix Float)) (gen 5)
, testSlice (flip (<>) (gen 5:: Matrix Float)) (ogen 5)
, testSlice ((<>) (ogen 5:: Matrix (Complex Float))) (gen 5)
, testSlice (flip (<>) (gen 5:: Matrix (Complex Float))) (ogen 5)
, testSlice ((<>) (ogen 5:: Matrix I)) (gen 5)
, testSlice (flip (<>) (gen 5:: Matrix I)) (ogen 5)
, testSlice ((<>) (ogen 5:: Matrix Z)) (gen 5)
, testSlice (flip (<>) (gen 5:: Matrix Z)) (ogen 5)
, testSlice ((<>) (ogen 5:: Matrix (I ./. 7))) (gen 5)
, testSlice (flip (<>) (gen 5:: Matrix (I ./. 7))) (ogen 5)
, testSlice ((<>) (ogen 5:: Matrix (Z ./. 7))) (gen 5)
, testSlice (flip (<>) (gen 5:: Matrix (Z ./. 7))) (ogen 5)
, testSlice (flip cholSolve (agen 5:: Matrix R)) (chol $ trustSym $ gen 5)
, testSlice (flip cholSolve (agen 5:: Matrix C)) (chol $ trustSym $ gen 5)
, testSlice (cholSolve (chol $ trustSym $ gen 5:: Matrix R)) (agen 5)
, testSlice (cholSolve (chol $ trustSym $ gen 5:: Matrix C)) (agen 5)
, ok_qrgr (rec :: Matrix R)
, ok_qrgr (rec :: Matrix C)
, testSlice (test_qrgr 4 tau1) qrr1
, testSlice (test_qrgr 4 tau2) qrr2
]
where
QR qrr1 tau1 = qrRaw (rec :: Matrix R)
QR qrr2 tau2 = qrRaw (rec :: Matrix C)
test_qrgr n t x = qrgr n (QR x t)
ok_qrgr x = TestCase . assertBool "ok_qrgr" $ simeq 1E-15 q q'
where
(q,_) = qr x
atau = qrRaw x
q' = qrgr (rows q) atau
simeq eps a b = not $ magnit eps (norm_1 $ flatten (a-b))
test_lus m = testSlice f lup
where
f x = luSolve (LU x p) m
(LU lup p) = luPacked m
gen :: Numeric t => Int -> Matrix t
gen n = diagRect 1 (konst 5 n) n n
agen :: (Numeric t, Num (Vector t))=> Int -> Matrix t
agen n = gen n + fromInt ((n><n)[0..])
ogen :: (Numeric t, Num (Vector t))=> Int -> Matrix t
ogen n = gen n === gen n
ugen :: (Numeric t, Num (Vector t))=> Int -> Matrix t
ugen n = takeRows 3 (gen n)
rec :: Numeric t => Matrix t
rec = subMatrix (0,0) (4,5) (gen 5)
testSlice f x@(size->sz@(r,c)) =
TestList . map (TestCase . assertEqual "" (f x)) $ (map f (g y1 ++ g y2))
where
subm = subMatrix
g y = [ subm (a*r,b*c) sz y | a <-[0..2], b <- [0..2]]
h z = fromBlocks (replicate 3 (replicate 3 z))
y1 = h x
y2 = (tr . h . tr) x
| All tests must pass with a maximum dimension of about 20
-> IO ()
runTests n = do
let test :: forall t . T.Testable t => t -> IO ()
test p = qCheck n p
putStrLn "------ index"
test( \m -> indexProp id flatten (single (m :: RM)) )
test( \v -> indexProp id id (single (v :: Vector Double)) )
test( \m -> indexProp id flatten (m :: RM) )
test( \v -> indexProp id id (v :: Vector Double) )
test( \m -> indexProp magnitude flatten (single (m :: CM)) )
test( \v -> indexProp magnitude id (single (v :: Vector (Complex Double))) )
test( \m -> indexProp magnitude flatten (m :: CM) )
test( \v -> indexProp magnitude id (v :: Vector (Complex Double)) )
putStrLn "------ mult Double"
test (multProp1 10 . rConsist)
test (multProp1 10 . cConsist)
test (multProp2 10 . rConsist)
test (multProp2 10 . cConsist)
" ------ mult Float "
test ( multProp1 6 . ( single * * * single ) . )
test ( multProp1 6 . ( single * * * single ) . cConsist )
test ( multProp2 6 . ( single * * * single ) . )
test ( multProp2 6 . ( single * * * single ) . cConsist )
putStrLn "------ sub-trans"
test (subProp . rM)
test (subProp . cM)
putStrLn "------ ctrans"
test (conjuTest . cM)
test (conjuTest . zM)
putStrLn "------ lu"
test (luProp . rM)
test (luProp . cM)
putStrLn "------ inv (linearSolve)"
test (invProp . rSqWC)
test (invProp . cSqWC)
putStrLn "------ luSolve"
test (linearSolveProp (luSolve.luPacked) . rSqWC)
test (linearSolveProp (luSolve.luPacked) . cSqWC)
putStrLn "------ ldlSolve"
test (linearSolvePropH (ldlSolve.ldlPacked) . rSymWC)
test (linearSolvePropH (ldlSolve.ldlPacked) . cSymWC)
putStrLn "------ cholSolve"
test (linearSolveProp (cholSolve.chol.trustSym) . rPosDef)
test (linearSolveProp (cholSolve.chol.trustSym) . cPosDef)
putStrLn "------ luSolveLS"
test (linearSolveProp linearSolveLS . rSqWC)
test (linearSolveProp linearSolveLS . cSqWC)
test (linearSolveProp2 linearSolveLS . rConsist)
test (linearSolveProp2 linearSolveLS . cConsist)
putStrLn "------ pinv (linearSolveSVD)"
test (pinvProp . rM)
test (pinvProp . cM)
putStrLn "------ det"
test (detProp . rSqWC)
test (detProp . cSqWC)
putStrLn "------ svd"
test (svdProp1 . rM)
test (svdProp1 . cM)
test (svdProp1a svd . rM)
test (svdProp1a svd . cM)
test ( svdProp1a svdRd )
test (svdProp1b svd . rM)
test (svdProp1b svd . cM)
test (svdProp2 thinSVD . rM)
test (svdProp2 thinSVD . cM)
test (svdProp3 . rM)
test (svdProp3 . cM)
test (svdProp4 . rM)
test (svdProp4 . cM)
test (svdProp5a)
test (svdProp5b)
test (svdProp6a)
test (svdProp6b)
test (svdProp7 . rM)
test (svdProp7 . cM)
" ------ svdCd "
#ifdef NOZGESDD
Omitted "
#else
test ( svdProp1a svdCd )
test ( svdProp1b svdCd )
#endif
putStrLn "------ eig"
test (eigSHProp . rHer)
test (eigSHProp . cHer)
test (eigProp . rSq)
test (eigProp . cSq)
test (eigSHProp2 . rHer)
test (eigSHProp2 . cHer)
test (eigProp2 . rSq)
test (eigProp2 . cSq)
putStrLn "------ geig"
test (uncurry geigProp . rSq2WC)
test (uncurry geigProp . cSq2WC)
putStrLn "------ nullSpace"
test (nullspaceProp . rM)
test (nullspaceProp . cM)
putStrLn "------ qr"
test (qrProp . rM)
test (qrProp . cM)
test (rqProp . rM)
test (rqProp1 . cM)
test (rqProp2 . cM)
test ( rqProp3 . cM )
putStrLn "------ hess"
test (hessProp . rSq)
test (hessProp . cSq)
putStrLn "------ schur"
test (schurProp2 . rSq)
test (schurProp1 . cSq)
putStrLn "------ chol"
test (cholProp . rPosDef)
test (cholProp . cPosDef)
putStrLn "------ expm"
test (expmDiagProp . complex. rSqWC)
test (expmDiagProp . cSqWC)
putStrLn "------ vector operations - Double"
test (\u -> sin u ^ 2 + cos u ^ 2 |~| (1::RM))
test $ (\u -> sin u ^ 2 + cos u ^ 2 |~| (1::CM)) . liftMatrix makeUnitary
test (\u -> sin u ** 2 + cos u ** 2 |~| (1::RM))
test (\u -> cos u * tan u |~| sin (u::RM))
test $ (\u -> cos u * tan u |~| sin (u::CM)) . liftMatrix makeUnitary
" ------ vector operations - Float "
test ( \u - > sin u ^ 2 + cos u ^ 2 |~~| ( 1::FM ) )
test $ ( \u - > sin u ^ 2 + cos u ^ 2 |~~| ( 1::ZM ) ) . liftMatrix makeUnitary
test ( \u - > sin u * * 2 + cos u * * 2 |~~| ( 1::FM ) )
test ( \u - > cos u * tan u |~~| sin ( u::FM ) )
test $ ( \u - > cos u * tan u |~~| sin ( ) ) . liftMatrix makeUnitary
putStrLn "------ read . show"
test (\m -> (m::RM) == read (show m))
test (\m -> (m::CM) == read (show m))
test (\m -> toRows (m::RM) == read (show (toRows m)))
test (\m -> toRows (m::CM) == read (show (toRows m)))
test (\m -> (m::FM) == read (show m))
test (\m -> (m::ZM) == read (show m))
test (\m -> toRows (m::FM) == read (show (toRows m)))
test (\m -> toRows (m::ZM) == read (show (toRows m)))
putStrLn "------ some unit tests"
c <- runTestTT $ TestList
[ utest "1E5 rots" rotTest
, utest "det1" detTest1
, utest "invlndet" detTest2
, utest "expm1" (expmTest1)
, utest "expm2" (expmTest2)
, utest "arith1" $ ((ones (100,100) * 5 + 2)/0.5 - 7)**2 |~| (49 :: RM)
, utest "arith2" $ ((scalar (1+iC) * ones (100,100) * 5 + 2)/0.5 - 7)**2 |~| ( scalar (140*iC-51) :: CM)
, utest "arith3" $ exp (scalar iC * ones(10,10)*pi) + 1 |~| 0
, utest "<\\>" $ (3><2) [2,0,0,3,1,1::Double] <\> 3|>[4,9,5] |~| 2|>[2,3]
, utest " gamma " ( gamma 5 = = 24.0 )
, utest "randomGaussian" randomTestGaussian
, utest "randomUniform" randomTestUniform
, utest "buildVector/Matrix" $
complex (10 |> [0::Double ..]) == build 10 id
&& ident 5 == build (5,5) (\r c -> if r==c then 1::Double else 0)
, utest "rank" $ rank ((2><3)[1,0,0,1,5*peps,0::Double]) == 1
&& rank ((2><3)[1,0,0,1,7*peps,0::Double]) == 2
, utest "block" $ fromBlocks [[ident 3,0],[0,ident 4]] == (ident 7 :: CM)
, mbCholTest
, triTest
, triDiagTest
, triDiagRegression
, utest "offset" offsetTest
, normsVTest
, normsMTest
, sumprodTest
, chainTest
, succTest
, findAssocTest
, condTest
, conformTest
, accumTest
, convolutionTest
, sparseTest
, staticTest
, intTest
, modularTest
]
when (errors c + failures c > 0) exitFailure
return ()
infixl 4 |~~|
a |~~| b = a : ~6~ : b
makeUnitary v | realPart n > 1 = v / scalar n
| otherwise = v
where n = sqrt (v `dot` v)
binaryTests :: IO ()
binaryTests = do
let test :: forall t . T.Testable t => t -> IO ()
test = qCheck 100
test vectorBinaryRoundtripProp
test staticVectorBinaryRoundtripProp
qCheck 30 matrixBinaryRoundtripProp
qCheck 30 staticMatrixBinaryRoundtripProp
findNaN : : Int - > Bool
findNaN n = all ( bugProp . eye ) ( take n $ cycle [ 1 .. 20 ] )
where eye m = ident m : : Matrix ( Double )
findNaN :: Int -> Bool
findNaN n = all (bugProp . eye) (take n $ cycle [1..20])
where eye m = ident m :: Matrix ( Double)
-}
runBenchmarks :: IO ()
runBenchmarks = do
solveBench
subBench
mkVecBench
multBench
cholBench
luBench
luBench_2
svdBench
eigBench
putStrLn ""
time msg act = do
putStr (msg++" ")
t0 <- getCPUTime
act `seq` putStr " "
t1 <- getCPUTime
printf "%6.2f s CPU\n" $ (fromIntegral (t1 - t0) / (10^12 :: Double)) :: IO ()
return ()
timeR msg act = do
putStr (msg++" ")
t0 <- getCPUTime
putStr (show act)
t1 <- getCPUTime
printf "%6.2f s CPU\n" $ (fromIntegral (t1 - t0) / (10^12 :: Double)) :: IO ()
return ()
manymult n = foldl1' (<>) (map rot2 angles) where
angles = toList $ linspace n (0,1)
rot2 :: Double -> Matrix Double
rot2 a = (3><3) [ c,0,s
, 0,1,0
,-s,0,c ]
where c = cos a
s = sin a
multb n = foldl1' (<>) (replicate (10^6) (ident n :: Matrix Double))
manyvec0 xs = sum $ map (\x -> x + x**2 + x**3) xs
manyvec1 xs = sumElements $ fromRows $ map (\x -> fromList [x,x**2,x**3]) xs
manyvec5 xs = sumElements $ fromRows $ map (\x -> vec3 x (x**2) (x**3)) xs
manyvec2 xs = sum $ map (\x -> sqrt(x^2 + (x**2)^2 +(x**3)^2)) xs
manyvec3 xs = sum $ map (norm_2 . (\x -> fromList [x,x**2,x**3])) xs
manyvec4 xs = sum $ map (norm_2 . (\x -> vec3 x (x**2) (x**3))) xs
vec3 :: Double -> Double -> Double -> Vector Double
vec3 a b c = runSTVector $ do
v <- newUndefinedVector 3
writeVector v 0 a
writeVector v 1 b
writeVector v 2 c
return v
mkVecBench = do
let n = 1000000
xs = toList $ linspace n (0,1::Double)
putStr "\neval data... "; print (sum xs)
timeR "listproc " $ manyvec0 xs
timeR "fromList matrix " $ manyvec1 xs
timeR "vec3 matrix " $ manyvec5 xs
timeR "listproc norm " $ manyvec2 xs
timeR "norm fromList " $ manyvec3 xs
timeR "norm vec3 " $ manyvec4 xs
subBench = do
putStrLn ""
let g = foldl1' (.) (replicate (10^5) (\v -> subVector 1 (size v -1) v))
time "0.1M subVector " (g (konst 1 (1+10^5) :: Vector Double) ! 0)
let f = foldl1' (.) (replicate (10^5) (fromRows.toRows))
time "subVector-join 3" (f (ident 3 :: Matrix Double) `atIndex` (0,0))
time "subVector-join 10" (f (ident 10 :: Matrix Double) `atIndex` (0,0))
multBench = do
let a = ident 1000 :: Matrix Double
let b = ident 2000 :: Matrix Double
a `seq` b `seq` putStrLn ""
time "product of 1M different 3x3 matrices" (manymult (10^6))
putStrLn ""
time "product of 1M constant 1x1 matrices" (multb 1)
time "product of 1M constant 3x3 matrices" (multb 3)
time " product of 1 M constant 5x5 matrices " ( multb 5 )
time "product of 1M const. 10x10 matrices" (multb 10)
time " product of 1 M const . 15x15 matrices " ( multb 15 )
time "product of 1M const. 20x20 matrices" (multb 20)
time " product of 1 M const . 25x25 matrices " ( multb 25 )
putStrLn ""
time "product (1000 x 1000)<>(1000 x 1000)" (a<>a)
time "product (2000 x 2000)<>(2000 x 2000)" (b<>b)
eigBench = do
let m = reshape 1000 (randomVector 777 Uniform (1000*1000))
s = m + tr m
m `seq` s `seq` putStrLn ""
time "eigenvalues symmetric 1000x1000" (eigenvaluesSH (trustSym m))
time "eigenvectors symmetric 1000x1000" (snd $ eigSH (trustSym m))
time "eigenvalues general 1000x1000" (eigenvalues m)
time "eigenvectors general 1000x1000" (snd $ eig m)
svdBench = do
let a = reshape 500 (randomVector 777 Uniform (3000*500))
b = reshape 1000 (randomVector 777 Uniform (1000*1000))
fv (_,_,v) = v `atIndex` (0,0)
a `seq` b `seq` putStrLn ""
time "singular values 3000x500" (singularValues a)
time "thin svd 3000x500" (fv $ thinSVD a)
time "full svd 3000x500" (fv $ svd a)
time "singular values 1000x1000" (singularValues b)
time "full svd 1000x1000" (fv $ svd b)
solveBenchN n = do
let x = uniformSample 777 (2*n) (replicate n (-1,1))
a = tr x <> x
b = asColumn $ randomVector 666 Uniform n
a `seq` b `seq` putStrLn ""
time ("svd solve " ++ show n) (linearSolveSVD a b)
time (" ls solve " ++ show n) (linearSolveLS a b)
time (" solve " ++ show n) (linearSolve a b)
time ("LDL solve " ++ show n) (ldlSolve (ldlPacked (trustSym a)) b)
time ("cholSolve " ++ show n) (cholSolve (chol $ trustSym a) b)
solveBench = do
solveBenchN 500
solveBenchN 1000
solveBenchN 1500
cholBenchN n = do
let x = uniformSample 777 (2*n) (replicate n (-1,1))
a = tr x <> x
a `seq` putStr ""
time ("chol " ++ show n) (chol $ trustSym a)
cholBench = do
putStrLn ""
cholBenchN 1200
cholBenchN 600
cholBenchN 300
cholBenchN 150
cholBenchN 50
luBenchN f n x msg = do
let m = diagRect 1 (fromList (replicate n x)) n n
m `seq` putStr ""
time (msg ++ " "++ show n) (rnf $ f m)
luBench = do
putStrLn ""
luBenchN luPacked 1000 (5::R) "luPacked Double "
luBenchN luPacked' 1000 (5::R) "luPacked' Double "
luBenchN luPacked' 1000 (5::Mod 9973 I) "luPacked' I mod 9973"
luBenchN luPacked' 1000 (5::Mod 9973 Z) "luPacked' Z mod 9973"
luBenchN_2 f g n x msg = do
let m = diagRect 1 (fromList (replicate n x)) n n
b = flipud m
m `seq` b `seq` putStr ""
time (msg ++ " "++ show n) (f (g m) b)
luBench_2 = do
putStrLn ""
luBenchN_2 luSolve luPacked 500 (5::R) "luSolve .luPacked Double "
luBenchN_2 luSolve' luPacked' 500 (5::R) "luSolve'.luPacked' Double "
luBenchN_2 luSolve' luPacked' 500 (5::Mod 9973 I) "luSolve'.luPacked' I mod 9973"
luBenchN_2 luSolve' luPacked' 500 (5::Mod 9973 Z) "luSolve'.luPacked' Z mod 9973"
|
f7b7a8e6309a7b172a9dcf49bc1dcf313820a8dcf0887baccbc468d51de83768 | dparis/gen-phzr | flex_grid.cljs | (ns phzr.flex-grid
(:require [phzr.impl.utils.core :refer [clj->phaser phaser->clj]]
[phzr.impl.extend :as ex]
[cljsjs.phaser]))
(defn ->FlexGrid
"WARNING: This is an EXPERIMENTAL class. The API will change significantly in the coming versions and is incomplete.
Please try to avoid using in production games with a long time to build.
This is also why the documentation is incomplete.
FlexGrid is a a responsive grid manager that works in conjunction with the ScaleManager RESIZE scaling mode and FlexLayers
to provide for game object positioning in a responsive manner.
Parameters:
* manager (Phaser.ScaleManager) - The ScaleManager.
* width (number) - The width of the game.
* height (number) - The height of the game."
([manager width height]
(js/Phaser.FlexGrid. (clj->phaser manager)
(clj->phaser width)
(clj->phaser height))))
(defn create-custom-layer
"A custom layer is centered on the game and maintains its aspect ratio as it scales up and down.
Parameters:
* flex-grid (Phaser.FlexGrid) - Targeted instance for method
* width (number) - Width of this layer in pixels.
* height (number) - Height of this layer in pixels.
* children (Array.<PIXI.DisplayObject>) {optional} - An array of children that are used to populate the FlexLayer.
Returns: Phaser.FlexLayer - The Layer object."
([flex-grid width height]
(phaser->clj
(.createCustomLayer flex-grid
(clj->phaser width)
(clj->phaser height))))
([flex-grid width height children]
(phaser->clj
(.createCustomLayer flex-grid
(clj->phaser width)
(clj->phaser height)
(clj->phaser children)))))
(defn create-fixed-layer
"A fixed layer is centered on the game and is the size of the required dimensions and is never scaled.
Parameters:
* flex-grid (Phaser.FlexGrid) - Targeted instance for method
* children (Array.<PIXI.DisplayObject>) {optional} - An array of children that are used to populate the FlexLayer.
Returns: Phaser.FlexLayer - The Layer object."
([flex-grid]
(phaser->clj
(.createFixedLayer flex-grid)))
([flex-grid children]
(phaser->clj
(.createFixedLayer flex-grid
(clj->phaser children)))))
(defn create-fluid-layer
"A fluid layer is centered on the game and maintains its aspect ratio as it scales up and down.
Parameters:
* flex-grid (Phaser.FlexGrid) - Targeted instance for method
* children (array) {optional} - An array of children that are used to populate the FlexLayer.
Returns: Phaser.FlexLayer - The Layer object."
([flex-grid]
(phaser->clj
(.createFluidLayer flex-grid)))
([flex-grid children]
(phaser->clj
(.createFluidLayer flex-grid
(clj->phaser children)))))
(defn create-full-layer
"A full layer is placed at 0,0 and extends to the full size of the game. Children are scaled according to the fluid ratios.
Parameters:
* flex-grid (Phaser.FlexGrid) - Targeted instance for method
* children (array) {optional} - An array of children that are used to populate the FlexLayer.
Returns: Phaser.FlexLayer - The Layer object."
([flex-grid]
(phaser->clj
(.createFullLayer flex-grid)))
([flex-grid children]
(phaser->clj
(.createFullLayer flex-grid
(clj->phaser children)))))
(defn debug
"Call in the render function to output the bounds rects."
([flex-grid]
(phaser->clj
(.debug flex-grid))))
(defn fit-sprite
"Fits a sprites width to the bounds.
Parameters:
* flex-grid (Phaser.FlexGrid) - Targeted instance for method
* sprite (Phaser.Sprite) - The Sprite to fit."
([flex-grid sprite]
(phaser->clj
(.fitSprite flex-grid
(clj->phaser sprite)))))
(defn on-resize
"Called when the game container changes dimensions.
Parameters:
* flex-grid (Phaser.FlexGrid) - Targeted instance for method
* width (number) - The new width of the game container.
* height (number) - The new height of the game container."
([flex-grid width height]
(phaser->clj
(.onResize flex-grid
(clj->phaser width)
(clj->phaser height)))))
(defn refresh
"Updates all internal vars such as the bounds and scale values."
([flex-grid]
(phaser->clj
(.refresh flex-grid))))
(defn reset
"Resets the layer children references"
([flex-grid]
(phaser->clj
(.reset flex-grid))))
(defn set-size
"Sets the core game size. This resets the w/h parameters and bounds.
Parameters:
* flex-grid (Phaser.FlexGrid) - Targeted instance for method
* width (number) - The new dimensions.
* height (number) - The new dimensions."
([flex-grid width height]
(phaser->clj
(.setSize flex-grid
(clj->phaser width)
(clj->phaser height))))) | null | https://raw.githubusercontent.com/dparis/gen-phzr/e4c7b272e225ac343718dc15fc84f5f0dce68023/out/flex_grid.cljs | clojure | (ns phzr.flex-grid
(:require [phzr.impl.utils.core :refer [clj->phaser phaser->clj]]
[phzr.impl.extend :as ex]
[cljsjs.phaser]))
(defn ->FlexGrid
"WARNING: This is an EXPERIMENTAL class. The API will change significantly in the coming versions and is incomplete.
Please try to avoid using in production games with a long time to build.
This is also why the documentation is incomplete.
FlexGrid is a a responsive grid manager that works in conjunction with the ScaleManager RESIZE scaling mode and FlexLayers
to provide for game object positioning in a responsive manner.
Parameters:
* manager (Phaser.ScaleManager) - The ScaleManager.
* width (number) - The width of the game.
* height (number) - The height of the game."
([manager width height]
(js/Phaser.FlexGrid. (clj->phaser manager)
(clj->phaser width)
(clj->phaser height))))
(defn create-custom-layer
"A custom layer is centered on the game and maintains its aspect ratio as it scales up and down.
Parameters:
* flex-grid (Phaser.FlexGrid) - Targeted instance for method
* width (number) - Width of this layer in pixels.
* height (number) - Height of this layer in pixels.
* children (Array.<PIXI.DisplayObject>) {optional} - An array of children that are used to populate the FlexLayer.
Returns: Phaser.FlexLayer - The Layer object."
([flex-grid width height]
(phaser->clj
(.createCustomLayer flex-grid
(clj->phaser width)
(clj->phaser height))))
([flex-grid width height children]
(phaser->clj
(.createCustomLayer flex-grid
(clj->phaser width)
(clj->phaser height)
(clj->phaser children)))))
(defn create-fixed-layer
"A fixed layer is centered on the game and is the size of the required dimensions and is never scaled.
Parameters:
* flex-grid (Phaser.FlexGrid) - Targeted instance for method
* children (Array.<PIXI.DisplayObject>) {optional} - An array of children that are used to populate the FlexLayer.
Returns: Phaser.FlexLayer - The Layer object."
([flex-grid]
(phaser->clj
(.createFixedLayer flex-grid)))
([flex-grid children]
(phaser->clj
(.createFixedLayer flex-grid
(clj->phaser children)))))
(defn create-fluid-layer
"A fluid layer is centered on the game and maintains its aspect ratio as it scales up and down.
Parameters:
* flex-grid (Phaser.FlexGrid) - Targeted instance for method
* children (array) {optional} - An array of children that are used to populate the FlexLayer.
Returns: Phaser.FlexLayer - The Layer object."
([flex-grid]
(phaser->clj
(.createFluidLayer flex-grid)))
([flex-grid children]
(phaser->clj
(.createFluidLayer flex-grid
(clj->phaser children)))))
(defn create-full-layer
"A full layer is placed at 0,0 and extends to the full size of the game. Children are scaled according to the fluid ratios.
Parameters:
* flex-grid (Phaser.FlexGrid) - Targeted instance for method
* children (array) {optional} - An array of children that are used to populate the FlexLayer.
Returns: Phaser.FlexLayer - The Layer object."
([flex-grid]
(phaser->clj
(.createFullLayer flex-grid)))
([flex-grid children]
(phaser->clj
(.createFullLayer flex-grid
(clj->phaser children)))))
(defn debug
"Call in the render function to output the bounds rects."
([flex-grid]
(phaser->clj
(.debug flex-grid))))
(defn fit-sprite
"Fits a sprites width to the bounds.
Parameters:
* flex-grid (Phaser.FlexGrid) - Targeted instance for method
* sprite (Phaser.Sprite) - The Sprite to fit."
([flex-grid sprite]
(phaser->clj
(.fitSprite flex-grid
(clj->phaser sprite)))))
(defn on-resize
"Called when the game container changes dimensions.
Parameters:
* flex-grid (Phaser.FlexGrid) - Targeted instance for method
* width (number) - The new width of the game container.
* height (number) - The new height of the game container."
([flex-grid width height]
(phaser->clj
(.onResize flex-grid
(clj->phaser width)
(clj->phaser height)))))
(defn refresh
"Updates all internal vars such as the bounds and scale values."
([flex-grid]
(phaser->clj
(.refresh flex-grid))))
(defn reset
"Resets the layer children references"
([flex-grid]
(phaser->clj
(.reset flex-grid))))
(defn set-size
"Sets the core game size. This resets the w/h parameters and bounds.
Parameters:
* flex-grid (Phaser.FlexGrid) - Targeted instance for method
* width (number) - The new dimensions.
* height (number) - The new dimensions."
([flex-grid width height]
(phaser->clj
(.setSize flex-grid
(clj->phaser width)
(clj->phaser height))))) |
|
5b636b6eab720893ca4383d12541a36c6663dd3e2f072f110e466284b5f4b1c6 | clojure-interop/google-cloud-clients | TranslateRpc.clj | (ns com.google.cloud.translate.spi.v2.TranslateRpc
(:refer-clojure :only [require comment defn ->])
(:import [com.google.cloud.translate.spi.v2 TranslateRpc]))
(defn list-supported-languages
"Returns a list of the languages supported by Google Translation.
option-map - options to listing language translations - `java.util.Map`
returns: `java.util.List<com.google.api.services.translate.model.LanguagesResource>`"
(^java.util.List [^TranslateRpc this ^java.util.Map option-map]
(-> this (.listSupportedLanguages option-map))))
(defn detect
"Detects the language of the provided texts.
texts - the texts to translate - `java.util.List`
returns: a list of lists of detections, one list of detections for each provided text, in order - `java.util.List<java.util.List<com.google.api.services.translate.model.DetectionsResourceItems>>`"
(^java.util.List [^TranslateRpc this ^java.util.List texts]
(-> this (.detect texts))))
(defn translate
"Translates the provided texts.
texts - the texts to translate - `java.util.List`
option-map - options to text translation - `java.util.Map`
returns: a list of resources containing translation information, in the same order of the
provided texts - `java.util.List<com.google.api.services.translate.model.TranslationsResource>`"
(^java.util.List [^TranslateRpc this ^java.util.List texts ^java.util.Map option-map]
(-> this (.translate texts option-map))))
| null | https://raw.githubusercontent.com/clojure-interop/google-cloud-clients/80852d0496057c22f9cdc86d6f9ffc0fa3cd7904/com.google.cloud.translate/src/com/google/cloud/translate/spi/v2/TranslateRpc.clj | clojure | (ns com.google.cloud.translate.spi.v2.TranslateRpc
(:refer-clojure :only [require comment defn ->])
(:import [com.google.cloud.translate.spi.v2 TranslateRpc]))
(defn list-supported-languages
"Returns a list of the languages supported by Google Translation.
option-map - options to listing language translations - `java.util.Map`
returns: `java.util.List<com.google.api.services.translate.model.LanguagesResource>`"
(^java.util.List [^TranslateRpc this ^java.util.Map option-map]
(-> this (.listSupportedLanguages option-map))))
(defn detect
"Detects the language of the provided texts.
texts - the texts to translate - `java.util.List`
returns: a list of lists of detections, one list of detections for each provided text, in order - `java.util.List<java.util.List<com.google.api.services.translate.model.DetectionsResourceItems>>`"
(^java.util.List [^TranslateRpc this ^java.util.List texts]
(-> this (.detect texts))))
(defn translate
"Translates the provided texts.
texts - the texts to translate - `java.util.List`
option-map - options to text translation - `java.util.Map`
returns: a list of resources containing translation information, in the same order of the
provided texts - `java.util.List<com.google.api.services.translate.model.TranslationsResource>`"
(^java.util.List [^TranslateRpc this ^java.util.List texts ^java.util.Map option-map]
(-> this (.translate texts option-map))))
|
|
c64f1ccd678d0fdc530971311dff6a2daae49236bb611e77c1127b9fa0d09dbb | janestreet/bonsai | types.ml | open! Core
module type Name = sig
module Count : sig
type t
val zero : t
val succ : t -> t
end
type t
val to_string : t -> string
val create : unit -> t
val next : Count.t -> t * Count.t
include Comparable.S_binable with type t := t
include Sexpable.S with type t := t
end
module Default_id = struct
module Count = Int
module T = struct
type t =
| User of int
| Gen of int
[@@deriving bin_io, sexp, compare]
end
include T
include Comparable.Make_binable (T)
let count = ref 0
let create () =
count := !count + 1;
User !count
;;
let next count =
let count = Count.succ count in
Gen count, count
;;
let to_string = function
| User x -> Int.to_string x
| Gen x -> "gen_" ^ Int.to_string x
;;
end
module Make (Name : Name) = struct
(* This ugly recursive type / recursive module structure is
required in order to get sexp-deriving to work correctly *)
type kind =
| Bindings of
{ bindings : binding list
; last_body : computation
}
| Value of value
| Wrapping of
{ name : string
; introduces : Name.t list
; bodies : computation list
}
and binding =
{ bound : computation
; as_ : Name.t
}
and value_without_position =
| Fake
| Redirect of { name : Name.t }
| Named of Name.t
| Singleton
| Mapn of value list
and value =
{ value_kind : value_without_position
; value_here : Source_code_position.Stable.V1.t option
; value_id : Name.t
}
and computation =
{ kind : kind
; free_variables : Name.Set.t
; here : Source_code_position.Stable.V1.t option
}
[@@deriving sexp, compare]
module rec Kind : sig
type t = kind =
| Bindings of
{ bindings : binding list
; last_body : computation
}
| Value of value
| Wrapping of
{ name : string
; introduces : Name.t list
; bodies : computation list
}
[@@deriving sexp]
end =
Kind
and Binding : sig
type t = binding =
{ bound : computation
; as_ : Name.t
}
[@@deriving sexp, compare]
end = struct
type t = binding =
{ bound : computation
; as_ : Name.t
}
[@@deriving sexp, compare]
end
and Value : sig
type nonrec value_without_position = value_without_position =
| Fake
| Redirect of { name : Name.t }
| Named of Name.t
| Singleton
| Mapn of value list
[@@deriving sexp, compare]
and t = value =
{ value_kind : value_without_position
; value_here : Source_code_position.t option
; value_id : Name.t
}
[@@deriving sexp, compare]
end = struct
type nonrec value_without_position = value_without_position =
| Fake
| Redirect of { name : Name.t }
| Named of Name.t
| Singleton
| Mapn of value list
[@@deriving sexp, compare]
and t = value =
{ value_kind : value_without_position
; value_here : Source_code_position.Stable.V1.t option
; value_id : Name.t
}
[@@deriving sexp, compare]
end
and Computation : sig
type nonrec t = computation =
{ kind : kind
; free_variables : Name.Set.t
; here : Source_code_position.t option
}
[@@deriving sexp, compare]
end = struct
type nonrec t = computation =
{ kind : kind
; free_variables : Name.Set.t
; here : Source_code_position.Stable.V1.t option
}
[@@deriving sexp, compare]
end
class ['acc] fold =
object (self)
method name : Name.t -> 'acc -> 'acc = fun _ acc -> acc
method position : Source_code_position.t -> 'acc -> 'acc = fun _ acc -> acc
method value_kind : Value.value_without_position -> 'acc -> 'acc =
fun value_kind acc ->
match value_kind with
| Fake -> acc
| Redirect { name } -> self#name name acc
| Named name -> self#name name acc
| Singleton -> acc
| Mapn values ->
List.fold ~init:acc ~f:(fun acc value -> self#value value acc) values
method value : Value.t -> 'acc -> 'acc =
fun value acc ->
match value with
| { value_kind; value_here; value_id } ->
self#value_kind value_kind acc
|> fun acc ->
Option.value_map
value_here
~f:(fun value_here -> self#position value_here acc)
~default:acc
|> self#name value_id
method binding : Binding.t -> 'acc -> 'acc =
fun binding acc ->
let { bound : computation; as_ : Name.t } = binding in
self#computation bound acc |> self#name as_
method string : string -> 'acc -> 'acc = fun _ acc -> acc
method kind : Kind.t -> 'acc -> 'acc =
fun kind acc ->
match kind with
| Bindings { bindings; last_body } ->
List.fold ~init:acc bindings ~f:(Fn.flip self#binding)
|> self#computation last_body
| Value value -> self#value value acc
| Wrapping { name; introduces; bodies } ->
self#string name acc
|> fun acc ->
List.fold introduces ~init:acc ~f:(Fn.flip self#name)
|> fun acc -> List.fold bodies ~init:acc ~f:(Fn.flip self#computation)
method computation : Computation.t -> 'acc -> 'acc =
fun computation acc ->
let { kind : Kind.t
; free_variables : Name.Set.t
; here : Source_code_position.Stable.V1.t option
}
=
computation
in
self#kind kind acc
|> fun acc ->
Set.fold free_variables ~init:acc ~f:(Fn.flip self#name)
|> fun acc ->
Option.value_map here ~f:(fun here -> self#position here acc) ~default:acc
end
end
| null | https://raw.githubusercontent.com/janestreet/bonsai/4baeedc75bf73a0915e04dc02d8a49b78779e9b0/experimental/dagviz/src/types.ml | ocaml | This ugly recursive type / recursive module structure is
required in order to get sexp-deriving to work correctly | open! Core
module type Name = sig
module Count : sig
type t
val zero : t
val succ : t -> t
end
type t
val to_string : t -> string
val create : unit -> t
val next : Count.t -> t * Count.t
include Comparable.S_binable with type t := t
include Sexpable.S with type t := t
end
module Default_id = struct
module Count = Int
module T = struct
type t =
| User of int
| Gen of int
[@@deriving bin_io, sexp, compare]
end
include T
include Comparable.Make_binable (T)
let count = ref 0
let create () =
count := !count + 1;
User !count
;;
let next count =
let count = Count.succ count in
Gen count, count
;;
let to_string = function
| User x -> Int.to_string x
| Gen x -> "gen_" ^ Int.to_string x
;;
end
module Make (Name : Name) = struct
type kind =
| Bindings of
{ bindings : binding list
; last_body : computation
}
| Value of value
| Wrapping of
{ name : string
; introduces : Name.t list
; bodies : computation list
}
and binding =
{ bound : computation
; as_ : Name.t
}
and value_without_position =
| Fake
| Redirect of { name : Name.t }
| Named of Name.t
| Singleton
| Mapn of value list
and value =
{ value_kind : value_without_position
; value_here : Source_code_position.Stable.V1.t option
; value_id : Name.t
}
and computation =
{ kind : kind
; free_variables : Name.Set.t
; here : Source_code_position.Stable.V1.t option
}
[@@deriving sexp, compare]
module rec Kind : sig
type t = kind =
| Bindings of
{ bindings : binding list
; last_body : computation
}
| Value of value
| Wrapping of
{ name : string
; introduces : Name.t list
; bodies : computation list
}
[@@deriving sexp]
end =
Kind
and Binding : sig
type t = binding =
{ bound : computation
; as_ : Name.t
}
[@@deriving sexp, compare]
end = struct
type t = binding =
{ bound : computation
; as_ : Name.t
}
[@@deriving sexp, compare]
end
and Value : sig
type nonrec value_without_position = value_without_position =
| Fake
| Redirect of { name : Name.t }
| Named of Name.t
| Singleton
| Mapn of value list
[@@deriving sexp, compare]
and t = value =
{ value_kind : value_without_position
; value_here : Source_code_position.t option
; value_id : Name.t
}
[@@deriving sexp, compare]
end = struct
type nonrec value_without_position = value_without_position =
| Fake
| Redirect of { name : Name.t }
| Named of Name.t
| Singleton
| Mapn of value list
[@@deriving sexp, compare]
and t = value =
{ value_kind : value_without_position
; value_here : Source_code_position.Stable.V1.t option
; value_id : Name.t
}
[@@deriving sexp, compare]
end
and Computation : sig
type nonrec t = computation =
{ kind : kind
; free_variables : Name.Set.t
; here : Source_code_position.t option
}
[@@deriving sexp, compare]
end = struct
type nonrec t = computation =
{ kind : kind
; free_variables : Name.Set.t
; here : Source_code_position.Stable.V1.t option
}
[@@deriving sexp, compare]
end
class ['acc] fold =
object (self)
method name : Name.t -> 'acc -> 'acc = fun _ acc -> acc
method position : Source_code_position.t -> 'acc -> 'acc = fun _ acc -> acc
method value_kind : Value.value_without_position -> 'acc -> 'acc =
fun value_kind acc ->
match value_kind with
| Fake -> acc
| Redirect { name } -> self#name name acc
| Named name -> self#name name acc
| Singleton -> acc
| Mapn values ->
List.fold ~init:acc ~f:(fun acc value -> self#value value acc) values
method value : Value.t -> 'acc -> 'acc =
fun value acc ->
match value with
| { value_kind; value_here; value_id } ->
self#value_kind value_kind acc
|> fun acc ->
Option.value_map
value_here
~f:(fun value_here -> self#position value_here acc)
~default:acc
|> self#name value_id
method binding : Binding.t -> 'acc -> 'acc =
fun binding acc ->
let { bound : computation; as_ : Name.t } = binding in
self#computation bound acc |> self#name as_
method string : string -> 'acc -> 'acc = fun _ acc -> acc
method kind : Kind.t -> 'acc -> 'acc =
fun kind acc ->
match kind with
| Bindings { bindings; last_body } ->
List.fold ~init:acc bindings ~f:(Fn.flip self#binding)
|> self#computation last_body
| Value value -> self#value value acc
| Wrapping { name; introduces; bodies } ->
self#string name acc
|> fun acc ->
List.fold introduces ~init:acc ~f:(Fn.flip self#name)
|> fun acc -> List.fold bodies ~init:acc ~f:(Fn.flip self#computation)
method computation : Computation.t -> 'acc -> 'acc =
fun computation acc ->
let { kind : Kind.t
; free_variables : Name.Set.t
; here : Source_code_position.Stable.V1.t option
}
=
computation
in
self#kind kind acc
|> fun acc ->
Set.fold free_variables ~init:acc ~f:(Fn.flip self#name)
|> fun acc ->
Option.value_map here ~f:(fun here -> self#position here acc) ~default:acc
end
end
|
a7c3bdb26f678b74223435c14abd0e397408126108c856db5edc5b69a1796339 | ku-fpg/haskino | SemExample.hs | {-# OPTIONS_GHC -fplugin=System.Hardware.Haskino.ShallowDeepPlugin #-}
-------------------------------------------------------------------------------
-- |
Module : System . Hardware . Haskino . SamplePrograms . Rewrite.semExample
Copyright : ( c ) University of Kansas
-- License : BSD3
-- Stability : experimental
--
This is an example of using semaphores to communicate between two tasks .
One task gives a semaphore then delays for 2 seconds . The other task
waits for the semaphore then blinks the led rapidly 3 times .
-------------------------------------------------------------------------------
module Main where
import Data.Boolean
import Data.Boolean.Numbers
import Data.Word
import System.Hardware.Haskino
blinkDelay :: Word32
blinkDelay = 125
taskDelay :: Word32
taskDelay = 2000
semId :: Word8
semId = 0
count :: Word32
count = 3
led :: Word8
led = 13
myTask1 :: Word8 -> Arduino ()
myTask1 led = do
setPinMode led OUTPUT
myTask1'
where
myTask1' :: Arduino ()
myTask1' = do
takeSem semId
myTask1'' 0
myTask1'
myTask1'' :: Word32 -> Arduino ()
myTask1'' x = do
if x < count then do
digitalWrite led true
delayMillis blinkDelay
digitalWrite led false
delayMillis blinkDelay
myTask1'' (x + 1)
else return ()
myTask2 :: Arduino ()
myTask2 = do
myTask2' 0
where
myTask2' :: Word8 -> Arduino ()
myTask2' loopCount = do
giveSem semId
debug $ showB loopCount
delayMillis taskDelay
myTask2' (loopCount + 1)
initExample :: Arduino ()
initExample = do
-- Create the tasks
createTask 1 $ myTask1 led
createTask 2 myTask2
Schedule the tasks to start in 1 second , the second starting after the first
scheduleTask 1 1000
scheduleTask 2 1050
semExample :: IO ()
semExample = withArduino True "/dev/cu.usbmodem1421" $ do
initExample
-- Execute this function to generate C code to be used with the runtime.
compile :: IO ()
compile = compileProgram initExample "semExample.ino"
main :: IO ()
main = compile
| null | https://raw.githubusercontent.com/ku-fpg/haskino/9a0709c92c2da9b9371e292b00fd076e5539eb18/legacy/Rewrite/SemExample.hs | haskell | # OPTIONS_GHC -fplugin=System.Hardware.Haskino.ShallowDeepPlugin #
-----------------------------------------------------------------------------
|
License : BSD3
Stability : experimental
-----------------------------------------------------------------------------
Create the tasks
Execute this function to generate C code to be used with the runtime. | Module : System . Hardware . Haskino . SamplePrograms . Rewrite.semExample
Copyright : ( c ) University of Kansas
This is an example of using semaphores to communicate between two tasks .
One task gives a semaphore then delays for 2 seconds . The other task
waits for the semaphore then blinks the led rapidly 3 times .
module Main where
import Data.Boolean
import Data.Boolean.Numbers
import Data.Word
import System.Hardware.Haskino
blinkDelay :: Word32
blinkDelay = 125
taskDelay :: Word32
taskDelay = 2000
semId :: Word8
semId = 0
count :: Word32
count = 3
led :: Word8
led = 13
myTask1 :: Word8 -> Arduino ()
myTask1 led = do
setPinMode led OUTPUT
myTask1'
where
myTask1' :: Arduino ()
myTask1' = do
takeSem semId
myTask1'' 0
myTask1'
myTask1'' :: Word32 -> Arduino ()
myTask1'' x = do
if x < count then do
digitalWrite led true
delayMillis blinkDelay
digitalWrite led false
delayMillis blinkDelay
myTask1'' (x + 1)
else return ()
myTask2 :: Arduino ()
myTask2 = do
myTask2' 0
where
myTask2' :: Word8 -> Arduino ()
myTask2' loopCount = do
giveSem semId
debug $ showB loopCount
delayMillis taskDelay
myTask2' (loopCount + 1)
initExample :: Arduino ()
initExample = do
createTask 1 $ myTask1 led
createTask 2 myTask2
Schedule the tasks to start in 1 second , the second starting after the first
scheduleTask 1 1000
scheduleTask 2 1050
semExample :: IO ()
semExample = withArduino True "/dev/cu.usbmodem1421" $ do
initExample
compile :: IO ()
compile = compileProgram initExample "semExample.ino"
main :: IO ()
main = compile
|
50085c9efc088773aaff6a5599cb8fb65e23314ed8bec4ce8fc9248f77b9deae | CarlosMChica/HaskellBook | enumFromTo.hs | module EnumFromTo where
eftBool :: Bool -> Bool -> [Bool]
eftBool = eft'
eftOrd :: Ordering -> Ordering -> [Ordering]
eftOrd = eft'
eftInt :: Int -> Int -> [Int]
eftInt = eft'
eftChar :: Char -> Char -> String
eftChar = eft'
eft' :: (Ord a, Enum a) => a -> a -> [a]
eft' x y
| x > y = []
| otherwise = go x y []
where go x y acc
| x == y = acc ++ [x]
| otherwise = go (succ x) y (acc ++ [x])
| null | https://raw.githubusercontent.com/CarlosMChica/HaskellBook/86f82cf36cd00003b1a1aebf264e4b5d606ddfad/chapter9/enumFromTo.hs | haskell | module EnumFromTo where
eftBool :: Bool -> Bool -> [Bool]
eftBool = eft'
eftOrd :: Ordering -> Ordering -> [Ordering]
eftOrd = eft'
eftInt :: Int -> Int -> [Int]
eftInt = eft'
eftChar :: Char -> Char -> String
eftChar = eft'
eft' :: (Ord a, Enum a) => a -> a -> [a]
eft' x y
| x > y = []
| otherwise = go x y []
where go x y acc
| x == y = acc ++ [x]
| otherwise = go (succ x) y (acc ++ [x])
|
|
ac80526687aa26906cbf691242fcfa3978b5df9c34155eb73b92d8dd25b6e105 | PrincetonUniversity/lucid | tofinoCore.ml | Core syntax with a few extra nodes for the Tofino .
Core syntax with two new declarations that are
useful for organizing core Lucid code into a
form that is easy to translate into P4 .
The new declarations in tofinocore are :
1 . a " main handler " is the union of all
handlers . It is used to represent
the single P4 ingress block that
implements all of the handlers .
2 . a " labeled statement " that can be
executed by calling the label .
It is basically an argumentless
function with dynamically scoped
variables .
It is used to represent P4 actions
that are sets of statements .
Open question : do we want to add these nodes
directly to coreSyntax ? They are generally
useful for translation into P4 , but may not
be useful for translation to non - P4 targets .
Core syntax with two new declarations that are
useful for organizing core Lucid code into a
form that is easy to translate into P4.
The new declarations in tofinocore are:
1. a "main handler" is the union of all
handlers. It is used to represent
the single P4 ingress block that
implements all of the handlers.
2. a "labeled statement" that can be
executed by calling the label.
It is basically an argumentless
function with dynamically scoped
variables.
It is used to represent P4 actions
that are sets of statements.
Open question: do we want to add these nodes
directly to coreSyntax? They are generally
useful for translation into P4, but may not
be useful for translation to non-P4 targets. *)
open CoreSyntax
module Ctx = Collections.CidMap
exception Error of string
let error s = raise (Error s)
type id = [%import: (Id.t[@opaque])]
and cid = [%import: (Cid.t[@opqaue])]
and sp = [%import: Span.t]
and z = [%import: (Z.t[@opaque])]
and zint = [%import: (Integer.t[@with Z.t := (Z.t [@opaque])])]
and location = int
(* All sizes should be inlined and precomputed *)
and size = int
and sizes = size list
and raw_ty = [%import: CoreSyntax.raw_ty]
and tbl_ty = [%import: CoreSyntax.tbl_ty]
and acn_ty = [%import: CoreSyntax.acn_ty]
and func_ty = [%import: CoreSyntax.func_ty]
and ty = [%import: CoreSyntax.ty]
and tys = [%import: CoreSyntax.tys]
and op = [%import: CoreSyntax.op]
and pat = [%import: CoreSyntax.pat]
and v = [%import: CoreSyntax.v]
and event = [%import: CoreSyntax.event]
and value = [%import: CoreSyntax.value]
and pragma = [%import: CoreSyntax.pragma]
and e = [%import: CoreSyntax.e]
and exp = [%import: CoreSyntax.exp]
and branch = [%import: CoreSyntax.branch]
and gen_type = [%import: CoreSyntax.gen_type]
and s = [%import: CoreSyntax.s]
and tbl_def = [%import: CoreSyntax.tbl_def]
and tbl_match_out_param = [%import: CoreSyntax.tbl_match_out_param]
and tbl_match = [%import: CoreSyntax.tbl_match]
and tbl_entry = [%import: CoreSyntax.tbl_entry]
and statement = [%import: CoreSyntax.statement]
and params = [%import: CoreSyntax.params]
and body = [%import: CoreSyntax.body]
and event_sort = [%import: CoreSyntax.event_sort]
and handler_sort = [%import: CoreSyntax.handler_sort]
and conditional_return = [%import: CoreSyntax.conditional_return]
and complex_body = [%import: CoreSyntax.complex_body]
and memop_body = [%import: CoreSyntax.memop_body]
and memop = [%import: CoreSyntax.memop]
and action_body = [%import: CoreSyntax.action_body]
and action = [%import: CoreSyntax.action]
multicast i d space :
0 - 511 : recirculated event cloning
512 - 1024 : port flooding groups
1024 > : user groups
0 - 511: recirculated event cloning
512 - 1024: port flooding groups
1024 > : user groups *)
and td =
| TDGlobal of id * ty * exp
| TDEvent of id * event_sort * params
| TDHandler of id * handler_sort * body
| TDMemop of memop
| TDExtern of id * ty
| TDAction of action
| TDMain of main_handler
In P4 , actions and tables are " open functions " .
they can use variables that are defined in
the environment where they are _ called _ . The
cid list is the variables it can modify
they can use variables that are defined in
the environment where they are _called_. The
cid list is the variables it can modify *)
| TDOpenFunction of id * params * statement * (cid list)
and main_handler = {
main_id : id;
hdl_selector : (id * ty);
hdl_enum : (id * int) list;
hdl_params : (id * params) list;
default_hdl : id option;
shared_locals : (id * ty) list;
main_body : statement list;
event_output : event_output;
}
and event_output = {
(* count the number of recirc / self events
generated on this path *)
recirc_mcid_var : (id * ty);
(* all possible sequences of events
that this program can generate. *)
ev_gen_seqs : (id list list);
}
and tdecl = {td:td; tdspan: sp; tdpragma : pragma option;}
and tdecls = tdecl list
[@@deriving
visitors
{ name = "s_iter"
; variety = "iter"
; polymorphic = false
; data = true
; concrete = true
; nude = false
}
, visitors
{ name = "s_map"
; variety = "map"
; polymorphic = false
; data = true
; concrete = true
; nude = false
}]
let tdecl_of_decl decl =
match decl.d with
| DGlobal(id, ty, exp) -> {td=TDGlobal(id, ty, exp); tdspan=decl.dspan; tdpragma = decl.dpragma;}
| DEvent (id, es, ps) -> {td=TDEvent(id, es, ps); tdspan=decl.dspan; tdpragma = decl.dpragma;}
| DHandler(i,s,b) -> {td=TDHandler(i,s,b); tdspan=decl.dspan; tdpragma = decl.dpragma;}
| DMemop(m) -> {td=TDMemop(m); tdspan=decl.dspan; tdpragma = decl.dpragma;}
| DExtern(i, t) -> {td=TDExtern(i, t); tdspan=decl.dspan; tdpragma = decl.dpragma;}
| DAction(a) -> {td=TDAction(a); tdspan = decl.dspan; tdpragma = decl.dpragma;}
;;
module Seq = Core.Sequence
let dbgstr_of_ids cids = List.map (Id.to_string) cids |> String.concat ", ";;
let seq_eq eq s1 s2 =
match ((Seq.length s1) = (Seq.length s2)) with
| true ->
Seq.fold
(Seq.zip s1 s2)
~init:true
~f:(fun a (e1, e2) -> (a && (eq e1 e2)))
| false -> false
;;
let idseq_eq = seq_eq (Id.equal)
let dprint_seqs res =
Seq.iter res
~f:(fun pathseq ->
let plen = Seq.length pathseq in
print_endline@@"path: ("^(string_of_int plen)^") elements "^(dbgstr_of_ids (Seq.to_list pathseq)))
;;
(* find all the possible sequences of events that get generated *)
let rec find_ev_gen_seqs statement : id Core.Sequence.t Core.Sequence.t =
match statement.s with
| SGen(_, ev_exp) -> (
match ev_exp.e with
| ECall(ev_cid, _) -> (
let res = Seq.of_list [Seq.of_list [Cid.to_id ev_cid]] in
res
)
| _ -> error "[find_ev_gen_seqs] event should be a call by this point"
)
| SIf(_, s1, s2) ->
(* make sure we only find the _unique_ paths *)
let res = Seq.append (find_ev_gen_seqs s1) (find_ev_gen_seqs s2) in
let res = MiscUtils.unique_seq_of idseq_eq res in
(* print_endline ("IF"); *)
dprint_seqs res ;
res
(* |> (MiscUtils.unique_seq_of idseq_eq) *)
| SMatch(_, branches) ->
let res = List.fold_left
(fun seqs (_, stmt) -> Seq.append seqs (find_ev_gen_seqs stmt))
Seq.empty
branches
in
let res = MiscUtils.unique_seq_of idseq_eq res in
(* print_endline ("MATCH"); *)
dprint_seqs res ;
res
(* |> (MiscUtils.unique_seq_of idseq_eq) *)
| SSeq(s1, s2) -> (
let seqs_s1 = find_ev_gen_seqs s1 in
let seqs_s2 = find_ev_gen_seqs s2 in
for each sequence in s1 :
for each sequence in s2 :
create a new sequence : s1@s2
for each sequence in s2:
create a new sequence: s1@s2
*)
let res = match Seq.length seqs_s1, Seq.length seqs_s2 with
| (0, 0) -> seqs_s1
| (_, 0) -> seqs_s1
| (0, _) -> seqs_s2
| (_, _) -> (
(* print_endline ("seqs_s1 and seqs_s2 are both nonempty."); *)
Seq.fold seqs_s1
~init:(Seq.of_list [Seq.empty])
~f:(fun merged_seqs s1_seq ->
Seq.fold seqs_s2
~init:merged_seqs
~f:(fun merged_seqs s2_seq ->
let res = Seq.append merged_seqs (Seq.of_list [(Seq.append s1_seq s2_seq)]) in
(* print_endline ("inner loop result: "); *)
dprint_seqs res ;
res
)
)
)
in
(* print_endline "AFTER FOLD"; *)
dprint_seqs res ;
let =
Seq.map ( fun s - > Seq.append seq s )
in
Seq.map seqs (fun s -> Seq.append seq s)
in
*)
let res = Seq.fold
seqs_s2
~init:(Seq.of_list [ Seq.empty ] )
~f:(fun new_seqs_s1 seq - >
Seq.append new_seqs_s1 ( update_seqs seqs_s1 seq )
)
in
seqs_s2
~init:(Seq.of_list [Seq.empty])
~f:(fun new_seqs_s1 seq ->
Seq.append new_seqs_s1 (update_seqs seqs_s1 seq)
)
in *)
let res = MiscUtils.unique_seq_of idseq_eq res in
print_endline " AFTER UNIQUE " ;
dprint_seqs res ;
( match ( s1.s , s2.s ) with
| SGen ( _ ) , _ - > (
print_endline ( " SEQ " ) ;
print_endline ( " --------- " ) ;
print_endline ( CorePrinting.stmt_to_string statement ) ;
print_endline ( " --------- " ) ;
print_endline ( " s1 results : " ) ;
dprint_seqs seqs_s1 ;
print_endline ( " s2 results : " ) ;
dprint_seqs seqs_s2 ;
print_endline ( " merged results : " ) ;
;
exit 1 ;
)
| _ , ( _ ) - > (
print_endline ( " SEQ " ) ;
print_endline ( " --------- " ) ;
print_endline ( CorePrinting.stmt_to_string statement ) ;
print_endline ( " --------- " ) ;
print_endline ( " s1 results : " ) ;
dprint_seqs seqs_s1 ;
print_endline ( " s2 results : " ) ;
dprint_seqs seqs_s2 ;
print_endline ( " merged results : " ) ;
;
exit 1 ;
)
| _ , _ - > ( )
) ;
dprint_seqs res;
(match (s1.s, s2.s) with
| SGen(_), _ -> (
print_endline ("SEQ");
print_endline ("---------");
print_endline (CorePrinting.stmt_to_string statement);
print_endline ("---------");
print_endline ("s1 results: ");
dprint_seqs seqs_s1;
print_endline ("s2 results: ");
dprint_seqs seqs_s2;
print_endline ("merged results: ");
dprint_seqs res;
exit 1;
)
| _, SGen(_) -> (
print_endline ("SEQ");
print_endline ("---------");
print_endline (CorePrinting.stmt_to_string statement);
print_endline ("---------");
print_endline ("s1 results: ");
dprint_seqs seqs_s1;
print_endline ("s2 results: ");
dprint_seqs seqs_s2;
print_endline ("merged results: ");
dprint_seqs res;
exit 1;
)
| _, _ -> ()
); *)
(* let res = res |> (MiscUtils.unique_seq_of idseq_eq) in *)
res
List.fold_left
( fun new_seqs_s1 seq - >
Seq.append new_seqs_s1 ( update_seqs seqs_s1 seq )
)
Seq.of_list [ Seq.empty ]
seqs_s2
in
(fun new_seqs_s1 seq ->
Seq.append new_seqs_s1 (update_seqs seqs_s1 seq)
)
Seq.of_list [Seq.empty]
seqs_s2
in *)
Seq.of_list [ Seq.empty ]
)
(* no events in rest *)
| _ -> Seq.of_list [Seq.empty]
;;
let find_ev_gen_lists statement =
let res = Seq.map (find_ev_gen_seqs statement)
~f:(fun inner_seq -> Seq.to_list inner_seq)
|> Seq.to_list
in
print_endline ( " [ find_ev_gen_lists ] result : " ) ;
List.iter ( fun idlist - >
idlist | > List.map Id.to_string | > String.concat " , " | > print_endline ;
)
res ;
exit 1 ;
List.iter (fun idlist ->
idlist |> List.map Id.to_string |> String.concat ", " |> print_endline;
)
res;
exit 1; *)
res
;;
(* generate the main handler *)
let add_main_handler decls =
let main_id = Id.create "main_handler" in
let hdl_selector = (Id.create "event_id", (ty (TInt 8))) in
let hdl_enum, hdl_params, default_hdl, _=
let acc (enum, all_params, default_hdl, cur_ev_num) dec =
match dec.td with
| TDEvent(id, ev_sort, params) ->
let default_hdl = match ev_sort, default_hdl with
| EEntry _, None -> Some id
| EEntry _, Some _ -> error "[add_main_handler] only 1 entry event is supported"
| _, _ -> default_hdl
in
(id, cur_ev_num)::enum,
(id, params)::all_params,
default_hdl,
cur_ev_num+1
| _ -> enum, all_params, default_hdl, cur_ev_num
in
List.fold_left acc ([], [], None, 1) decls
in
let main_body =
let handler_branches branches dec =
match dec.td with
| TDHandler(hdl_id, _, (_, stmt)) -> (
let hdl_num = match List.assoc_opt hdl_id hdl_enum with
| None -> error "[generate_merged_handler] could not find handler id in enum. Do events and handlers have the same internal IDs?"
| Some hdl_num ->
hdl_num
in
branches@[([PNum (Z.of_int(hdl_num))], stmt)]
)
| _ -> branches
in
let ehdl_selector = var_sp
(Cid.id (fst hdl_selector))
((snd hdl_selector))
(Span.default)
in
let branches = List.fold_left handler_branches [] decls in
[smatch [ehdl_selector] branches]
in
let rec erase_handler_bodies decls =
match decls with
| [] -> []
| hd::tl -> (
match hd with
| {td=TDHandler(i, s, (p, _));} -> (
{hd with td=TDHandler(i, s, (p, snoop));}::(erase_handler_bodies tl)
)
| _ -> hd::(erase_handler_bodies tl)
)
in
let event_output = {
recirc_mcid_var = (Id.create "recirc_mcid", (ty (TInt 16)));
ev_gen_seqs = find_ev_gen_lists (List.hd main_body) |> MiscUtils.unique_list_of;
}
in
let tds =(erase_handler_bodies decls)
@[{td=
TDMain{main_id;hdl_selector;hdl_enum;hdl_params;default_hdl; main_body; shared_locals=[];event_output;}
;tdspan=Span.default; tdpragma = None;}]
in
tds
;;
let tdecls_of_decls decls =
let translated_decls = List.map tdecl_of_decl decls in
add_main_handler translated_decls
;;
(* generate the main handler, for a program where
the event gets compiled to a control block library *)
let add_lib_handler decls =
let hdl_selector = (Id.create "event_id", (ty (TInt 8))) in
let hdl_enum, hdl_params, default_hdl, _=
let acc (enum, all_params, default_hdl, cur_ev_num) dec =
match dec.td with
| TDEvent(id, ev_sort, params) ->
let default_hdl = match ev_sort, default_hdl with
| EEntry _, None -> Some id
| EEntry _, Some _ -> error "[add_main_handler] only 1 entry event is supported"
| _, _ -> default_hdl
in
(id, cur_ev_num)::enum,
(id, params)::all_params,
default_hdl,
cur_ev_num+1
| _ -> enum, all_params, default_hdl, cur_ev_num
in
List.fold_left acc ([], [], None, 1) decls
in
let main_body, main_id = List.filter_map
(fun dec ->
match dec.td with
| TDHandler(hdl_id, _, (_, stmt)) -> Some(stmt, hdl_id)
| _ -> None
)
decls
|> List.hd
in
let rec erase_handler_bodies decls =
match decls with
| [] -> []
| hd::tl -> (
match hd with
| {td=TDHandler(i, s, (p, _));} -> (
{hd with td=TDHandler(i, s, (p, snoop));}::(erase_handler_bodies tl)
)
| _ -> hd::(erase_handler_bodies tl)
)
in
let event_output = {
recirc_mcid_var = (Id.create "recirc_mcid", (ty (TInt 16)));
ev_gen_seqs = find_ev_gen_lists main_body |> MiscUtils.unique_list_of;
}
in
let tds =(erase_handler_bodies decls)
@[{td=
TDMain{main_id;hdl_selector;hdl_enum;hdl_params;default_hdl; main_body=[main_body]; shared_locals=[];event_output;}
;tdspan=Span.default; tdpragma = None;}]
in
tds
;;
let tdecls_of_decl_for_control_lib decls =
let translated_decls = List.map tdecl_of_decl decls in
add_lib_handler translated_decls
;;
(* get the main handler's signature *)
let main ds =
let main_decs = List.filter_map (fun dec ->
match dec.td with
| TDMain main_sig -> Some main_sig
| _ -> None
)
ds
in
match (main_decs) with
| [main_hdl] -> main_hdl
| [] -> error "[main] no main handler."
| _ -> error "[main] more than 1 main handler."
;;
(* replace the main handler's signature *)
let update_main ds new_main_d =
List.map
(fun dec -> match dec.td with
| TDMain _ -> {dec with td=TDMain(new_main_d);} | _ -> dec)
ds
;;
(* add a shared local to the main handler *)
let add_shared_local ds tmp_id tmp_ty =
(* let tmp_v = value_to_exp (vint 0 tmp_sz) in *)
let tmp_e = var_sp (Cid.id tmp_id) tmp_ty Span.default in
let old_main = (main ds) in
let new_main = {old_main with
shared_locals=((tmp_id, tmp_ty)::old_main.shared_locals);}
in
tmp_e, update_main ds new_main
;;
(* get assoc list of memops *)
let memops tds =
List.filter_map
(fun dec -> match dec.td with
|TDMemop(m) -> Some ((m.mid, m))
| _ -> None
)
tds
;;
(* returns assoc list: (arrayid : (slot width, num slots)) list *)
let array_dimensions tds =
List.filter_map
(fun dec -> match dec.td with
| TDGlobal(
id,
{raw_ty=TName(ty_cid, sizes, true); _},
{e=ECall(_, num_slots::_)}) -> (
match (Cid.names ty_cid |> List.hd) with
| "Array" ->
let num_slots = InterpHelpers.int_from_exp num_slots in
Some((id, (List.hd sizes, num_slots)))
| "PairArray" ->
let num_slots = InterpHelpers.int_from_exp num_slots in
Some((id, (2*(List.hd sizes), num_slots)))
| _ -> None
)
| _ -> None
)
tds
;;
(*** output ***)
let decl_of_tdecl tdecl =
match tdecl.td with
| TDGlobal(id, ty, exp) -> decl_pragma (DGlobal(id, ty, exp)) tdecl.tdspan tdecl.tdpragma
| TDEvent (id, es, ps) -> {d=DEvent(id, es, ps); dspan=tdecl.tdspan; dpragma = tdecl.tdpragma;}
| TDHandler(i, s, b) -> {d=DHandler(i, s, b); dspan=tdecl.tdspan; dpragma = tdecl.tdpragma; }
| TDMemop(m) -> {d=DMemop(m); dspan=tdecl.tdspan; dpragma = tdecl.tdpragma; }
| TDExtern(i, t) -> {d=DExtern(i, t); dspan=tdecl.tdspan; dpragma = tdecl.tdpragma; }
| TDAction(a) -> {d=DAction(a); dspan=tdecl.tdspan; dpragma = tdecl.tdpragma; }
| _ -> error "[decl_of_tdecl] not a directly translatable decl"
;;
let main_to_string mainsig =
"// shared locals:\n"
^(
(List.map
(fun (id, ty) ->
(CorePrinting.ty_to_string ty)
^" "
^(CorePrinting.id_to_string id)
^";")
mainsig.shared_locals)
|> String.concat "\n")
^"\n// MAIN HANDLER \n"
^"handler main(...){\n"
^((List.mapi
(fun i stg_stmt ->
"// Stage "^(string_of_int i)^"\n"
^(CorePrinting.statement_to_string stg_stmt))
)
mainsig.main_body
|> String.concat "\n"
)
^"}"
;;
let tdecl_to_string tdec =
match tdec.td with
| TDMain(mainsig) ->
main_to_string mainsig
| TDOpenFunction(id, _, stmt, _) ->
"labeled_statement "^(CorePrinting.id_to_string id)^"{\n"
^CorePrinting.statement_to_string stmt
^"\n}"
| _ -> CorePrinting.decl_to_string (decl_of_tdecl tdec)
;;
let tdecls_to_string tdecs =
List.map tdecl_to_string tdecs |>
String.concat "\n"
;;
let dump_prog fn tds =
let outf = (open_out fn) in
Printf.fprintf outf "%s" (tdecls_to_string tds);
flush outf
;;
( draft )
To go back to core syntax :
1 . create a handler for the multihandler with parameters :
handle_selector::shared_locals@(flatten handler_sigs.params )
2 . convert each existing event into an event that calls the
multihandler :
handler foo(int a , int b ) {
generate multihandler (
( find foo multihandler.handler_sigs).hselect_key ,
List.map ( fun hid , hsig - > if hid = i d then [ a ; b ] else hsig.hdefaultargs )
@sharedlocal_defaults
)
}
3 . delete the shared local and multihandler
4 . convert everything else back directly .
Note : we could do something fancier , and extract the tables / rules relevant
to each handler from the multihandler .
(draft)
To go back to core syntax:
1. create a handler for the multihandler with parameters:
handle_selector::shared_locals@(flatten handler_sigs.params)
2. convert each existing event into an event that calls the
multihandler:
handler foo(int a, int b) {
generate multihandler(
(find foo multihandler.handler_sigs).hselect_key,
List.map (fun hid, hsig -> if hid = id then [a; b] else hsig.hdefaultargs)
@sharedlocal_defaults
)
}
3. delete the shared local and multihandler
4. convert everything else back directly.
Note: we could do something fancier, and extract the tables / rules relevant
to each handler from the multihandler.
*)
let decls_of_tdecls tdecls =
let mh_rec =
match ( List.filter_map
( fun tdec - > match tdec.td with
| TMultiHandler(mh_rec ) - > Some(mh_rec )
| _ - > None )
tdecls ) with
| [ mh_rec ] - > mh_rec
| _ - > error " no multihandler in tdecls -- nothing to convert back . "
in
let shared_locals = List.filter_map
( fun tdec - > match tdec.td with
| , ty , default ) - >
Some((id , ty ) , default )
| _ - > None
)
tdecls
in
let shared_local_params , shared_local_defaults = List.split shared_locals in
let mh_params = ( mh_rec.handler_selector::shared_local_params )
@(List . )
let dmultihandler =
handler_sp
multihandler.id
let mh_rec =
match (List.filter_map
(fun tdec -> match tdec.td with
| TMultiHandler(mh_rec) -> Some(mh_rec)
| _ -> None)
tdecls) with
| [mh_rec] -> mh_rec
| _ -> error "no multihandler in tdecls -- nothing to convert back."
in
let shared_locals = List.filter_map
(fun tdec -> match tdec.td with
| TSharedLocal(id, ty, default) ->
Some((id, ty), default)
| _ -> None
)
tdecls
in
let shared_local_params, shared_local_defaults = List.split shared_locals in
let mh_params = (mh_rec.handler_selector::shared_local_params)
@(List.)
let dmultihandler =
handler_sp
multihandler.id
*)
;;
| null | https://raw.githubusercontent.com/PrincetonUniversity/lucid/8ca93fd803caaa80cf2e301154791564dd3fed7e/src/lib/backend/tofinoCore.ml | ocaml | All sizes should be inlined and precomputed
count the number of recirc / self events
generated on this path
all possible sequences of events
that this program can generate.
find all the possible sequences of events that get generated
make sure we only find the _unique_ paths
print_endline ("IF");
|> (MiscUtils.unique_seq_of idseq_eq)
print_endline ("MATCH");
|> (MiscUtils.unique_seq_of idseq_eq)
print_endline ("seqs_s1 and seqs_s2 are both nonempty.");
print_endline ("inner loop result: ");
print_endline "AFTER FOLD";
let res = res |> (MiscUtils.unique_seq_of idseq_eq) in
no events in rest
generate the main handler
generate the main handler, for a program where
the event gets compiled to a control block library
get the main handler's signature
replace the main handler's signature
add a shared local to the main handler
let tmp_v = value_to_exp (vint 0 tmp_sz) in
get assoc list of memops
returns assoc list: (arrayid : (slot width, num slots)) list
** output ** | Core syntax with a few extra nodes for the Tofino .
Core syntax with two new declarations that are
useful for organizing core Lucid code into a
form that is easy to translate into P4 .
The new declarations in tofinocore are :
1 . a " main handler " is the union of all
handlers . It is used to represent
the single P4 ingress block that
implements all of the handlers .
2 . a " labeled statement " that can be
executed by calling the label .
It is basically an argumentless
function with dynamically scoped
variables .
It is used to represent P4 actions
that are sets of statements .
Open question : do we want to add these nodes
directly to coreSyntax ? They are generally
useful for translation into P4 , but may not
be useful for translation to non - P4 targets .
Core syntax with two new declarations that are
useful for organizing core Lucid code into a
form that is easy to translate into P4.
The new declarations in tofinocore are:
1. a "main handler" is the union of all
handlers. It is used to represent
the single P4 ingress block that
implements all of the handlers.
2. a "labeled statement" that can be
executed by calling the label.
It is basically an argumentless
function with dynamically scoped
variables.
It is used to represent P4 actions
that are sets of statements.
Open question: do we want to add these nodes
directly to coreSyntax? They are generally
useful for translation into P4, but may not
be useful for translation to non-P4 targets. *)
open CoreSyntax
module Ctx = Collections.CidMap
exception Error of string
let error s = raise (Error s)
type id = [%import: (Id.t[@opaque])]
and cid = [%import: (Cid.t[@opqaue])]
and sp = [%import: Span.t]
and z = [%import: (Z.t[@opaque])]
and zint = [%import: (Integer.t[@with Z.t := (Z.t [@opaque])])]
and location = int
and size = int
and sizes = size list
and raw_ty = [%import: CoreSyntax.raw_ty]
and tbl_ty = [%import: CoreSyntax.tbl_ty]
and acn_ty = [%import: CoreSyntax.acn_ty]
and func_ty = [%import: CoreSyntax.func_ty]
and ty = [%import: CoreSyntax.ty]
and tys = [%import: CoreSyntax.tys]
and op = [%import: CoreSyntax.op]
and pat = [%import: CoreSyntax.pat]
and v = [%import: CoreSyntax.v]
and event = [%import: CoreSyntax.event]
and value = [%import: CoreSyntax.value]
and pragma = [%import: CoreSyntax.pragma]
and e = [%import: CoreSyntax.e]
and exp = [%import: CoreSyntax.exp]
and branch = [%import: CoreSyntax.branch]
and gen_type = [%import: CoreSyntax.gen_type]
and s = [%import: CoreSyntax.s]
and tbl_def = [%import: CoreSyntax.tbl_def]
and tbl_match_out_param = [%import: CoreSyntax.tbl_match_out_param]
and tbl_match = [%import: CoreSyntax.tbl_match]
and tbl_entry = [%import: CoreSyntax.tbl_entry]
and statement = [%import: CoreSyntax.statement]
and params = [%import: CoreSyntax.params]
and body = [%import: CoreSyntax.body]
and event_sort = [%import: CoreSyntax.event_sort]
and handler_sort = [%import: CoreSyntax.handler_sort]
and conditional_return = [%import: CoreSyntax.conditional_return]
and complex_body = [%import: CoreSyntax.complex_body]
and memop_body = [%import: CoreSyntax.memop_body]
and memop = [%import: CoreSyntax.memop]
and action_body = [%import: CoreSyntax.action_body]
and action = [%import: CoreSyntax.action]
multicast i d space :
0 - 511 : recirculated event cloning
512 - 1024 : port flooding groups
1024 > : user groups
0 - 511: recirculated event cloning
512 - 1024: port flooding groups
1024 > : user groups *)
and td =
| TDGlobal of id * ty * exp
| TDEvent of id * event_sort * params
| TDHandler of id * handler_sort * body
| TDMemop of memop
| TDExtern of id * ty
| TDAction of action
| TDMain of main_handler
In P4 , actions and tables are " open functions " .
they can use variables that are defined in
the environment where they are _ called _ . The
cid list is the variables it can modify
they can use variables that are defined in
the environment where they are _called_. The
cid list is the variables it can modify *)
| TDOpenFunction of id * params * statement * (cid list)
and main_handler = {
main_id : id;
hdl_selector : (id * ty);
hdl_enum : (id * int) list;
hdl_params : (id * params) list;
default_hdl : id option;
shared_locals : (id * ty) list;
main_body : statement list;
event_output : event_output;
}
and event_output = {
recirc_mcid_var : (id * ty);
ev_gen_seqs : (id list list);
}
and tdecl = {td:td; tdspan: sp; tdpragma : pragma option;}
and tdecls = tdecl list
[@@deriving
visitors
{ name = "s_iter"
; variety = "iter"
; polymorphic = false
; data = true
; concrete = true
; nude = false
}
, visitors
{ name = "s_map"
; variety = "map"
; polymorphic = false
; data = true
; concrete = true
; nude = false
}]
let tdecl_of_decl decl =
match decl.d with
| DGlobal(id, ty, exp) -> {td=TDGlobal(id, ty, exp); tdspan=decl.dspan; tdpragma = decl.dpragma;}
| DEvent (id, es, ps) -> {td=TDEvent(id, es, ps); tdspan=decl.dspan; tdpragma = decl.dpragma;}
| DHandler(i,s,b) -> {td=TDHandler(i,s,b); tdspan=decl.dspan; tdpragma = decl.dpragma;}
| DMemop(m) -> {td=TDMemop(m); tdspan=decl.dspan; tdpragma = decl.dpragma;}
| DExtern(i, t) -> {td=TDExtern(i, t); tdspan=decl.dspan; tdpragma = decl.dpragma;}
| DAction(a) -> {td=TDAction(a); tdspan = decl.dspan; tdpragma = decl.dpragma;}
;;
module Seq = Core.Sequence
let dbgstr_of_ids cids = List.map (Id.to_string) cids |> String.concat ", ";;
let seq_eq eq s1 s2 =
match ((Seq.length s1) = (Seq.length s2)) with
| true ->
Seq.fold
(Seq.zip s1 s2)
~init:true
~f:(fun a (e1, e2) -> (a && (eq e1 e2)))
| false -> false
;;
let idseq_eq = seq_eq (Id.equal)
let dprint_seqs res =
Seq.iter res
~f:(fun pathseq ->
let plen = Seq.length pathseq in
print_endline@@"path: ("^(string_of_int plen)^") elements "^(dbgstr_of_ids (Seq.to_list pathseq)))
;;
let rec find_ev_gen_seqs statement : id Core.Sequence.t Core.Sequence.t =
match statement.s with
| SGen(_, ev_exp) -> (
match ev_exp.e with
| ECall(ev_cid, _) -> (
let res = Seq.of_list [Seq.of_list [Cid.to_id ev_cid]] in
res
)
| _ -> error "[find_ev_gen_seqs] event should be a call by this point"
)
| SIf(_, s1, s2) ->
let res = Seq.append (find_ev_gen_seqs s1) (find_ev_gen_seqs s2) in
let res = MiscUtils.unique_seq_of idseq_eq res in
dprint_seqs res ;
res
| SMatch(_, branches) ->
let res = List.fold_left
(fun seqs (_, stmt) -> Seq.append seqs (find_ev_gen_seqs stmt))
Seq.empty
branches
in
let res = MiscUtils.unique_seq_of idseq_eq res in
dprint_seqs res ;
res
| SSeq(s1, s2) -> (
let seqs_s1 = find_ev_gen_seqs s1 in
let seqs_s2 = find_ev_gen_seqs s2 in
for each sequence in s1 :
for each sequence in s2 :
create a new sequence : s1@s2
for each sequence in s2:
create a new sequence: s1@s2
*)
let res = match Seq.length seqs_s1, Seq.length seqs_s2 with
| (0, 0) -> seqs_s1
| (_, 0) -> seqs_s1
| (0, _) -> seqs_s2
| (_, _) -> (
Seq.fold seqs_s1
~init:(Seq.of_list [Seq.empty])
~f:(fun merged_seqs s1_seq ->
Seq.fold seqs_s2
~init:merged_seqs
~f:(fun merged_seqs s2_seq ->
let res = Seq.append merged_seqs (Seq.of_list [(Seq.append s1_seq s2_seq)]) in
dprint_seqs res ;
res
)
)
)
in
dprint_seqs res ;
let =
Seq.map ( fun s - > Seq.append seq s )
in
Seq.map seqs (fun s -> Seq.append seq s)
in
*)
let res = Seq.fold
seqs_s2
~init:(Seq.of_list [ Seq.empty ] )
~f:(fun new_seqs_s1 seq - >
Seq.append new_seqs_s1 ( update_seqs seqs_s1 seq )
)
in
seqs_s2
~init:(Seq.of_list [Seq.empty])
~f:(fun new_seqs_s1 seq ->
Seq.append new_seqs_s1 (update_seqs seqs_s1 seq)
)
in *)
let res = MiscUtils.unique_seq_of idseq_eq res in
print_endline " AFTER UNIQUE " ;
dprint_seqs res ;
( match ( s1.s , s2.s ) with
| SGen ( _ ) , _ - > (
print_endline ( " SEQ " ) ;
print_endline ( " --------- " ) ;
print_endline ( CorePrinting.stmt_to_string statement ) ;
print_endline ( " --------- " ) ;
print_endline ( " s1 results : " ) ;
dprint_seqs seqs_s1 ;
print_endline ( " s2 results : " ) ;
dprint_seqs seqs_s2 ;
print_endline ( " merged results : " ) ;
;
exit 1 ;
)
| _ , ( _ ) - > (
print_endline ( " SEQ " ) ;
print_endline ( " --------- " ) ;
print_endline ( CorePrinting.stmt_to_string statement ) ;
print_endline ( " --------- " ) ;
print_endline ( " s1 results : " ) ;
dprint_seqs seqs_s1 ;
print_endline ( " s2 results : " ) ;
dprint_seqs seqs_s2 ;
print_endline ( " merged results : " ) ;
;
exit 1 ;
)
| _ , _ - > ( )
) ;
dprint_seqs res;
(match (s1.s, s2.s) with
| SGen(_), _ -> (
print_endline ("SEQ");
print_endline ("---------");
print_endline (CorePrinting.stmt_to_string statement);
print_endline ("---------");
print_endline ("s1 results: ");
dprint_seqs seqs_s1;
print_endline ("s2 results: ");
dprint_seqs seqs_s2;
print_endline ("merged results: ");
dprint_seqs res;
exit 1;
)
| _, SGen(_) -> (
print_endline ("SEQ");
print_endline ("---------");
print_endline (CorePrinting.stmt_to_string statement);
print_endline ("---------");
print_endline ("s1 results: ");
dprint_seqs seqs_s1;
print_endline ("s2 results: ");
dprint_seqs seqs_s2;
print_endline ("merged results: ");
dprint_seqs res;
exit 1;
)
| _, _ -> ()
); *)
res
List.fold_left
( fun new_seqs_s1 seq - >
Seq.append new_seqs_s1 ( update_seqs seqs_s1 seq )
)
Seq.of_list [ Seq.empty ]
seqs_s2
in
(fun new_seqs_s1 seq ->
Seq.append new_seqs_s1 (update_seqs seqs_s1 seq)
)
Seq.of_list [Seq.empty]
seqs_s2
in *)
Seq.of_list [ Seq.empty ]
)
| _ -> Seq.of_list [Seq.empty]
;;
let find_ev_gen_lists statement =
let res = Seq.map (find_ev_gen_seqs statement)
~f:(fun inner_seq -> Seq.to_list inner_seq)
|> Seq.to_list
in
print_endline ( " [ find_ev_gen_lists ] result : " ) ;
List.iter ( fun idlist - >
idlist | > List.map Id.to_string | > String.concat " , " | > print_endline ;
)
res ;
exit 1 ;
List.iter (fun idlist ->
idlist |> List.map Id.to_string |> String.concat ", " |> print_endline;
)
res;
exit 1; *)
res
;;
let add_main_handler decls =
let main_id = Id.create "main_handler" in
let hdl_selector = (Id.create "event_id", (ty (TInt 8))) in
let hdl_enum, hdl_params, default_hdl, _=
let acc (enum, all_params, default_hdl, cur_ev_num) dec =
match dec.td with
| TDEvent(id, ev_sort, params) ->
let default_hdl = match ev_sort, default_hdl with
| EEntry _, None -> Some id
| EEntry _, Some _ -> error "[add_main_handler] only 1 entry event is supported"
| _, _ -> default_hdl
in
(id, cur_ev_num)::enum,
(id, params)::all_params,
default_hdl,
cur_ev_num+1
| _ -> enum, all_params, default_hdl, cur_ev_num
in
List.fold_left acc ([], [], None, 1) decls
in
let main_body =
let handler_branches branches dec =
match dec.td with
| TDHandler(hdl_id, _, (_, stmt)) -> (
let hdl_num = match List.assoc_opt hdl_id hdl_enum with
| None -> error "[generate_merged_handler] could not find handler id in enum. Do events and handlers have the same internal IDs?"
| Some hdl_num ->
hdl_num
in
branches@[([PNum (Z.of_int(hdl_num))], stmt)]
)
| _ -> branches
in
let ehdl_selector = var_sp
(Cid.id (fst hdl_selector))
((snd hdl_selector))
(Span.default)
in
let branches = List.fold_left handler_branches [] decls in
[smatch [ehdl_selector] branches]
in
let rec erase_handler_bodies decls =
match decls with
| [] -> []
| hd::tl -> (
match hd with
| {td=TDHandler(i, s, (p, _));} -> (
{hd with td=TDHandler(i, s, (p, snoop));}::(erase_handler_bodies tl)
)
| _ -> hd::(erase_handler_bodies tl)
)
in
let event_output = {
recirc_mcid_var = (Id.create "recirc_mcid", (ty (TInt 16)));
ev_gen_seqs = find_ev_gen_lists (List.hd main_body) |> MiscUtils.unique_list_of;
}
in
let tds =(erase_handler_bodies decls)
@[{td=
TDMain{main_id;hdl_selector;hdl_enum;hdl_params;default_hdl; main_body; shared_locals=[];event_output;}
;tdspan=Span.default; tdpragma = None;}]
in
tds
;;
let tdecls_of_decls decls =
let translated_decls = List.map tdecl_of_decl decls in
add_main_handler translated_decls
;;
let add_lib_handler decls =
let hdl_selector = (Id.create "event_id", (ty (TInt 8))) in
let hdl_enum, hdl_params, default_hdl, _=
let acc (enum, all_params, default_hdl, cur_ev_num) dec =
match dec.td with
| TDEvent(id, ev_sort, params) ->
let default_hdl = match ev_sort, default_hdl with
| EEntry _, None -> Some id
| EEntry _, Some _ -> error "[add_main_handler] only 1 entry event is supported"
| _, _ -> default_hdl
in
(id, cur_ev_num)::enum,
(id, params)::all_params,
default_hdl,
cur_ev_num+1
| _ -> enum, all_params, default_hdl, cur_ev_num
in
List.fold_left acc ([], [], None, 1) decls
in
let main_body, main_id = List.filter_map
(fun dec ->
match dec.td with
| TDHandler(hdl_id, _, (_, stmt)) -> Some(stmt, hdl_id)
| _ -> None
)
decls
|> List.hd
in
let rec erase_handler_bodies decls =
match decls with
| [] -> []
| hd::tl -> (
match hd with
| {td=TDHandler(i, s, (p, _));} -> (
{hd with td=TDHandler(i, s, (p, snoop));}::(erase_handler_bodies tl)
)
| _ -> hd::(erase_handler_bodies tl)
)
in
let event_output = {
recirc_mcid_var = (Id.create "recirc_mcid", (ty (TInt 16)));
ev_gen_seqs = find_ev_gen_lists main_body |> MiscUtils.unique_list_of;
}
in
let tds =(erase_handler_bodies decls)
@[{td=
TDMain{main_id;hdl_selector;hdl_enum;hdl_params;default_hdl; main_body=[main_body]; shared_locals=[];event_output;}
;tdspan=Span.default; tdpragma = None;}]
in
tds
;;
let tdecls_of_decl_for_control_lib decls =
let translated_decls = List.map tdecl_of_decl decls in
add_lib_handler translated_decls
;;
let main ds =
let main_decs = List.filter_map (fun dec ->
match dec.td with
| TDMain main_sig -> Some main_sig
| _ -> None
)
ds
in
match (main_decs) with
| [main_hdl] -> main_hdl
| [] -> error "[main] no main handler."
| _ -> error "[main] more than 1 main handler."
;;
let update_main ds new_main_d =
List.map
(fun dec -> match dec.td with
| TDMain _ -> {dec with td=TDMain(new_main_d);} | _ -> dec)
ds
;;
let add_shared_local ds tmp_id tmp_ty =
let tmp_e = var_sp (Cid.id tmp_id) tmp_ty Span.default in
let old_main = (main ds) in
let new_main = {old_main with
shared_locals=((tmp_id, tmp_ty)::old_main.shared_locals);}
in
tmp_e, update_main ds new_main
;;
let memops tds =
List.filter_map
(fun dec -> match dec.td with
|TDMemop(m) -> Some ((m.mid, m))
| _ -> None
)
tds
;;
let array_dimensions tds =
List.filter_map
(fun dec -> match dec.td with
| TDGlobal(
id,
{raw_ty=TName(ty_cid, sizes, true); _},
{e=ECall(_, num_slots::_)}) -> (
match (Cid.names ty_cid |> List.hd) with
| "Array" ->
let num_slots = InterpHelpers.int_from_exp num_slots in
Some((id, (List.hd sizes, num_slots)))
| "PairArray" ->
let num_slots = InterpHelpers.int_from_exp num_slots in
Some((id, (2*(List.hd sizes), num_slots)))
| _ -> None
)
| _ -> None
)
tds
;;
let decl_of_tdecl tdecl =
match tdecl.td with
| TDGlobal(id, ty, exp) -> decl_pragma (DGlobal(id, ty, exp)) tdecl.tdspan tdecl.tdpragma
| TDEvent (id, es, ps) -> {d=DEvent(id, es, ps); dspan=tdecl.tdspan; dpragma = tdecl.tdpragma;}
| TDHandler(i, s, b) -> {d=DHandler(i, s, b); dspan=tdecl.tdspan; dpragma = tdecl.tdpragma; }
| TDMemop(m) -> {d=DMemop(m); dspan=tdecl.tdspan; dpragma = tdecl.tdpragma; }
| TDExtern(i, t) -> {d=DExtern(i, t); dspan=tdecl.tdspan; dpragma = tdecl.tdpragma; }
| TDAction(a) -> {d=DAction(a); dspan=tdecl.tdspan; dpragma = tdecl.tdpragma; }
| _ -> error "[decl_of_tdecl] not a directly translatable decl"
;;
let main_to_string mainsig =
"// shared locals:\n"
^(
(List.map
(fun (id, ty) ->
(CorePrinting.ty_to_string ty)
^" "
^(CorePrinting.id_to_string id)
^";")
mainsig.shared_locals)
|> String.concat "\n")
^"\n// MAIN HANDLER \n"
^"handler main(...){\n"
^((List.mapi
(fun i stg_stmt ->
"// Stage "^(string_of_int i)^"\n"
^(CorePrinting.statement_to_string stg_stmt))
)
mainsig.main_body
|> String.concat "\n"
)
^"}"
;;
let tdecl_to_string tdec =
match tdec.td with
| TDMain(mainsig) ->
main_to_string mainsig
| TDOpenFunction(id, _, stmt, _) ->
"labeled_statement "^(CorePrinting.id_to_string id)^"{\n"
^CorePrinting.statement_to_string stmt
^"\n}"
| _ -> CorePrinting.decl_to_string (decl_of_tdecl tdec)
;;
let tdecls_to_string tdecs =
List.map tdecl_to_string tdecs |>
String.concat "\n"
;;
let dump_prog fn tds =
let outf = (open_out fn) in
Printf.fprintf outf "%s" (tdecls_to_string tds);
flush outf
;;
( draft )
To go back to core syntax :
1 . create a handler for the multihandler with parameters :
handle_selector::shared_locals@(flatten handler_sigs.params )
2 . convert each existing event into an event that calls the
multihandler :
handler foo(int a , int b ) {
generate multihandler (
( find foo multihandler.handler_sigs).hselect_key ,
List.map ( fun hid , hsig - > if hid = i d then [ a ; b ] else hsig.hdefaultargs )
@sharedlocal_defaults
)
}
3 . delete the shared local and multihandler
4 . convert everything else back directly .
Note : we could do something fancier , and extract the tables / rules relevant
to each handler from the multihandler .
(draft)
To go back to core syntax:
1. create a handler for the multihandler with parameters:
handle_selector::shared_locals@(flatten handler_sigs.params)
2. convert each existing event into an event that calls the
multihandler:
handler foo(int a, int b) {
generate multihandler(
(find foo multihandler.handler_sigs).hselect_key,
List.map (fun hid, hsig -> if hid = id then [a; b] else hsig.hdefaultargs)
@sharedlocal_defaults
)
}
3. delete the shared local and multihandler
4. convert everything else back directly.
Note: we could do something fancier, and extract the tables / rules relevant
to each handler from the multihandler.
*)
let decls_of_tdecls tdecls =
let mh_rec =
match ( List.filter_map
( fun tdec - > match tdec.td with
| TMultiHandler(mh_rec ) - > Some(mh_rec )
| _ - > None )
tdecls ) with
| [ mh_rec ] - > mh_rec
| _ - > error " no multihandler in tdecls -- nothing to convert back . "
in
let shared_locals = List.filter_map
( fun tdec - > match tdec.td with
| , ty , default ) - >
Some((id , ty ) , default )
| _ - > None
)
tdecls
in
let shared_local_params , shared_local_defaults = List.split shared_locals in
let mh_params = ( mh_rec.handler_selector::shared_local_params )
@(List . )
let dmultihandler =
handler_sp
multihandler.id
let mh_rec =
match (List.filter_map
(fun tdec -> match tdec.td with
| TMultiHandler(mh_rec) -> Some(mh_rec)
| _ -> None)
tdecls) with
| [mh_rec] -> mh_rec
| _ -> error "no multihandler in tdecls -- nothing to convert back."
in
let shared_locals = List.filter_map
(fun tdec -> match tdec.td with
| TSharedLocal(id, ty, default) ->
Some((id, ty), default)
| _ -> None
)
tdecls
in
let shared_local_params, shared_local_defaults = List.split shared_locals in
let mh_params = (mh_rec.handler_selector::shared_local_params)
@(List.)
let dmultihandler =
handler_sp
multihandler.id
*)
;;
|
77937c6484b48ee7ac1ccd3daf6ce0aa00f206c27f2c39619b9c89b1b325fb71 | TrustInSoft/tis-interpreter | property.ml | Modified by TrustInSoft
(**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2015
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
open Cil_types
open Cil_datatype
type behavior_or_loop =
Id_contract of Datatype.String.Set.t * funbehavior
| Id_loop of code_annotation
type identified_complete =
kernel_function * kinstr * Datatype.String.Set.t * string list
type identified_disjoint = identified_complete
type identified_code_annotation =
kernel_function * stmt * code_annotation
type identified_allocation =
kernel_function
* kinstr
* behavior_or_loop
* (identified_term list * identified_term list)
type identified_assigns =
kernel_function
* kinstr
* behavior_or_loop
* identified_term from list
type identified_from =
kernel_function
* kinstr
* behavior_or_loop
* (identified_term from (* * identified_term list *) )
type identified_decrease =
kernel_function * kinstr * code_annotation option * term variant
type identified_behavior =
kernel_function * kinstr * Datatype.String.Set.t * funbehavior
type predicate_kind =
| PKRequires of funbehavior
| PKAssumes of funbehavior
| PKEnsures of funbehavior * termination_kind
| PKTerminates
let pretty_predicate_kind fmt = function
| PKRequires _ -> Format.pp_print_string fmt "requires"
| PKAssumes _ -> Format.pp_print_string fmt "assumes"
| PKEnsures(_, tk) ->
Format.pp_print_string fmt
(match tk with
| Normal -> "ensures"
| Exits -> "exits"
| Breaks -> "breaks"
| Continues -> "continues"
| Returns -> "returns")
| PKTerminates -> Format.pp_print_string fmt "terminates"
type identified_predicate =
predicate_kind * kernel_function * kinstr * Cil_types.identified_predicate
type program_point = Before | After
type identified_reachable = kernel_function option * kinstr * program_point
type identified_type_invariant = string * typ * predicate named * location
type identified_global_invariant = string * predicate named * location
and identified_axiomatic = string * identified_property list
and identified_lemma =
string * logic_label list * string list * predicate named * location
and identified_axiom = identified_lemma
and identified_instance =
kernel_function option * kinstr * identified_property
and identified_property =
| IPPredicate of identified_predicate
| IPAxiom of identified_axiom
| IPAxiomatic of identified_axiomatic
| IPLemma of identified_lemma
| IPBehavior of identified_behavior
| IPComplete of identified_complete
| IPDisjoint of identified_disjoint
| IPCodeAnnot of identified_code_annotation
| IPAllocation of identified_allocation
| IPAssigns of identified_assigns
| IPFrom of identified_from
| IPDecrease of identified_decrease
| IPReachable of identified_reachable
| IPPropertyInstance of identified_instance
| IPTypeInvariant of identified_type_invariant
| IPGlobalInvariant of identified_global_invariant
| IPOther of string * kernel_function option * kinstr
let get_kinstr = function
| IPPredicate (_,_,ki,_)
| IPBehavior(_, ki,_,_)
| IPComplete (_,ki,_,_)
| IPDisjoint(_,ki,_,_)
| IPAllocation (_,ki,_,_)
| IPAssigns (_,ki,_,_)
| IPFrom(_,ki,_,_)
| IPReachable (_, ki, _)
| IPDecrease (_,ki,_,_)
| IPPropertyInstance (_, ki, _) -> ki
| IPAxiom _
| IPAxiomatic _
| IPLemma _ -> Kglobal
| IPOther(_,_,ki) -> ki
| IPCodeAnnot (_,s,_) -> Kstmt s
| IPTypeInvariant _ | IPGlobalInvariant _ -> Kglobal
let get_kf = function
| IPPredicate (_,kf,_,_)
| IPBehavior(kf,_,_,_)
| IPCodeAnnot (kf,_,_)
| IPComplete (kf,_,_,_)
| IPDisjoint(kf,_,_,_)
| IPAllocation(kf,_,_,_)
| IPAssigns(kf,_,_,_)
| IPFrom(kf,_,_,_)
| IPDecrease (kf,_,_,_) -> Some kf
| IPAxiom _
| IPAxiomatic _
| IPLemma _ -> None
| IPReachable (kfopt, _, _)
| IPPropertyInstance (kfopt, _, _)
| IPOther(_,kfopt,_) -> kfopt
| IPTypeInvariant _ | IPGlobalInvariant _ -> None
let loc_of_kf_ki kf = function
| Kstmt s -> Cil_datatype.Stmt.loc s
| Kglobal -> Kernel_function.get_location kf
let rec location = function
| IPPredicate (_,_,_,ip) -> ip.ip_loc
| IPBehavior(kf,ki, _,_)
| IPComplete (kf,ki,_,_)
| IPDisjoint(kf,ki,_,_)
| IPReachable(Some kf, ki, _)
| IPPropertyInstance (Some kf, ki, _) -> loc_of_kf_ki kf ki
| IPPropertyInstance (None, Kstmt s, _)
| IPReachable(None, Kstmt s, _) -> Cil_datatype.Stmt.loc s
| IPCodeAnnot (_,s,ca) -> (
match Cil_datatype.Code_annotation.loc ca with
| None -> Cil_datatype.Stmt.loc s
| Some loc -> loc)
| IPPropertyInstance (None, Kglobal, _)
| IPReachable(None, Kglobal, _) -> Cil_datatype.Location.unknown
| IPAssigns(kf,ki,_,a) ->
(match a with
| [] -> loc_of_kf_ki kf ki
| (t,_) :: _ -> t.it_content.term_loc)
| IPAllocation(kf,ki,_,fa) ->
(match fa with
| [],[] -> loc_of_kf_ki kf ki
| (t :: _),_
| _,(t :: _) -> t.it_content.term_loc)
| IPFrom(_,_,_,(t,_)) -> t.it_content.term_loc
| IPDecrease (_,_,_,(t,_)) -> t.term_loc
| IPAxiom (_,_,_,_,loc) -> loc
| IPAxiomatic (_,l) ->
(match l with
| [] -> Cil_datatype.Location.unknown
| p :: _ -> location p)
| IPLemma (_,_,_,_,loc) -> loc
| IPOther(_,kf,ki) ->
(match kf with
| None -> Cil_datatype.Location.unknown
| Some kf -> loc_of_kf_ki kf ki)
| IPTypeInvariant(_,_,_,loc) | IPGlobalInvariant(_,_,loc) -> loc
(* Pretty information about the localization of a IPPropertyInstance *)
let pretty_instance_location fmt (kfopt, ki) =
match kfopt, ki with
| None, Kglobal -> Format.pp_print_string fmt "at global scope"
| Some kf, Kglobal ->
Format.fprintf fmt "in function %a" Kernel_function.pretty kf
| None, Kstmt stmt -> Format.fprintf fmt "at stmt %d" stmt.sid
| Some kf, Kstmt stmt
when Kernel_function.(equal kf (find_englobing_kf stmt)) ->
Format.fprintf fmt "at stmt %d" stmt.sid
| Some kf, Kstmt stmt ->
Format.fprintf fmt "at stmt %d and function %a"
stmt.sid Kernel_function.pretty kf
let get_pk_behavior = function
| PKRequires b | PKAssumes b | PKEnsures (b,_) -> Some b
| PKTerminates -> None
let get_behavior = function
| IPPredicate (pk,_,_,_) -> get_pk_behavior pk
| IPBehavior(_, _, _, b) -> Some b
| IPAllocation(_,_,Id_contract (_,b),_)
| IPAssigns(_,_,Id_contract (_,b),_)
| IPFrom(_,_,Id_contract (_,b),_) -> Some b
| IPAllocation(_,_,Id_loop _,_)
| IPAssigns(_,_,Id_loop _,_)
| IPFrom(_,_,Id_loop _,_)
| IPAxiom _
| IPAxiomatic _
| IPLemma _
| IPCodeAnnot (_,_,_)
| IPComplete (_,_,_,_)
| IPDisjoint(_,_,_,_)
| IPDecrease _
| IPReachable _
| IPPropertyInstance _
| IPTypeInvariant _
| IPGlobalInvariant _
| IPOther _ -> None
include Datatype.Make_with_collections
(struct
include Datatype.Serializable_undefined
type t = identified_property
let name = "Property.t"
let reprs = [ IPAxiom ("",[],[],Logic_const.ptrue,Location.unknown) ]
let mem_project = Datatype.never_any_project
let equal_opt eq a b =
match a,b with
| None,None -> true
| Some _,None | None,Some _ -> false
| Some x , Some y -> eq x y
let compare_opt cmp a b =
match a,b with
| None,None -> 0
| None,Some _ -> (-1)
| Some _,None -> 1
| Some x,Some y -> cmp x y
let pp_active fmt active =
let sep = ref false in
let print_one a =
Format.fprintf fmt "%s%s" (if !sep then ", " else "") a;
sep:=true
in
Datatype.String.Set.iter print_one active
let rec pretty fmt = function
| IPPredicate (kind,_,_,p) ->
Format.fprintf fmt "%a@ %a"
pretty_predicate_kind kind Cil_printer.pp_identified_predicate p
| IPAxiom (s,_,_,_,_) -> Format.fprintf fmt "axiom@ %s" s
| IPAxiomatic(s, _) -> Format.fprintf fmt "axiomatic@ %s" s
| IPLemma (s,_,_,_,_) -> Format.fprintf fmt "lemma@ %s" s
| IPTypeInvariant(s,ty,_,_) ->
Format.fprintf fmt "invariant@ %s for type %a" s Cil_printer.pp_typ ty
| IPGlobalInvariant(s,_,_) ->
Format.fprintf fmt "global invariant@ %s" s
| IPBehavior(_kf, ki, active, b) ->
if Cil.is_default_behavior b then
Format.pp_print_string fmt "default behavior"
else
Format.fprintf fmt "behavior %s" b.b_name;
(match ki with
| Kstmt s -> Format.fprintf fmt " for statement %d" s.sid
| Kglobal -> ());
pp_active fmt active
| IPCodeAnnot(_, _, a) -> Cil_printer.pp_code_annotation fmt a
| IPComplete(_, _, active, l) ->
Format.fprintf fmt "complete@ %a"
(Pretty_utils.pp_list ~sep:","
(fun fmt s -> Format.fprintf fmt "@ %s" s))
l;
pp_active fmt active
| IPDisjoint(_, _, active, l) ->
Format.fprintf fmt "disjoint@ %a"
(Pretty_utils.pp_list ~sep:","
(fun fmt s -> Format.fprintf fmt " %s" s))
l;
pp_active fmt active
| IPAllocation(_, _, _, (f,a)) ->
Cil_printer.pp_allocation fmt (FreeAlloc(f,a))
| IPAssigns(_, _, _, l) -> Cil_printer.pp_assigns fmt (Writes l)
| IPFrom (_,_,_, f) -> Cil_printer.pp_from fmt f
| IPDecrease(_, _, None,v) -> Cil_printer.pp_decreases fmt v
| IPDecrease(_, _, _,v) -> Cil_printer.pp_variant fmt v
| IPReachable(None, Kstmt _, _) -> assert false
| IPReachable(None, Kglobal, _) ->
Format.fprintf fmt "reachability of entry point"
| IPReachable(Some kf, Kglobal, _) ->
Format.fprintf fmt "reachability of function %a" Kf.pretty kf
| IPReachable(Some kf, Kstmt stmt, ba) ->
Format.fprintf fmt "reachability %s stmt %a in %a"
(match ba with Before -> "of" | After -> "post")
Cil_datatype.Location.pretty_line (Cil_datatype.Stmt.loc stmt)
Kf.pretty kf
| IPPropertyInstance (kfopt, ki, ip) ->
Format.fprintf fmt "status of '%a'%t %a"
pretty ip
(fun fmt -> match get_kf ip with
| Some kf -> Format.fprintf fmt " of %a" Kernel_function.pretty kf
| None -> ())
pretty_instance_location (kfopt, ki)
| IPOther(s,_,_) -> Format.pp_print_string fmt s
let rec hash =
let hash_bhv_loop = function
| Id_contract (a,b) -> (0, Hashtbl.hash (a,b.b_name))
| Id_loop ca -> (1, ca.annot_id)
in
function
| IPPredicate (_,_,_,x) -> Hashtbl.hash (1, x.ip_id)
| IPAxiom (x,_,_,_,_) -> Hashtbl.hash (2, (x:string))
| IPAxiomatic (x,_) -> Hashtbl.hash (3, (x:string))
| IPLemma (x,_,_,_,_) -> Hashtbl.hash (4, (x:string))
| IPCodeAnnot(_,_, ca) -> Hashtbl.hash (5, ca.annot_id)
| IPComplete(f, ki, x, y) ->
(* complete list is more likely to discriminate than active list. *)
Hashtbl.hash
(6, Kf.hash f, Kinstr.hash ki,
(y:string list), (x:Datatype.String.Set.t))
| IPDisjoint(f, ki, x, y) ->
Hashtbl.hash
(7, Kf.hash f, Kinstr.hash ki,
(y: string list), (x:Datatype.String.Set.t))
| IPAssigns(f, ki, b, _l) ->
Hashtbl.hash (8, Kf.hash f, Kinstr.hash ki, hash_bhv_loop b)
| IPFrom(kf,ki,b,(t,_)) ->
Hashtbl.hash
(9, Kf.hash kf, Kinstr.hash ki,
hash_bhv_loop b, Identified_term.hash t)
| IPDecrease(kf, ki, _ca, _v) ->
At most one loop variant per statement anyway , no
need to discriminate against the code annotation itself
need to discriminate against the code annotation itself *)
Hashtbl.hash (10, Kf.hash kf, Kinstr.hash ki)
| IPBehavior(kf, s, a, b) ->
Hashtbl.hash
(11, Kf.hash kf, Kinstr.hash s,
(b.b_name:string), (a:Datatype.String.Set.t))
| IPReachable(kf, ki, ba) ->
Hashtbl.hash(12, Extlib.may_map Kf.hash ~dft:0 kf,
Kinstr.hash ki, Hashtbl.hash ba)
| IPAllocation(f, ki, b, _fa) ->
Hashtbl.hash (13, Kf.hash f, Kinstr.hash ki, hash_bhv_loop b)
| IPPropertyInstance (kf_caller, ki, ip) ->
Hashtbl.hash (14, Extlib.opt_hash Kf.hash kf_caller,
Kinstr.hash ki, hash ip)
| IPOther(s,_,_) -> Hashtbl.hash (15, (s:string))
| IPTypeInvariant(s,_,_,_) -> Hashtbl.hash (16, (s:string))
| IPGlobalInvariant(s,_,_) -> Hashtbl.hash (17, (s:string))
let rec equal p1 p2 =
let eq_bhv (f1,ki1,b1) (f2,ki2,b2) =
Kf.equal f1 f2 && Kinstr.equal ki1 ki2
&&
(match b1, b2 with
| Id_loop ca1, Id_loop ca2 ->
ca1.annot_id = ca2.annot_id
| Id_contract (a1,b1), Id_contract (a2,b2) ->
Datatype.String.Set.equal a1 a2 &&
Datatype.String.equal b1.b_name b2.b_name
| Id_loop _, Id_contract _
| Id_contract _, Id_loop _ -> false)
in
match p1, p2 with
| IPPredicate (_,_,_,s1), IPPredicate (_,_,_,s2) -> s1.ip_id = s2.ip_id
| IPAxiom (s1,_,_,_,_), IPAxiom (s2,_,_,_,_)
| IPAxiomatic(s1, _), IPAxiomatic(s2, _)
| IPTypeInvariant(s1,_,_,_), IPTypeInvariant(s2,_,_,_)
| IPGlobalInvariant(s1,_,_), IPGlobalInvariant(s2,_,_)
| IPLemma (s1,_,_,_,_), IPLemma (s2,_,_,_,_) ->
Datatype.String.equal s1 s2
| IPCodeAnnot(_,_,ca1), IPCodeAnnot(_,_,ca2) ->
ca1.annot_id = ca2.annot_id
| IPComplete(f1, ki1, a1, x1), IPComplete(f2, ki2, a2, x2)
| IPDisjoint(f1, ki1, a1, x1), IPDisjoint(f2, ki2, a2, x2) ->
Kf.equal f1 f2 && Kinstr.equal ki1 ki2 && a1 = a2 && x1 = x2
| IPAllocation (f1, ki1, b1, _), IPAllocation (f2, ki2, b2, _) ->
eq_bhv (f1,ki1,b1) (f2,ki2,b2)
| IPAssigns (f1, ki1, b1, _), IPAssigns (f2, ki2, b2, _) ->
eq_bhv (f1,ki1,b1) (f2,ki2,b2)
| IPFrom (f1,ki1,b1,(t1,_)), IPFrom (f2, ki2,b2,(t2,_)) ->
eq_bhv (f1,ki1,b1) (f2,ki2,b2) && t1.it_id = t2.it_id
| IPDecrease(f1, ki1, _, _), IPDecrease(f2, ki2, _, _) ->
Kf.equal f1 f2 && Kinstr.equal ki1 ki2
| IPReachable(kf1, ki1, ba1), IPReachable(kf2, ki2, ba2) ->
Extlib.opt_equal Kf.equal kf1 kf2 && Kinstr.equal ki1 ki2 && ba1 = ba2
| IPBehavior(f1, k1, a1, b1), IPBehavior(f2, k2, a2, b2) ->
Kf.equal f1 f2
&& Kinstr.equal k1 k2
&& Datatype.String.Set.equal a1 a2
&& Datatype.String.equal b1.b_name b2.b_name
| IPOther(s1,kf1,ki1), IPOther(s2,kf2,ki2) ->
Datatype.String.equal s1 s2
&& Kinstr.equal ki1 ki2
&& equal_opt Kf.equal kf1 kf2
| IPPropertyInstance (kf1, ki1, ip1),
IPPropertyInstance (kf2, ki2, ip2) ->
Extlib.opt_equal Kernel_function.equal kf1 kf2 &&
Kinstr.equal ki1 ki2 && equal ip1 ip2
| (IPPredicate _ | IPAxiom _ | IPAxiomatic _ | IPLemma _
| IPCodeAnnot _ | IPComplete _ | IPDisjoint _ | IPAssigns _
| IPFrom _ | IPDecrease _ | IPBehavior _ | IPReachable _
| IPAllocation _ | IPOther _ | IPPropertyInstance _
| IPTypeInvariant _ | IPGlobalInvariant _), _ -> false
let rec compare x y =
let cmp_bhv (f1,ki1,b1) (f2,ki2,b2) =
let n = Kf.compare f1 f2 in
if n = 0 then
let n = Kinstr.compare ki1 ki2 in
if n = 0 then
match b1, b2 with
| Id_contract (a1,b1), Id_contract (a2,b2) ->
let n = Datatype.String.compare b1.b_name b2.b_name in
if n = 0 then Datatype.String.Set.compare a1 a2 else n
| Id_loop ca1, Id_loop ca2 ->
Datatype.Int.compare ca1.annot_id ca2.annot_id
| Id_contract _, Id_loop _ -> -1
| Id_loop _, Id_contract _ -> 1
else n
else n
in
match x, y with
| IPPredicate (_,_,_,s1), IPPredicate (_,_,_,s2) ->
Datatype.Int.compare s1.ip_id s2.ip_id
| IPCodeAnnot(_,_,ca1), IPCodeAnnot(_,_,ca2) ->
Datatype.Int.compare ca1.annot_id ca2.annot_id
| IPBehavior(f1, k1, a1, b1), IPBehavior(f2, k2, a2, b2) ->
cmp_bhv (f1, k1, Id_contract (a1,b1)) (f2, k2, Id_contract (a2,b2))
| IPComplete(f1, ki1, a1, x1), IPComplete(f2, ki2, a2, x2)
| IPDisjoint(f1, ki1, a1, x1), IPDisjoint(f2, ki2, a2, x2) ->
let n = Kf.compare f1 f2 in
if n = 0 then
let n = Kinstr.compare ki1 ki2 in
if n = 0 then
let n = Extlib.compare_basic x1 x2 in
if n = 0 then
Datatype.String.Set.compare a1 a2
else n
else n
else n
| IPAssigns (f1, ki1, b1, _), IPAssigns (f2, ki2, b2, _) ->
cmp_bhv (f1,ki1,b1) (f2,ki2,b2)
| IPFrom (f1,ki1,b1,(t1,_)), IPFrom(f2,ki2,b2,(t2,_)) ->
let n = cmp_bhv (f1,ki1,b1) (f2,ki2,b2) in
if n = 0 then Identified_term.compare t1 t2 else n
| IPDecrease(f1, ki1,_,_), IPDecrease(f2, ki2,_,_) ->
let n = Kf.compare f1 f2 in
if n = 0 then Kinstr.compare ki1 ki2 else n
| IPReachable(kf1, ki1, ba1), IPReachable(kf2, ki2, ba2) ->
let n = Extlib.opt_compare Kf.compare kf1 kf2 in
if n = 0 then
let n = Kinstr.compare ki1 ki2 in
if n = 0 then Pervasives.compare ba1 ba2 else n
else
n
| IPAxiom (s1,_,_,_,_), IPAxiom (s2,_,_,_,_)
| IPAxiomatic(s1, _), IPAxiomatic(s2, _)
| IPTypeInvariant(s1,_,_,_), IPTypeInvariant(s2,_,_,_)
| IPLemma (s1,_,_,_,_), IPLemma (s2,_,_,_,_) ->
Datatype.String.compare s1 s2
| IPOther(s1,kf1,ki1), IPOther(s2,kf2,ki2) ->
let s = Datatype.String.compare s1 s2 in
if s <> 0 then s else
let s = compare_opt Kf.compare kf1 kf2 in
if s <> 0 then s else
Kinstr.compare ki1 ki2
| IPAllocation (f1, ki1, b1, _), IPAllocation (f2, ki2, b2, _) ->
cmp_bhv (f1,ki1,b1) (f2,ki2,b2)
| IPPropertyInstance (kf1, ki1, ip1),
IPPropertyInstance (kf2, ki2, ip2) ->
let c = Extlib.opt_compare Kernel_function.compare kf1 kf2 in
if c <> 0 then c else
let c = Kinstr.compare ki1 ki2 in
if c <> 0 then c else compare ip1 ip2
| (IPPredicate _ | IPCodeAnnot _ | IPBehavior _ | IPComplete _ |
IPDisjoint _ | IPAssigns _ | IPFrom _ | IPDecrease _ |
IPReachable _ | IPAxiom _ | IPAxiomatic _ | IPLemma _ |
IPOther _ | IPAllocation _ | IPPropertyInstance _ |
IPTypeInvariant _ | IPGlobalInvariant _) as x, y ->
let nb = function
| IPPredicate _ -> 1
| IPAssigns _ -> 2
| IPDecrease _ -> 3
| IPAxiom _ -> 4
| IPAxiomatic _ -> 5
| IPLemma _ -> 6
| IPCodeAnnot _ -> 7
| IPComplete _ -> 8
| IPDisjoint _ -> 9
| IPFrom _ -> 10
| IPBehavior _ -> 11
| IPReachable _ -> 12
| IPAllocation _ -> 13
| IPOther _ -> 14
| IPPropertyInstance _ -> 15
| IPTypeInvariant _ -> 16
| IPGlobalInvariant _ -> 17
in
Datatype.Int.compare (nb x) (nb y)
end)
let rec short_pretty fmt p = match p with
| IPPredicate (_,_,_,{ ip_name = name :: _ }) ->
Format.pp_print_string fmt name
| IPPredicate _ -> pretty fmt p
| IPAxiom (name,_,_,_,_) | IPLemma(name,_,_,_,_)
| IPTypeInvariant(name,_,_,_) -> Format.pp_print_string fmt name
| IPGlobalInvariant(name,_,_) -> Format.pp_print_string fmt name
| IPAxiomatic (name,_) -> Format.pp_print_string fmt name
| IPBehavior(kf,_,_,{b_name = name }) ->
Format.fprintf fmt "behavior %s in function %a"
name Kernel_function.pretty kf
| IPComplete (kf,_,_,_) ->
Format.fprintf fmt "complete clause in function %a"
Kernel_function.pretty kf
| IPDisjoint (kf,_,_,_) ->
Format.fprintf fmt "disjoint clause in function %a"
Kernel_function.pretty kf
| IPCodeAnnot (_,_,{ annot_content = AAssert (_, { name = name :: _ })}) ->
Format.pp_print_string fmt name
| IPCodeAnnot(_,_,{annot_content = AInvariant (_,_, { name = name :: _ })})->
Format.pp_print_string fmt name
| IPCodeAnnot _ -> pretty fmt p
| IPAllocation (kf,_,_,_) ->
Format.fprintf fmt "allocates/frees clause in function %a"
Kernel_function.pretty kf
| IPAssigns (kf,_,_,_) ->
Format.fprintf fmt "assigns clause in function %a"
Kernel_function.pretty kf
| IPFrom (kf,_,_,(t,_)) ->
Format.fprintf fmt "from clause of term %a in function %a"
Cil_printer.pp_identified_term t Kernel_function.pretty kf
| IPDecrease(kf,_,_,_) ->
Format.fprintf fmt "decrease clause in function %a"
Kernel_function.pretty kf
| IPPropertyInstance (kfopt, ki, ip) ->
Format.fprintf fmt "specialization of %a %a" short_pretty ip
pretty_instance_location (kfopt, ki)
| IPReachable _ | IPOther _ -> pretty fmt p
module Names = struct
module NamesTbl =
State_builder.Hashtbl(Datatype.String.Hashtbl)(Datatype.Int)
(struct
let name = "PropertyNames"
let dependencies = [ ]
let size = 97
end)
module IndexTbl =
State_builder.Hashtbl(Hashtbl)(Datatype.String)
(struct
let name = "PropertyIndex"
let dependencies = [ Ast.self; NamesTbl.self; Globals.Functions.self ]
let size = 97
end)
let self = IndexTbl.self
let kf_prefix kf = (Ast_info.Function.get_vi kf.fundec).vname ^ "_"
let ident_names names =
List.filter (function "" -> true
| _ as n -> '\"' <> (String.get n 0) ) names
let pp_names fmt l =
let l = ident_names l in
match l with [] -> ()
| _ -> Format.fprintf fmt "_%a"
(Pretty_utils.pp_list ~sep:"_" Format.pp_print_string) l
let pp_code_annot_names fmt ca =
match ca.annot_content with
| AAssert(for_bhv,named_pred) | AInvariant(for_bhv,_,named_pred) ->
let pp_for_bhv fmt l =
match l with [] -> ()
| _ -> Format.fprintf fmt "_for_%a"
(Pretty_utils.pp_list ~sep:"_" Format.pp_print_string) l
in Format.fprintf fmt "%a%a" pp_names named_pred.name pp_for_bhv for_bhv
| AVariant(term, _) -> pp_names fmt term.term_name
| _ -> () (* TODO : add some more names ? *)
let behavior_prefix b =
if Cil.is_default_behavior b then ""
else b.b_name ^ "_"
let variant_suffix = function
| (_,Some s) -> s
| _ -> ""
let string_of_termination_kind = function
Normal -> "post"
| Exits -> "exit"
| Breaks -> "break"
| Continues -> "continue"
| Returns -> "return"
let ki_prefix = function
| Kglobal -> ""
| Kstmt _ -> "stmt_"
let predicate_kind_txt pk ki =
let name = match pk with
| PKRequires b -> (behavior_prefix b) ^ "pre"
| PKAssumes b -> (behavior_prefix b) ^ "assume"
| PKEnsures (b, tk) -> (behavior_prefix b) ^ string_of_termination_kind tk
| PKTerminates -> "term"
in
(ki_prefix ki) ^ name
let active_prefix fmt a =
let print_one a = Format.fprintf fmt "_%s" a in
Datatype.String.Set.iter print_one a
let rec id_prop_txt p = match p with
| IPPredicate (pk,kf,ki,idp) ->
Pretty_utils.sfprintf "%s%s%a"
(kf_prefix kf) (predicate_kind_txt pk ki) pp_names idp.ip_name
| IPCodeAnnot (kf,_, ca) ->
let name = match ca.annot_content with
| AAssert _ -> "assert"
| AInvariant (_,true,_) -> "loop_inv"
| AInvariant _ -> "inv"
| APragma _ -> "pragma"
| _ -> assert false
in Pretty_utils.sfprintf "%s%s%a" (kf_prefix kf) name pp_code_annot_names ca
| IPComplete (kf, ki, a, lb) ->
Pretty_utils.sfprintf "%s%s%acomplete%a"
(kf_prefix kf) (ki_prefix ki) active_prefix a pp_names lb
| IPDisjoint (kf, ki, a, lb) ->
Pretty_utils.sfprintf "%s%s%adisjoint%a"
(kf_prefix kf) (ki_prefix ki) active_prefix a pp_names lb
| IPDecrease (kf,_,None, variant) -> (kf_prefix kf) ^ "decr" ^ (variant_suffix variant)
| IPDecrease (kf,_,_,variant) -> (kf_prefix kf) ^ "loop_term" ^ (variant_suffix variant)
| IPAxiom (name,_,_,named_pred,_) ->
Pretty_utils.sfprintf "axiom_%s%a" name pp_names named_pred.name
| IPAxiomatic(name, _) -> "axiomatic_" ^ name
| IPLemma (name,_,_,named_pred,_) ->
Pretty_utils.sfprintf "lemma_%s%a" name pp_names named_pred.name
| IPTypeInvariant (name,_,named_pred,_) ->
Pretty_utils.sfprintf "type_invariant_%s%a" name pp_names named_pred.name
| IPGlobalInvariant (name,named_pred,_) ->
Pretty_utils.sfprintf "global_invariant_%s%a"name pp_names named_pred.name
| IPAllocation (kf, ki, (Id_contract (a,b)), _) ->
Pretty_utils.sfprintf "%s%s%a%salloc"
(kf_prefix kf) (ki_prefix ki) active_prefix a (behavior_prefix b)
| IPAllocation (kf, Kstmt _s, (Id_loop ca), _) ->
Pretty_utils.sfprintf "%sloop_alloc%a"
(kf_prefix kf) pp_code_annot_names ca
| IPAllocation _ -> assert false
| IPAssigns (kf, ki, (Id_contract (a,b)), _) ->
Pretty_utils.sfprintf "%s%s%a%sassign"
(kf_prefix kf) (ki_prefix ki) active_prefix a (behavior_prefix b)
| IPAssigns (kf, Kstmt _s, (Id_loop ca), _) ->
Pretty_utils.sfprintf "%sloop_assign%a"
(kf_prefix kf) pp_code_annot_names ca
| IPAssigns _ -> assert false
| IPFrom (_, _, _, (out,_)) ->
"from_id_"^(string_of_int (out.it_id))
| IPReachable _ -> "reachable_stmt"
| IPBehavior(kf, ki, a, b) ->
Pretty_utils.sfprintf "%s%s%a%s"
(kf_prefix kf) (ki_prefix ki) active_prefix a b.b_name
| IPPropertyInstance (kfopt, ki, ip) ->
Pretty_utils.sfprintf "specialization_%s_at_%t" (id_prop_txt ip)
(fun fmt -> match kfopt, ki with
| None, Kglobal -> Format.pp_print_string fmt "global"
| Some kf, Kglobal -> Kernel_function.pretty fmt kf
| None, Kstmt s -> Format.fprintf fmt "stmt_%d" s.sid
| Some kf, Kstmt s ->
Format.fprintf fmt "%a_stmt_%d" Kernel_function.pretty kf s.sid)
| IPOther(s,Some kf,ki) -> (kf_prefix kf) ^ (ki_prefix ki) ^ s
| IPOther(s,None,ki) -> (ki_prefix ki) ^ s
(** function used to normanize basename *)
let normalize_basename s =
let is_valid_id = ref true
and is_valid_char_id = function
| 'a'..'z' | 'A' .. 'Z' | '0' .. '9' | '_' -> true
| _ -> false
and is_numeric = function
| '0'..'9' -> true
| _ -> false
in
String.iter (fun c -> if not (is_valid_char_id c) then is_valid_id := false) s ;
let s = if !is_valid_id then s else
begin
let sn = String.copy s
and i = ref 0
in String.iter (fun c -> if not (is_valid_char_id c) then String.set sn !i '_' ; i := succ !i) s ;
sn
end
in if s = "" then "property" else
if is_numeric (String.get s 0) then "property_" ^ s else s
(** returns the name that should be returned by the function [get_prop_name_id] if the given property has [name] as basename. That name is reserved so that [get_prop_name_id prop] can never return an identical name. *)
let reserve_name_id basename =
let basename = normalize_basename basename in
try
let speed_up_start = NamesTbl.find basename in
(* this basename is already reserved *)
let n,unique_name = Extlib.make_unique_name NamesTbl.mem ~sep:"_" ~start:speed_up_start basename
in NamesTbl.replace basename (succ n) ; (* to speed up Extlib.make_unique_name for next time *)
unique_name
first time that basename is reserved
NamesTbl.add basename 2 ;
basename
(** returns the basename of the property. *)
let get_prop_basename ip = normalize_basename (id_prop_txt ip)
(** returns a unique name identifying the property.
This name is built from the basename of the property. *)
let get_prop_name_id ip =
try IndexTbl.find ip
first time we are asking for a name for that [ ip ]
let basename = get_prop_basename ip in
let unique_name = reserve_name_id basename in
IndexTbl.add ip unique_name ;
unique_name
(*
(** force computation of the unique name identifying the property *)
let make_prop_name_id ip =
ignore (get_prop_name_id ip)
let remove_prop_name_id ip =
try
ignore (IndexTbl.find ip);
IndexTbl.remove ip
with Not_found -> ()
*)
end
let ip_other s kf ki = IPOther(s,kf,ki)
let ip_reachable_stmt kf ki = IPReachable(Some kf, Kstmt ki, Before)
let ip_reachable_ppt p =
let kf = get_kf p in
let ki = get_kinstr p in
let ba = match p with
| IPPredicate((PKRequires _ | PKAssumes _ | PKTerminates), _, _, _)
| IPAxiom _ | IPAxiomatic _ | IPLemma _ | IPComplete _
| IPDisjoint _ | IPCodeAnnot _ | IPAllocation _
| IPDecrease _ | IPPropertyInstance _ | IPOther _
| IPTypeInvariant _ | IPGlobalInvariant _
-> Before
| IPPredicate(PKEnsures _, _, _, _) | IPAssigns _ | IPFrom _
| IPBehavior _
-> After
| IPReachable _ -> Kernel.fatal "IPReachable(IPReachable _) is not possible"
in
IPReachable(kf, ki, ba)
let ip_of_ensures kf st b (k,p) = IPPredicate (PKEnsures(b,k),kf,st,p)
let ip_ensures_of_behavior kf st b =
List.map (ip_of_ensures kf st b) b.b_post_cond
let ip_of_allocation kf st loc = function
| FreeAllocAny -> None
| FreeAlloc(f,a) -> Some (IPAllocation (kf,st,loc,(f,a)))
let ip_allocation_of_behavior kf st ~active b =
let a = Datatype.String.Set.of_list active in
ip_of_allocation kf st (Id_contract (a,b)) b.b_allocation
let ip_of_assigns kf st loc = function
| WritesAny -> None
| Writes [(a,_)] when Logic_utils.is_result a.it_content ->
(* We're only assigning the result (with dependencies), but no
global variables, this amounts to \nothing.
*)
Some (IPAssigns (kf, st, loc, []))
| Writes a -> Some (IPAssigns (kf,st,loc,a))
let ip_assigns_of_behavior kf st ~active b =
let a = Datatype.String.Set.of_list active in
ip_of_assigns kf st (Id_contract (a,b)) b.b_assigns
let ip_of_from kf st loc from =
match snd from with
| FromAny -> None
| From _ -> Some (IPFrom (kf,st, loc, from))
let ip_from_of_behavior kf st ~active b =
match b.b_assigns with
| WritesAny -> []
| Writes l ->
let treat_from acc (out, froms) = match froms with
| FromAny -> acc
| From _ ->
let a = Datatype.String.Set.of_list active in
let ip =
Extlib.the (ip_of_from kf st (Id_contract (a,b)) (out, froms))
in
ip :: acc
in
List.fold_left treat_from [] l
let ip_allocation_of_code_annot kf st ca = match ca.annot_content with
| AAllocation (_,a) -> ip_of_allocation kf st (Id_loop ca) a
| _ -> None
let ip_assigns_of_code_annot kf st ca = match ca.annot_content with
| AAssigns (_,a) -> ip_of_assigns kf st (Id_loop ca) a
| _ -> None
let ip_from_of_code_annot kf st ca = match ca.annot_content with
| AAssigns(_,WritesAny) -> []
| AAssigns (_,Writes l) ->
let treat_from acc (out, froms) = match froms with FromAny -> acc
| From _ ->
let ip =
Extlib.the (ip_of_from kf st (Id_loop ca) (out, froms))
in
ip::acc
in
List.fold_left treat_from [] l
| _ -> []
let ip_post_cond_of_behavior kf st ~active b =
ip_ensures_of_behavior kf st b
@ (Extlib.list_of_opt (ip_assigns_of_behavior kf st ~active b))
@ ip_from_of_behavior kf st active b
@ (Extlib.list_of_opt (ip_allocation_of_behavior kf st ~active b))
let ip_of_behavior kf s ~active b =
let a = Datatype.String.Set.of_list active in
IPBehavior(kf, s, a, b)
let ip_of_requires kf st b p = IPPredicate (PKRequires b,kf,st,p)
let ip_requires_of_behavior kf st b =
List.map (ip_of_requires kf st b) b.b_requires
let ip_of_assumes kf st b p = IPPredicate (PKAssumes b,kf,st,p)
let ip_assumes_of_behavior kf st b =
List.map (ip_of_assumes kf st b) b.b_assumes
let ip_all_of_behavior kf st ~active b =
ip_of_behavior kf st ~active b
:: ip_requires_of_behavior kf st b
@ ip_assumes_of_behavior kf st b
@ ip_post_cond_of_behavior kf st ~active b
let ip_of_complete kf st ~active bhvs =
let a = Datatype.String.Set.of_list active in IPComplete(kf,st,a,bhvs)
let ip_complete_of_spec kf st ~active s =
List.map (ip_of_complete kf st ~active) s.spec_complete_behaviors
let ip_of_disjoint kf st ~active bhvs =
let a = Datatype.String.Set.of_list active in IPDisjoint(kf,st,a,bhvs)
let ip_disjoint_of_spec kf st ~active s =
List.map (ip_of_disjoint kf st ~active) s.spec_disjoint_behaviors
let ip_of_terminates kf st p = IPPredicate(PKTerminates,kf,st,p)
let ip_terminates_of_spec kf st s = match s.spec_terminates with
| None -> None
| Some p -> Some (ip_of_terminates kf st p)
let ip_of_decreases kf st d = IPDecrease(kf,st,None,d)
let ip_decreases_of_spec kf st s =
Extlib.opt_map (ip_of_decreases kf st) s.spec_variant
let ip_post_cond_of_spec kf st ~active s =
List.concat
(List.map (ip_post_cond_of_behavior kf st ~active) s.spec_behavior)
let ip_of_spec kf st ~active s =
List.concat (List.map (ip_all_of_behavior kf st ~active) s.spec_behavior)
@ ip_complete_of_spec kf st active s
@ ip_disjoint_of_spec kf st active s
@ (Extlib.list_of_opt (ip_terminates_of_spec kf st s))
@ (Extlib.list_of_opt (ip_decreases_of_spec kf st s))
let ip_axiom s = IPAxiom s
let ip_lemma s = IPLemma s
let ip_type_invariant s = IPTypeInvariant s
let ip_global_invariant s = IPGlobalInvariant s
let ip_property_instance kfopt ki ip = IPPropertyInstance (kfopt, ki, ip)
let ip_of_code_annot kf ki ca =
let st = Kstmt ki in
match ca.annot_content with
| AAssert _ | AInvariant _ -> [ IPCodeAnnot(kf, ki, ca) ]
| AStmtSpec (active,s) -> ip_of_spec kf st active s
| AVariant t -> [ IPDecrease (kf,st,(Some ca),t) ]
| AAllocation _ ->
Extlib.list_of_opt (ip_allocation_of_code_annot kf st ca)
@ ip_from_of_code_annot kf st ca
| AAssigns _ ->
Extlib.list_of_opt (ip_assigns_of_code_annot kf st ca)
@ ip_from_of_code_annot kf st ca
| APragma p when Logic_utils.is_property_pragma p ->
[ IPCodeAnnot (kf,ki,ca) ]
| APragma _ -> []
let ip_of_code_annot_single kf ki ca = match ip_of_code_annot kf ki ca with
| [] ->
[ JS 2011/06/07 ] using Kernel.error here seems very strange .
Actually it is incorrect in case of pragma which is not a property ( see
function ip_of_code_annot above .
Actually it is incorrect in case of pragma which is not a property (see
function ip_of_code_annot above. *)
Kernel.error
"@[cannot find a property to extract from code annotation@\n%a@]"
Cil_printer.pp_code_annotation ca;
raise (Invalid_argument "ip_of_code_annot_single")
| [ ip ] -> ip
| ip :: _ ->
Kernel.warning
"@[choosing one of multiple properties associated \
to code annotation@\n%a@]"
Cil_printer.pp_code_annotation ca;
ip
Must ensure that the first property is the best one in order to represent
the annotation ( see ip_of_global_annotation_single )
the annotation (see ip_of_global_annotation_single) *)
let ip_of_global_annotation a =
let once = true in
let rec aux acc = function
| Daxiomatic(name, l, _) ->
let ppts = List.fold_left aux [] l in
IPAxiomatic(name, ppts) :: (ppts @ acc)
| Dlemma(name, true, a, b, c, d) -> ip_axiom (name,a,b,c,d) :: acc
| Dlemma(name, false, a, b, c, d) -> ip_lemma (name,a,b,c,d) :: acc
| Dinvariant(l, loc) ->
let pred = match l.l_body with
| LBpred p -> p
| _ -> assert false
in
IPGlobalInvariant(l.l_var_info.lv_name,pred,loc) :: acc
| Dtype_annot(l, loc) ->
let parameter = match l.l_profile with
| h :: [] -> h
| _ -> assert false
in
let ty = match parameter.lv_type with
| Ctype x -> x
| _ -> assert false
in
let pred = match l.l_body with
| LBpred p -> p
| _ -> assert false
in
IPTypeInvariant(l.l_var_info.lv_name,ty,pred,loc) :: acc
| Dcustom_annot(_c, _n, _) ->
TODO
Kernel.warning ~once "ignoring status of custom annotation";
acc
| Dmodel_annot _ | Dfun_or_pred _ | Dvolatile _ | Dtype _ ->
(* no associated status for these annotations *)
acc
in
aux [] a
let ip_of_global_annotation_single a = match ip_of_global_annotation a with
| [] -> None
| ip :: _ ->
the first one is the good one , see ip_of_global_annotation
Some ip
(*
Local Variables:
compile-command: "make -C ../../.."
End:
*)
| null | https://raw.githubusercontent.com/TrustInSoft/tis-interpreter/33132ce4a825494ea48bf2dd6fd03a56b62cc5c3/src/kernel_services/ast_data/property.ml | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
* identified_term list
Pretty information about the localization of a IPPropertyInstance
complete list is more likely to discriminate than active list.
TODO : add some more names ?
* function used to normanize basename
* returns the name that should be returned by the function [get_prop_name_id] if the given property has [name] as basename. That name is reserved so that [get_prop_name_id prop] can never return an identical name.
this basename is already reserved
to speed up Extlib.make_unique_name for next time
* returns the basename of the property.
* returns a unique name identifying the property.
This name is built from the basename of the property.
(** force computation of the unique name identifying the property
We're only assigning the result (with dependencies), but no
global variables, this amounts to \nothing.
no associated status for these annotations
Local Variables:
compile-command: "make -C ../../.."
End:
| Modified by TrustInSoft
This file is part of Frama - C.
Copyright ( C ) 2007 - 2015
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
open Cil_types
open Cil_datatype
type behavior_or_loop =
Id_contract of Datatype.String.Set.t * funbehavior
| Id_loop of code_annotation
type identified_complete =
kernel_function * kinstr * Datatype.String.Set.t * string list
type identified_disjoint = identified_complete
type identified_code_annotation =
kernel_function * stmt * code_annotation
type identified_allocation =
kernel_function
* kinstr
* behavior_or_loop
* (identified_term list * identified_term list)
type identified_assigns =
kernel_function
* kinstr
* behavior_or_loop
* identified_term from list
type identified_from =
kernel_function
* kinstr
* behavior_or_loop
type identified_decrease =
kernel_function * kinstr * code_annotation option * term variant
type identified_behavior =
kernel_function * kinstr * Datatype.String.Set.t * funbehavior
type predicate_kind =
| PKRequires of funbehavior
| PKAssumes of funbehavior
| PKEnsures of funbehavior * termination_kind
| PKTerminates
let pretty_predicate_kind fmt = function
| PKRequires _ -> Format.pp_print_string fmt "requires"
| PKAssumes _ -> Format.pp_print_string fmt "assumes"
| PKEnsures(_, tk) ->
Format.pp_print_string fmt
(match tk with
| Normal -> "ensures"
| Exits -> "exits"
| Breaks -> "breaks"
| Continues -> "continues"
| Returns -> "returns")
| PKTerminates -> Format.pp_print_string fmt "terminates"
type identified_predicate =
predicate_kind * kernel_function * kinstr * Cil_types.identified_predicate
type program_point = Before | After
type identified_reachable = kernel_function option * kinstr * program_point
type identified_type_invariant = string * typ * predicate named * location
type identified_global_invariant = string * predicate named * location
and identified_axiomatic = string * identified_property list
and identified_lemma =
string * logic_label list * string list * predicate named * location
and identified_axiom = identified_lemma
and identified_instance =
kernel_function option * kinstr * identified_property
and identified_property =
| IPPredicate of identified_predicate
| IPAxiom of identified_axiom
| IPAxiomatic of identified_axiomatic
| IPLemma of identified_lemma
| IPBehavior of identified_behavior
| IPComplete of identified_complete
| IPDisjoint of identified_disjoint
| IPCodeAnnot of identified_code_annotation
| IPAllocation of identified_allocation
| IPAssigns of identified_assigns
| IPFrom of identified_from
| IPDecrease of identified_decrease
| IPReachable of identified_reachable
| IPPropertyInstance of identified_instance
| IPTypeInvariant of identified_type_invariant
| IPGlobalInvariant of identified_global_invariant
| IPOther of string * kernel_function option * kinstr
let get_kinstr = function
| IPPredicate (_,_,ki,_)
| IPBehavior(_, ki,_,_)
| IPComplete (_,ki,_,_)
| IPDisjoint(_,ki,_,_)
| IPAllocation (_,ki,_,_)
| IPAssigns (_,ki,_,_)
| IPFrom(_,ki,_,_)
| IPReachable (_, ki, _)
| IPDecrease (_,ki,_,_)
| IPPropertyInstance (_, ki, _) -> ki
| IPAxiom _
| IPAxiomatic _
| IPLemma _ -> Kglobal
| IPOther(_,_,ki) -> ki
| IPCodeAnnot (_,s,_) -> Kstmt s
| IPTypeInvariant _ | IPGlobalInvariant _ -> Kglobal
let get_kf = function
| IPPredicate (_,kf,_,_)
| IPBehavior(kf,_,_,_)
| IPCodeAnnot (kf,_,_)
| IPComplete (kf,_,_,_)
| IPDisjoint(kf,_,_,_)
| IPAllocation(kf,_,_,_)
| IPAssigns(kf,_,_,_)
| IPFrom(kf,_,_,_)
| IPDecrease (kf,_,_,_) -> Some kf
| IPAxiom _
| IPAxiomatic _
| IPLemma _ -> None
| IPReachable (kfopt, _, _)
| IPPropertyInstance (kfopt, _, _)
| IPOther(_,kfopt,_) -> kfopt
| IPTypeInvariant _ | IPGlobalInvariant _ -> None
let loc_of_kf_ki kf = function
| Kstmt s -> Cil_datatype.Stmt.loc s
| Kglobal -> Kernel_function.get_location kf
let rec location = function
| IPPredicate (_,_,_,ip) -> ip.ip_loc
| IPBehavior(kf,ki, _,_)
| IPComplete (kf,ki,_,_)
| IPDisjoint(kf,ki,_,_)
| IPReachable(Some kf, ki, _)
| IPPropertyInstance (Some kf, ki, _) -> loc_of_kf_ki kf ki
| IPPropertyInstance (None, Kstmt s, _)
| IPReachable(None, Kstmt s, _) -> Cil_datatype.Stmt.loc s
| IPCodeAnnot (_,s,ca) -> (
match Cil_datatype.Code_annotation.loc ca with
| None -> Cil_datatype.Stmt.loc s
| Some loc -> loc)
| IPPropertyInstance (None, Kglobal, _)
| IPReachable(None, Kglobal, _) -> Cil_datatype.Location.unknown
| IPAssigns(kf,ki,_,a) ->
(match a with
| [] -> loc_of_kf_ki kf ki
| (t,_) :: _ -> t.it_content.term_loc)
| IPAllocation(kf,ki,_,fa) ->
(match fa with
| [],[] -> loc_of_kf_ki kf ki
| (t :: _),_
| _,(t :: _) -> t.it_content.term_loc)
| IPFrom(_,_,_,(t,_)) -> t.it_content.term_loc
| IPDecrease (_,_,_,(t,_)) -> t.term_loc
| IPAxiom (_,_,_,_,loc) -> loc
| IPAxiomatic (_,l) ->
(match l with
| [] -> Cil_datatype.Location.unknown
| p :: _ -> location p)
| IPLemma (_,_,_,_,loc) -> loc
| IPOther(_,kf,ki) ->
(match kf with
| None -> Cil_datatype.Location.unknown
| Some kf -> loc_of_kf_ki kf ki)
| IPTypeInvariant(_,_,_,loc) | IPGlobalInvariant(_,_,loc) -> loc
let pretty_instance_location fmt (kfopt, ki) =
match kfopt, ki with
| None, Kglobal -> Format.pp_print_string fmt "at global scope"
| Some kf, Kglobal ->
Format.fprintf fmt "in function %a" Kernel_function.pretty kf
| None, Kstmt stmt -> Format.fprintf fmt "at stmt %d" stmt.sid
| Some kf, Kstmt stmt
when Kernel_function.(equal kf (find_englobing_kf stmt)) ->
Format.fprintf fmt "at stmt %d" stmt.sid
| Some kf, Kstmt stmt ->
Format.fprintf fmt "at stmt %d and function %a"
stmt.sid Kernel_function.pretty kf
let get_pk_behavior = function
| PKRequires b | PKAssumes b | PKEnsures (b,_) -> Some b
| PKTerminates -> None
let get_behavior = function
| IPPredicate (pk,_,_,_) -> get_pk_behavior pk
| IPBehavior(_, _, _, b) -> Some b
| IPAllocation(_,_,Id_contract (_,b),_)
| IPAssigns(_,_,Id_contract (_,b),_)
| IPFrom(_,_,Id_contract (_,b),_) -> Some b
| IPAllocation(_,_,Id_loop _,_)
| IPAssigns(_,_,Id_loop _,_)
| IPFrom(_,_,Id_loop _,_)
| IPAxiom _
| IPAxiomatic _
| IPLemma _
| IPCodeAnnot (_,_,_)
| IPComplete (_,_,_,_)
| IPDisjoint(_,_,_,_)
| IPDecrease _
| IPReachable _
| IPPropertyInstance _
| IPTypeInvariant _
| IPGlobalInvariant _
| IPOther _ -> None
include Datatype.Make_with_collections
(struct
include Datatype.Serializable_undefined
type t = identified_property
let name = "Property.t"
let reprs = [ IPAxiom ("",[],[],Logic_const.ptrue,Location.unknown) ]
let mem_project = Datatype.never_any_project
let equal_opt eq a b =
match a,b with
| None,None -> true
| Some _,None | None,Some _ -> false
| Some x , Some y -> eq x y
let compare_opt cmp a b =
match a,b with
| None,None -> 0
| None,Some _ -> (-1)
| Some _,None -> 1
| Some x,Some y -> cmp x y
let pp_active fmt active =
let sep = ref false in
let print_one a =
Format.fprintf fmt "%s%s" (if !sep then ", " else "") a;
sep:=true
in
Datatype.String.Set.iter print_one active
let rec pretty fmt = function
| IPPredicate (kind,_,_,p) ->
Format.fprintf fmt "%a@ %a"
pretty_predicate_kind kind Cil_printer.pp_identified_predicate p
| IPAxiom (s,_,_,_,_) -> Format.fprintf fmt "axiom@ %s" s
| IPAxiomatic(s, _) -> Format.fprintf fmt "axiomatic@ %s" s
| IPLemma (s,_,_,_,_) -> Format.fprintf fmt "lemma@ %s" s
| IPTypeInvariant(s,ty,_,_) ->
Format.fprintf fmt "invariant@ %s for type %a" s Cil_printer.pp_typ ty
| IPGlobalInvariant(s,_,_) ->
Format.fprintf fmt "global invariant@ %s" s
| IPBehavior(_kf, ki, active, b) ->
if Cil.is_default_behavior b then
Format.pp_print_string fmt "default behavior"
else
Format.fprintf fmt "behavior %s" b.b_name;
(match ki with
| Kstmt s -> Format.fprintf fmt " for statement %d" s.sid
| Kglobal -> ());
pp_active fmt active
| IPCodeAnnot(_, _, a) -> Cil_printer.pp_code_annotation fmt a
| IPComplete(_, _, active, l) ->
Format.fprintf fmt "complete@ %a"
(Pretty_utils.pp_list ~sep:","
(fun fmt s -> Format.fprintf fmt "@ %s" s))
l;
pp_active fmt active
| IPDisjoint(_, _, active, l) ->
Format.fprintf fmt "disjoint@ %a"
(Pretty_utils.pp_list ~sep:","
(fun fmt s -> Format.fprintf fmt " %s" s))
l;
pp_active fmt active
| IPAllocation(_, _, _, (f,a)) ->
Cil_printer.pp_allocation fmt (FreeAlloc(f,a))
| IPAssigns(_, _, _, l) -> Cil_printer.pp_assigns fmt (Writes l)
| IPFrom (_,_,_, f) -> Cil_printer.pp_from fmt f
| IPDecrease(_, _, None,v) -> Cil_printer.pp_decreases fmt v
| IPDecrease(_, _, _,v) -> Cil_printer.pp_variant fmt v
| IPReachable(None, Kstmt _, _) -> assert false
| IPReachable(None, Kglobal, _) ->
Format.fprintf fmt "reachability of entry point"
| IPReachable(Some kf, Kglobal, _) ->
Format.fprintf fmt "reachability of function %a" Kf.pretty kf
| IPReachable(Some kf, Kstmt stmt, ba) ->
Format.fprintf fmt "reachability %s stmt %a in %a"
(match ba with Before -> "of" | After -> "post")
Cil_datatype.Location.pretty_line (Cil_datatype.Stmt.loc stmt)
Kf.pretty kf
| IPPropertyInstance (kfopt, ki, ip) ->
Format.fprintf fmt "status of '%a'%t %a"
pretty ip
(fun fmt -> match get_kf ip with
| Some kf -> Format.fprintf fmt " of %a" Kernel_function.pretty kf
| None -> ())
pretty_instance_location (kfopt, ki)
| IPOther(s,_,_) -> Format.pp_print_string fmt s
let rec hash =
let hash_bhv_loop = function
| Id_contract (a,b) -> (0, Hashtbl.hash (a,b.b_name))
| Id_loop ca -> (1, ca.annot_id)
in
function
| IPPredicate (_,_,_,x) -> Hashtbl.hash (1, x.ip_id)
| IPAxiom (x,_,_,_,_) -> Hashtbl.hash (2, (x:string))
| IPAxiomatic (x,_) -> Hashtbl.hash (3, (x:string))
| IPLemma (x,_,_,_,_) -> Hashtbl.hash (4, (x:string))
| IPCodeAnnot(_,_, ca) -> Hashtbl.hash (5, ca.annot_id)
| IPComplete(f, ki, x, y) ->
Hashtbl.hash
(6, Kf.hash f, Kinstr.hash ki,
(y:string list), (x:Datatype.String.Set.t))
| IPDisjoint(f, ki, x, y) ->
Hashtbl.hash
(7, Kf.hash f, Kinstr.hash ki,
(y: string list), (x:Datatype.String.Set.t))
| IPAssigns(f, ki, b, _l) ->
Hashtbl.hash (8, Kf.hash f, Kinstr.hash ki, hash_bhv_loop b)
| IPFrom(kf,ki,b,(t,_)) ->
Hashtbl.hash
(9, Kf.hash kf, Kinstr.hash ki,
hash_bhv_loop b, Identified_term.hash t)
| IPDecrease(kf, ki, _ca, _v) ->
At most one loop variant per statement anyway , no
need to discriminate against the code annotation itself
need to discriminate against the code annotation itself *)
Hashtbl.hash (10, Kf.hash kf, Kinstr.hash ki)
| IPBehavior(kf, s, a, b) ->
Hashtbl.hash
(11, Kf.hash kf, Kinstr.hash s,
(b.b_name:string), (a:Datatype.String.Set.t))
| IPReachable(kf, ki, ba) ->
Hashtbl.hash(12, Extlib.may_map Kf.hash ~dft:0 kf,
Kinstr.hash ki, Hashtbl.hash ba)
| IPAllocation(f, ki, b, _fa) ->
Hashtbl.hash (13, Kf.hash f, Kinstr.hash ki, hash_bhv_loop b)
| IPPropertyInstance (kf_caller, ki, ip) ->
Hashtbl.hash (14, Extlib.opt_hash Kf.hash kf_caller,
Kinstr.hash ki, hash ip)
| IPOther(s,_,_) -> Hashtbl.hash (15, (s:string))
| IPTypeInvariant(s,_,_,_) -> Hashtbl.hash (16, (s:string))
| IPGlobalInvariant(s,_,_) -> Hashtbl.hash (17, (s:string))
let rec equal p1 p2 =
let eq_bhv (f1,ki1,b1) (f2,ki2,b2) =
Kf.equal f1 f2 && Kinstr.equal ki1 ki2
&&
(match b1, b2 with
| Id_loop ca1, Id_loop ca2 ->
ca1.annot_id = ca2.annot_id
| Id_contract (a1,b1), Id_contract (a2,b2) ->
Datatype.String.Set.equal a1 a2 &&
Datatype.String.equal b1.b_name b2.b_name
| Id_loop _, Id_contract _
| Id_contract _, Id_loop _ -> false)
in
match p1, p2 with
| IPPredicate (_,_,_,s1), IPPredicate (_,_,_,s2) -> s1.ip_id = s2.ip_id
| IPAxiom (s1,_,_,_,_), IPAxiom (s2,_,_,_,_)
| IPAxiomatic(s1, _), IPAxiomatic(s2, _)
| IPTypeInvariant(s1,_,_,_), IPTypeInvariant(s2,_,_,_)
| IPGlobalInvariant(s1,_,_), IPGlobalInvariant(s2,_,_)
| IPLemma (s1,_,_,_,_), IPLemma (s2,_,_,_,_) ->
Datatype.String.equal s1 s2
| IPCodeAnnot(_,_,ca1), IPCodeAnnot(_,_,ca2) ->
ca1.annot_id = ca2.annot_id
| IPComplete(f1, ki1, a1, x1), IPComplete(f2, ki2, a2, x2)
| IPDisjoint(f1, ki1, a1, x1), IPDisjoint(f2, ki2, a2, x2) ->
Kf.equal f1 f2 && Kinstr.equal ki1 ki2 && a1 = a2 && x1 = x2
| IPAllocation (f1, ki1, b1, _), IPAllocation (f2, ki2, b2, _) ->
eq_bhv (f1,ki1,b1) (f2,ki2,b2)
| IPAssigns (f1, ki1, b1, _), IPAssigns (f2, ki2, b2, _) ->
eq_bhv (f1,ki1,b1) (f2,ki2,b2)
| IPFrom (f1,ki1,b1,(t1,_)), IPFrom (f2, ki2,b2,(t2,_)) ->
eq_bhv (f1,ki1,b1) (f2,ki2,b2) && t1.it_id = t2.it_id
| IPDecrease(f1, ki1, _, _), IPDecrease(f2, ki2, _, _) ->
Kf.equal f1 f2 && Kinstr.equal ki1 ki2
| IPReachable(kf1, ki1, ba1), IPReachable(kf2, ki2, ba2) ->
Extlib.opt_equal Kf.equal kf1 kf2 && Kinstr.equal ki1 ki2 && ba1 = ba2
| IPBehavior(f1, k1, a1, b1), IPBehavior(f2, k2, a2, b2) ->
Kf.equal f1 f2
&& Kinstr.equal k1 k2
&& Datatype.String.Set.equal a1 a2
&& Datatype.String.equal b1.b_name b2.b_name
| IPOther(s1,kf1,ki1), IPOther(s2,kf2,ki2) ->
Datatype.String.equal s1 s2
&& Kinstr.equal ki1 ki2
&& equal_opt Kf.equal kf1 kf2
| IPPropertyInstance (kf1, ki1, ip1),
IPPropertyInstance (kf2, ki2, ip2) ->
Extlib.opt_equal Kernel_function.equal kf1 kf2 &&
Kinstr.equal ki1 ki2 && equal ip1 ip2
| (IPPredicate _ | IPAxiom _ | IPAxiomatic _ | IPLemma _
| IPCodeAnnot _ | IPComplete _ | IPDisjoint _ | IPAssigns _
| IPFrom _ | IPDecrease _ | IPBehavior _ | IPReachable _
| IPAllocation _ | IPOther _ | IPPropertyInstance _
| IPTypeInvariant _ | IPGlobalInvariant _), _ -> false
let rec compare x y =
let cmp_bhv (f1,ki1,b1) (f2,ki2,b2) =
let n = Kf.compare f1 f2 in
if n = 0 then
let n = Kinstr.compare ki1 ki2 in
if n = 0 then
match b1, b2 with
| Id_contract (a1,b1), Id_contract (a2,b2) ->
let n = Datatype.String.compare b1.b_name b2.b_name in
if n = 0 then Datatype.String.Set.compare a1 a2 else n
| Id_loop ca1, Id_loop ca2 ->
Datatype.Int.compare ca1.annot_id ca2.annot_id
| Id_contract _, Id_loop _ -> -1
| Id_loop _, Id_contract _ -> 1
else n
else n
in
match x, y with
| IPPredicate (_,_,_,s1), IPPredicate (_,_,_,s2) ->
Datatype.Int.compare s1.ip_id s2.ip_id
| IPCodeAnnot(_,_,ca1), IPCodeAnnot(_,_,ca2) ->
Datatype.Int.compare ca1.annot_id ca2.annot_id
| IPBehavior(f1, k1, a1, b1), IPBehavior(f2, k2, a2, b2) ->
cmp_bhv (f1, k1, Id_contract (a1,b1)) (f2, k2, Id_contract (a2,b2))
| IPComplete(f1, ki1, a1, x1), IPComplete(f2, ki2, a2, x2)
| IPDisjoint(f1, ki1, a1, x1), IPDisjoint(f2, ki2, a2, x2) ->
let n = Kf.compare f1 f2 in
if n = 0 then
let n = Kinstr.compare ki1 ki2 in
if n = 0 then
let n = Extlib.compare_basic x1 x2 in
if n = 0 then
Datatype.String.Set.compare a1 a2
else n
else n
else n
| IPAssigns (f1, ki1, b1, _), IPAssigns (f2, ki2, b2, _) ->
cmp_bhv (f1,ki1,b1) (f2,ki2,b2)
| IPFrom (f1,ki1,b1,(t1,_)), IPFrom(f2,ki2,b2,(t2,_)) ->
let n = cmp_bhv (f1,ki1,b1) (f2,ki2,b2) in
if n = 0 then Identified_term.compare t1 t2 else n
| IPDecrease(f1, ki1,_,_), IPDecrease(f2, ki2,_,_) ->
let n = Kf.compare f1 f2 in
if n = 0 then Kinstr.compare ki1 ki2 else n
| IPReachable(kf1, ki1, ba1), IPReachable(kf2, ki2, ba2) ->
let n = Extlib.opt_compare Kf.compare kf1 kf2 in
if n = 0 then
let n = Kinstr.compare ki1 ki2 in
if n = 0 then Pervasives.compare ba1 ba2 else n
else
n
| IPAxiom (s1,_,_,_,_), IPAxiom (s2,_,_,_,_)
| IPAxiomatic(s1, _), IPAxiomatic(s2, _)
| IPTypeInvariant(s1,_,_,_), IPTypeInvariant(s2,_,_,_)
| IPLemma (s1,_,_,_,_), IPLemma (s2,_,_,_,_) ->
Datatype.String.compare s1 s2
| IPOther(s1,kf1,ki1), IPOther(s2,kf2,ki2) ->
let s = Datatype.String.compare s1 s2 in
if s <> 0 then s else
let s = compare_opt Kf.compare kf1 kf2 in
if s <> 0 then s else
Kinstr.compare ki1 ki2
| IPAllocation (f1, ki1, b1, _), IPAllocation (f2, ki2, b2, _) ->
cmp_bhv (f1,ki1,b1) (f2,ki2,b2)
| IPPropertyInstance (kf1, ki1, ip1),
IPPropertyInstance (kf2, ki2, ip2) ->
let c = Extlib.opt_compare Kernel_function.compare kf1 kf2 in
if c <> 0 then c else
let c = Kinstr.compare ki1 ki2 in
if c <> 0 then c else compare ip1 ip2
| (IPPredicate _ | IPCodeAnnot _ | IPBehavior _ | IPComplete _ |
IPDisjoint _ | IPAssigns _ | IPFrom _ | IPDecrease _ |
IPReachable _ | IPAxiom _ | IPAxiomatic _ | IPLemma _ |
IPOther _ | IPAllocation _ | IPPropertyInstance _ |
IPTypeInvariant _ | IPGlobalInvariant _) as x, y ->
let nb = function
| IPPredicate _ -> 1
| IPAssigns _ -> 2
| IPDecrease _ -> 3
| IPAxiom _ -> 4
| IPAxiomatic _ -> 5
| IPLemma _ -> 6
| IPCodeAnnot _ -> 7
| IPComplete _ -> 8
| IPDisjoint _ -> 9
| IPFrom _ -> 10
| IPBehavior _ -> 11
| IPReachable _ -> 12
| IPAllocation _ -> 13
| IPOther _ -> 14
| IPPropertyInstance _ -> 15
| IPTypeInvariant _ -> 16
| IPGlobalInvariant _ -> 17
in
Datatype.Int.compare (nb x) (nb y)
end)
let rec short_pretty fmt p = match p with
| IPPredicate (_,_,_,{ ip_name = name :: _ }) ->
Format.pp_print_string fmt name
| IPPredicate _ -> pretty fmt p
| IPAxiom (name,_,_,_,_) | IPLemma(name,_,_,_,_)
| IPTypeInvariant(name,_,_,_) -> Format.pp_print_string fmt name
| IPGlobalInvariant(name,_,_) -> Format.pp_print_string fmt name
| IPAxiomatic (name,_) -> Format.pp_print_string fmt name
| IPBehavior(kf,_,_,{b_name = name }) ->
Format.fprintf fmt "behavior %s in function %a"
name Kernel_function.pretty kf
| IPComplete (kf,_,_,_) ->
Format.fprintf fmt "complete clause in function %a"
Kernel_function.pretty kf
| IPDisjoint (kf,_,_,_) ->
Format.fprintf fmt "disjoint clause in function %a"
Kernel_function.pretty kf
| IPCodeAnnot (_,_,{ annot_content = AAssert (_, { name = name :: _ })}) ->
Format.pp_print_string fmt name
| IPCodeAnnot(_,_,{annot_content = AInvariant (_,_, { name = name :: _ })})->
Format.pp_print_string fmt name
| IPCodeAnnot _ -> pretty fmt p
| IPAllocation (kf,_,_,_) ->
Format.fprintf fmt "allocates/frees clause in function %a"
Kernel_function.pretty kf
| IPAssigns (kf,_,_,_) ->
Format.fprintf fmt "assigns clause in function %a"
Kernel_function.pretty kf
| IPFrom (kf,_,_,(t,_)) ->
Format.fprintf fmt "from clause of term %a in function %a"
Cil_printer.pp_identified_term t Kernel_function.pretty kf
| IPDecrease(kf,_,_,_) ->
Format.fprintf fmt "decrease clause in function %a"
Kernel_function.pretty kf
| IPPropertyInstance (kfopt, ki, ip) ->
Format.fprintf fmt "specialization of %a %a" short_pretty ip
pretty_instance_location (kfopt, ki)
| IPReachable _ | IPOther _ -> pretty fmt p
module Names = struct
module NamesTbl =
State_builder.Hashtbl(Datatype.String.Hashtbl)(Datatype.Int)
(struct
let name = "PropertyNames"
let dependencies = [ ]
let size = 97
end)
module IndexTbl =
State_builder.Hashtbl(Hashtbl)(Datatype.String)
(struct
let name = "PropertyIndex"
let dependencies = [ Ast.self; NamesTbl.self; Globals.Functions.self ]
let size = 97
end)
let self = IndexTbl.self
let kf_prefix kf = (Ast_info.Function.get_vi kf.fundec).vname ^ "_"
let ident_names names =
List.filter (function "" -> true
| _ as n -> '\"' <> (String.get n 0) ) names
let pp_names fmt l =
let l = ident_names l in
match l with [] -> ()
| _ -> Format.fprintf fmt "_%a"
(Pretty_utils.pp_list ~sep:"_" Format.pp_print_string) l
let pp_code_annot_names fmt ca =
match ca.annot_content with
| AAssert(for_bhv,named_pred) | AInvariant(for_bhv,_,named_pred) ->
let pp_for_bhv fmt l =
match l with [] -> ()
| _ -> Format.fprintf fmt "_for_%a"
(Pretty_utils.pp_list ~sep:"_" Format.pp_print_string) l
in Format.fprintf fmt "%a%a" pp_names named_pred.name pp_for_bhv for_bhv
| AVariant(term, _) -> pp_names fmt term.term_name
let behavior_prefix b =
if Cil.is_default_behavior b then ""
else b.b_name ^ "_"
let variant_suffix = function
| (_,Some s) -> s
| _ -> ""
let string_of_termination_kind = function
Normal -> "post"
| Exits -> "exit"
| Breaks -> "break"
| Continues -> "continue"
| Returns -> "return"
let ki_prefix = function
| Kglobal -> ""
| Kstmt _ -> "stmt_"
let predicate_kind_txt pk ki =
let name = match pk with
| PKRequires b -> (behavior_prefix b) ^ "pre"
| PKAssumes b -> (behavior_prefix b) ^ "assume"
| PKEnsures (b, tk) -> (behavior_prefix b) ^ string_of_termination_kind tk
| PKTerminates -> "term"
in
(ki_prefix ki) ^ name
let active_prefix fmt a =
let print_one a = Format.fprintf fmt "_%s" a in
Datatype.String.Set.iter print_one a
let rec id_prop_txt p = match p with
| IPPredicate (pk,kf,ki,idp) ->
Pretty_utils.sfprintf "%s%s%a"
(kf_prefix kf) (predicate_kind_txt pk ki) pp_names idp.ip_name
| IPCodeAnnot (kf,_, ca) ->
let name = match ca.annot_content with
| AAssert _ -> "assert"
| AInvariant (_,true,_) -> "loop_inv"
| AInvariant _ -> "inv"
| APragma _ -> "pragma"
| _ -> assert false
in Pretty_utils.sfprintf "%s%s%a" (kf_prefix kf) name pp_code_annot_names ca
| IPComplete (kf, ki, a, lb) ->
Pretty_utils.sfprintf "%s%s%acomplete%a"
(kf_prefix kf) (ki_prefix ki) active_prefix a pp_names lb
| IPDisjoint (kf, ki, a, lb) ->
Pretty_utils.sfprintf "%s%s%adisjoint%a"
(kf_prefix kf) (ki_prefix ki) active_prefix a pp_names lb
| IPDecrease (kf,_,None, variant) -> (kf_prefix kf) ^ "decr" ^ (variant_suffix variant)
| IPDecrease (kf,_,_,variant) -> (kf_prefix kf) ^ "loop_term" ^ (variant_suffix variant)
| IPAxiom (name,_,_,named_pred,_) ->
Pretty_utils.sfprintf "axiom_%s%a" name pp_names named_pred.name
| IPAxiomatic(name, _) -> "axiomatic_" ^ name
| IPLemma (name,_,_,named_pred,_) ->
Pretty_utils.sfprintf "lemma_%s%a" name pp_names named_pred.name
| IPTypeInvariant (name,_,named_pred,_) ->
Pretty_utils.sfprintf "type_invariant_%s%a" name pp_names named_pred.name
| IPGlobalInvariant (name,named_pred,_) ->
Pretty_utils.sfprintf "global_invariant_%s%a"name pp_names named_pred.name
| IPAllocation (kf, ki, (Id_contract (a,b)), _) ->
Pretty_utils.sfprintf "%s%s%a%salloc"
(kf_prefix kf) (ki_prefix ki) active_prefix a (behavior_prefix b)
| IPAllocation (kf, Kstmt _s, (Id_loop ca), _) ->
Pretty_utils.sfprintf "%sloop_alloc%a"
(kf_prefix kf) pp_code_annot_names ca
| IPAllocation _ -> assert false
| IPAssigns (kf, ki, (Id_contract (a,b)), _) ->
Pretty_utils.sfprintf "%s%s%a%sassign"
(kf_prefix kf) (ki_prefix ki) active_prefix a (behavior_prefix b)
| IPAssigns (kf, Kstmt _s, (Id_loop ca), _) ->
Pretty_utils.sfprintf "%sloop_assign%a"
(kf_prefix kf) pp_code_annot_names ca
| IPAssigns _ -> assert false
| IPFrom (_, _, _, (out,_)) ->
"from_id_"^(string_of_int (out.it_id))
| IPReachable _ -> "reachable_stmt"
| IPBehavior(kf, ki, a, b) ->
Pretty_utils.sfprintf "%s%s%a%s"
(kf_prefix kf) (ki_prefix ki) active_prefix a b.b_name
| IPPropertyInstance (kfopt, ki, ip) ->
Pretty_utils.sfprintf "specialization_%s_at_%t" (id_prop_txt ip)
(fun fmt -> match kfopt, ki with
| None, Kglobal -> Format.pp_print_string fmt "global"
| Some kf, Kglobal -> Kernel_function.pretty fmt kf
| None, Kstmt s -> Format.fprintf fmt "stmt_%d" s.sid
| Some kf, Kstmt s ->
Format.fprintf fmt "%a_stmt_%d" Kernel_function.pretty kf s.sid)
| IPOther(s,Some kf,ki) -> (kf_prefix kf) ^ (ki_prefix ki) ^ s
| IPOther(s,None,ki) -> (ki_prefix ki) ^ s
let normalize_basename s =
let is_valid_id = ref true
and is_valid_char_id = function
| 'a'..'z' | 'A' .. 'Z' | '0' .. '9' | '_' -> true
| _ -> false
and is_numeric = function
| '0'..'9' -> true
| _ -> false
in
String.iter (fun c -> if not (is_valid_char_id c) then is_valid_id := false) s ;
let s = if !is_valid_id then s else
begin
let sn = String.copy s
and i = ref 0
in String.iter (fun c -> if not (is_valid_char_id c) then String.set sn !i '_' ; i := succ !i) s ;
sn
end
in if s = "" then "property" else
if is_numeric (String.get s 0) then "property_" ^ s else s
let reserve_name_id basename =
let basename = normalize_basename basename in
try
let speed_up_start = NamesTbl.find basename in
let n,unique_name = Extlib.make_unique_name NamesTbl.mem ~sep:"_" ~start:speed_up_start basename
unique_name
first time that basename is reserved
NamesTbl.add basename 2 ;
basename
let get_prop_basename ip = normalize_basename (id_prop_txt ip)
let get_prop_name_id ip =
try IndexTbl.find ip
first time we are asking for a name for that [ ip ]
let basename = get_prop_basename ip in
let unique_name = reserve_name_id basename in
IndexTbl.add ip unique_name ;
unique_name
let make_prop_name_id ip =
ignore (get_prop_name_id ip)
let remove_prop_name_id ip =
try
ignore (IndexTbl.find ip);
IndexTbl.remove ip
with Not_found -> ()
*)
end
let ip_other s kf ki = IPOther(s,kf,ki)
let ip_reachable_stmt kf ki = IPReachable(Some kf, Kstmt ki, Before)
let ip_reachable_ppt p =
let kf = get_kf p in
let ki = get_kinstr p in
let ba = match p with
| IPPredicate((PKRequires _ | PKAssumes _ | PKTerminates), _, _, _)
| IPAxiom _ | IPAxiomatic _ | IPLemma _ | IPComplete _
| IPDisjoint _ | IPCodeAnnot _ | IPAllocation _
| IPDecrease _ | IPPropertyInstance _ | IPOther _
| IPTypeInvariant _ | IPGlobalInvariant _
-> Before
| IPPredicate(PKEnsures _, _, _, _) | IPAssigns _ | IPFrom _
| IPBehavior _
-> After
| IPReachable _ -> Kernel.fatal "IPReachable(IPReachable _) is not possible"
in
IPReachable(kf, ki, ba)
let ip_of_ensures kf st b (k,p) = IPPredicate (PKEnsures(b,k),kf,st,p)
let ip_ensures_of_behavior kf st b =
List.map (ip_of_ensures kf st b) b.b_post_cond
let ip_of_allocation kf st loc = function
| FreeAllocAny -> None
| FreeAlloc(f,a) -> Some (IPAllocation (kf,st,loc,(f,a)))
let ip_allocation_of_behavior kf st ~active b =
let a = Datatype.String.Set.of_list active in
ip_of_allocation kf st (Id_contract (a,b)) b.b_allocation
let ip_of_assigns kf st loc = function
| WritesAny -> None
| Writes [(a,_)] when Logic_utils.is_result a.it_content ->
Some (IPAssigns (kf, st, loc, []))
| Writes a -> Some (IPAssigns (kf,st,loc,a))
let ip_assigns_of_behavior kf st ~active b =
let a = Datatype.String.Set.of_list active in
ip_of_assigns kf st (Id_contract (a,b)) b.b_assigns
let ip_of_from kf st loc from =
match snd from with
| FromAny -> None
| From _ -> Some (IPFrom (kf,st, loc, from))
let ip_from_of_behavior kf st ~active b =
match b.b_assigns with
| WritesAny -> []
| Writes l ->
let treat_from acc (out, froms) = match froms with
| FromAny -> acc
| From _ ->
let a = Datatype.String.Set.of_list active in
let ip =
Extlib.the (ip_of_from kf st (Id_contract (a,b)) (out, froms))
in
ip :: acc
in
List.fold_left treat_from [] l
let ip_allocation_of_code_annot kf st ca = match ca.annot_content with
| AAllocation (_,a) -> ip_of_allocation kf st (Id_loop ca) a
| _ -> None
let ip_assigns_of_code_annot kf st ca = match ca.annot_content with
| AAssigns (_,a) -> ip_of_assigns kf st (Id_loop ca) a
| _ -> None
let ip_from_of_code_annot kf st ca = match ca.annot_content with
| AAssigns(_,WritesAny) -> []
| AAssigns (_,Writes l) ->
let treat_from acc (out, froms) = match froms with FromAny -> acc
| From _ ->
let ip =
Extlib.the (ip_of_from kf st (Id_loop ca) (out, froms))
in
ip::acc
in
List.fold_left treat_from [] l
| _ -> []
let ip_post_cond_of_behavior kf st ~active b =
ip_ensures_of_behavior kf st b
@ (Extlib.list_of_opt (ip_assigns_of_behavior kf st ~active b))
@ ip_from_of_behavior kf st active b
@ (Extlib.list_of_opt (ip_allocation_of_behavior kf st ~active b))
let ip_of_behavior kf s ~active b =
let a = Datatype.String.Set.of_list active in
IPBehavior(kf, s, a, b)
let ip_of_requires kf st b p = IPPredicate (PKRequires b,kf,st,p)
let ip_requires_of_behavior kf st b =
List.map (ip_of_requires kf st b) b.b_requires
let ip_of_assumes kf st b p = IPPredicate (PKAssumes b,kf,st,p)
let ip_assumes_of_behavior kf st b =
List.map (ip_of_assumes kf st b) b.b_assumes
let ip_all_of_behavior kf st ~active b =
ip_of_behavior kf st ~active b
:: ip_requires_of_behavior kf st b
@ ip_assumes_of_behavior kf st b
@ ip_post_cond_of_behavior kf st ~active b
let ip_of_complete kf st ~active bhvs =
let a = Datatype.String.Set.of_list active in IPComplete(kf,st,a,bhvs)
let ip_complete_of_spec kf st ~active s =
List.map (ip_of_complete kf st ~active) s.spec_complete_behaviors
let ip_of_disjoint kf st ~active bhvs =
let a = Datatype.String.Set.of_list active in IPDisjoint(kf,st,a,bhvs)
let ip_disjoint_of_spec kf st ~active s =
List.map (ip_of_disjoint kf st ~active) s.spec_disjoint_behaviors
let ip_of_terminates kf st p = IPPredicate(PKTerminates,kf,st,p)
let ip_terminates_of_spec kf st s = match s.spec_terminates with
| None -> None
| Some p -> Some (ip_of_terminates kf st p)
let ip_of_decreases kf st d = IPDecrease(kf,st,None,d)
let ip_decreases_of_spec kf st s =
Extlib.opt_map (ip_of_decreases kf st) s.spec_variant
let ip_post_cond_of_spec kf st ~active s =
List.concat
(List.map (ip_post_cond_of_behavior kf st ~active) s.spec_behavior)
let ip_of_spec kf st ~active s =
List.concat (List.map (ip_all_of_behavior kf st ~active) s.spec_behavior)
@ ip_complete_of_spec kf st active s
@ ip_disjoint_of_spec kf st active s
@ (Extlib.list_of_opt (ip_terminates_of_spec kf st s))
@ (Extlib.list_of_opt (ip_decreases_of_spec kf st s))
let ip_axiom s = IPAxiom s
let ip_lemma s = IPLemma s
let ip_type_invariant s = IPTypeInvariant s
let ip_global_invariant s = IPGlobalInvariant s
let ip_property_instance kfopt ki ip = IPPropertyInstance (kfopt, ki, ip)
let ip_of_code_annot kf ki ca =
let st = Kstmt ki in
match ca.annot_content with
| AAssert _ | AInvariant _ -> [ IPCodeAnnot(kf, ki, ca) ]
| AStmtSpec (active,s) -> ip_of_spec kf st active s
| AVariant t -> [ IPDecrease (kf,st,(Some ca),t) ]
| AAllocation _ ->
Extlib.list_of_opt (ip_allocation_of_code_annot kf st ca)
@ ip_from_of_code_annot kf st ca
| AAssigns _ ->
Extlib.list_of_opt (ip_assigns_of_code_annot kf st ca)
@ ip_from_of_code_annot kf st ca
| APragma p when Logic_utils.is_property_pragma p ->
[ IPCodeAnnot (kf,ki,ca) ]
| APragma _ -> []
let ip_of_code_annot_single kf ki ca = match ip_of_code_annot kf ki ca with
| [] ->
[ JS 2011/06/07 ] using Kernel.error here seems very strange .
Actually it is incorrect in case of pragma which is not a property ( see
function ip_of_code_annot above .
Actually it is incorrect in case of pragma which is not a property (see
function ip_of_code_annot above. *)
Kernel.error
"@[cannot find a property to extract from code annotation@\n%a@]"
Cil_printer.pp_code_annotation ca;
raise (Invalid_argument "ip_of_code_annot_single")
| [ ip ] -> ip
| ip :: _ ->
Kernel.warning
"@[choosing one of multiple properties associated \
to code annotation@\n%a@]"
Cil_printer.pp_code_annotation ca;
ip
Must ensure that the first property is the best one in order to represent
the annotation ( see ip_of_global_annotation_single )
the annotation (see ip_of_global_annotation_single) *)
let ip_of_global_annotation a =
let once = true in
let rec aux acc = function
| Daxiomatic(name, l, _) ->
let ppts = List.fold_left aux [] l in
IPAxiomatic(name, ppts) :: (ppts @ acc)
| Dlemma(name, true, a, b, c, d) -> ip_axiom (name,a,b,c,d) :: acc
| Dlemma(name, false, a, b, c, d) -> ip_lemma (name,a,b,c,d) :: acc
| Dinvariant(l, loc) ->
let pred = match l.l_body with
| LBpred p -> p
| _ -> assert false
in
IPGlobalInvariant(l.l_var_info.lv_name,pred,loc) :: acc
| Dtype_annot(l, loc) ->
let parameter = match l.l_profile with
| h :: [] -> h
| _ -> assert false
in
let ty = match parameter.lv_type with
| Ctype x -> x
| _ -> assert false
in
let pred = match l.l_body with
| LBpred p -> p
| _ -> assert false
in
IPTypeInvariant(l.l_var_info.lv_name,ty,pred,loc) :: acc
| Dcustom_annot(_c, _n, _) ->
TODO
Kernel.warning ~once "ignoring status of custom annotation";
acc
| Dmodel_annot _ | Dfun_or_pred _ | Dvolatile _ | Dtype _ ->
acc
in
aux [] a
let ip_of_global_annotation_single a = match ip_of_global_annotation a with
| [] -> None
| ip :: _ ->
the first one is the good one , see ip_of_global_annotation
Some ip
|
9d13f94f28a95fc073ec73916c65e30ad7c6266e07f20d48c7ad70f48e6b5ce3 | ghc/packages-base | Utils.hs | # LANGUAGE Trustworthy #
# LANGUAGE NoImplicitPrelude #
-----------------------------------------------------------------------------
-- |
-- Module : Foreign.Marshal.Utils
Copyright : ( c ) The FFI task force 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
Maintainer :
-- Stability : provisional
-- Portability : portable
--
Utilities for primitive marshaling
--
-----------------------------------------------------------------------------
module Foreign.Marshal.Utils (
-- * General marshalling utilities
-- ** Combined allocation and marshalling
--
with,
new,
* * Marshalling of Boolean values ( non - zero corresponds to ' True ' )
--
fromBool,
toBool,
-- ** Marshalling of Maybe values
--
maybeNew,
maybeWith,
maybePeek,
-- ** Marshalling lists of storable objects
--
withMany,
* * Haskellish interface to memcpy and memmove
-- | (argument order: destination, source)
--
copyBytes,
moveBytes,
) where
import Data.Maybe
import Foreign.Ptr ( Ptr, nullPtr )
import Foreign.Storable ( Storable(poke) )
import Foreign.C.Types ( CSize(..) )
import Foreign.Marshal.Alloc ( malloc, alloca )
import GHC.Real ( fromIntegral )
import GHC.Num
import GHC.Base
-- combined allocation and marshalling
-- -----------------------------------
-- |Allocate a block of memory and marshal a value into it
-- (the combination of 'malloc' and 'poke').
-- The size of the area allocated is determined by the 'Foreign.Storable.sizeOf'
method from the instance of ' ' for the appropriate type .
--
-- The memory may be deallocated using 'Foreign.Marshal.Alloc.free' or
' Foreign . Marshal . when no longer required .
--
new :: Storable a => a -> IO (Ptr a)
new val =
do
ptr <- malloc
poke ptr val
return ptr
|@'with ' val f@ executes the computation @f@ , passing as argument
-- a pointer to a temporarily allocated block of memory into which
-- @val@ has been marshalled (the combination of 'alloca' and 'poke').
--
The memory is freed when terminates ( either normally or via an
exception ) , so the pointer passed to must /not/ be used after this .
--
with :: Storable a => a -> (Ptr a -> IO b) -> IO b
with val f =
alloca $ \ptr -> do
poke ptr val
res <- f ptr
return res
marshalling of Boolean values ( non - zero corresponds to ' True ' )
-- -----------------------------
|Convert a ' ' to its numeric representation
--
fromBool :: Num a => Bool -> a
fromBool False = 0
fromBool True = 1
|Convert a Boolean in numeric representation to a value
--
toBool :: (Eq a, Num a) => a -> Bool
toBool = (/= 0)
-- marshalling of Maybe values
-- ---------------------------
|Allocate storage and marshal a storable value wrapped into a ' Maybe '
--
-- * the 'nullPtr' is used to represent 'Nothing'
--
maybeNew :: ( a -> IO (Ptr b))
-> (Maybe a -> IO (Ptr b))
maybeNew = maybe (return nullPtr)
|Converts a @withXXX@ combinator into one marshalling a value wrapped
-- into a 'Maybe', using 'nullPtr' to represent 'Nothing'.
--
maybeWith :: ( a -> (Ptr b -> IO c) -> IO c)
-> (Maybe a -> (Ptr b -> IO c) -> IO c)
maybeWith = maybe ($ nullPtr)
-- |Convert a peek combinator into a one returning 'Nothing' if applied to a
-- 'nullPtr'
--
maybePeek :: (Ptr a -> IO b) -> Ptr a -> IO (Maybe b)
maybePeek peek ptr | ptr == nullPtr = return Nothing
| otherwise = do a <- peek ptr; return (Just a)
-- marshalling lists of storable objects
-- -------------------------------------
|Replicates a @withXXX@ combinator over a list of objects , yielding a list of
-- marshalled objects
--
withXXX combinator for one object
-> [a] -- storable objects
-> ([b] -> res) -- action on list of marshalled obj.s
-> res
withMany _ [] f = f []
withMany withFoo (x:xs) f = withFoo x $ \x' ->
withMany withFoo xs (\xs' -> f (x':xs'))
Haskellish interface to memcpy and memmove
-- ------------------------------------------
|Copies the given number of bytes from the second area ( source ) into the
first ( destination ) ; the copied areas may /not/ overlap
--
copyBytes :: Ptr a -> Ptr a -> Int -> IO ()
copyBytes dest src size = do _ <- memcpy dest src (fromIntegral size)
return ()
|Copies the given number of bytes from the second area ( source ) into the
-- first (destination); the copied areas /may/ overlap
--
moveBytes :: Ptr a -> Ptr a -> Int -> IO ()
moveBytes dest src size = do _ <- memmove dest src (fromIntegral size)
return ()
-- auxilliary routines
-- -------------------
|Basic C routines needed for memory copying
--
foreign import ccall unsafe "string.h" memcpy :: Ptr a -> Ptr a -> CSize -> IO (Ptr a)
foreign import ccall unsafe "string.h" memmove :: Ptr a -> Ptr a -> CSize -> IO (Ptr a)
| null | https://raw.githubusercontent.com/ghc/packages-base/52c0b09036c36f1ed928663abb2f295fd36a88bb/Foreign/Marshal/Utils.hs | haskell | ---------------------------------------------------------------------------
|
Module : Foreign.Marshal.Utils
License : BSD-style (see the file libraries/base/LICENSE)
Stability : provisional
Portability : portable
---------------------------------------------------------------------------
* General marshalling utilities
** Combined allocation and marshalling
** Marshalling of Maybe values
** Marshalling lists of storable objects
| (argument order: destination, source)
combined allocation and marshalling
-----------------------------------
|Allocate a block of memory and marshal a value into it
(the combination of 'malloc' and 'poke').
The size of the area allocated is determined by the 'Foreign.Storable.sizeOf'
The memory may be deallocated using 'Foreign.Marshal.Alloc.free' or
a pointer to a temporarily allocated block of memory into which
@val@ has been marshalled (the combination of 'alloca' and 'poke').
-----------------------------
marshalling of Maybe values
---------------------------
* the 'nullPtr' is used to represent 'Nothing'
into a 'Maybe', using 'nullPtr' to represent 'Nothing'.
|Convert a peek combinator into a one returning 'Nothing' if applied to a
'nullPtr'
marshalling lists of storable objects
-------------------------------------
marshalled objects
storable objects
action on list of marshalled obj.s
------------------------------------------
first (destination); the copied areas /may/ overlap
auxilliary routines
-------------------
| # LANGUAGE Trustworthy #
# LANGUAGE NoImplicitPrelude #
Copyright : ( c ) The FFI task force 2001
Maintainer :
Utilities for primitive marshaling
module Foreign.Marshal.Utils (
with,
new,
* * Marshalling of Boolean values ( non - zero corresponds to ' True ' )
fromBool,
toBool,
maybeNew,
maybeWith,
maybePeek,
withMany,
* * Haskellish interface to memcpy and memmove
copyBytes,
moveBytes,
) where
import Data.Maybe
import Foreign.Ptr ( Ptr, nullPtr )
import Foreign.Storable ( Storable(poke) )
import Foreign.C.Types ( CSize(..) )
import Foreign.Marshal.Alloc ( malloc, alloca )
import GHC.Real ( fromIntegral )
import GHC.Num
import GHC.Base
method from the instance of ' ' for the appropriate type .
' Foreign . Marshal . when no longer required .
new :: Storable a => a -> IO (Ptr a)
new val =
do
ptr <- malloc
poke ptr val
return ptr
|@'with ' val f@ executes the computation @f@ , passing as argument
The memory is freed when terminates ( either normally or via an
exception ) , so the pointer passed to must /not/ be used after this .
with :: Storable a => a -> (Ptr a -> IO b) -> IO b
with val f =
alloca $ \ptr -> do
poke ptr val
res <- f ptr
return res
marshalling of Boolean values ( non - zero corresponds to ' True ' )
|Convert a ' ' to its numeric representation
fromBool :: Num a => Bool -> a
fromBool False = 0
fromBool True = 1
|Convert a Boolean in numeric representation to a value
toBool :: (Eq a, Num a) => a -> Bool
toBool = (/= 0)
|Allocate storage and marshal a storable value wrapped into a ' Maybe '
maybeNew :: ( a -> IO (Ptr b))
-> (Maybe a -> IO (Ptr b))
maybeNew = maybe (return nullPtr)
|Converts a @withXXX@ combinator into one marshalling a value wrapped
maybeWith :: ( a -> (Ptr b -> IO c) -> IO c)
-> (Maybe a -> (Ptr b -> IO c) -> IO c)
maybeWith = maybe ($ nullPtr)
maybePeek :: (Ptr a -> IO b) -> Ptr a -> IO (Maybe b)
maybePeek peek ptr | ptr == nullPtr = return Nothing
| otherwise = do a <- peek ptr; return (Just a)
|Replicates a @withXXX@ combinator over a list of objects , yielding a list of
withXXX combinator for one object
-> res
withMany _ [] f = f []
withMany withFoo (x:xs) f = withFoo x $ \x' ->
withMany withFoo xs (\xs' -> f (x':xs'))
Haskellish interface to memcpy and memmove
|Copies the given number of bytes from the second area ( source ) into the
first ( destination ) ; the copied areas may /not/ overlap
copyBytes :: Ptr a -> Ptr a -> Int -> IO ()
copyBytes dest src size = do _ <- memcpy dest src (fromIntegral size)
return ()
|Copies the given number of bytes from the second area ( source ) into the
moveBytes :: Ptr a -> Ptr a -> Int -> IO ()
moveBytes dest src size = do _ <- memmove dest src (fromIntegral size)
return ()
|Basic C routines needed for memory copying
foreign import ccall unsafe "string.h" memcpy :: Ptr a -> Ptr a -> CSize -> IO (Ptr a)
foreign import ccall unsafe "string.h" memmove :: Ptr a -> Ptr a -> CSize -> IO (Ptr a)
|
fd54f237aab924c9002832571772f8bba37f91c9b0afb8ce3aa5bb3d32de1aa8 | input-output-hk/ouroboros-network | ChainDB.hs | -- | The storage layer is a highly specialized database for storing the blockchain.
It consists of five subcomponents :
--
* An abstract file system API , ' Ouroboros . Consensus . Storage . FS.API.HasFS ' ,
-- that smooths out over some differences between the file systems of
-- different operating systems and, more importantly, allows us to simulate
-- all kinds of failures. This is then used for stress-testing the other
-- components below.
* The _ _ [ Immutable . Consensus . Storage . " ) _ _ , stores
-- the part of the chain that is immutable, that is, no longer subject to
-- rollback. It is an append-only database, providing efficient access to the
chain . ' Ouroboros . Consensus . Storage . ImmutableDB.API.ImmutableDB ' defines the
-- immutable DB API.
* The _ _ [ Volatile DB]("Ouroboros . Consensus . Storage . VolatileDB " ) _ _ , stores the
-- part of the chain near its tip. This doesn't really store a __chain__ as
-- such, but rather simply a collection of blocks from which we might
_ _ construct _ _ a chain . ' Ouroboros . Consensus . Storage . VolatileDB.API.VolatileDB '
-- defines the volatile DB API.
-- * The ledger DB, stores the state of the ledger. The
-- __[on disk]("Ouroboros.Consensus.Storage.LedgerDB.OnDisk")__ part only stores
-- snapshots of the ledger state that correspond to immutable blocks. The
_ _ [ in memory]("Ouroboros . Consensus . Storage . LedgerDB.InMemory " ) _ _ part
-- stores various snapshots of the ledger state corresponding to blocks near
-- the current tip of the chain, and provides an efficient way of computing
-- any ledger state for the last @k@ blocks of the chain.
-- * The Chain DB finally combines all of these components. It makes decisions
-- about which chains to adopt (chain selection), switches to forks when
-- needed, deals with clock skew, and provides various interfaces to the rest
-- of the consensus layer for things like finding out which blocks were
-- invalid (so we can disconnect from the clients who sent them), cursors that
-- follow the tip of the chain (so that we can inform our downstream peers of
-- how our chain evolves), etc. In many ways, the chain DB is the component
-- that is responsible for "consensus": deciding which chain is the one true
chain . ' Ouroboros . Consensus . Storage . ChainDB.API.ChainDB ' defines the chain
-- DB API.
--
module Ouroboros.Consensus.Storage.ChainDB (
module Ouroboros.Consensus.Storage.ChainDB.API
, module Ouroboros.Consensus.Storage.ChainDB.Impl
) where
import Ouroboros.Consensus.Storage.ChainDB.API
import Ouroboros.Consensus.Storage.ChainDB.Impl
| null | https://raw.githubusercontent.com/input-output-hk/ouroboros-network/959fa23d0a2bf963b109063ad998fb83edce73e4/ouroboros-consensus/src/Ouroboros/Consensus/Storage/ChainDB.hs | haskell | | The storage layer is a highly specialized database for storing the blockchain.
that smooths out over some differences between the file systems of
different operating systems and, more importantly, allows us to simulate
all kinds of failures. This is then used for stress-testing the other
components below.
the part of the chain that is immutable, that is, no longer subject to
rollback. It is an append-only database, providing efficient access to the
immutable DB API.
part of the chain near its tip. This doesn't really store a __chain__ as
such, but rather simply a collection of blocks from which we might
defines the volatile DB API.
* The ledger DB, stores the state of the ledger. The
__[on disk]("Ouroboros.Consensus.Storage.LedgerDB.OnDisk")__ part only stores
snapshots of the ledger state that correspond to immutable blocks. The
stores various snapshots of the ledger state corresponding to blocks near
the current tip of the chain, and provides an efficient way of computing
any ledger state for the last @k@ blocks of the chain.
* The Chain DB finally combines all of these components. It makes decisions
about which chains to adopt (chain selection), switches to forks when
needed, deals with clock skew, and provides various interfaces to the rest
of the consensus layer for things like finding out which blocks were
invalid (so we can disconnect from the clients who sent them), cursors that
follow the tip of the chain (so that we can inform our downstream peers of
how our chain evolves), etc. In many ways, the chain DB is the component
that is responsible for "consensus": deciding which chain is the one true
DB API.
| It consists of five subcomponents :
* An abstract file system API , ' Ouroboros . Consensus . Storage . FS.API.HasFS ' ,
* The _ _ [ Immutable . Consensus . Storage . " ) _ _ , stores
chain . ' Ouroboros . Consensus . Storage . ImmutableDB.API.ImmutableDB ' defines the
* The _ _ [ Volatile DB]("Ouroboros . Consensus . Storage . VolatileDB " ) _ _ , stores the
_ _ construct _ _ a chain . ' Ouroboros . Consensus . Storage . VolatileDB.API.VolatileDB '
_ _ [ in memory]("Ouroboros . Consensus . Storage . LedgerDB.InMemory " ) _ _ part
chain . ' Ouroboros . Consensus . Storage . ChainDB.API.ChainDB ' defines the chain
module Ouroboros.Consensus.Storage.ChainDB (
module Ouroboros.Consensus.Storage.ChainDB.API
, module Ouroboros.Consensus.Storage.ChainDB.Impl
) where
import Ouroboros.Consensus.Storage.ChainDB.API
import Ouroboros.Consensus.Storage.ChainDB.Impl
|
869fa2e57aabecb5b40995e997d6131f39c69bf2574b2a0453f24ce729e12317 | ghc/packages-Cabal | ShowBuildInfo.hs | -- |
-- This module defines a simple JSON-based format for exporting basic
information about a Cabal package and the compiler configuration Cabal
-- would use to build it. This can be produced with the
new - show - build - info@ command .
--
--
-- This format is intended for consumption by external tooling and should
-- therefore be rather stable. Moreover, this allows tooling users to avoid
linking against Cabal . This is an important advantage as direct API usage
-- tends to be rather fragile in the presence of user-initiated upgrades of
Cabal .
--
-- Below is an example of the output this module produces,
--
-- @
-- { "cabal-version": "1.23.0.0",
-- "compiler": {
" flavour " : " GHC " ,
-- "compiler-id": "ghc-7.10.2",
-- "path": "/usr/bin/ghc",
-- },
-- "components": [
-- { "type": "lib",
" name " : " lib : " ,
-- "compiler-args":
-- ["-O", "-XHaskell98", "-Wall",
-- "-package-id", "parallel-3.2.0.6-b79c38c5c25fff77f3ea7271851879eb"]
" modules " : [ " Project . " , " Project . ModB " , " Paths_project " ] ,
-- "src-files": [],
-- "src-dirs": ["src"]
-- }
-- ]
-- }
-- @
--
The - version@ property provides the version of the Cabal library
-- which generated the output. The @compiler@ property gives some basic
information about the compiler Cabal would use to compile the package .
--
-- The @components@ property gives a list of the Cabal 'Component's defined by
-- the package. Each has,
--
* @type@ : the type of the component ( one of @lib@ , @exe@ ,
@test@ , @bench@ , or @flib@ )
-- * @name@: a string serving to uniquely identify the component within the
-- package.
* @compiler - args@ : the command - line arguments Cabal would pass to the
-- compiler to compile the component
-- * @modules@: the modules belonging to the component
-- * @src-dirs@: a list of directories where the modules might be found
* @src - files@ : any other Haskell sources needed by the component
--
Note : At the moment this is only supported when using the GHC compiler .
--
module Distribution.Simple.ShowBuildInfo (mkBuildInfo) where
import Distribution.Compat.Prelude
import Prelude ()
import qualified Distribution.Simple.GHC as GHC
import qualified Distribution.Simple.Program.GHC as GHC
import Distribution.PackageDescription
import Distribution.Compiler
import Distribution.Verbosity
import Distribution.Simple.Compiler
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.Program
import Distribution.Simple.Setup
import Distribution.Simple.Utils (cabalVersion)
import Distribution.Simple.Utils.Json
import Distribution.Types.TargetInfo
import Distribution.Text
import Distribution.Pretty
-- | Construct a JSON document describing the build information for a
-- package.
mkBuildInfo
:: PackageDescription -- ^ Mostly information from the .cabal file
-> LocalBuildInfo -- ^ Configuration information
-> BuildFlags -- ^ Flags that the user passed to build
-> [TargetInfo]
-> Json
mkBuildInfo pkg_descr lbi _flags targetsToBuild = info
where
targetToNameAndLBI target =
(componentLocalName $ targetCLBI target, targetCLBI target)
componentsToBuild = map targetToNameAndLBI targetsToBuild
(.=) :: String -> Json -> (String, Json)
k .= v = (k, v)
info = JsonObject
[ "cabal-version" .= JsonString (display cabalVersion)
, "compiler" .= mkCompilerInfo
, "components" .= JsonArray (map mkComponentInfo componentsToBuild)
]
mkCompilerInfo = JsonObject
[ "flavour" .= JsonString (prettyShow $ compilerFlavor $ compiler lbi)
, "compiler-id" .= JsonString (showCompilerId $ compiler lbi)
, "path" .= path
]
where
path = maybe JsonNull (JsonString . programPath)
$ (flavorToProgram . compilerFlavor $ compiler lbi)
>>= flip lookupProgram (withPrograms lbi)
flavorToProgram :: CompilerFlavor -> Maybe Program
flavorToProgram GHC = Just ghcProgram
flavorToProgram GHCJS = Just ghcjsProgram
flavorToProgram UHC = Just uhcProgram
flavorToProgram JHC = Just jhcProgram
flavorToProgram _ = Nothing
mkComponentInfo (name, clbi) = JsonObject
[ "type" .= JsonString compType
, "name" .= JsonString (prettyShow name)
, "unit-id" .= JsonString (prettyShow $ componentUnitId clbi)
, "compiler-args" .= JsonArray (map JsonString $ getCompilerArgs bi lbi clbi)
, "modules" .= JsonArray (map (JsonString . display) modules)
, "src-files" .= JsonArray (map JsonString sourceFiles)
, "src-dirs" .= JsonArray (map JsonString $ hsSourceDirs bi)
]
where
bi = componentBuildInfo comp
comp = fromMaybe (error $ "mkBuildInfo: no component " ++ prettyShow name) $ lookupComponent pkg_descr name
compType = case comp of
CLib _ -> "lib"
CExe _ -> "exe"
CTest _ -> "test"
CBench _ -> "bench"
CFLib _ -> "flib"
modules = case comp of
CLib lib -> explicitLibModules lib
CExe exe -> exeModules exe
_ -> []
sourceFiles = case comp of
CLib _ -> []
CExe exe -> [modulePath exe]
_ -> []
-- | Get the command-line arguments that would be passed
-- to the compiler to build the given component.
getCompilerArgs
:: BuildInfo
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> [String]
getCompilerArgs bi lbi clbi =
case compilerFlavor $ compiler lbi of
GHC -> ghc
GHCJS -> ghc
c -> error $ "ShowBuildInfo.getCompilerArgs: Don't know how to get "++
"build arguments for compiler "++show c
where
-- This is absolutely awful
ghc = GHC.renderGhcOptions (compiler lbi) (hostPlatform lbi) baseOpts
where
baseOpts = GHC.componentGhcOptions normal lbi bi clbi (buildDir lbi)
| null | https://raw.githubusercontent.com/ghc/packages-Cabal/6f22f2a789fa23edb210a2591d74ea6a5f767872/Cabal/Distribution/Simple/ShowBuildInfo.hs | haskell | |
This module defines a simple JSON-based format for exporting basic
would use to build it. This can be produced with the
This format is intended for consumption by external tooling and should
therefore be rather stable. Moreover, this allows tooling users to avoid
tends to be rather fragile in the presence of user-initiated upgrades of
Below is an example of the output this module produces,
@
{ "cabal-version": "1.23.0.0",
"compiler": {
"compiler-id": "ghc-7.10.2",
"path": "/usr/bin/ghc",
},
"components": [
{ "type": "lib",
"compiler-args":
["-O", "-XHaskell98", "-Wall",
"-package-id", "parallel-3.2.0.6-b79c38c5c25fff77f3ea7271851879eb"]
"src-files": [],
"src-dirs": ["src"]
}
]
}
@
which generated the output. The @compiler@ property gives some basic
The @components@ property gives a list of the Cabal 'Component's defined by
the package. Each has,
* @name@: a string serving to uniquely identify the component within the
package.
compiler to compile the component
* @modules@: the modules belonging to the component
* @src-dirs@: a list of directories where the modules might be found
| Construct a JSON document describing the build information for a
package.
^ Mostly information from the .cabal file
^ Configuration information
^ Flags that the user passed to build
| Get the command-line arguments that would be passed
to the compiler to build the given component.
This is absolutely awful | information about a Cabal package and the compiler configuration Cabal
new - show - build - info@ command .
linking against Cabal . This is an important advantage as direct API usage
Cabal .
" flavour " : " GHC " ,
" name " : " lib : " ,
" modules " : [ " Project . " , " Project . ModB " , " Paths_project " ] ,
The - version@ property provides the version of the Cabal library
information about the compiler Cabal would use to compile the package .
* @type@ : the type of the component ( one of @lib@ , @exe@ ,
@test@ , @bench@ , or @flib@ )
* @compiler - args@ : the command - line arguments Cabal would pass to the
* @src - files@ : any other Haskell sources needed by the component
Note : At the moment this is only supported when using the GHC compiler .
module Distribution.Simple.ShowBuildInfo (mkBuildInfo) where
import Distribution.Compat.Prelude
import Prelude ()
import qualified Distribution.Simple.GHC as GHC
import qualified Distribution.Simple.Program.GHC as GHC
import Distribution.PackageDescription
import Distribution.Compiler
import Distribution.Verbosity
import Distribution.Simple.Compiler
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.Program
import Distribution.Simple.Setup
import Distribution.Simple.Utils (cabalVersion)
import Distribution.Simple.Utils.Json
import Distribution.Types.TargetInfo
import Distribution.Text
import Distribution.Pretty
mkBuildInfo
-> [TargetInfo]
-> Json
mkBuildInfo pkg_descr lbi _flags targetsToBuild = info
where
targetToNameAndLBI target =
(componentLocalName $ targetCLBI target, targetCLBI target)
componentsToBuild = map targetToNameAndLBI targetsToBuild
(.=) :: String -> Json -> (String, Json)
k .= v = (k, v)
info = JsonObject
[ "cabal-version" .= JsonString (display cabalVersion)
, "compiler" .= mkCompilerInfo
, "components" .= JsonArray (map mkComponentInfo componentsToBuild)
]
mkCompilerInfo = JsonObject
[ "flavour" .= JsonString (prettyShow $ compilerFlavor $ compiler lbi)
, "compiler-id" .= JsonString (showCompilerId $ compiler lbi)
, "path" .= path
]
where
path = maybe JsonNull (JsonString . programPath)
$ (flavorToProgram . compilerFlavor $ compiler lbi)
>>= flip lookupProgram (withPrograms lbi)
flavorToProgram :: CompilerFlavor -> Maybe Program
flavorToProgram GHC = Just ghcProgram
flavorToProgram GHCJS = Just ghcjsProgram
flavorToProgram UHC = Just uhcProgram
flavorToProgram JHC = Just jhcProgram
flavorToProgram _ = Nothing
mkComponentInfo (name, clbi) = JsonObject
[ "type" .= JsonString compType
, "name" .= JsonString (prettyShow name)
, "unit-id" .= JsonString (prettyShow $ componentUnitId clbi)
, "compiler-args" .= JsonArray (map JsonString $ getCompilerArgs bi lbi clbi)
, "modules" .= JsonArray (map (JsonString . display) modules)
, "src-files" .= JsonArray (map JsonString sourceFiles)
, "src-dirs" .= JsonArray (map JsonString $ hsSourceDirs bi)
]
where
bi = componentBuildInfo comp
comp = fromMaybe (error $ "mkBuildInfo: no component " ++ prettyShow name) $ lookupComponent pkg_descr name
compType = case comp of
CLib _ -> "lib"
CExe _ -> "exe"
CTest _ -> "test"
CBench _ -> "bench"
CFLib _ -> "flib"
modules = case comp of
CLib lib -> explicitLibModules lib
CExe exe -> exeModules exe
_ -> []
sourceFiles = case comp of
CLib _ -> []
CExe exe -> [modulePath exe]
_ -> []
getCompilerArgs
:: BuildInfo
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> [String]
getCompilerArgs bi lbi clbi =
case compilerFlavor $ compiler lbi of
GHC -> ghc
GHCJS -> ghc
c -> error $ "ShowBuildInfo.getCompilerArgs: Don't know how to get "++
"build arguments for compiler "++show c
where
ghc = GHC.renderGhcOptions (compiler lbi) (hostPlatform lbi) baseOpts
where
baseOpts = GHC.componentGhcOptions normal lbi bi clbi (buildDir lbi)
|
71edfdcee37b4b6efb83006b78c83ad64b9f12b03151822c1bbe43ecea11e272 | MyDataFlow/ttalk-server | cowboy_req.erl | Copyright ( c ) 2011 - 2014 , < >
Copyright ( c ) 2011 , < >
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
%% copyright notice and this permission notice appear in all copies.
%%
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-module(cowboy_req).
%% Request API.
-export([new/14]).
-export([method/1]).
-export([version/1]).
-export([peer/1]).
-export([host/1]).
-export([host_info/1]).
-export([port/1]).
-export([path/1]).
-export([path_info/1]).
-export([qs/1]).
-export([qs_val/2]).
-export([qs_val/3]).
-export([qs_vals/1]).
-export([host_url/1]).
-export([url/1]).
-export([binding/2]).
-export([binding/3]).
-export([bindings/1]).
-export([header/2]).
-export([header/3]).
-export([headers/1]).
-export([parse_header/2]).
-export([parse_header/3]).
-export([cookie/2]).
-export([cookie/3]).
-export([cookies/1]).
-export([meta/2]).
-export([meta/3]).
-export([set_meta/3]).
%% Request body API.
-export([has_body/1]).
-export([body_length/1]).
-export([body/1]).
-export([body/2]).
-export([body_qs/1]).
-export([body_qs/2]).
%% Multipart API.
-export([part/1]).
-export([part/2]).
-export([part_body/1]).
-export([part_body/2]).
%% Response API.
-export([set_resp_cookie/4]).
-export([set_resp_header/3]).
-export([set_resp_body/2]).
-export([set_resp_body_fun/2]).
-export([set_resp_body_fun/3]).
-export([has_resp_header/2]).
-export([has_resp_body/1]).
-export([delete_resp_header/2]).
-export([reply/2]).
-export([reply/3]).
-export([reply/4]).
-export([chunked_reply/2]).
-export([chunked_reply/3]).
-export([chunk/2]).
-export([upgrade_reply/3]).
-export([continue/1]).
-export([maybe_reply/2]).
-export([ensure_response/2]).
%% Private setter/getter API.
-export([append_buffer/2]).
-export([get/2]).
-export([set/2]).
-export([set_bindings/4]).
%% Misc API.
-export([compact/1]).
-export([lock/1]).
-export([to_list/1]).
-type cookie_opts() :: cow_cookie:cookie_opts().
-export_type([cookie_opts/0]).
-type content_decode_fun() :: fun((binary())
-> {ok, binary()}
| {error, atom()}).
-type transfer_decode_fun() :: fun((binary(), any())
-> cow_http_te:decode_ret()).
-type body_opts() :: [{continue, boolean()}
| {length, non_neg_integer()}
| {read_length, non_neg_integer()}
| {read_timeout, timeout()}
| {transfer_decode, transfer_decode_fun(), any()}
| {content_decode, content_decode_fun()}].
-export_type([body_opts/0]).
-type resp_body_fun() :: fun((any(), module()) -> ok).
-type send_chunk_fun() :: fun((iodata()) -> ok | {error, atom()}).
-type resp_chunked_fun() :: fun((send_chunk_fun()) -> ok).
-record(http_req, {
%% Transport.
socket = undefined :: any(),
transport = undefined :: undefined | module(),
connection = keepalive :: keepalive | close,
%% Request.
pid = undefined :: pid(),
method = <<"GET">> :: binary(),
version = 'HTTP/1.1' :: cowboy:http_version(),
peer = undefined :: undefined | {inet:ip_address(), inet:port_number()},
host = undefined :: undefined | binary(),
host_info = undefined :: undefined | cowboy_router:tokens(),
port = undefined :: undefined | inet:port_number(),
path = undefined :: binary(),
path_info = undefined :: undefined | cowboy_router:tokens(),
qs = undefined :: binary(),
qs_vals = undefined :: undefined | list({binary(), binary() | true}),
bindings = undefined :: undefined | cowboy_router:bindings(),
headers = [] :: cowboy:http_headers(),
p_headers = [] :: [any()],
cookies = undefined :: undefined | [{binary(), binary()}],
meta = [] :: [{atom(), any()}],
%% Request body.
body_state = waiting :: waiting | done | {stream, non_neg_integer(),
transfer_decode_fun(), any(), content_decode_fun()},
buffer = <<>> :: binary(),
multipart = undefined :: undefined | {binary(), binary()},
%% Response.
resp_compress = false :: boolean(),
resp_state = waiting :: locked | waiting | waiting_stream
| chunks | stream | done,
resp_headers = [] :: cowboy:http_headers(),
resp_body = <<>> :: iodata() | resp_body_fun()
| {non_neg_integer(), resp_body_fun()}
| {chunked, resp_chunked_fun()},
%% Functions.
onresponse = undefined :: undefined | already_called
| cowboy:onresponse_fun()
}).
-opaque req() :: #http_req{}.
-export_type([req/0]).
%% Request API.
-spec new(any(), module(),
undefined | {inet:ip_address(), inet:port_number()},
binary(), binary(), binary(),
cowboy:http_version(), cowboy:http_headers(), binary(),
inet:port_number() | undefined, binary(), boolean(), boolean(),
undefined | cowboy:onresponse_fun())
-> req().
new(Socket, Transport, Peer, Method, Path, Query,
Version, Headers, Host, Port, Buffer, CanKeepalive,
Compress, OnResponse) ->
Req = #http_req{socket=Socket, transport=Transport, pid=self(), peer=Peer,
method=Method, path=Path, qs=Query, version=Version,
headers=Headers, host=Host, port=Port, buffer=Buffer,
resp_compress=Compress, onresponse=OnResponse},
case CanKeepalive of
false ->
Req#http_req{connection=close};
true ->
case lists:keyfind(<<"connection">>, 1, Headers) of
false ->
case Version of
'HTTP/1.1' -> Req; %% keepalive
'HTTP/1.0' -> Req#http_req{connection=close}
end;
{_, ConnectionHeader} ->
Tokens = cow_http_hd:parse_connection(ConnectionHeader),
Connection = connection_to_atom(Tokens),
Req#http_req{connection=Connection,
p_headers=[{<<"connection">>, Tokens}]}
end
end.
-spec method(Req) -> {binary(), Req} when Req::req().
method(Req) ->
{Req#http_req.method, Req}.
-spec version(Req) -> {cowboy:http_version(), Req} when Req::req().
version(Req) ->
{Req#http_req.version, Req}.
-spec peer(Req)
-> {{inet:ip_address(), inet:port_number()}, Req}
when Req::req().
peer(Req) ->
{Req#http_req.peer, Req}.
-spec host(Req) -> {binary(), Req} when Req::req().
host(Req) ->
{Req#http_req.host, Req}.
-spec host_info(Req)
-> {cowboy_router:tokens() | undefined, Req} when Req::req().
host_info(Req) ->
{Req#http_req.host_info, Req}.
-spec port(Req) -> {inet:port_number(), Req} when Req::req().
port(Req) ->
{Req#http_req.port, Req}.
-spec path(Req) -> {binary(), Req} when Req::req().
path(Req) ->
{Req#http_req.path, Req}.
-spec path_info(Req)
-> {cowboy_router:tokens() | undefined, Req} when Req::req().
path_info(Req) ->
{Req#http_req.path_info, Req}.
-spec qs(Req) -> {binary(), Req} when Req::req().
qs(Req) ->
{Req#http_req.qs, Req}.
-spec qs_val(binary(), Req)
-> {binary() | true | undefined, Req} when Req::req().
qs_val(Name, Req) when is_binary(Name) ->
qs_val(Name, Req, undefined).
-spec qs_val(binary(), Req, Default)
-> {binary() | true | Default, Req} when Req::req(), Default::any().
qs_val(Name, Req=#http_req{qs=RawQs, qs_vals=undefined}, Default)
when is_binary(Name) ->
QsVals = cow_qs:parse_qs(RawQs),
qs_val(Name, Req#http_req{qs_vals=QsVals}, Default);
qs_val(Name, Req, Default) ->
case lists:keyfind(Name, 1, Req#http_req.qs_vals) of
{Name, Value} -> {Value, Req};
false -> {Default, Req}
end.
-spec qs_vals(Req) -> {list({binary(), binary() | true}), Req} when Req::req().
qs_vals(Req=#http_req{qs=RawQs, qs_vals=undefined}) ->
QsVals = cow_qs:parse_qs(RawQs),
qs_vals(Req#http_req{qs_vals=QsVals});
qs_vals(Req=#http_req{qs_vals=QsVals}) ->
{QsVals, Req}.
%% The URL includes the scheme, host and port only.
-spec host_url(Req) -> {undefined | binary(), Req} when Req::req().
host_url(Req=#http_req{port=undefined}) ->
{undefined, Req};
host_url(Req=#http_req{transport=Transport, host=Host, port=Port}) ->
TransportName = Transport:name(),
Secure = case TransportName of
ssl -> <<"s">>;
_ -> <<>>
end,
PortBin = case {TransportName, Port} of
{ssl, 443} -> <<>>;
{tcp, 80} -> <<>>;
_ -> << ":", (integer_to_binary(Port))/binary >>
end,
{<< "http", Secure/binary, "://", Host/binary, PortBin/binary >>, Req}.
%% The URL includes the scheme, host, port, path and query string.
-spec url(Req) -> {undefined | binary(), Req} when Req::req().
url(Req=#http_req{}) ->
{HostURL, Req2} = host_url(Req),
url(HostURL, Req2).
url(undefined, Req=#http_req{}) ->
{undefined, Req};
url(HostURL, Req=#http_req{path=Path, qs=QS}) ->
QS2 = case QS of
<<>> -> <<>>;
_ -> << "?", QS/binary >>
end,
{<< HostURL/binary, Path/binary, QS2/binary >>, Req}.
-spec binding(atom(), Req) -> {any() | undefined, Req} when Req::req().
binding(Name, Req) when is_atom(Name) ->
binding(Name, Req, undefined).
-spec binding(atom(), Req, Default)
-> {any() | Default, Req} when Req::req(), Default::any().
binding(Name, Req, Default) when is_atom(Name) ->
case lists:keyfind(Name, 1, Req#http_req.bindings) of
{Name, Value} -> {Value, Req};
false -> {Default, Req}
end.
-spec bindings(Req) -> {[{atom(), any()}], Req} when Req::req().
bindings(Req) ->
{Req#http_req.bindings, Req}.
-spec header(binary(), Req)
-> {binary() | undefined, Req} when Req::req().
header(Name, Req) ->
header(Name, Req, undefined).
-spec header(binary(), Req, Default)
-> {binary() | Default, Req} when Req::req(), Default::any().
header(Name, Req, Default) ->
case lists:keyfind(Name, 1, Req#http_req.headers) of
{Name, Value} -> {Value, Req};
false -> {Default, Req}
end.
-spec headers(Req) -> {cowboy:http_headers(), Req} when Req::req().
headers(Req) ->
{Req#http_req.headers, Req}.
-spec parse_header(binary(), Req)
-> {ok, any(), Req} | {undefined, binary(), Req}
| {error, badarg} when Req::req().
parse_header(Name, Req=#http_req{p_headers=PHeaders}) ->
case lists:keyfind(Name, 1, PHeaders) of
false -> parse_header(Name, Req, parse_header_default(Name));
{Name, Value} -> {ok, Value, Req}
end.
-spec parse_header_default(binary()) -> any().
parse_header_default(<<"transfer-encoding">>) -> [<<"identity">>];
parse_header_default(_Name) -> undefined.
-spec parse_header(binary(), Req, any())
-> {ok, any(), Req} | {undefined, binary(), Req}
| {error, badarg} when Req::req().
parse_header(Name = <<"accept">>, Req, Default) ->
parse_header(Name, Req, Default,
fun (Value) ->
cowboy_http:list(Value, fun cowboy_http:media_range/2)
end);
parse_header(Name = <<"accept-charset">>, Req, Default) ->
parse_header(Name, Req, Default,
fun (Value) ->
cowboy_http:nonempty_list(Value, fun cowboy_http:conneg/2)
end);
parse_header(Name = <<"accept-encoding">>, Req, Default) ->
parse_header(Name, Req, Default,
fun (Value) ->
cowboy_http:list(Value, fun cowboy_http:conneg/2)
end);
parse_header(Name = <<"accept-language">>, Req, Default) ->
parse_header(Name, Req, Default,
fun (Value) ->
cowboy_http:nonempty_list(Value, fun cowboy_http:language_range/2)
end);
parse_header(Name = <<"authorization">>, Req, Default) ->
parse_header(Name, Req, Default,
fun (Value) ->
cowboy_http:token_ci(Value, fun cowboy_http:authorization/2)
end);
parse_header(Name = <<"content-length">>, Req, Default) ->
parse_header(Name, Req, Default, fun cow_http_hd:parse_content_length/1);
parse_header(Name = <<"content-type">>, Req, Default) ->
parse_header(Name, Req, Default, fun cowboy_http:content_type/1);
parse_header(Name = <<"cookie">>, Req, Default) ->
parse_header(Name, Req, Default, fun cow_cookie:parse_cookie/1);
parse_header(Name = <<"expect">>, Req, Default) ->
parse_header(Name, Req, Default,
fun (Value) ->
cowboy_http:nonempty_list(Value, fun cowboy_http:expectation/2)
end);
parse_header(Name, Req, Default)
when Name =:= <<"if-match">>;
Name =:= <<"if-none-match">> ->
parse_header(Name, Req, Default, fun cowboy_http:entity_tag_match/1);
parse_header(Name, Req, Default)
when Name =:= <<"if-modified-since">>;
Name =:= <<"if-unmodified-since">> ->
parse_header(Name, Req, Default, fun cowboy_http:http_date/1);
parse_header(Name = <<"range">>, Req, Default) ->
parse_header(Name, Req, Default, fun cowboy_http:range/1);
parse_header(Name, Req, Default)
when Name =:= <<"sec-websocket-protocol">>;
Name =:= <<"x-forwarded-for">> ->
parse_header(Name, Req, Default,
fun (Value) ->
cowboy_http:nonempty_list(Value, fun cowboy_http:token/2)
end);
parse_header(Name = <<"transfer-encoding">>, Req, Default) ->
parse_header(Name, Req, Default, fun cow_http_hd:parse_transfer_encoding/1);
%% @todo Product version.
parse_header(Name = <<"upgrade">>, Req, Default) ->
parse_header(Name, Req, Default,
fun (Value) ->
cowboy_http:nonempty_list(Value, fun cowboy_http:token_ci/2)
end);
parse_header(Name = <<"sec-websocket-extensions">>, Req, Default) ->
parse_header(Name, Req, Default, fun cowboy_http:parameterized_tokens/1);
parse_header(Name, Req, Default) ->
{Value, Req2} = header(Name, Req, Default),
{undefined, Value, Req2}.
parse_header(Name, Req=#http_req{p_headers=PHeaders}, Default, Fun) ->
case header(Name, Req) of
{undefined, Req2} ->
{ok, Default, Req2#http_req{p_headers=[{Name, Default}|PHeaders]}};
{Value, Req2} ->
case Fun(Value) of
{error, badarg} ->
{error, badarg};
P ->
{ok, P, Req2#http_req{p_headers=[{Name, P}|PHeaders]}}
end
end.
-spec cookie(binary(), Req)
-> {binary() | undefined, Req} when Req::req().
cookie(Name, Req) when is_binary(Name) ->
cookie(Name, Req, undefined).
-spec cookie(binary(), Req, Default)
-> {binary() | Default, Req} when Req::req(), Default::any().
cookie(Name, Req=#http_req{cookies=undefined}, Default) when is_binary(Name) ->
case parse_header(<<"cookie">>, Req) of
{ok, undefined, Req2} ->
{Default, Req2#http_req{cookies=[]}};
{ok, Cookies, Req2} ->
cookie(Name, Req2#http_req{cookies=Cookies}, Default)
end;
cookie(Name, Req, Default) ->
case lists:keyfind(Name, 1, Req#http_req.cookies) of
{Name, Value} -> {Value, Req};
false -> {Default, Req}
end.
-spec cookies(Req) -> {list({binary(), binary()}), Req} when Req::req().
cookies(Req=#http_req{cookies=undefined}) ->
case parse_header(<<"cookie">>, Req) of
{ok, undefined, Req2} ->
{[], Req2#http_req{cookies=[]}};
{ok, Cookies, Req2} ->
cookies(Req2#http_req{cookies=Cookies});
%% Flash player incorrectly sends an empty Cookie header.
{error, badarg} ->
{[], Req#http_req{cookies=[]}}
end;
cookies(Req=#http_req{cookies=Cookies}) ->
{Cookies, Req}.
-spec meta(atom(), Req) -> {any() | undefined, Req} when Req::req().
meta(Name, Req) ->
meta(Name, Req, undefined).
-spec meta(atom(), Req, any()) -> {any(), Req} when Req::req().
meta(Name, Req, Default) ->
case lists:keyfind(Name, 1, Req#http_req.meta) of
{Name, Value} -> {Value, Req};
false -> {Default, Req}
end.
-spec set_meta(atom(), any(), Req) -> Req when Req::req().
set_meta(Name, Value, Req=#http_req{meta=Meta}) ->
Req#http_req{meta=lists:keystore(Name, 1, Meta, {Name, Value})}.
%% Request Body API.
-spec has_body(req()) -> boolean().
has_body(Req) ->
case lists:keyfind(<<"content-length">>, 1, Req#http_req.headers) of
{_, <<"0">>} ->
false;
{_, _} ->
true;
_ ->
lists:keymember(<<"transfer-encoding">>, 1, Req#http_req.headers)
end.
The length may not be known if Transfer - Encoding is not identity ,
%% and the body hasn't been read at the time of the call.
-spec body_length(Req) -> {undefined | non_neg_integer(), Req} when Req::req().
body_length(Req) ->
case parse_header(<<"transfer-encoding">>, Req) of
{ok, [<<"identity">>], Req2} ->
{ok, Length, Req3} = parse_header(<<"content-length">>, Req2, 0),
{Length, Req3};
{ok, _, Req2} ->
{undefined, Req2}
end.
-spec body(Req)
-> {ok, binary(), Req} | {more, binary(), Req}
| {error, atom()} when Req::req().
body(Req) ->
body(Req, []).
-spec body(Req, body_opts())
-> {ok, binary(), Req} | {more, binary(), Req}
| {error, atom()} when Req::req().
body(Req=#http_req{body_state=waiting}, Opts) ->
Send a 100 continue if needed ( enabled by default ) .
Req1 = case lists:keyfind(continue, 1, Opts) of
{_, false} ->
Req;
_ ->
{ok, ExpectHeader, Req0} = parse_header(<<"expect">>, Req),
ok = case ExpectHeader of
[<<"100-continue">>] -> continue(Req0);
_ -> ok
end,
Req0
end,
Initialize body streaming state .
CFun = case lists:keyfind(content_decode, 1, Opts) of
false ->
fun cowboy_http:ce_identity/1;
{_, CFun0} ->
CFun0
end,
case lists:keyfind(transfer_decode, 1, Opts) of
false ->
case parse_header(<<"transfer-encoding">>, Req1) of
{ok, [<<"chunked">>], Req2} ->
body(Req2#http_req{body_state={stream, 0,
fun cow_http_te:stream_chunked/2, {0, 0}, CFun}}, Opts);
{ok, [<<"identity">>], Req2} ->
{Len, Req3} = body_length(Req2),
case Len of
0 ->
{ok, <<>>, Req3#http_req{body_state=done}};
_ ->
body(Req3#http_req{body_state={stream, Len,
fun cow_http_te:stream_identity/2, {0, Len},
CFun}}, Opts)
end
end;
{_, TFun, TState} ->
body(Req1#http_req{body_state={stream, 0,
TFun, TState, CFun}}, Opts)
end;
body(Req=#http_req{body_state=done}, _) ->
{ok, <<>>, Req};
body(Req, Opts) ->
ChunkLen = case lists:keyfind(length, 1, Opts) of
false -> 8000000;
{_, ChunkLen0} -> ChunkLen0
end,
ReadLen = case lists:keyfind(read_length, 1, Opts) of
false -> 1000000;
{_, ReadLen0} -> ReadLen0
end,
ReadTimeout = case lists:keyfind(read_timeout, 1, Opts) of
false -> 15000;
{_, ReadTimeout0} -> ReadTimeout0
end,
body_loop(Req, ReadTimeout, ReadLen, ChunkLen, <<>>).
body_loop(Req=#http_req{buffer=Buffer, body_state={stream, Length, _, _, _}},
ReadTimeout, ReadLength, ChunkLength, Acc) ->
{Tag, Res, Req2} = case Buffer of
<<>> ->
body_recv(Req, ReadTimeout, min(Length, ReadLength));
_ ->
body_decode(Req, ReadTimeout)
end,
case {Tag, Res} of
{ok, {ok, Data}} ->
{ok, << Acc/binary, Data/binary >>, Req2};
{more, {ok, Data}} ->
Acc2 = << Acc/binary, Data/binary >>,
case byte_size(Acc2) >= ChunkLength of
true -> {more, Acc2, Req2};
false -> body_loop(Req2, ReadTimeout, ReadLength, ChunkLength, Acc2)
end;
_ -> %% Error.
Res
end.
body_recv(Req=#http_req{transport=Transport, socket=Socket, buffer=Buffer},
ReadTimeout, ReadLength) ->
case Transport:recv(Socket, ReadLength, ReadTimeout) of
{ok, Data} ->
body_decode(Req#http_req{buffer= << Buffer/binary, Data/binary >>},
ReadTimeout);
Error = {error, _} ->
{error, Error, Req}
end.
Two decodings happen . First a decoding function is applied to the
%% transferred data, and then another is applied to the actual content.
%%
%% Transfer encoding is generally used for chunked bodies. The decoding
%% function uses a state to keep track of how much it has read, which is
%% also initialized through this function.
%%
Content encoding is generally used for compression .
%%
%% @todo Handle chunked after-the-facts headers.
%% @todo Depending on the length returned we might want to 0 or +5 it.
body_decode(Req=#http_req{buffer=Data, body_state={stream, _,
TDecode, TState, CDecode}}, ReadTimeout) ->
case TDecode(Data, TState) of
more ->
body_recv(Req#http_req{body_state={stream, 0,
TDecode, TState, CDecode}}, ReadTimeout, 0);
{more, Data2, TState2} ->
{more, CDecode(Data2), Req#http_req{body_state={stream, 0,
TDecode, TState2, CDecode}, buffer= <<>>}};
{more, Data2, Length, TState2} when is_integer(Length) ->
{more, CDecode(Data2), Req#http_req{body_state={stream, Length,
TDecode, TState2, CDecode}, buffer= <<>>}};
{more, Data2, Rest, TState2} ->
{more, CDecode(Data2), Req#http_req{body_state={stream, 0,
TDecode, TState2, CDecode}, buffer=Rest}};
{done, TotalLength, Rest} ->
{ok, {ok, <<>>}, body_decode_end(Req, TotalLength, Rest)};
{done, Data2, TotalLength, Rest} ->
{ok, CDecode(Data2), body_decode_end(Req, TotalLength, Rest)}
end.
body_decode_end(Req=#http_req{headers=Headers, p_headers=PHeaders},
TotalLength, Rest) ->
Headers2 = lists:keystore(<<"content-length">>, 1, Headers,
{<<"content-length">>, integer_to_binary(TotalLength)}),
At this point we just assume TEs were all decoded .
Headers3 = lists:keydelete(<<"transfer-encoding">>, 1, Headers2),
PHeaders2 = lists:keystore(<<"content-length">>, 1, PHeaders,
{<<"content-length">>, TotalLength}),
PHeaders3 = lists:keydelete(<<"transfer-encoding">>, 1, PHeaders2),
Req#http_req{buffer=Rest, body_state=done,
headers=Headers3, p_headers=PHeaders3}.
-spec body_qs(Req)
-> {ok, [{binary(), binary() | true}], Req} | {error, atom()}
when Req::req().
body_qs(Req) ->
body_qs(Req, [
{length, 64000},
{read_length, 64000},
{read_timeout, 5000}]).
-spec body_qs(Req, body_opts()) -> {ok, [{binary(), binary() | true}], Req}
| {badlength, Req} | {error, atom()} when Req::req().
body_qs(Req, Opts) ->
case body(Req, Opts) of
{ok, Body, Req2} ->
{ok, cow_qs:parse_qs(Body), Req2};
{more, _, Req2} ->
{badlength, Req2};
{error, Reason} ->
{error, Reason}
end.
%% Multipart API.
-spec part(Req)
-> {ok, cow_multipart:headers(), Req} | {done, Req}
when Req::req().
part(Req) ->
part(Req, [
{length, 64000},
{read_length, 64000},
{read_timeout, 5000}]).
-spec part(Req, body_opts())
-> {ok, cow_multipart:headers(), Req} | {done, Req}
when Req::req().
part(Req=#http_req{multipart=undefined}, Opts) ->
part(init_multipart(Req), Opts);
part(Req, Opts) ->
{Data, Req2} = stream_multipart(Req, Opts),
part(Data, Opts, Req2).
part(Buffer, Opts, Req=#http_req{multipart={Boundary, _}}) ->
case cow_multipart:parse_headers(Buffer, Boundary) of
more ->
{Data, Req2} = stream_multipart(Req, Opts),
part(<< Buffer/binary, Data/binary >>, Opts, Req2);
{more, Buffer2} ->
{Data, Req2} = stream_multipart(Req, Opts),
part(<< Buffer2/binary, Data/binary >>, Opts, Req2);
{ok, Headers, Rest} ->
{ok, Headers, Req#http_req{multipart={Boundary, Rest}}};
%% Ignore epilogue.
{done, _} ->
{done, Req#http_req{multipart=undefined}}
end.
-spec part_body(Req)
-> {ok, binary(), Req} | {more, binary(), Req}
when Req::req().
part_body(Req) ->
part_body(Req, []).
-spec part_body(Req, body_opts())
-> {ok, binary(), Req} | {more, binary(), Req}
when Req::req().
part_body(Req=#http_req{multipart=undefined}, Opts) ->
part_body(init_multipart(Req), Opts);
part_body(Req, Opts) ->
part_body(<<>>, Opts, Req, <<>>).
part_body(Buffer, Opts, Req=#http_req{multipart={Boundary, _}}, Acc) ->
ChunkLen = case lists:keyfind(length, 1, Opts) of
false -> 8000000;
{_, ChunkLen0} -> ChunkLen0
end,
case byte_size(Acc) > ChunkLen of
true ->
{more, Acc, Req#http_req{multipart={Boundary, Buffer}}};
false ->
{Data, Req2} = stream_multipart(Req, Opts),
case cow_multipart:parse_body(<< Buffer/binary, Data/binary >>, Boundary) of
{ok, Body} ->
part_body(<<>>, Opts, Req2, << Acc/binary, Body/binary >>);
{ok, Body, Rest} ->
part_body(Rest, Opts, Req2, << Acc/binary, Body/binary >>);
done ->
{ok, Acc, Req2};
{done, Body} ->
{ok, << Acc/binary, Body/binary >>, Req2};
{done, Body, Rest} ->
{ok, << Acc/binary, Body/binary >>,
Req2#http_req{multipart={Boundary, Rest}}}
end
end.
init_multipart(Req) ->
{ok, {<<"multipart">>, _, Params}, Req2}
= parse_header(<<"content-type">>, Req),
{_, Boundary} = lists:keyfind(<<"boundary">>, 1, Params),
Req2#http_req{multipart={Boundary, <<>>}}.
stream_multipart(Req=#http_req{body_state=BodyState, multipart={_, <<>>}}, Opts) ->
true = BodyState =/= done,
{_, Data, Req2} = body(Req, Opts),
{Data, Req2};
stream_multipart(Req=#http_req{multipart={Boundary, Buffer}}, _) ->
{Buffer, Req#http_req{multipart={Boundary, <<>>}}}.
%% Response API.
%% The cookie name cannot contain any of the following characters:
%% =,;\s\t\r\n\013\014
%%
%% The cookie value cannot contain any of the following characters:
%% ,; \t\r\n\013\014
-spec set_resp_cookie(iodata(), iodata(), cookie_opts(), Req)
-> Req when Req::req().
set_resp_cookie(Name, Value, Opts, Req) ->
Cookie = cow_cookie:setcookie(Name, Value, Opts),
set_resp_header(<<"set-cookie">>, Cookie, Req).
-spec set_resp_header(binary(), iodata(), Req)
-> Req when Req::req().
set_resp_header(Name, Value, Req=#http_req{resp_headers=RespHeaders}) ->
Req#http_req{resp_headers=[{Name, Value}|RespHeaders]}.
-spec set_resp_body(iodata(), Req) -> Req when Req::req().
set_resp_body(Body, Req) ->
Req#http_req{resp_body=Body}.
-spec set_resp_body_fun(resp_body_fun(), Req) -> Req when Req::req().
set_resp_body_fun(StreamFun, Req) when is_function(StreamFun) ->
Req#http_req{resp_body=StreamFun}.
%% If the body function crashes while writing the response body or writes
%% fewer bytes than declared the behaviour is undefined.
-spec set_resp_body_fun(non_neg_integer(), resp_body_fun(), Req)
-> Req when Req::req();
(chunked, resp_chunked_fun(), Req)
-> Req when Req::req().
set_resp_body_fun(StreamLen, StreamFun, Req)
when is_integer(StreamLen), is_function(StreamFun) ->
Req#http_req{resp_body={StreamLen, StreamFun}};
set_resp_body_fun(chunked, StreamFun, Req)
when is_function(StreamFun) ->
Req#http_req{resp_body={chunked, StreamFun}}.
-spec has_resp_header(binary(), req()) -> boolean().
has_resp_header(Name, #http_req{resp_headers=RespHeaders}) ->
lists:keymember(Name, 1, RespHeaders).
-spec has_resp_body(req()) -> boolean().
has_resp_body(#http_req{resp_body=RespBody}) when is_function(RespBody) ->
true;
has_resp_body(#http_req{resp_body={chunked, _}}) ->
true;
has_resp_body(#http_req{resp_body={Length, _}}) ->
Length > 0;
has_resp_body(#http_req{resp_body=RespBody}) ->
iolist_size(RespBody) > 0.
-spec delete_resp_header(binary(), Req)
-> Req when Req::req().
delete_resp_header(Name, Req=#http_req{resp_headers=RespHeaders}) ->
RespHeaders2 = lists:keydelete(Name, 1, RespHeaders),
Req#http_req{resp_headers=RespHeaders2}.
-spec reply(cowboy:http_status(), Req) -> {ok, Req} when Req::req().
reply(Status, Req=#http_req{resp_body=Body}) ->
reply(Status, [], Body, Req).
-spec reply(cowboy:http_status(), cowboy:http_headers(), Req)
-> {ok, Req} when Req::req().
reply(Status, Headers, Req=#http_req{resp_body=Body}) ->
reply(Status, Headers, Body, Req).
-spec reply(cowboy:http_status(), cowboy:http_headers(),
iodata() | {non_neg_integer() | resp_body_fun()}, Req)
-> {ok, Req} when Req::req().
reply(Status, Headers, Body, Req=#http_req{
socket=Socket, transport=Transport,
version=Version, connection=Connection,
method=Method, resp_compress=Compress,
resp_state=RespState, resp_headers=RespHeaders})
when RespState =:= waiting; RespState =:= waiting_stream ->
HTTP11Headers = if
Transport =/= cowboy_spdy, Version =:= 'HTTP/1.0', Connection =:= keepalive ->
[{<<"connection">>, atom_to_connection(Connection)}];
Transport =/= cowboy_spdy, Version =:= 'HTTP/1.1', Connection =:= close ->
[{<<"connection">>, atom_to_connection(Connection)}];
true ->
[]
end,
Req3 = case Body of
BodyFun when is_function(BodyFun) ->
%% We stream the response body until we close the connection.
RespConn = close,
{RespType, Req2} = if
Transport =:= cowboy_spdy ->
response(Status, Headers, RespHeaders, [
{<<"date">>, cowboy_clock:rfc1123()},
{<<"server">>, <<"Cowboy">>}
], stream, Req);
true ->
response(Status, Headers, RespHeaders, [
{<<"connection">>, <<"close">>},
{<<"date">>, cowboy_clock:rfc1123()},
{<<"server">>, <<"Cowboy">>},
{<<"transfer-encoding">>, <<"identity">>}
], <<>>, Req)
end,
if RespType =/= hook, Method =/= <<"HEAD">> ->
BodyFun(Socket, Transport);
true -> ok
end,
Req2#http_req{connection=RespConn};
{chunked, BodyFun} ->
%% We stream the response body in chunks.
{RespType, Req2} = chunked_response(Status, Headers, Req),
if RespType =/= hook, Method =/= <<"HEAD">> ->
ChunkFun = fun(IoData) -> chunk(IoData, Req2) end,
BodyFun(ChunkFun),
%% Send the last chunk if chunked encoding was used.
if
Version =:= 'HTTP/1.0'; RespState =:= waiting_stream ->
Req2;
true ->
last_chunk(Req2)
end;
true -> Req2
end;
{ContentLength, BodyFun} ->
We stream the response body for ContentLength bytes .
RespConn = response_connection(Headers, Connection),
{RespType, Req2} = response(Status, Headers, RespHeaders, [
{<<"content-length">>, integer_to_list(ContentLength)},
{<<"date">>, cowboy_clock:rfc1123()},
{<<"server">>, <<"Cowboy">>}
|HTTP11Headers], stream, Req),
if RespType =/= hook, Method =/= <<"HEAD">> ->
BodyFun(Socket, Transport);
true -> ok
end,
Req2#http_req{connection=RespConn};
_ when Compress ->
RespConn = response_connection(Headers, Connection),
Req2 = reply_may_compress(Status, Headers, Body, Req,
RespHeaders, HTTP11Headers, Method),
Req2#http_req{connection=RespConn};
_ ->
RespConn = response_connection(Headers, Connection),
Req2 = reply_no_compress(Status, Headers, Body, Req,
RespHeaders, HTTP11Headers, Method, iolist_size(Body)),
Req2#http_req{connection=RespConn}
end,
{ok, Req3#http_req{resp_state=done, resp_headers=[], resp_body= <<>>}}.
reply_may_compress(Status, Headers, Body, Req,
RespHeaders, HTTP11Headers, Method) ->
BodySize = iolist_size(Body),
case parse_header(<<"accept-encoding">>, Req) of
{ok, Encodings, Req2} ->
CanGzip = (BodySize > 300)
andalso (false =:= lists:keyfind(<<"content-encoding">>,
1, Headers))
andalso (false =:= lists:keyfind(<<"content-encoding">>,
1, RespHeaders))
andalso (false =:= lists:keyfind(<<"transfer-encoding">>,
1, Headers))
andalso (false =:= lists:keyfind(<<"transfer-encoding">>,
1, RespHeaders))
andalso (Encodings =/= undefined)
andalso (false =/= lists:keyfind(<<"gzip">>, 1, Encodings)),
case CanGzip of
true ->
GzBody = zlib:gzip(Body),
{_, Req3} = response(Status, Headers, RespHeaders, [
{<<"content-length">>, integer_to_list(byte_size(GzBody))},
{<<"content-encoding">>, <<"gzip">>},
{<<"date">>, cowboy_clock:rfc1123()},
{<<"server">>, <<"Cowboy">>}
|HTTP11Headers],
case Method of <<"HEAD">> -> <<>>; _ -> GzBody end,
Req2),
Req3;
false ->
reply_no_compress(Status, Headers, Body, Req,
RespHeaders, HTTP11Headers, Method, BodySize)
end;
{error, badarg} ->
reply_no_compress(Status, Headers, Body, Req,
RespHeaders, HTTP11Headers, Method, BodySize)
end.
reply_no_compress(Status, Headers, Body, Req,
RespHeaders, HTTP11Headers, Method, BodySize) ->
{_, Req2} = response(Status, Headers, RespHeaders, [
{<<"content-length">>, integer_to_list(BodySize)},
{<<"date">>, cowboy_clock:rfc1123()},
{<<"server">>, <<"Cowboy">>}
|HTTP11Headers],
case Method of <<"HEAD">> -> <<>>; _ -> Body end,
Req),
Req2.
-spec chunked_reply(cowboy:http_status(), Req) -> {ok, Req} when Req::req().
chunked_reply(Status, Req) ->
chunked_reply(Status, [], Req).
-spec chunked_reply(cowboy:http_status(), cowboy:http_headers(), Req)
-> {ok, Req} when Req::req().
chunked_reply(Status, Headers, Req) ->
{_, Req2} = chunked_response(Status, Headers, Req),
{ok, Req2}.
-spec chunk(iodata(), req()) -> ok | {error, atom()}.
chunk(_Data, #http_req{method= <<"HEAD">>}) ->
ok;
chunk(Data, #http_req{socket=Socket, transport=cowboy_spdy,
resp_state=chunks}) ->
cowboy_spdy:stream_data(Socket, Data);
chunk(Data, #http_req{socket=Socket, transport=Transport,
resp_state=stream}) ->
Transport:send(Socket, Data);
chunk(Data, #http_req{socket=Socket, transport=Transport,
resp_state=chunks}) ->
Transport:send(Socket, [integer_to_list(iolist_size(Data), 16),
<<"\r\n">>, Data, <<"\r\n">>]).
%% If ever made public, need to send nothing if HEAD.
-spec last_chunk(Req) -> Req when Req::req().
last_chunk(Req=#http_req{socket=Socket, transport=cowboy_spdy}) ->
_ = cowboy_spdy:stream_close(Socket),
Req#http_req{resp_state=done};
last_chunk(Req=#http_req{socket=Socket, transport=Transport}) ->
_ = Transport:send(Socket, <<"0\r\n\r\n">>),
Req#http_req{resp_state=done}.
-spec upgrade_reply(cowboy:http_status(), cowboy:http_headers(), Req)
-> {ok, Req} when Req::req().
upgrade_reply(Status, Headers, Req=#http_req{transport=Transport,
resp_state=waiting, resp_headers=RespHeaders})
when Transport =/= cowboy_spdy ->
{_, Req2} = response(Status, Headers, RespHeaders, [
{<<"connection">>, <<"Upgrade">>}
], <<>>, Req),
{ok, Req2#http_req{resp_state=done, resp_headers=[], resp_body= <<>>}}.
-spec continue(req()) -> ok | {error, atom()}.
continue(#http_req{socket=Socket, transport=Transport,
version=Version}) ->
HTTPVer = atom_to_binary(Version, latin1),
Transport:send(Socket,
<< HTTPVer/binary, " ", (status(100))/binary, "\r\n\r\n" >>).
%% Meant to be used internally for sending errors after crashes.
-spec maybe_reply([{module(), atom(), arity() | [term()], _}], req()) -> ok.
maybe_reply(Stacktrace, Req) ->
receive
{cowboy_req, resp_sent} -> ok
after 0 ->
_ = do_maybe_reply(Stacktrace, Req),
ok
end.
do_maybe_reply([
{cow_http_hd, _, _, _},
{cowboy_req, parse_header, _, _}|_], Req) ->
cowboy_req:reply(400, Req);
do_maybe_reply(_, Req) ->
cowboy_req:reply(500, Req).
-spec ensure_response(req(), cowboy:http_status()) -> ok.
%% The response has already been fully sent to the client.
ensure_response(#http_req{resp_state=done}, _) ->
ok;
%% No response has been sent but everything apparently went fine.
Reply with the status code found in the second argument .
ensure_response(Req=#http_req{resp_state=RespState}, Status)
when RespState =:= waiting; RespState =:= waiting_stream ->
_ = reply(Status, [], [], Req),
ok;
%% Terminate the chunked body for HTTP/1.1 only.
ensure_response(#http_req{method= <<"HEAD">>}, _) ->
ok;
ensure_response(Req=#http_req{resp_state=chunks}, _) ->
_ = last_chunk(Req),
ok;
ensure_response(#http_req{}, _) ->
ok.
%% Private setter/getter API.
-spec append_buffer(binary(), Req) -> Req when Req::req().
append_buffer(Suffix, Req=#http_req{buffer=Buffer}) ->
Req#http_req{buffer= << Buffer/binary, Suffix/binary >>}.
-spec get(atom(), req()) -> any(); ([atom()], req()) -> any().
get(List, Req) when is_list(List) ->
[g(Atom, Req) || Atom <- List];
get(Atom, Req) when is_atom(Atom) ->
g(Atom, Req).
g(bindings, #http_req{bindings=Ret}) -> Ret;
g(body_state, #http_req{body_state=Ret}) -> Ret;
g(buffer, #http_req{buffer=Ret}) -> Ret;
g(connection, #http_req{connection=Ret}) -> Ret;
g(cookies, #http_req{cookies=Ret}) -> Ret;
g(headers, #http_req{headers=Ret}) -> Ret;
g(host, #http_req{host=Ret}) -> Ret;
g(host_info, #http_req{host_info=Ret}) -> Ret;
g(meta, #http_req{meta=Ret}) -> Ret;
g(method, #http_req{method=Ret}) -> Ret;
g(multipart, #http_req{multipart=Ret}) -> Ret;
g(onresponse, #http_req{onresponse=Ret}) -> Ret;
g(p_headers, #http_req{p_headers=Ret}) -> Ret;
g(path, #http_req{path=Ret}) -> Ret;
g(path_info, #http_req{path_info=Ret}) -> Ret;
g(peer, #http_req{peer=Ret}) -> Ret;
g(pid, #http_req{pid=Ret}) -> Ret;
g(port, #http_req{port=Ret}) -> Ret;
g(qs, #http_req{qs=Ret}) -> Ret;
g(qs_vals, #http_req{qs_vals=Ret}) -> Ret;
g(resp_body, #http_req{resp_body=Ret}) -> Ret;
g(resp_compress, #http_req{resp_compress=Ret}) -> Ret;
g(resp_headers, #http_req{resp_headers=Ret}) -> Ret;
g(resp_state, #http_req{resp_state=Ret}) -> Ret;
g(socket, #http_req{socket=Ret}) -> Ret;
g(transport, #http_req{transport=Ret}) -> Ret;
g(version, #http_req{version=Ret}) -> Ret.
-spec set([{atom(), any()}], Req) -> Req when Req::req().
set([], Req) -> Req;
set([{bindings, Val}|Tail], Req) -> set(Tail, Req#http_req{bindings=Val});
set([{body_state, Val}|Tail], Req) -> set(Tail, Req#http_req{body_state=Val});
set([{buffer, Val}|Tail], Req) -> set(Tail, Req#http_req{buffer=Val});
set([{connection, Val}|Tail], Req) -> set(Tail, Req#http_req{connection=Val});
set([{cookies, Val}|Tail], Req) -> set(Tail, Req#http_req{cookies=Val});
set([{headers, Val}|Tail], Req) -> set(Tail, Req#http_req{headers=Val});
set([{host, Val}|Tail], Req) -> set(Tail, Req#http_req{host=Val});
set([{host_info, Val}|Tail], Req) -> set(Tail, Req#http_req{host_info=Val});
set([{meta, Val}|Tail], Req) -> set(Tail, Req#http_req{meta=Val});
set([{method, Val}|Tail], Req) -> set(Tail, Req#http_req{method=Val});
set([{multipart, Val}|Tail], Req) -> set(Tail, Req#http_req{multipart=Val});
set([{onresponse, Val}|Tail], Req) -> set(Tail, Req#http_req{onresponse=Val});
set([{p_headers, Val}|Tail], Req) -> set(Tail, Req#http_req{p_headers=Val});
set([{path, Val}|Tail], Req) -> set(Tail, Req#http_req{path=Val});
set([{path_info, Val}|Tail], Req) -> set(Tail, Req#http_req{path_info=Val});
set([{peer, Val}|Tail], Req) -> set(Tail, Req#http_req{peer=Val});
set([{pid, Val}|Tail], Req) -> set(Tail, Req#http_req{pid=Val});
set([{port, Val}|Tail], Req) -> set(Tail, Req#http_req{port=Val});
set([{qs, Val}|Tail], Req) -> set(Tail, Req#http_req{qs=Val});
set([{qs_vals, Val}|Tail], Req) -> set(Tail, Req#http_req{qs_vals=Val});
set([{resp_body, Val}|Tail], Req) -> set(Tail, Req#http_req{resp_body=Val});
set([{resp_headers, Val}|Tail], Req) -> set(Tail, Req#http_req{resp_headers=Val});
set([{resp_state, Val}|Tail], Req) -> set(Tail, Req#http_req{resp_state=Val});
set([{socket, Val}|Tail], Req) -> set(Tail, Req#http_req{socket=Val});
set([{transport, Val}|Tail], Req) -> set(Tail, Req#http_req{transport=Val});
set([{version, Val}|Tail], Req) -> set(Tail, Req#http_req{version=Val}).
-spec set_bindings(cowboy_router:tokens(), cowboy_router:tokens(),
cowboy_router:bindings(), Req) -> Req when Req::req().
set_bindings(HostInfo, PathInfo, Bindings, Req) ->
Req#http_req{host_info=HostInfo, path_info=PathInfo,
bindings=Bindings}.
%% Misc API.
-spec compact(Req) -> Req when Req::req().
compact(Req) ->
Req#http_req{host_info=undefined,
path_info=undefined, qs_vals=undefined,
bindings=undefined, headers=[],
p_headers=[], cookies=[]}.
-spec lock(Req) -> Req when Req::req().
lock(Req) ->
Req#http_req{resp_state=locked}.
-spec to_list(req()) -> [{atom(), any()}].
to_list(Req) ->
lists:zip(record_info(fields, http_req), tl(tuple_to_list(Req))).
Internal .
-spec chunked_response(cowboy:http_status(), cowboy:http_headers(), Req) ->
{normal | hook, Req} when Req::req().
chunked_response(Status, Headers, Req=#http_req{
transport=cowboy_spdy, resp_state=waiting,
resp_headers=RespHeaders}) ->
{RespType, Req2} = response(Status, Headers, RespHeaders, [
{<<"date">>, cowboy_clock:rfc1123()},
{<<"server">>, <<"Cowboy">>}
], stream, Req),
{RespType, Req2#http_req{resp_state=chunks,
resp_headers=[], resp_body= <<>>}};
chunked_response(Status, Headers, Req=#http_req{
version=Version, connection=Connection,
resp_state=RespState, resp_headers=RespHeaders})
when RespState =:= waiting; RespState =:= waiting_stream ->
RespConn = response_connection(Headers, Connection),
HTTP11Headers = if
Version =:= 'HTTP/1.0', Connection =:= keepalive ->
[{<<"connection">>, atom_to_connection(Connection)}];
Version =:= 'HTTP/1.0' -> [];
true ->
MaybeTE = if
RespState =:= waiting_stream -> [];
true -> [{<<"transfer-encoding">>, <<"chunked">>}]
end,
if
Connection =:= close ->
[{<<"connection">>, atom_to_connection(Connection)}|MaybeTE];
true ->
MaybeTE
end
end,
RespState2 = if
Version =:= 'HTTP/1.1', RespState =:= 'waiting' -> chunks;
true -> stream
end,
{RespType, Req2} = response(Status, Headers, RespHeaders, [
{<<"date">>, cowboy_clock:rfc1123()},
{<<"server">>, <<"Cowboy">>}
|HTTP11Headers], <<>>, Req),
{RespType, Req2#http_req{connection=RespConn, resp_state=RespState2,
resp_headers=[], resp_body= <<>>}}.
-spec response(cowboy:http_status(), cowboy:http_headers(),
cowboy:http_headers(), cowboy:http_headers(), stream | iodata(), Req)
-> {normal | hook, Req} when Req::req().
response(Status, Headers, RespHeaders, DefaultHeaders, Body, Req=#http_req{
socket=Socket, transport=Transport, version=Version,
pid=ReqPid, onresponse=OnResponse}) ->
FullHeaders = case OnResponse of
already_called -> Headers;
_ -> response_merge_headers(Headers, RespHeaders, DefaultHeaders)
end,
Body2 = case Body of stream -> <<>>; _ -> Body end,
{Status2, FullHeaders2, Req2} = case OnResponse of
already_called -> {Status, FullHeaders, Req};
undefined -> {Status, FullHeaders, Req};
OnResponse ->
case OnResponse(Status, FullHeaders, Body2,
%% Don't call 'onresponse' from the hook itself.
Req#http_req{resp_headers=[], resp_body= <<>>,
onresponse=already_called}) of
StHdReq = {_, _, _} ->
StHdReq;
Req1 ->
{Status, FullHeaders, Req1}
end
end,
ReplyType = case Req2#http_req.resp_state of
waiting when Transport =:= cowboy_spdy, Body =:= stream ->
cowboy_spdy:stream_reply(Socket, status(Status2), FullHeaders2),
ReqPid ! {?MODULE, resp_sent},
normal;
waiting when Transport =:= cowboy_spdy ->
cowboy_spdy:reply(Socket, status(Status2), FullHeaders2, Body),
ReqPid ! {?MODULE, resp_sent},
normal;
RespState when RespState =:= waiting; RespState =:= waiting_stream ->
HTTPVer = atom_to_binary(Version, latin1),
StatusLine = << HTTPVer/binary, " ",
(status(Status2))/binary, "\r\n" >>,
HeaderLines = [[Key, <<": ">>, Value, <<"\r\n">>]
|| {Key, Value} <- FullHeaders2],
Transport:send(Socket, [StatusLine, HeaderLines, <<"\r\n">>, Body2]),
ReqPid ! {?MODULE, resp_sent},
normal;
_ ->
hook
end,
{ReplyType, Req2}.
-spec response_connection(cowboy:http_headers(), keepalive | close)
-> keepalive | close.
response_connection([], Connection) ->
Connection;
response_connection([{Name, Value}|Tail], Connection) ->
case Name of
<<"connection">> ->
Tokens = cow_http_hd:parse_connection(Value),
connection_to_atom(Tokens);
_ ->
response_connection(Tail, Connection)
end.
-spec response_merge_headers(cowboy:http_headers(), cowboy:http_headers(),
cowboy:http_headers()) -> cowboy:http_headers().
response_merge_headers(Headers, RespHeaders, DefaultHeaders) ->
Headers2 = [{Key, Value} || {Key, Value} <- Headers],
merge_headers(
merge_headers(Headers2, RespHeaders),
DefaultHeaders).
-spec merge_headers(cowboy:http_headers(), cowboy:http_headers())
-> cowboy:http_headers().
Merge headers by prepending the tuples in the second list to the
first list . It also handles Set - Cookie properly , which supports
%% duplicated entries. Notice that, while the RFC2109 does allow more
%% than one cookie to be set per Set-Cookie header, we are following
%% the implementation of common web servers and applications which
%% return many distinct headers per each Set-Cookie entry to avoid
%% issues with clients/browser which may not support it.
merge_headers(Headers, []) ->
Headers;
merge_headers(Headers, [{<<"set-cookie">>, Value}|Tail]) ->
merge_headers([{<<"set-cookie">>, Value}|Headers], Tail);
merge_headers(Headers, [{Name, Value}|Tail]) ->
Headers2 = case lists:keymember(Name, 1, Headers) of
true -> Headers;
false -> [{Name, Value}|Headers]
end,
merge_headers(Headers2, Tail).
-spec atom_to_connection(keepalive) -> <<_:80>>;
(close) -> <<_:40>>.
atom_to_connection(keepalive) ->
<<"keep-alive">>;
atom_to_connection(close) ->
<<"close">>.
%% We don't match on "keep-alive" since it is the default value.
-spec connection_to_atom([binary()]) -> keepalive | close.
connection_to_atom([]) ->
keepalive;
connection_to_atom([<<"close">>|_]) ->
close;
connection_to_atom([_|Tail]) ->
connection_to_atom(Tail).
-spec status(cowboy:http_status()) -> binary().
status(100) -> <<"100 Continue">>;
status(101) -> <<"101 Switching Protocols">>;
status(102) -> <<"102 Processing">>;
status(200) -> <<"200 OK">>;
status(201) -> <<"201 Created">>;
status(202) -> <<"202 Accepted">>;
status(203) -> <<"203 Non-Authoritative Information">>;
status(204) -> <<"204 No Content">>;
status(205) -> <<"205 Reset Content">>;
status(206) -> <<"206 Partial Content">>;
status(207) -> <<"207 Multi-Status">>;
status(226) -> <<"226 IM Used">>;
status(300) -> <<"300 Multiple Choices">>;
status(301) -> <<"301 Moved Permanently">>;
status(302) -> <<"302 Found">>;
status(303) -> <<"303 See Other">>;
status(304) -> <<"304 Not Modified">>;
status(305) -> <<"305 Use Proxy">>;
status(306) -> <<"306 Switch Proxy">>;
status(307) -> <<"307 Temporary Redirect">>;
status(400) -> <<"400 Bad Request">>;
status(401) -> <<"401 Unauthorized">>;
status(402) -> <<"402 Payment Required">>;
status(403) -> <<"403 Forbidden">>;
status(404) -> <<"404 Not Found">>;
status(405) -> <<"405 Method Not Allowed">>;
status(406) -> <<"406 Not Acceptable">>;
status(407) -> <<"407 Proxy Authentication Required">>;
status(408) -> <<"408 Request Timeout">>;
status(409) -> <<"409 Conflict">>;
status(410) -> <<"410 Gone">>;
status(411) -> <<"411 Length Required">>;
status(412) -> <<"412 Precondition Failed">>;
status(413) -> <<"413 Request Entity Too Large">>;
status(414) -> <<"414 Request-URI Too Long">>;
status(415) -> <<"415 Unsupported Media Type">>;
status(416) -> <<"416 Requested Range Not Satisfiable">>;
status(417) -> <<"417 Expectation Failed">>;
status(418) -> <<"418 I'm a teapot">>;
status(422) -> <<"422 Unprocessable Entity">>;
status(423) -> <<"423 Locked">>;
status(424) -> <<"424 Failed Dependency">>;
status(425) -> <<"425 Unordered Collection">>;
status(426) -> <<"426 Upgrade Required">>;
status(428) -> <<"428 Precondition Required">>;
status(429) -> <<"429 Too Many Requests">>;
status(431) -> <<"431 Request Header Fields Too Large">>;
status(500) -> <<"500 Internal Server Error">>;
status(501) -> <<"501 Not Implemented">>;
status(502) -> <<"502 Bad Gateway">>;
status(503) -> <<"503 Service Unavailable">>;
status(504) -> <<"504 Gateway Timeout">>;
status(505) -> <<"505 HTTP Version Not Supported">>;
status(506) -> <<"506 Variant Also Negotiates">>;
status(507) -> <<"507 Insufficient Storage">>;
status(510) -> <<"510 Not Extended">>;
status(511) -> <<"511 Network Authentication Required">>;
status(B) when is_binary(B) -> B.
%% Tests.
-ifdef(TEST).
url_test() ->
{undefined, _} =
url(#http_req{transport=ranch_tcp, host= <<>>, port= undefined,
path= <<>>, qs= <<>>, pid=self()}),
{<<"">>, _ } =
url(#http_req{transport=ranch_tcp, host= <<"localhost">>, port=80,
path= <<"/path">>, qs= <<>>, pid=self()}),
{<<":443/path">>, _} =
url(#http_req{transport=ranch_tcp, host= <<"localhost">>, port=443,
path= <<"/path">>, qs= <<>>, pid=self()}),
{<<":8080/path">>, _} =
url(#http_req{transport=ranch_tcp, host= <<"localhost">>, port=8080,
path= <<"/path">>, qs= <<>>, pid=self()}),
{<<":8080/path?dummy=2785">>, _} =
url(#http_req{transport=ranch_tcp, host= <<"localhost">>, port=8080,
path= <<"/path">>, qs= <<"dummy=2785">>, pid=self()}),
{<<"">>, _} =
url(#http_req{transport=ranch_ssl, host= <<"localhost">>, port=443,
path= <<"/path">>, qs= <<>>, pid=self()}),
{<<":8443/path">>, _} =
url(#http_req{transport=ranch_ssl, host= <<"localhost">>, port=8443,
path= <<"/path">>, qs= <<>>, pid=self()}),
{<<":8443/path?dummy=2785">>, _} =
url(#http_req{transport=ranch_ssl, host= <<"localhost">>, port=8443,
path= <<"/path">>, qs= <<"dummy=2785">>, pid=self()}),
ok.
connection_to_atom_test_() ->
Tests = [
{[<<"close">>], close},
{[<<"keep-alive">>], keepalive},
{[<<"keep-alive">>, <<"upgrade">>], keepalive}
],
[{lists:flatten(io_lib:format("~p", [T])),
fun() -> R = connection_to_atom(T) end} || {T, R} <- Tests].
merge_headers_test_() ->
Tests = [
{[{<<"content-length">>,<<"13">>},{<<"server">>,<<"Cowboy">>}],
[{<<"set-cookie">>,<<"foo=bar">>},{<<"content-length">>,<<"11">>}],
[{<<"set-cookie">>,<<"foo=bar">>},
{<<"content-length">>,<<"13">>},
{<<"server">>,<<"Cowboy">>}]},
{[{<<"content-length">>,<<"13">>},{<<"server">>,<<"Cowboy">>}],
[{<<"set-cookie">>,<<"foo=bar">>},{<<"set-cookie">>,<<"bar=baz">>}],
[{<<"set-cookie">>,<<"bar=baz">>},
{<<"set-cookie">>,<<"foo=bar">>},
{<<"content-length">>,<<"13">>},
{<<"server">>,<<"Cowboy">>}]}
],
[fun() -> Res = merge_headers(L,R) end || {L, R, Res} <- Tests].
-endif.
| null | https://raw.githubusercontent.com/MyDataFlow/ttalk-server/07a60d5d74cd86aedd1f19c922d9d3abf2ebf28d/deps/cowboy/src/cowboy_req.erl | erlang |
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
Request API.
Request body API.
Multipart API.
Response API.
Private setter/getter API.
Misc API.
Transport.
Request.
Request body.
Response.
Functions.
Request API.
keepalive
The URL includes the scheme, host and port only.
The URL includes the scheme, host, port, path and query string.
@todo Product version.
Flash player incorrectly sends an empty Cookie header.
Request Body API.
and the body hasn't been read at the time of the call.
Error.
transferred data, and then another is applied to the actual content.
Transfer encoding is generally used for chunked bodies. The decoding
function uses a state to keep track of how much it has read, which is
also initialized through this function.
@todo Handle chunked after-the-facts headers.
@todo Depending on the length returned we might want to 0 or +5 it.
Multipart API.
Ignore epilogue.
Response API.
The cookie name cannot contain any of the following characters:
=,;\s\t\r\n\013\014
The cookie value cannot contain any of the following characters:
,; \t\r\n\013\014
If the body function crashes while writing the response body or writes
fewer bytes than declared the behaviour is undefined.
We stream the response body until we close the connection.
We stream the response body in chunks.
Send the last chunk if chunked encoding was used.
If ever made public, need to send nothing if HEAD.
Meant to be used internally for sending errors after crashes.
The response has already been fully sent to the client.
No response has been sent but everything apparently went fine.
Terminate the chunked body for HTTP/1.1 only.
Private setter/getter API.
Misc API.
Don't call 'onresponse' from the hook itself.
duplicated entries. Notice that, while the RFC2109 does allow more
than one cookie to be set per Set-Cookie header, we are following
the implementation of common web servers and applications which
return many distinct headers per each Set-Cookie entry to avoid
issues with clients/browser which may not support it.
We don't match on "keep-alive" since it is the default value.
Tests. | Copyright ( c ) 2011 - 2014 , < >
Copyright ( c ) 2011 , < >
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
-module(cowboy_req).
-export([new/14]).
-export([method/1]).
-export([version/1]).
-export([peer/1]).
-export([host/1]).
-export([host_info/1]).
-export([port/1]).
-export([path/1]).
-export([path_info/1]).
-export([qs/1]).
-export([qs_val/2]).
-export([qs_val/3]).
-export([qs_vals/1]).
-export([host_url/1]).
-export([url/1]).
-export([binding/2]).
-export([binding/3]).
-export([bindings/1]).
-export([header/2]).
-export([header/3]).
-export([headers/1]).
-export([parse_header/2]).
-export([parse_header/3]).
-export([cookie/2]).
-export([cookie/3]).
-export([cookies/1]).
-export([meta/2]).
-export([meta/3]).
-export([set_meta/3]).
-export([has_body/1]).
-export([body_length/1]).
-export([body/1]).
-export([body/2]).
-export([body_qs/1]).
-export([body_qs/2]).
-export([part/1]).
-export([part/2]).
-export([part_body/1]).
-export([part_body/2]).
-export([set_resp_cookie/4]).
-export([set_resp_header/3]).
-export([set_resp_body/2]).
-export([set_resp_body_fun/2]).
-export([set_resp_body_fun/3]).
-export([has_resp_header/2]).
-export([has_resp_body/1]).
-export([delete_resp_header/2]).
-export([reply/2]).
-export([reply/3]).
-export([reply/4]).
-export([chunked_reply/2]).
-export([chunked_reply/3]).
-export([chunk/2]).
-export([upgrade_reply/3]).
-export([continue/1]).
-export([maybe_reply/2]).
-export([ensure_response/2]).
-export([append_buffer/2]).
-export([get/2]).
-export([set/2]).
-export([set_bindings/4]).
-export([compact/1]).
-export([lock/1]).
-export([to_list/1]).
-type cookie_opts() :: cow_cookie:cookie_opts().
-export_type([cookie_opts/0]).
-type content_decode_fun() :: fun((binary())
-> {ok, binary()}
| {error, atom()}).
-type transfer_decode_fun() :: fun((binary(), any())
-> cow_http_te:decode_ret()).
-type body_opts() :: [{continue, boolean()}
| {length, non_neg_integer()}
| {read_length, non_neg_integer()}
| {read_timeout, timeout()}
| {transfer_decode, transfer_decode_fun(), any()}
| {content_decode, content_decode_fun()}].
-export_type([body_opts/0]).
-type resp_body_fun() :: fun((any(), module()) -> ok).
-type send_chunk_fun() :: fun((iodata()) -> ok | {error, atom()}).
-type resp_chunked_fun() :: fun((send_chunk_fun()) -> ok).
-record(http_req, {
socket = undefined :: any(),
transport = undefined :: undefined | module(),
connection = keepalive :: keepalive | close,
pid = undefined :: pid(),
method = <<"GET">> :: binary(),
version = 'HTTP/1.1' :: cowboy:http_version(),
peer = undefined :: undefined | {inet:ip_address(), inet:port_number()},
host = undefined :: undefined | binary(),
host_info = undefined :: undefined | cowboy_router:tokens(),
port = undefined :: undefined | inet:port_number(),
path = undefined :: binary(),
path_info = undefined :: undefined | cowboy_router:tokens(),
qs = undefined :: binary(),
qs_vals = undefined :: undefined | list({binary(), binary() | true}),
bindings = undefined :: undefined | cowboy_router:bindings(),
headers = [] :: cowboy:http_headers(),
p_headers = [] :: [any()],
cookies = undefined :: undefined | [{binary(), binary()}],
meta = [] :: [{atom(), any()}],
body_state = waiting :: waiting | done | {stream, non_neg_integer(),
transfer_decode_fun(), any(), content_decode_fun()},
buffer = <<>> :: binary(),
multipart = undefined :: undefined | {binary(), binary()},
resp_compress = false :: boolean(),
resp_state = waiting :: locked | waiting | waiting_stream
| chunks | stream | done,
resp_headers = [] :: cowboy:http_headers(),
resp_body = <<>> :: iodata() | resp_body_fun()
| {non_neg_integer(), resp_body_fun()}
| {chunked, resp_chunked_fun()},
onresponse = undefined :: undefined | already_called
| cowboy:onresponse_fun()
}).
-opaque req() :: #http_req{}.
-export_type([req/0]).
-spec new(any(), module(),
undefined | {inet:ip_address(), inet:port_number()},
binary(), binary(), binary(),
cowboy:http_version(), cowboy:http_headers(), binary(),
inet:port_number() | undefined, binary(), boolean(), boolean(),
undefined | cowboy:onresponse_fun())
-> req().
new(Socket, Transport, Peer, Method, Path, Query,
Version, Headers, Host, Port, Buffer, CanKeepalive,
Compress, OnResponse) ->
Req = #http_req{socket=Socket, transport=Transport, pid=self(), peer=Peer,
method=Method, path=Path, qs=Query, version=Version,
headers=Headers, host=Host, port=Port, buffer=Buffer,
resp_compress=Compress, onresponse=OnResponse},
case CanKeepalive of
false ->
Req#http_req{connection=close};
true ->
case lists:keyfind(<<"connection">>, 1, Headers) of
false ->
case Version of
'HTTP/1.0' -> Req#http_req{connection=close}
end;
{_, ConnectionHeader} ->
Tokens = cow_http_hd:parse_connection(ConnectionHeader),
Connection = connection_to_atom(Tokens),
Req#http_req{connection=Connection,
p_headers=[{<<"connection">>, Tokens}]}
end
end.
-spec method(Req) -> {binary(), Req} when Req::req().
method(Req) ->
{Req#http_req.method, Req}.
-spec version(Req) -> {cowboy:http_version(), Req} when Req::req().
version(Req) ->
{Req#http_req.version, Req}.
-spec peer(Req)
-> {{inet:ip_address(), inet:port_number()}, Req}
when Req::req().
peer(Req) ->
{Req#http_req.peer, Req}.
-spec host(Req) -> {binary(), Req} when Req::req().
host(Req) ->
{Req#http_req.host, Req}.
-spec host_info(Req)
-> {cowboy_router:tokens() | undefined, Req} when Req::req().
host_info(Req) ->
{Req#http_req.host_info, Req}.
-spec port(Req) -> {inet:port_number(), Req} when Req::req().
port(Req) ->
{Req#http_req.port, Req}.
-spec path(Req) -> {binary(), Req} when Req::req().
path(Req) ->
{Req#http_req.path, Req}.
-spec path_info(Req)
-> {cowboy_router:tokens() | undefined, Req} when Req::req().
path_info(Req) ->
{Req#http_req.path_info, Req}.
-spec qs(Req) -> {binary(), Req} when Req::req().
qs(Req) ->
{Req#http_req.qs, Req}.
-spec qs_val(binary(), Req)
-> {binary() | true | undefined, Req} when Req::req().
qs_val(Name, Req) when is_binary(Name) ->
qs_val(Name, Req, undefined).
-spec qs_val(binary(), Req, Default)
-> {binary() | true | Default, Req} when Req::req(), Default::any().
qs_val(Name, Req=#http_req{qs=RawQs, qs_vals=undefined}, Default)
when is_binary(Name) ->
QsVals = cow_qs:parse_qs(RawQs),
qs_val(Name, Req#http_req{qs_vals=QsVals}, Default);
qs_val(Name, Req, Default) ->
case lists:keyfind(Name, 1, Req#http_req.qs_vals) of
{Name, Value} -> {Value, Req};
false -> {Default, Req}
end.
-spec qs_vals(Req) -> {list({binary(), binary() | true}), Req} when Req::req().
qs_vals(Req=#http_req{qs=RawQs, qs_vals=undefined}) ->
QsVals = cow_qs:parse_qs(RawQs),
qs_vals(Req#http_req{qs_vals=QsVals});
qs_vals(Req=#http_req{qs_vals=QsVals}) ->
{QsVals, Req}.
-spec host_url(Req) -> {undefined | binary(), Req} when Req::req().
host_url(Req=#http_req{port=undefined}) ->
{undefined, Req};
host_url(Req=#http_req{transport=Transport, host=Host, port=Port}) ->
TransportName = Transport:name(),
Secure = case TransportName of
ssl -> <<"s">>;
_ -> <<>>
end,
PortBin = case {TransportName, Port} of
{ssl, 443} -> <<>>;
{tcp, 80} -> <<>>;
_ -> << ":", (integer_to_binary(Port))/binary >>
end,
{<< "http", Secure/binary, "://", Host/binary, PortBin/binary >>, Req}.
-spec url(Req) -> {undefined | binary(), Req} when Req::req().
url(Req=#http_req{}) ->
{HostURL, Req2} = host_url(Req),
url(HostURL, Req2).
url(undefined, Req=#http_req{}) ->
{undefined, Req};
url(HostURL, Req=#http_req{path=Path, qs=QS}) ->
QS2 = case QS of
<<>> -> <<>>;
_ -> << "?", QS/binary >>
end,
{<< HostURL/binary, Path/binary, QS2/binary >>, Req}.
-spec binding(atom(), Req) -> {any() | undefined, Req} when Req::req().
binding(Name, Req) when is_atom(Name) ->
binding(Name, Req, undefined).
-spec binding(atom(), Req, Default)
-> {any() | Default, Req} when Req::req(), Default::any().
binding(Name, Req, Default) when is_atom(Name) ->
case lists:keyfind(Name, 1, Req#http_req.bindings) of
{Name, Value} -> {Value, Req};
false -> {Default, Req}
end.
-spec bindings(Req) -> {[{atom(), any()}], Req} when Req::req().
bindings(Req) ->
{Req#http_req.bindings, Req}.
-spec header(binary(), Req)
-> {binary() | undefined, Req} when Req::req().
header(Name, Req) ->
header(Name, Req, undefined).
-spec header(binary(), Req, Default)
-> {binary() | Default, Req} when Req::req(), Default::any().
header(Name, Req, Default) ->
case lists:keyfind(Name, 1, Req#http_req.headers) of
{Name, Value} -> {Value, Req};
false -> {Default, Req}
end.
-spec headers(Req) -> {cowboy:http_headers(), Req} when Req::req().
headers(Req) ->
{Req#http_req.headers, Req}.
-spec parse_header(binary(), Req)
-> {ok, any(), Req} | {undefined, binary(), Req}
| {error, badarg} when Req::req().
parse_header(Name, Req=#http_req{p_headers=PHeaders}) ->
case lists:keyfind(Name, 1, PHeaders) of
false -> parse_header(Name, Req, parse_header_default(Name));
{Name, Value} -> {ok, Value, Req}
end.
-spec parse_header_default(binary()) -> any().
parse_header_default(<<"transfer-encoding">>) -> [<<"identity">>];
parse_header_default(_Name) -> undefined.
-spec parse_header(binary(), Req, any())
-> {ok, any(), Req} | {undefined, binary(), Req}
| {error, badarg} when Req::req().
parse_header(Name = <<"accept">>, Req, Default) ->
parse_header(Name, Req, Default,
fun (Value) ->
cowboy_http:list(Value, fun cowboy_http:media_range/2)
end);
parse_header(Name = <<"accept-charset">>, Req, Default) ->
parse_header(Name, Req, Default,
fun (Value) ->
cowboy_http:nonempty_list(Value, fun cowboy_http:conneg/2)
end);
parse_header(Name = <<"accept-encoding">>, Req, Default) ->
parse_header(Name, Req, Default,
fun (Value) ->
cowboy_http:list(Value, fun cowboy_http:conneg/2)
end);
parse_header(Name = <<"accept-language">>, Req, Default) ->
parse_header(Name, Req, Default,
fun (Value) ->
cowboy_http:nonempty_list(Value, fun cowboy_http:language_range/2)
end);
parse_header(Name = <<"authorization">>, Req, Default) ->
parse_header(Name, Req, Default,
fun (Value) ->
cowboy_http:token_ci(Value, fun cowboy_http:authorization/2)
end);
parse_header(Name = <<"content-length">>, Req, Default) ->
parse_header(Name, Req, Default, fun cow_http_hd:parse_content_length/1);
parse_header(Name = <<"content-type">>, Req, Default) ->
parse_header(Name, Req, Default, fun cowboy_http:content_type/1);
parse_header(Name = <<"cookie">>, Req, Default) ->
parse_header(Name, Req, Default, fun cow_cookie:parse_cookie/1);
parse_header(Name = <<"expect">>, Req, Default) ->
parse_header(Name, Req, Default,
fun (Value) ->
cowboy_http:nonempty_list(Value, fun cowboy_http:expectation/2)
end);
parse_header(Name, Req, Default)
when Name =:= <<"if-match">>;
Name =:= <<"if-none-match">> ->
parse_header(Name, Req, Default, fun cowboy_http:entity_tag_match/1);
parse_header(Name, Req, Default)
when Name =:= <<"if-modified-since">>;
Name =:= <<"if-unmodified-since">> ->
parse_header(Name, Req, Default, fun cowboy_http:http_date/1);
parse_header(Name = <<"range">>, Req, Default) ->
parse_header(Name, Req, Default, fun cowboy_http:range/1);
parse_header(Name, Req, Default)
when Name =:= <<"sec-websocket-protocol">>;
Name =:= <<"x-forwarded-for">> ->
parse_header(Name, Req, Default,
fun (Value) ->
cowboy_http:nonempty_list(Value, fun cowboy_http:token/2)
end);
parse_header(Name = <<"transfer-encoding">>, Req, Default) ->
parse_header(Name, Req, Default, fun cow_http_hd:parse_transfer_encoding/1);
parse_header(Name = <<"upgrade">>, Req, Default) ->
parse_header(Name, Req, Default,
fun (Value) ->
cowboy_http:nonempty_list(Value, fun cowboy_http:token_ci/2)
end);
parse_header(Name = <<"sec-websocket-extensions">>, Req, Default) ->
parse_header(Name, Req, Default, fun cowboy_http:parameterized_tokens/1);
parse_header(Name, Req, Default) ->
{Value, Req2} = header(Name, Req, Default),
{undefined, Value, Req2}.
parse_header(Name, Req=#http_req{p_headers=PHeaders}, Default, Fun) ->
case header(Name, Req) of
{undefined, Req2} ->
{ok, Default, Req2#http_req{p_headers=[{Name, Default}|PHeaders]}};
{Value, Req2} ->
case Fun(Value) of
{error, badarg} ->
{error, badarg};
P ->
{ok, P, Req2#http_req{p_headers=[{Name, P}|PHeaders]}}
end
end.
-spec cookie(binary(), Req)
-> {binary() | undefined, Req} when Req::req().
cookie(Name, Req) when is_binary(Name) ->
cookie(Name, Req, undefined).
-spec cookie(binary(), Req, Default)
-> {binary() | Default, Req} when Req::req(), Default::any().
cookie(Name, Req=#http_req{cookies=undefined}, Default) when is_binary(Name) ->
case parse_header(<<"cookie">>, Req) of
{ok, undefined, Req2} ->
{Default, Req2#http_req{cookies=[]}};
{ok, Cookies, Req2} ->
cookie(Name, Req2#http_req{cookies=Cookies}, Default)
end;
cookie(Name, Req, Default) ->
case lists:keyfind(Name, 1, Req#http_req.cookies) of
{Name, Value} -> {Value, Req};
false -> {Default, Req}
end.
-spec cookies(Req) -> {list({binary(), binary()}), Req} when Req::req().
cookies(Req=#http_req{cookies=undefined}) ->
case parse_header(<<"cookie">>, Req) of
{ok, undefined, Req2} ->
{[], Req2#http_req{cookies=[]}};
{ok, Cookies, Req2} ->
cookies(Req2#http_req{cookies=Cookies});
{error, badarg} ->
{[], Req#http_req{cookies=[]}}
end;
cookies(Req=#http_req{cookies=Cookies}) ->
{Cookies, Req}.
-spec meta(atom(), Req) -> {any() | undefined, Req} when Req::req().
meta(Name, Req) ->
meta(Name, Req, undefined).
-spec meta(atom(), Req, any()) -> {any(), Req} when Req::req().
meta(Name, Req, Default) ->
case lists:keyfind(Name, 1, Req#http_req.meta) of
{Name, Value} -> {Value, Req};
false -> {Default, Req}
end.
-spec set_meta(atom(), any(), Req) -> Req when Req::req().
set_meta(Name, Value, Req=#http_req{meta=Meta}) ->
Req#http_req{meta=lists:keystore(Name, 1, Meta, {Name, Value})}.
-spec has_body(req()) -> boolean().
has_body(Req) ->
case lists:keyfind(<<"content-length">>, 1, Req#http_req.headers) of
{_, <<"0">>} ->
false;
{_, _} ->
true;
_ ->
lists:keymember(<<"transfer-encoding">>, 1, Req#http_req.headers)
end.
The length may not be known if Transfer - Encoding is not identity ,
-spec body_length(Req) -> {undefined | non_neg_integer(), Req} when Req::req().
body_length(Req) ->
case parse_header(<<"transfer-encoding">>, Req) of
{ok, [<<"identity">>], Req2} ->
{ok, Length, Req3} = parse_header(<<"content-length">>, Req2, 0),
{Length, Req3};
{ok, _, Req2} ->
{undefined, Req2}
end.
-spec body(Req)
-> {ok, binary(), Req} | {more, binary(), Req}
| {error, atom()} when Req::req().
body(Req) ->
body(Req, []).
-spec body(Req, body_opts())
-> {ok, binary(), Req} | {more, binary(), Req}
| {error, atom()} when Req::req().
body(Req=#http_req{body_state=waiting}, Opts) ->
Send a 100 continue if needed ( enabled by default ) .
Req1 = case lists:keyfind(continue, 1, Opts) of
{_, false} ->
Req;
_ ->
{ok, ExpectHeader, Req0} = parse_header(<<"expect">>, Req),
ok = case ExpectHeader of
[<<"100-continue">>] -> continue(Req0);
_ -> ok
end,
Req0
end,
Initialize body streaming state .
CFun = case lists:keyfind(content_decode, 1, Opts) of
false ->
fun cowboy_http:ce_identity/1;
{_, CFun0} ->
CFun0
end,
case lists:keyfind(transfer_decode, 1, Opts) of
false ->
case parse_header(<<"transfer-encoding">>, Req1) of
{ok, [<<"chunked">>], Req2} ->
body(Req2#http_req{body_state={stream, 0,
fun cow_http_te:stream_chunked/2, {0, 0}, CFun}}, Opts);
{ok, [<<"identity">>], Req2} ->
{Len, Req3} = body_length(Req2),
case Len of
0 ->
{ok, <<>>, Req3#http_req{body_state=done}};
_ ->
body(Req3#http_req{body_state={stream, Len,
fun cow_http_te:stream_identity/2, {0, Len},
CFun}}, Opts)
end
end;
{_, TFun, TState} ->
body(Req1#http_req{body_state={stream, 0,
TFun, TState, CFun}}, Opts)
end;
body(Req=#http_req{body_state=done}, _) ->
{ok, <<>>, Req};
body(Req, Opts) ->
ChunkLen = case lists:keyfind(length, 1, Opts) of
false -> 8000000;
{_, ChunkLen0} -> ChunkLen0
end,
ReadLen = case lists:keyfind(read_length, 1, Opts) of
false -> 1000000;
{_, ReadLen0} -> ReadLen0
end,
ReadTimeout = case lists:keyfind(read_timeout, 1, Opts) of
false -> 15000;
{_, ReadTimeout0} -> ReadTimeout0
end,
body_loop(Req, ReadTimeout, ReadLen, ChunkLen, <<>>).
body_loop(Req=#http_req{buffer=Buffer, body_state={stream, Length, _, _, _}},
ReadTimeout, ReadLength, ChunkLength, Acc) ->
{Tag, Res, Req2} = case Buffer of
<<>> ->
body_recv(Req, ReadTimeout, min(Length, ReadLength));
_ ->
body_decode(Req, ReadTimeout)
end,
case {Tag, Res} of
{ok, {ok, Data}} ->
{ok, << Acc/binary, Data/binary >>, Req2};
{more, {ok, Data}} ->
Acc2 = << Acc/binary, Data/binary >>,
case byte_size(Acc2) >= ChunkLength of
true -> {more, Acc2, Req2};
false -> body_loop(Req2, ReadTimeout, ReadLength, ChunkLength, Acc2)
end;
Res
end.
body_recv(Req=#http_req{transport=Transport, socket=Socket, buffer=Buffer},
ReadTimeout, ReadLength) ->
case Transport:recv(Socket, ReadLength, ReadTimeout) of
{ok, Data} ->
body_decode(Req#http_req{buffer= << Buffer/binary, Data/binary >>},
ReadTimeout);
Error = {error, _} ->
{error, Error, Req}
end.
Two decodings happen . First a decoding function is applied to the
Content encoding is generally used for compression .
body_decode(Req=#http_req{buffer=Data, body_state={stream, _,
TDecode, TState, CDecode}}, ReadTimeout) ->
case TDecode(Data, TState) of
more ->
body_recv(Req#http_req{body_state={stream, 0,
TDecode, TState, CDecode}}, ReadTimeout, 0);
{more, Data2, TState2} ->
{more, CDecode(Data2), Req#http_req{body_state={stream, 0,
TDecode, TState2, CDecode}, buffer= <<>>}};
{more, Data2, Length, TState2} when is_integer(Length) ->
{more, CDecode(Data2), Req#http_req{body_state={stream, Length,
TDecode, TState2, CDecode}, buffer= <<>>}};
{more, Data2, Rest, TState2} ->
{more, CDecode(Data2), Req#http_req{body_state={stream, 0,
TDecode, TState2, CDecode}, buffer=Rest}};
{done, TotalLength, Rest} ->
{ok, {ok, <<>>}, body_decode_end(Req, TotalLength, Rest)};
{done, Data2, TotalLength, Rest} ->
{ok, CDecode(Data2), body_decode_end(Req, TotalLength, Rest)}
end.
body_decode_end(Req=#http_req{headers=Headers, p_headers=PHeaders},
TotalLength, Rest) ->
Headers2 = lists:keystore(<<"content-length">>, 1, Headers,
{<<"content-length">>, integer_to_binary(TotalLength)}),
At this point we just assume TEs were all decoded .
Headers3 = lists:keydelete(<<"transfer-encoding">>, 1, Headers2),
PHeaders2 = lists:keystore(<<"content-length">>, 1, PHeaders,
{<<"content-length">>, TotalLength}),
PHeaders3 = lists:keydelete(<<"transfer-encoding">>, 1, PHeaders2),
Req#http_req{buffer=Rest, body_state=done,
headers=Headers3, p_headers=PHeaders3}.
-spec body_qs(Req)
-> {ok, [{binary(), binary() | true}], Req} | {error, atom()}
when Req::req().
body_qs(Req) ->
body_qs(Req, [
{length, 64000},
{read_length, 64000},
{read_timeout, 5000}]).
-spec body_qs(Req, body_opts()) -> {ok, [{binary(), binary() | true}], Req}
| {badlength, Req} | {error, atom()} when Req::req().
body_qs(Req, Opts) ->
case body(Req, Opts) of
{ok, Body, Req2} ->
{ok, cow_qs:parse_qs(Body), Req2};
{more, _, Req2} ->
{badlength, Req2};
{error, Reason} ->
{error, Reason}
end.
-spec part(Req)
-> {ok, cow_multipart:headers(), Req} | {done, Req}
when Req::req().
part(Req) ->
part(Req, [
{length, 64000},
{read_length, 64000},
{read_timeout, 5000}]).
-spec part(Req, body_opts())
-> {ok, cow_multipart:headers(), Req} | {done, Req}
when Req::req().
part(Req=#http_req{multipart=undefined}, Opts) ->
part(init_multipart(Req), Opts);
part(Req, Opts) ->
{Data, Req2} = stream_multipart(Req, Opts),
part(Data, Opts, Req2).
part(Buffer, Opts, Req=#http_req{multipart={Boundary, _}}) ->
case cow_multipart:parse_headers(Buffer, Boundary) of
more ->
{Data, Req2} = stream_multipart(Req, Opts),
part(<< Buffer/binary, Data/binary >>, Opts, Req2);
{more, Buffer2} ->
{Data, Req2} = stream_multipart(Req, Opts),
part(<< Buffer2/binary, Data/binary >>, Opts, Req2);
{ok, Headers, Rest} ->
{ok, Headers, Req#http_req{multipart={Boundary, Rest}}};
{done, _} ->
{done, Req#http_req{multipart=undefined}}
end.
-spec part_body(Req)
-> {ok, binary(), Req} | {more, binary(), Req}
when Req::req().
part_body(Req) ->
part_body(Req, []).
-spec part_body(Req, body_opts())
-> {ok, binary(), Req} | {more, binary(), Req}
when Req::req().
part_body(Req=#http_req{multipart=undefined}, Opts) ->
part_body(init_multipart(Req), Opts);
part_body(Req, Opts) ->
part_body(<<>>, Opts, Req, <<>>).
part_body(Buffer, Opts, Req=#http_req{multipart={Boundary, _}}, Acc) ->
ChunkLen = case lists:keyfind(length, 1, Opts) of
false -> 8000000;
{_, ChunkLen0} -> ChunkLen0
end,
case byte_size(Acc) > ChunkLen of
true ->
{more, Acc, Req#http_req{multipart={Boundary, Buffer}}};
false ->
{Data, Req2} = stream_multipart(Req, Opts),
case cow_multipart:parse_body(<< Buffer/binary, Data/binary >>, Boundary) of
{ok, Body} ->
part_body(<<>>, Opts, Req2, << Acc/binary, Body/binary >>);
{ok, Body, Rest} ->
part_body(Rest, Opts, Req2, << Acc/binary, Body/binary >>);
done ->
{ok, Acc, Req2};
{done, Body} ->
{ok, << Acc/binary, Body/binary >>, Req2};
{done, Body, Rest} ->
{ok, << Acc/binary, Body/binary >>,
Req2#http_req{multipart={Boundary, Rest}}}
end
end.
init_multipart(Req) ->
{ok, {<<"multipart">>, _, Params}, Req2}
= parse_header(<<"content-type">>, Req),
{_, Boundary} = lists:keyfind(<<"boundary">>, 1, Params),
Req2#http_req{multipart={Boundary, <<>>}}.
stream_multipart(Req=#http_req{body_state=BodyState, multipart={_, <<>>}}, Opts) ->
true = BodyState =/= done,
{_, Data, Req2} = body(Req, Opts),
{Data, Req2};
stream_multipart(Req=#http_req{multipart={Boundary, Buffer}}, _) ->
{Buffer, Req#http_req{multipart={Boundary, <<>>}}}.
-spec set_resp_cookie(iodata(), iodata(), cookie_opts(), Req)
-> Req when Req::req().
set_resp_cookie(Name, Value, Opts, Req) ->
Cookie = cow_cookie:setcookie(Name, Value, Opts),
set_resp_header(<<"set-cookie">>, Cookie, Req).
-spec set_resp_header(binary(), iodata(), Req)
-> Req when Req::req().
set_resp_header(Name, Value, Req=#http_req{resp_headers=RespHeaders}) ->
Req#http_req{resp_headers=[{Name, Value}|RespHeaders]}.
-spec set_resp_body(iodata(), Req) -> Req when Req::req().
set_resp_body(Body, Req) ->
Req#http_req{resp_body=Body}.
-spec set_resp_body_fun(resp_body_fun(), Req) -> Req when Req::req().
set_resp_body_fun(StreamFun, Req) when is_function(StreamFun) ->
Req#http_req{resp_body=StreamFun}.
-spec set_resp_body_fun(non_neg_integer(), resp_body_fun(), Req)
-> Req when Req::req();
(chunked, resp_chunked_fun(), Req)
-> Req when Req::req().
set_resp_body_fun(StreamLen, StreamFun, Req)
when is_integer(StreamLen), is_function(StreamFun) ->
Req#http_req{resp_body={StreamLen, StreamFun}};
set_resp_body_fun(chunked, StreamFun, Req)
when is_function(StreamFun) ->
Req#http_req{resp_body={chunked, StreamFun}}.
-spec has_resp_header(binary(), req()) -> boolean().
has_resp_header(Name, #http_req{resp_headers=RespHeaders}) ->
lists:keymember(Name, 1, RespHeaders).
-spec has_resp_body(req()) -> boolean().
has_resp_body(#http_req{resp_body=RespBody}) when is_function(RespBody) ->
true;
has_resp_body(#http_req{resp_body={chunked, _}}) ->
true;
has_resp_body(#http_req{resp_body={Length, _}}) ->
Length > 0;
has_resp_body(#http_req{resp_body=RespBody}) ->
iolist_size(RespBody) > 0.
-spec delete_resp_header(binary(), Req)
-> Req when Req::req().
delete_resp_header(Name, Req=#http_req{resp_headers=RespHeaders}) ->
RespHeaders2 = lists:keydelete(Name, 1, RespHeaders),
Req#http_req{resp_headers=RespHeaders2}.
-spec reply(cowboy:http_status(), Req) -> {ok, Req} when Req::req().
reply(Status, Req=#http_req{resp_body=Body}) ->
reply(Status, [], Body, Req).
-spec reply(cowboy:http_status(), cowboy:http_headers(), Req)
-> {ok, Req} when Req::req().
reply(Status, Headers, Req=#http_req{resp_body=Body}) ->
reply(Status, Headers, Body, Req).
-spec reply(cowboy:http_status(), cowboy:http_headers(),
iodata() | {non_neg_integer() | resp_body_fun()}, Req)
-> {ok, Req} when Req::req().
reply(Status, Headers, Body, Req=#http_req{
socket=Socket, transport=Transport,
version=Version, connection=Connection,
method=Method, resp_compress=Compress,
resp_state=RespState, resp_headers=RespHeaders})
when RespState =:= waiting; RespState =:= waiting_stream ->
HTTP11Headers = if
Transport =/= cowboy_spdy, Version =:= 'HTTP/1.0', Connection =:= keepalive ->
[{<<"connection">>, atom_to_connection(Connection)}];
Transport =/= cowboy_spdy, Version =:= 'HTTP/1.1', Connection =:= close ->
[{<<"connection">>, atom_to_connection(Connection)}];
true ->
[]
end,
Req3 = case Body of
BodyFun when is_function(BodyFun) ->
RespConn = close,
{RespType, Req2} = if
Transport =:= cowboy_spdy ->
response(Status, Headers, RespHeaders, [
{<<"date">>, cowboy_clock:rfc1123()},
{<<"server">>, <<"Cowboy">>}
], stream, Req);
true ->
response(Status, Headers, RespHeaders, [
{<<"connection">>, <<"close">>},
{<<"date">>, cowboy_clock:rfc1123()},
{<<"server">>, <<"Cowboy">>},
{<<"transfer-encoding">>, <<"identity">>}
], <<>>, Req)
end,
if RespType =/= hook, Method =/= <<"HEAD">> ->
BodyFun(Socket, Transport);
true -> ok
end,
Req2#http_req{connection=RespConn};
{chunked, BodyFun} ->
{RespType, Req2} = chunked_response(Status, Headers, Req),
if RespType =/= hook, Method =/= <<"HEAD">> ->
ChunkFun = fun(IoData) -> chunk(IoData, Req2) end,
BodyFun(ChunkFun),
if
Version =:= 'HTTP/1.0'; RespState =:= waiting_stream ->
Req2;
true ->
last_chunk(Req2)
end;
true -> Req2
end;
{ContentLength, BodyFun} ->
We stream the response body for ContentLength bytes .
RespConn = response_connection(Headers, Connection),
{RespType, Req2} = response(Status, Headers, RespHeaders, [
{<<"content-length">>, integer_to_list(ContentLength)},
{<<"date">>, cowboy_clock:rfc1123()},
{<<"server">>, <<"Cowboy">>}
|HTTP11Headers], stream, Req),
if RespType =/= hook, Method =/= <<"HEAD">> ->
BodyFun(Socket, Transport);
true -> ok
end,
Req2#http_req{connection=RespConn};
_ when Compress ->
RespConn = response_connection(Headers, Connection),
Req2 = reply_may_compress(Status, Headers, Body, Req,
RespHeaders, HTTP11Headers, Method),
Req2#http_req{connection=RespConn};
_ ->
RespConn = response_connection(Headers, Connection),
Req2 = reply_no_compress(Status, Headers, Body, Req,
RespHeaders, HTTP11Headers, Method, iolist_size(Body)),
Req2#http_req{connection=RespConn}
end,
{ok, Req3#http_req{resp_state=done, resp_headers=[], resp_body= <<>>}}.
reply_may_compress(Status, Headers, Body, Req,
RespHeaders, HTTP11Headers, Method) ->
BodySize = iolist_size(Body),
case parse_header(<<"accept-encoding">>, Req) of
{ok, Encodings, Req2} ->
CanGzip = (BodySize > 300)
andalso (false =:= lists:keyfind(<<"content-encoding">>,
1, Headers))
andalso (false =:= lists:keyfind(<<"content-encoding">>,
1, RespHeaders))
andalso (false =:= lists:keyfind(<<"transfer-encoding">>,
1, Headers))
andalso (false =:= lists:keyfind(<<"transfer-encoding">>,
1, RespHeaders))
andalso (Encodings =/= undefined)
andalso (false =/= lists:keyfind(<<"gzip">>, 1, Encodings)),
case CanGzip of
true ->
GzBody = zlib:gzip(Body),
{_, Req3} = response(Status, Headers, RespHeaders, [
{<<"content-length">>, integer_to_list(byte_size(GzBody))},
{<<"content-encoding">>, <<"gzip">>},
{<<"date">>, cowboy_clock:rfc1123()},
{<<"server">>, <<"Cowboy">>}
|HTTP11Headers],
case Method of <<"HEAD">> -> <<>>; _ -> GzBody end,
Req2),
Req3;
false ->
reply_no_compress(Status, Headers, Body, Req,
RespHeaders, HTTP11Headers, Method, BodySize)
end;
{error, badarg} ->
reply_no_compress(Status, Headers, Body, Req,
RespHeaders, HTTP11Headers, Method, BodySize)
end.
reply_no_compress(Status, Headers, Body, Req,
RespHeaders, HTTP11Headers, Method, BodySize) ->
{_, Req2} = response(Status, Headers, RespHeaders, [
{<<"content-length">>, integer_to_list(BodySize)},
{<<"date">>, cowboy_clock:rfc1123()},
{<<"server">>, <<"Cowboy">>}
|HTTP11Headers],
case Method of <<"HEAD">> -> <<>>; _ -> Body end,
Req),
Req2.
-spec chunked_reply(cowboy:http_status(), Req) -> {ok, Req} when Req::req().
chunked_reply(Status, Req) ->
chunked_reply(Status, [], Req).
-spec chunked_reply(cowboy:http_status(), cowboy:http_headers(), Req)
-> {ok, Req} when Req::req().
chunked_reply(Status, Headers, Req) ->
{_, Req2} = chunked_response(Status, Headers, Req),
{ok, Req2}.
-spec chunk(iodata(), req()) -> ok | {error, atom()}.
chunk(_Data, #http_req{method= <<"HEAD">>}) ->
ok;
chunk(Data, #http_req{socket=Socket, transport=cowboy_spdy,
resp_state=chunks}) ->
cowboy_spdy:stream_data(Socket, Data);
chunk(Data, #http_req{socket=Socket, transport=Transport,
resp_state=stream}) ->
Transport:send(Socket, Data);
chunk(Data, #http_req{socket=Socket, transport=Transport,
resp_state=chunks}) ->
Transport:send(Socket, [integer_to_list(iolist_size(Data), 16),
<<"\r\n">>, Data, <<"\r\n">>]).
-spec last_chunk(Req) -> Req when Req::req().
last_chunk(Req=#http_req{socket=Socket, transport=cowboy_spdy}) ->
_ = cowboy_spdy:stream_close(Socket),
Req#http_req{resp_state=done};
last_chunk(Req=#http_req{socket=Socket, transport=Transport}) ->
_ = Transport:send(Socket, <<"0\r\n\r\n">>),
Req#http_req{resp_state=done}.
-spec upgrade_reply(cowboy:http_status(), cowboy:http_headers(), Req)
-> {ok, Req} when Req::req().
upgrade_reply(Status, Headers, Req=#http_req{transport=Transport,
resp_state=waiting, resp_headers=RespHeaders})
when Transport =/= cowboy_spdy ->
{_, Req2} = response(Status, Headers, RespHeaders, [
{<<"connection">>, <<"Upgrade">>}
], <<>>, Req),
{ok, Req2#http_req{resp_state=done, resp_headers=[], resp_body= <<>>}}.
-spec continue(req()) -> ok | {error, atom()}.
continue(#http_req{socket=Socket, transport=Transport,
version=Version}) ->
HTTPVer = atom_to_binary(Version, latin1),
Transport:send(Socket,
<< HTTPVer/binary, " ", (status(100))/binary, "\r\n\r\n" >>).
-spec maybe_reply([{module(), atom(), arity() | [term()], _}], req()) -> ok.
maybe_reply(Stacktrace, Req) ->
receive
{cowboy_req, resp_sent} -> ok
after 0 ->
_ = do_maybe_reply(Stacktrace, Req),
ok
end.
do_maybe_reply([
{cow_http_hd, _, _, _},
{cowboy_req, parse_header, _, _}|_], Req) ->
cowboy_req:reply(400, Req);
do_maybe_reply(_, Req) ->
cowboy_req:reply(500, Req).
-spec ensure_response(req(), cowboy:http_status()) -> ok.
ensure_response(#http_req{resp_state=done}, _) ->
ok;
Reply with the status code found in the second argument .
ensure_response(Req=#http_req{resp_state=RespState}, Status)
when RespState =:= waiting; RespState =:= waiting_stream ->
_ = reply(Status, [], [], Req),
ok;
ensure_response(#http_req{method= <<"HEAD">>}, _) ->
ok;
ensure_response(Req=#http_req{resp_state=chunks}, _) ->
_ = last_chunk(Req),
ok;
ensure_response(#http_req{}, _) ->
ok.
-spec append_buffer(binary(), Req) -> Req when Req::req().
append_buffer(Suffix, Req=#http_req{buffer=Buffer}) ->
Req#http_req{buffer= << Buffer/binary, Suffix/binary >>}.
-spec get(atom(), req()) -> any(); ([atom()], req()) -> any().
get(List, Req) when is_list(List) ->
[g(Atom, Req) || Atom <- List];
get(Atom, Req) when is_atom(Atom) ->
g(Atom, Req).
g(bindings, #http_req{bindings=Ret}) -> Ret;
g(body_state, #http_req{body_state=Ret}) -> Ret;
g(buffer, #http_req{buffer=Ret}) -> Ret;
g(connection, #http_req{connection=Ret}) -> Ret;
g(cookies, #http_req{cookies=Ret}) -> Ret;
g(headers, #http_req{headers=Ret}) -> Ret;
g(host, #http_req{host=Ret}) -> Ret;
g(host_info, #http_req{host_info=Ret}) -> Ret;
g(meta, #http_req{meta=Ret}) -> Ret;
g(method, #http_req{method=Ret}) -> Ret;
g(multipart, #http_req{multipart=Ret}) -> Ret;
g(onresponse, #http_req{onresponse=Ret}) -> Ret;
g(p_headers, #http_req{p_headers=Ret}) -> Ret;
g(path, #http_req{path=Ret}) -> Ret;
g(path_info, #http_req{path_info=Ret}) -> Ret;
g(peer, #http_req{peer=Ret}) -> Ret;
g(pid, #http_req{pid=Ret}) -> Ret;
g(port, #http_req{port=Ret}) -> Ret;
g(qs, #http_req{qs=Ret}) -> Ret;
g(qs_vals, #http_req{qs_vals=Ret}) -> Ret;
g(resp_body, #http_req{resp_body=Ret}) -> Ret;
g(resp_compress, #http_req{resp_compress=Ret}) -> Ret;
g(resp_headers, #http_req{resp_headers=Ret}) -> Ret;
g(resp_state, #http_req{resp_state=Ret}) -> Ret;
g(socket, #http_req{socket=Ret}) -> Ret;
g(transport, #http_req{transport=Ret}) -> Ret;
g(version, #http_req{version=Ret}) -> Ret.
-spec set([{atom(), any()}], Req) -> Req when Req::req().
set([], Req) -> Req;
set([{bindings, Val}|Tail], Req) -> set(Tail, Req#http_req{bindings=Val});
set([{body_state, Val}|Tail], Req) -> set(Tail, Req#http_req{body_state=Val});
set([{buffer, Val}|Tail], Req) -> set(Tail, Req#http_req{buffer=Val});
set([{connection, Val}|Tail], Req) -> set(Tail, Req#http_req{connection=Val});
set([{cookies, Val}|Tail], Req) -> set(Tail, Req#http_req{cookies=Val});
set([{headers, Val}|Tail], Req) -> set(Tail, Req#http_req{headers=Val});
set([{host, Val}|Tail], Req) -> set(Tail, Req#http_req{host=Val});
set([{host_info, Val}|Tail], Req) -> set(Tail, Req#http_req{host_info=Val});
set([{meta, Val}|Tail], Req) -> set(Tail, Req#http_req{meta=Val});
set([{method, Val}|Tail], Req) -> set(Tail, Req#http_req{method=Val});
set([{multipart, Val}|Tail], Req) -> set(Tail, Req#http_req{multipart=Val});
set([{onresponse, Val}|Tail], Req) -> set(Tail, Req#http_req{onresponse=Val});
set([{p_headers, Val}|Tail], Req) -> set(Tail, Req#http_req{p_headers=Val});
set([{path, Val}|Tail], Req) -> set(Tail, Req#http_req{path=Val});
set([{path_info, Val}|Tail], Req) -> set(Tail, Req#http_req{path_info=Val});
set([{peer, Val}|Tail], Req) -> set(Tail, Req#http_req{peer=Val});
set([{pid, Val}|Tail], Req) -> set(Tail, Req#http_req{pid=Val});
set([{port, Val}|Tail], Req) -> set(Tail, Req#http_req{port=Val});
set([{qs, Val}|Tail], Req) -> set(Tail, Req#http_req{qs=Val});
set([{qs_vals, Val}|Tail], Req) -> set(Tail, Req#http_req{qs_vals=Val});
set([{resp_body, Val}|Tail], Req) -> set(Tail, Req#http_req{resp_body=Val});
set([{resp_headers, Val}|Tail], Req) -> set(Tail, Req#http_req{resp_headers=Val});
set([{resp_state, Val}|Tail], Req) -> set(Tail, Req#http_req{resp_state=Val});
set([{socket, Val}|Tail], Req) -> set(Tail, Req#http_req{socket=Val});
set([{transport, Val}|Tail], Req) -> set(Tail, Req#http_req{transport=Val});
set([{version, Val}|Tail], Req) -> set(Tail, Req#http_req{version=Val}).
-spec set_bindings(cowboy_router:tokens(), cowboy_router:tokens(),
cowboy_router:bindings(), Req) -> Req when Req::req().
set_bindings(HostInfo, PathInfo, Bindings, Req) ->
Req#http_req{host_info=HostInfo, path_info=PathInfo,
bindings=Bindings}.
-spec compact(Req) -> Req when Req::req().
compact(Req) ->
Req#http_req{host_info=undefined,
path_info=undefined, qs_vals=undefined,
bindings=undefined, headers=[],
p_headers=[], cookies=[]}.
-spec lock(Req) -> Req when Req::req().
lock(Req) ->
Req#http_req{resp_state=locked}.
-spec to_list(req()) -> [{atom(), any()}].
to_list(Req) ->
lists:zip(record_info(fields, http_req), tl(tuple_to_list(Req))).
Internal .
-spec chunked_response(cowboy:http_status(), cowboy:http_headers(), Req) ->
{normal | hook, Req} when Req::req().
chunked_response(Status, Headers, Req=#http_req{
transport=cowboy_spdy, resp_state=waiting,
resp_headers=RespHeaders}) ->
{RespType, Req2} = response(Status, Headers, RespHeaders, [
{<<"date">>, cowboy_clock:rfc1123()},
{<<"server">>, <<"Cowboy">>}
], stream, Req),
{RespType, Req2#http_req{resp_state=chunks,
resp_headers=[], resp_body= <<>>}};
chunked_response(Status, Headers, Req=#http_req{
version=Version, connection=Connection,
resp_state=RespState, resp_headers=RespHeaders})
when RespState =:= waiting; RespState =:= waiting_stream ->
RespConn = response_connection(Headers, Connection),
HTTP11Headers = if
Version =:= 'HTTP/1.0', Connection =:= keepalive ->
[{<<"connection">>, atom_to_connection(Connection)}];
Version =:= 'HTTP/1.0' -> [];
true ->
MaybeTE = if
RespState =:= waiting_stream -> [];
true -> [{<<"transfer-encoding">>, <<"chunked">>}]
end,
if
Connection =:= close ->
[{<<"connection">>, atom_to_connection(Connection)}|MaybeTE];
true ->
MaybeTE
end
end,
RespState2 = if
Version =:= 'HTTP/1.1', RespState =:= 'waiting' -> chunks;
true -> stream
end,
{RespType, Req2} = response(Status, Headers, RespHeaders, [
{<<"date">>, cowboy_clock:rfc1123()},
{<<"server">>, <<"Cowboy">>}
|HTTP11Headers], <<>>, Req),
{RespType, Req2#http_req{connection=RespConn, resp_state=RespState2,
resp_headers=[], resp_body= <<>>}}.
-spec response(cowboy:http_status(), cowboy:http_headers(),
cowboy:http_headers(), cowboy:http_headers(), stream | iodata(), Req)
-> {normal | hook, Req} when Req::req().
response(Status, Headers, RespHeaders, DefaultHeaders, Body, Req=#http_req{
socket=Socket, transport=Transport, version=Version,
pid=ReqPid, onresponse=OnResponse}) ->
FullHeaders = case OnResponse of
already_called -> Headers;
_ -> response_merge_headers(Headers, RespHeaders, DefaultHeaders)
end,
Body2 = case Body of stream -> <<>>; _ -> Body end,
{Status2, FullHeaders2, Req2} = case OnResponse of
already_called -> {Status, FullHeaders, Req};
undefined -> {Status, FullHeaders, Req};
OnResponse ->
case OnResponse(Status, FullHeaders, Body2,
Req#http_req{resp_headers=[], resp_body= <<>>,
onresponse=already_called}) of
StHdReq = {_, _, _} ->
StHdReq;
Req1 ->
{Status, FullHeaders, Req1}
end
end,
ReplyType = case Req2#http_req.resp_state of
waiting when Transport =:= cowboy_spdy, Body =:= stream ->
cowboy_spdy:stream_reply(Socket, status(Status2), FullHeaders2),
ReqPid ! {?MODULE, resp_sent},
normal;
waiting when Transport =:= cowboy_spdy ->
cowboy_spdy:reply(Socket, status(Status2), FullHeaders2, Body),
ReqPid ! {?MODULE, resp_sent},
normal;
RespState when RespState =:= waiting; RespState =:= waiting_stream ->
HTTPVer = atom_to_binary(Version, latin1),
StatusLine = << HTTPVer/binary, " ",
(status(Status2))/binary, "\r\n" >>,
HeaderLines = [[Key, <<": ">>, Value, <<"\r\n">>]
|| {Key, Value} <- FullHeaders2],
Transport:send(Socket, [StatusLine, HeaderLines, <<"\r\n">>, Body2]),
ReqPid ! {?MODULE, resp_sent},
normal;
_ ->
hook
end,
{ReplyType, Req2}.
-spec response_connection(cowboy:http_headers(), keepalive | close)
-> keepalive | close.
response_connection([], Connection) ->
Connection;
response_connection([{Name, Value}|Tail], Connection) ->
case Name of
<<"connection">> ->
Tokens = cow_http_hd:parse_connection(Value),
connection_to_atom(Tokens);
_ ->
response_connection(Tail, Connection)
end.
-spec response_merge_headers(cowboy:http_headers(), cowboy:http_headers(),
cowboy:http_headers()) -> cowboy:http_headers().
response_merge_headers(Headers, RespHeaders, DefaultHeaders) ->
Headers2 = [{Key, Value} || {Key, Value} <- Headers],
merge_headers(
merge_headers(Headers2, RespHeaders),
DefaultHeaders).
-spec merge_headers(cowboy:http_headers(), cowboy:http_headers())
-> cowboy:http_headers().
Merge headers by prepending the tuples in the second list to the
first list . It also handles Set - Cookie properly , which supports
merge_headers(Headers, []) ->
Headers;
merge_headers(Headers, [{<<"set-cookie">>, Value}|Tail]) ->
merge_headers([{<<"set-cookie">>, Value}|Headers], Tail);
merge_headers(Headers, [{Name, Value}|Tail]) ->
Headers2 = case lists:keymember(Name, 1, Headers) of
true -> Headers;
false -> [{Name, Value}|Headers]
end,
merge_headers(Headers2, Tail).
-spec atom_to_connection(keepalive) -> <<_:80>>;
(close) -> <<_:40>>.
atom_to_connection(keepalive) ->
<<"keep-alive">>;
atom_to_connection(close) ->
<<"close">>.
-spec connection_to_atom([binary()]) -> keepalive | close.
connection_to_atom([]) ->
keepalive;
connection_to_atom([<<"close">>|_]) ->
close;
connection_to_atom([_|Tail]) ->
connection_to_atom(Tail).
-spec status(cowboy:http_status()) -> binary().
status(100) -> <<"100 Continue">>;
status(101) -> <<"101 Switching Protocols">>;
status(102) -> <<"102 Processing">>;
status(200) -> <<"200 OK">>;
status(201) -> <<"201 Created">>;
status(202) -> <<"202 Accepted">>;
status(203) -> <<"203 Non-Authoritative Information">>;
status(204) -> <<"204 No Content">>;
status(205) -> <<"205 Reset Content">>;
status(206) -> <<"206 Partial Content">>;
status(207) -> <<"207 Multi-Status">>;
status(226) -> <<"226 IM Used">>;
status(300) -> <<"300 Multiple Choices">>;
status(301) -> <<"301 Moved Permanently">>;
status(302) -> <<"302 Found">>;
status(303) -> <<"303 See Other">>;
status(304) -> <<"304 Not Modified">>;
status(305) -> <<"305 Use Proxy">>;
status(306) -> <<"306 Switch Proxy">>;
status(307) -> <<"307 Temporary Redirect">>;
status(400) -> <<"400 Bad Request">>;
status(401) -> <<"401 Unauthorized">>;
status(402) -> <<"402 Payment Required">>;
status(403) -> <<"403 Forbidden">>;
status(404) -> <<"404 Not Found">>;
status(405) -> <<"405 Method Not Allowed">>;
status(406) -> <<"406 Not Acceptable">>;
status(407) -> <<"407 Proxy Authentication Required">>;
status(408) -> <<"408 Request Timeout">>;
status(409) -> <<"409 Conflict">>;
status(410) -> <<"410 Gone">>;
status(411) -> <<"411 Length Required">>;
status(412) -> <<"412 Precondition Failed">>;
status(413) -> <<"413 Request Entity Too Large">>;
status(414) -> <<"414 Request-URI Too Long">>;
status(415) -> <<"415 Unsupported Media Type">>;
status(416) -> <<"416 Requested Range Not Satisfiable">>;
status(417) -> <<"417 Expectation Failed">>;
status(418) -> <<"418 I'm a teapot">>;
status(422) -> <<"422 Unprocessable Entity">>;
status(423) -> <<"423 Locked">>;
status(424) -> <<"424 Failed Dependency">>;
status(425) -> <<"425 Unordered Collection">>;
status(426) -> <<"426 Upgrade Required">>;
status(428) -> <<"428 Precondition Required">>;
status(429) -> <<"429 Too Many Requests">>;
status(431) -> <<"431 Request Header Fields Too Large">>;
status(500) -> <<"500 Internal Server Error">>;
status(501) -> <<"501 Not Implemented">>;
status(502) -> <<"502 Bad Gateway">>;
status(503) -> <<"503 Service Unavailable">>;
status(504) -> <<"504 Gateway Timeout">>;
status(505) -> <<"505 HTTP Version Not Supported">>;
status(506) -> <<"506 Variant Also Negotiates">>;
status(507) -> <<"507 Insufficient Storage">>;
status(510) -> <<"510 Not Extended">>;
status(511) -> <<"511 Network Authentication Required">>;
status(B) when is_binary(B) -> B.
-ifdef(TEST).
url_test() ->
{undefined, _} =
url(#http_req{transport=ranch_tcp, host= <<>>, port= undefined,
path= <<>>, qs= <<>>, pid=self()}),
{<<"">>, _ } =
url(#http_req{transport=ranch_tcp, host= <<"localhost">>, port=80,
path= <<"/path">>, qs= <<>>, pid=self()}),
{<<":443/path">>, _} =
url(#http_req{transport=ranch_tcp, host= <<"localhost">>, port=443,
path= <<"/path">>, qs= <<>>, pid=self()}),
{<<":8080/path">>, _} =
url(#http_req{transport=ranch_tcp, host= <<"localhost">>, port=8080,
path= <<"/path">>, qs= <<>>, pid=self()}),
{<<":8080/path?dummy=2785">>, _} =
url(#http_req{transport=ranch_tcp, host= <<"localhost">>, port=8080,
path= <<"/path">>, qs= <<"dummy=2785">>, pid=self()}),
{<<"">>, _} =
url(#http_req{transport=ranch_ssl, host= <<"localhost">>, port=443,
path= <<"/path">>, qs= <<>>, pid=self()}),
{<<":8443/path">>, _} =
url(#http_req{transport=ranch_ssl, host= <<"localhost">>, port=8443,
path= <<"/path">>, qs= <<>>, pid=self()}),
{<<":8443/path?dummy=2785">>, _} =
url(#http_req{transport=ranch_ssl, host= <<"localhost">>, port=8443,
path= <<"/path">>, qs= <<"dummy=2785">>, pid=self()}),
ok.
connection_to_atom_test_() ->
Tests = [
{[<<"close">>], close},
{[<<"keep-alive">>], keepalive},
{[<<"keep-alive">>, <<"upgrade">>], keepalive}
],
[{lists:flatten(io_lib:format("~p", [T])),
fun() -> R = connection_to_atom(T) end} || {T, R} <- Tests].
merge_headers_test_() ->
Tests = [
{[{<<"content-length">>,<<"13">>},{<<"server">>,<<"Cowboy">>}],
[{<<"set-cookie">>,<<"foo=bar">>},{<<"content-length">>,<<"11">>}],
[{<<"set-cookie">>,<<"foo=bar">>},
{<<"content-length">>,<<"13">>},
{<<"server">>,<<"Cowboy">>}]},
{[{<<"content-length">>,<<"13">>},{<<"server">>,<<"Cowboy">>}],
[{<<"set-cookie">>,<<"foo=bar">>},{<<"set-cookie">>,<<"bar=baz">>}],
[{<<"set-cookie">>,<<"bar=baz">>},
{<<"set-cookie">>,<<"foo=bar">>},
{<<"content-length">>,<<"13">>},
{<<"server">>,<<"Cowboy">>}]}
],
[fun() -> Res = merge_headers(L,R) end || {L, R, Res} <- Tests].
-endif.
|
95c7d087c0cb479b605e4207ff1cd2836bc06bb55993dce254773ab64d6b0c1e | petelliott/pscheme | string.scm | (define-library (pscheme string)
(import (scheme base)
(scheme read))
(export string-join
string-starts-with
string-split
string->object)
(begin
(define (string-join sep strings)
(if (null? strings)
""
(apply string-append
(car strings)
(map (lambda (str) (string-append sep str))
(cdr strings)))))
(define (string-starts-with str needle)
(define slen (string-length str))
(define nlen (string-length needle))
(and (>= slen nlen)
(string=? (substring str 0 nlen) needle)))
(define (string-split str char)
(define l (string-length str))
(let loop ((i 0))
(if (>= i l)
'()
(do ((j i (+ j 1)))
((or (= l j) (equal? (string-ref str j) char))
(cons (string-copy str i j) (loop (+ j 1))))))))
(define (string->object str)
(call-with-port (open-input-string str)
(lambda (port)
(read port))))
))
| null | https://raw.githubusercontent.com/petelliott/pscheme/3be8671116c1d9ea840a9cc7cffd3632fe03d6ac/scm/pscheme/string.scm | scheme | (define-library (pscheme string)
(import (scheme base)
(scheme read))
(export string-join
string-starts-with
string-split
string->object)
(begin
(define (string-join sep strings)
(if (null? strings)
""
(apply string-append
(car strings)
(map (lambda (str) (string-append sep str))
(cdr strings)))))
(define (string-starts-with str needle)
(define slen (string-length str))
(define nlen (string-length needle))
(and (>= slen nlen)
(string=? (substring str 0 nlen) needle)))
(define (string-split str char)
(define l (string-length str))
(let loop ((i 0))
(if (>= i l)
'()
(do ((j i (+ j 1)))
((or (= l j) (equal? (string-ref str j) char))
(cons (string-copy str i j) (loop (+ j 1))))))))
(define (string->object str)
(call-with-port (open-input-string str)
(lambda (port)
(read port))))
))
|
|
9f94fc4e42d3fae1c7cf86533f65f49d5581d51c85effb1b34e5f93a926e0af8 | xtdb/xtdb | api.clj | (ns xtdb.api
"Public API of XTDB."
(:refer-clojure :exclude [sync])
(:require [clojure.spec.alpha :as s]
[xtdb.codec :as c]
[xtdb.system :as sys])
(:import [xtdb.api IXtdb IXtdbSubmitClient RemoteClientOptions]
java.lang.AutoCloseable
java.time.Duration
[java.util Date Map]
java.util.function.Supplier))
(def ^:private date? (partial instance? Date))
(s/def ::tx-id nat-int?)
(s/def ::tx-time date?)
(s/def ::tx (s/keys :opt [::tx-id ::tx-time]))
(s/def ::submit-tx-opts (s/keys :opt [::tx-time]))
(s/def :crux.db/id c/valid-id?)
(s/def :xt/id c/valid-id?)
(s/def ::evicted? boolean?)
(s/def :crux.db.fn/args (s/coll-of any? :kind vector?))
(s/def ::fn
(s/cat :fn #{'fn}
:args (s/coll-of symbol? :kind vector? :min-count 1)
:body (s/* any?)))
(defprotocol PXtdb
"Provides API access to XTDB."
(status [node]
"Returns the status of this node as a map.")
(tx-committed? [node submitted-tx]
"Checks if a submitted tx was successfully committed.
submitted-tx must be a map returned from `submit-tx`.
Returns true if the submitted transaction was committed,
false if the transaction was not committed, and throws `NodeOutOfSyncException`
if the node has not yet indexed the transaction.")
(sync
[node]
[node ^Duration timeout]
[node tx-time ^Duration timeout]
"Blocks until the node has caught up indexing to the latest tx available at
the time this method is called. Will throw an exception on timeout. The
returned date is the latest transaction time indexed by this node. This can be
used as the second parameter in (db valid-time transaction-time) for
consistent reads.
timeout – max time to wait, can be nil for the default.
Returns the latest known transaction time.")
(await-tx-time
[node ^Date tx-time]
[node ^Date tx-time ^Duration timeout]
"Blocks until the node has indexed a transaction that is past the supplied
txTime. Will throw on timeout. The returned date is the latest index time when
this node has caught up as of this call.")
(await-tx
[node tx]
[node tx ^Duration timeout]
"Blocks until the node has indexed a transaction that is at or past the
supplied tx. Will throw on timeout. Returns the most recent tx indexed by the
node.")
(listen ^java.lang.AutoCloseable [node event-opts f]
"Attaches a listener to XTDB's event bus, the supplied callback `f` will be invoked as new events occur, receiving the event as the first argument.
Specify an event type `:xtdb.api/event-type` and options in the map `event-opts`
For example, this listener prints indexed transactions:
(def listener (xtdb.api/listen node {:xtdb.api/event-type :xtdb.api/indexed-tx} prn))
Use `.close` on the returned object to detach the listener:
(.close listener)
---
A listener will receive events in the order they were submitted (say by the indexer) each listener receives events asynchronously on a standalone thread. It does not block query, writes, indexing or other listeners.
If you start/stop many listeners over the lifetime of your program, ensure you .close listeners you no longer need to free those threads.
See below for detail on supported event types:
---
Event type `:xtdb.api/indexed-tx`
Occurs when a transaction has been processed by the indexer, and as such its effects will be visible to query if it was committed.
Example event:
{:xtdb.api/event-type :xtdb.api/indexed-tx
:xtdb.api/tx-time #inst \"2022-11-09T10:13:33.028-00:00\",
:xtdb.api/tx-id 4
;; can be false in the case of a rollback or failure, for example, if an ::xt/fn op throws, or a match condition is not met.
:committed? true
;; if :with-tx-ops? is true
:xtdb.api/tx-ops ([:xtdb.api/put {:name \"Fred\", :xt/id \"foo\"}])}
This event might be useful if you require reactive or dependent processing of indexed transactions, and do not want to block with `await-tx`.
Because you receive an event for transactions that did not commit (:committed? false), you could respond to transaction function failures in some way,
such as specific logging, or raising an alert.
Options:
- :with-tx-ops? (default false)
If true includes the indexed tx ops itself the key :xtdb.api/tx-ops
For transaction functions and match ops, you will see the expansion (i.e results), not the function call or match.
Be aware this option may require fetching transactions from storage per event if the transactions are not in cache.")
(latest-completed-tx [node]
"Returns the latest transaction to have been indexed by this node.")
(latest-submitted-tx [node]
"Returns the latest transaction to have been submitted to this cluster")
(attribute-stats [node]
"Returns frequencies map for indexed attributes")
(active-queries [node]
"Returns a list of currently running queries")
(recent-queries [node]
"Returns a list of recently completed/failed queries")
(slowest-queries [node]
"Returns a list of slowest completed/failed queries ran on the node"))
(defprotocol PXtdbSubmitClient
"Provides API access to XTDB transaction submission."
(submit-tx-async
[node tx-ops]
[node tx-ops opts]
"Writes transactions to the log for processing tx-ops datalog
style transactions. Non-blocking. Returns a deref with map with
details about the submitted transaction, including tx-time and
tx-id.
opts (map):
- ::tx-time
overrides tx-time for the transaction.
mustn't be earlier than any previous tx-time, and mustn't be later than the tx-log's clock.")
(submit-tx
[node tx-ops]
[node tx-ops opts]
"Writes transactions to the log for processing
tx-ops datalog style transactions.
Returns a map with details about the submitted transaction,
including tx-time and tx-id.
opts (map):
- ::tx-time
overrides tx-time for the transaction.
mustn't be earlier than any previous tx-time, and mustn't be later than the tx-log's clock.")
(open-tx-log ^java.io.Closeable [this after-tx-id with-ops?]
"Reads the transaction log. Optionally includes
operations, which allow the contents under the ::tx-ops
key to be piped into (submit-tx tx-ops) of another
XTDB instance.
after-tx-id optional transaction id to start after.
with-ops? should the operations with documents be included?
Returns an iterator of the TxLog"))
(defprotocol PXtdbDatasource
"Represents the database as of a specific valid and
transaction time."
(entity [db eid]
"queries a document map for an entity.
eid is an object which can be coerced into an entity id.
returns the entity document map.")
(entity-tx [db eid]
"returns the transaction details for an entity. Details
include tx-id and tx-time.
eid is an object that can be coerced into an entity id.")
(q* [db query args]
"q[uery] an XTDB db.
query param is a datalog query in map, vector or string form.
This function will return a set of result tuples if you do not specify `:order-by`, `:limit` or `:offset`;
otherwise, it will return a vector of result tuples.")
(open-q* ^xtdb.api.ICursor [db query args]
"lazily q[uery] an XTDB db.
query param is a datalog query in map, vector or string form.
This function returns a Cursor of result tuples - once you've consumed
as much of the sequence as you need to, you'll need to `.close` the sequence.
A common way to do this is using `with-open`:
(with-open [res (xt/open-q db '{:find [...]
:where [...]})]
(doseq [row (iterator-seq res)]
...))
Once the sequence is closed, attempting to iterate it is undefined.
Therefore, be cautious with lazy evaluation.")
(pull [db query eid]
"Returns the requested data for the given entity ID, based on the projection spec
e.g. `(pull db [:film/name :film/year] :spectre)`
=> `{:film/name \"Spectre\", :film/year 2015}`
See #pull for details of the spec format.")
(pull-many [db query eids]
"Returns the requested data for the given entity IDs, based on the projection spec
e.g. `(pull-many db [:film/name :film/year] #{:spectre :skyfall})`
=> `[{:film/name \"Spectre\", :film/year 2015}, {:film/name \"Skyfall\", :film/year 2012}]`
See #pull for details of the spec format.")
(entity-history
[db eid sort-order]
[db eid sort-order opts]
"Eagerly retrieves entity history for the given entity.
Options:
* `sort-order`: `#{:asc :desc}`
* `:with-docs?` (boolean, default false): specifies whether to include documents in the entries under the `:xtdb.api/doc` key
* `:with-corrections?` (boolean, default false): specifies whether to include bitemporal corrections in the sequence, sorted first by valid-time, then tx-id
* `:start-valid-time` (inclusive, default unbounded)
* `:start-tx`: (map, all keys optional)
- `:xtdb.api/tx-time` (Date, inclusive, default unbounded)
- `:xtdb.api/tx-id` (Long, inclusive, default unbounded)
* `:end-valid-time` (exclusive, default unbounded)
* `:end-tx`: (map, all keys optional)
- `:xtdb.api/tx-time` (Date, exclusive, default unbounded)
- `:xtdb.api/tx-id` (Long, exclusive, default unbounded)
No matter what `start-*` and `end-*` parameters you specify, you won't receive results later than the valid-time and tx-id of this DB value.
Each entry in the result contains the following keys:
* `:xtdb.api/valid-time`,
* `:xtdb.api/tx-time`,
* `:xtdb.api/tx-id`,
* `:xtdb.api/content-hash`
* `:xtdb.api/doc` (see `with-docs?`).")
(open-entity-history
^xtdb.api.ICursor [db eid sort-order]
^xtdb.api.ICursor [db eid sort-order opts]
"Lazily retrieves entity history for the given entity.
Don't forget to close the cursor when you've consumed enough history!
Consuming after the cursor is closed is undefined (e.g. may cause a JVM
segfault crash when using RocksDB). Therefore, be cautious with lazy
evaluation.
See `entity-history` for all the options")
(valid-time [db]
"returns the valid time of the db.
If valid time wasn't specified at the moment of the db value retrieval
then valid time will be time of db value retrieval.")
(transaction-time [db]
"returns the time of the latest transaction applied to this db value.
If a tx time was specified when db value was acquired then returns
the specified time.")
(db-basis [db]
"returns the basis of this db snapshot - a map containing `:xtdb.api/valid-time` and `:xtdb.api/tx`")
(^java.io.Closeable with-tx [db tx-ops]
"Returns a new db value with the tx-ops speculatively applied.
The tx-ops will only be visible in the value returned from this function - they're not submitted to the cluster, nor are they visible to any other database value in your application.
If the transaction doesn't commit (eg because of a failed 'match'), this function returns nil."))
(defn start-node
"NOTE: requires any dependencies on the classpath that the XTDB modules may need.
Accepts a map, or a JSON/EDN file or classpath resource.
See for details.
Returns a node which implements: DBProvider, PXtdb, PXtdbSubmitClient and java.io.Closeable.
Latter allows the node to be stopped by calling `(.close node)`.
Throws IndexVersionOutOfSyncException if the index needs rebuilding."
^java.io.Closeable [options]
(let [system (-> (sys/prep-system (into [{:xtdb/node 'xtdb.node/->node
:xtdb/index-store 'xtdb.kv.index-store/->kv-index-store
:xtdb/bus 'xtdb.bus/->bus
:xtdb.bus/bus-stop 'xtdb.bus/->bus-stop
:xtdb/tx-ingester 'xtdb.tx/->tx-ingester
:xtdb/tx-indexer 'xtdb.tx/->tx-indexer
:xtdb/document-store 'xtdb.kv.document-store/->document-store
:xtdb/tx-log 'xtdb.kv.tx-log/->tx-log
:xtdb/query-engine 'xtdb.query/->query-engine
:xtdb/secondary-indices 'xtdb.tx/->secondary-indices}]
(cond-> options (not (vector? options)) vector)))
(sys/start-system))]
(reset! (get-in system [:xtdb/node :!system]) system)
(-> (:xtdb/node system)
(assoc :close-fn #(.close ^AutoCloseable system)))))
(defn- ->RemoteClientOptions [{:keys [->jwt-token] :as opts}]
(RemoteClientOptions. (when ->jwt-token
(reify Supplier
(get [_] (->jwt-token))))))
(defn new-api-client
"Creates a new remote API client.
This implements: DBProvider, PXtdb, PXtdbSubmitClient and java.io.Closeable.
The remote client requires valid and transaction time to be specified for all calls to `db`.
NOTE: Requires either clj-http or http-kit on the classpath,
See for more information.
url the URL to an XTDB HTTP end-point.
(OPTIONAL) auth-supplier a supplier function which provides an auth token string for the XTDB HTTP end-point.
returns a remote API client."
(^java.io.Closeable [url]
(:node (IXtdb/newApiClient url)))
(^java.io.Closeable [url opts]
(:node (IXtdb/newApiClient url (->RemoteClientOptions opts)))))
(defn new-submit-client
"Starts a submit client for transacting into XTDB without running a full local node with index.
Accepts a map, or a JSON/EDN file or classpath resource.
See for details.
Returns a component that implements java.io.Closeable and PXtdbSubmitClient.
Latter allows the node to be stopped by calling `(.close node)`."
^java.io.Closeable [options]
(:client (IXtdbSubmitClient/newSubmitClient ^Map options)))
(defn conform-tx-ops [tx-ops]
(->> tx-ops
(mapv
(fn [tx-op]
(mapv #(if (instance? Map %) (into {} %) %)
tx-op)))))
(defprotocol DBProvider
(db
[node]
[node valid-time-or-basis]
^:deprecated [node valid-time tx-time]
"Returns a DB snapshot at the given time. The snapshot is not thread-safe.
db-basis: (optional map, all keys optional)
- `:xtdb.api/valid-time` (Date):
If provided, DB won't return any data with a valid-time greater than the given time.
Defaults to now.
- `:xtdb.api/tx` (Map):
If provided, DB will be a snapshot as of the given transaction.
Defaults to the latest completed transaction.
- `:xtdb.api/tx-time` (Date):
Shorthand for `{::tx {::tx-time <>}}`
Providing both `:xtdb.api/tx` and `:xtdb.api/tx-time` is undefined.
Arities passing dates directly (`node vt` and `node vt tt`) are deprecated and will be removed in a later release.
If the node hasn't yet indexed a transaction at or past the given transaction, this throws NodeOutOfSyncException")
(open-db
^java.io.Closeable [node]
^java.io.Closeable [node valid-time-or-basis]
^java.io.Closeable ^:deprecated [node valid-time tx-time]
"Opens a DB snapshot at the given time.
db-basis: (optional map, all keys optional)
- `:xtdb.api/valid-time` (Date):
If provided, DB won't return any data with a valid-time greater than the given time.
Defaults to now.
- `:xtdb.api/tx` (Map):
If provided, DB will be a snapshot as of the given transaction.
Defaults to the latest completed transaction.
- `:xtdb.api/tx-time` (Date):
Shorthand for `{::tx {::tx-time <>}}`
Providing both `:xtdb.api/tx` and `:xtdb.api/tx-time` is undefined.
Arities passing dates directly (`node vt` and `node vt tt`) are deprecated and will be removed in a later release.
If the node hasn't yet indexed a transaction at or past the given transaction, this throws NodeOutOfSyncException
This DB opens up shared resources to make multiple requests faster - it must be `.close`d when you've finished
using it (for example, in a `with-open` block). Be cautious with lazy evaluation that may attempt to make requests
after the DB is closed."))
(let [db-args '(^java.io.Closeable [node]
^java.io.Closeable [node db-basis]
^java.io.Closeable ^:deprecated [node valid-time]
^java.io.Closeable ^:deprecated [node valid-time tx-time])]
(alter-meta! #'db assoc :arglists db-args)
(alter-meta! #'open-db assoc :arglists db-args))
(defn q
"q[uery] an XTDB db.
query param is a datalog query in map, vector or string form.
This function will return a set of result tuples if you do not specify `:order-by`, `:limit` or `:offset`;
otherwise, it will return a vector of result tuples."
[db q & args]
(q* db q (object-array args)))
(defprotocol TransactionFnContext
(indexing-tx [tx-fn-ctx]))
(defn open-q
"lazily q[uery] an XTDB db.
query param is a datalog query in map, vector or string form.
This function returns a Cursor of result tuples - once you've consumed
as much of the sequence as you need to, you'll need to `.close` the sequence.
A common way to do this is using `with-open`:
(with-open [res (xt/open-q db '{:find [...]
:where [...]})]
(doseq [row (iterator-seq res)]
...))
Once the sequence is closed, attempting to consume it is undefined (e.g. may
cause a JVM segfault crash when using RocksDB). Therefore, be cautious with
lazy evaluation."
^xtdb.api.ICursor [db q & args]
(open-q* db q (object-array args)))
(let [arglists '(^xtdb.api.ICursor
[db eid sort-order]
^xtdb.api.ICursor
[db eid sort-order {:keys [with-docs?
with-corrections?
start-valid-time
end-valid-time]
{start-tt ::tx-time
start-tx-id ::tx-id} :start-tx
{end-tt ::tx-time
end-tx-id ::tx-id} :end-tx}])]
(alter-meta! #'entity-history assoc :arglists arglists)
(alter-meta! #'open-entity-history assoc :arglists arglists))
| null | https://raw.githubusercontent.com/xtdb/xtdb/f415b666f48e112271c7eeac3c4224cd7144999a/core/src/xtdb/api.clj | clojure | can be false in the case of a rollback or failure, for example, if an ::xt/fn op throws, or a match condition is not met.
if :with-tx-ops? is true
| (ns xtdb.api
"Public API of XTDB."
(:refer-clojure :exclude [sync])
(:require [clojure.spec.alpha :as s]
[xtdb.codec :as c]
[xtdb.system :as sys])
(:import [xtdb.api IXtdb IXtdbSubmitClient RemoteClientOptions]
java.lang.AutoCloseable
java.time.Duration
[java.util Date Map]
java.util.function.Supplier))
(def ^:private date? (partial instance? Date))
(s/def ::tx-id nat-int?)
(s/def ::tx-time date?)
(s/def ::tx (s/keys :opt [::tx-id ::tx-time]))
(s/def ::submit-tx-opts (s/keys :opt [::tx-time]))
(s/def :crux.db/id c/valid-id?)
(s/def :xt/id c/valid-id?)
(s/def ::evicted? boolean?)
(s/def :crux.db.fn/args (s/coll-of any? :kind vector?))
(s/def ::fn
(s/cat :fn #{'fn}
:args (s/coll-of symbol? :kind vector? :min-count 1)
:body (s/* any?)))
(defprotocol PXtdb
"Provides API access to XTDB."
(status [node]
"Returns the status of this node as a map.")
(tx-committed? [node submitted-tx]
"Checks if a submitted tx was successfully committed.
submitted-tx must be a map returned from `submit-tx`.
Returns true if the submitted transaction was committed,
false if the transaction was not committed, and throws `NodeOutOfSyncException`
if the node has not yet indexed the transaction.")
(sync
[node]
[node ^Duration timeout]
[node tx-time ^Duration timeout]
"Blocks until the node has caught up indexing to the latest tx available at
the time this method is called. Will throw an exception on timeout. The
returned date is the latest transaction time indexed by this node. This can be
used as the second parameter in (db valid-time transaction-time) for
consistent reads.
timeout – max time to wait, can be nil for the default.
Returns the latest known transaction time.")
(await-tx-time
[node ^Date tx-time]
[node ^Date tx-time ^Duration timeout]
"Blocks until the node has indexed a transaction that is past the supplied
txTime. Will throw on timeout. The returned date is the latest index time when
this node has caught up as of this call.")
(await-tx
[node tx]
[node tx ^Duration timeout]
"Blocks until the node has indexed a transaction that is at or past the
supplied tx. Will throw on timeout. Returns the most recent tx indexed by the
node.")
(listen ^java.lang.AutoCloseable [node event-opts f]
"Attaches a listener to XTDB's event bus, the supplied callback `f` will be invoked as new events occur, receiving the event as the first argument.
Specify an event type `:xtdb.api/event-type` and options in the map `event-opts`
For example, this listener prints indexed transactions:
(def listener (xtdb.api/listen node {:xtdb.api/event-type :xtdb.api/indexed-tx} prn))
Use `.close` on the returned object to detach the listener:
(.close listener)
---
A listener will receive events in the order they were submitted (say by the indexer) each listener receives events asynchronously on a standalone thread. It does not block query, writes, indexing or other listeners.
If you start/stop many listeners over the lifetime of your program, ensure you .close listeners you no longer need to free those threads.
See below for detail on supported event types:
---
Event type `:xtdb.api/indexed-tx`
Occurs when a transaction has been processed by the indexer, and as such its effects will be visible to query if it was committed.
Example event:
{:xtdb.api/event-type :xtdb.api/indexed-tx
:xtdb.api/tx-time #inst \"2022-11-09T10:13:33.028-00:00\",
:xtdb.api/tx-id 4
:committed? true
:xtdb.api/tx-ops ([:xtdb.api/put {:name \"Fred\", :xt/id \"foo\"}])}
This event might be useful if you require reactive or dependent processing of indexed transactions, and do not want to block with `await-tx`.
Because you receive an event for transactions that did not commit (:committed? false), you could respond to transaction function failures in some way,
such as specific logging, or raising an alert.
Options:
- :with-tx-ops? (default false)
If true includes the indexed tx ops itself the key :xtdb.api/tx-ops
For transaction functions and match ops, you will see the expansion (i.e results), not the function call or match.
Be aware this option may require fetching transactions from storage per event if the transactions are not in cache.")
(latest-completed-tx [node]
"Returns the latest transaction to have been indexed by this node.")
(latest-submitted-tx [node]
"Returns the latest transaction to have been submitted to this cluster")
(attribute-stats [node]
"Returns frequencies map for indexed attributes")
(active-queries [node]
"Returns a list of currently running queries")
(recent-queries [node]
"Returns a list of recently completed/failed queries")
(slowest-queries [node]
"Returns a list of slowest completed/failed queries ran on the node"))
(defprotocol PXtdbSubmitClient
"Provides API access to XTDB transaction submission."
(submit-tx-async
[node tx-ops]
[node tx-ops opts]
"Writes transactions to the log for processing tx-ops datalog
style transactions. Non-blocking. Returns a deref with map with
details about the submitted transaction, including tx-time and
tx-id.
opts (map):
- ::tx-time
overrides tx-time for the transaction.
mustn't be earlier than any previous tx-time, and mustn't be later than the tx-log's clock.")
(submit-tx
[node tx-ops]
[node tx-ops opts]
"Writes transactions to the log for processing
tx-ops datalog style transactions.
Returns a map with details about the submitted transaction,
including tx-time and tx-id.
opts (map):
- ::tx-time
overrides tx-time for the transaction.
mustn't be earlier than any previous tx-time, and mustn't be later than the tx-log's clock.")
(open-tx-log ^java.io.Closeable [this after-tx-id with-ops?]
"Reads the transaction log. Optionally includes
operations, which allow the contents under the ::tx-ops
key to be piped into (submit-tx tx-ops) of another
XTDB instance.
after-tx-id optional transaction id to start after.
with-ops? should the operations with documents be included?
Returns an iterator of the TxLog"))
(defprotocol PXtdbDatasource
"Represents the database as of a specific valid and
transaction time."
(entity [db eid]
"queries a document map for an entity.
eid is an object which can be coerced into an entity id.
returns the entity document map.")
(entity-tx [db eid]
"returns the transaction details for an entity. Details
include tx-id and tx-time.
eid is an object that can be coerced into an entity id.")
(q* [db query args]
"q[uery] an XTDB db.
query param is a datalog query in map, vector or string form.
otherwise, it will return a vector of result tuples.")
(open-q* ^xtdb.api.ICursor [db query args]
"lazily q[uery] an XTDB db.
query param is a datalog query in map, vector or string form.
This function returns a Cursor of result tuples - once you've consumed
as much of the sequence as you need to, you'll need to `.close` the sequence.
A common way to do this is using `with-open`:
(with-open [res (xt/open-q db '{:find [...]
:where [...]})]
(doseq [row (iterator-seq res)]
...))
Once the sequence is closed, attempting to iterate it is undefined.
Therefore, be cautious with lazy evaluation.")
(pull [db query eid]
"Returns the requested data for the given entity ID, based on the projection spec
e.g. `(pull db [:film/name :film/year] :spectre)`
=> `{:film/name \"Spectre\", :film/year 2015}`
See #pull for details of the spec format.")
(pull-many [db query eids]
"Returns the requested data for the given entity IDs, based on the projection spec
e.g. `(pull-many db [:film/name :film/year] #{:spectre :skyfall})`
=> `[{:film/name \"Spectre\", :film/year 2015}, {:film/name \"Skyfall\", :film/year 2012}]`
See #pull for details of the spec format.")
(entity-history
[db eid sort-order]
[db eid sort-order opts]
"Eagerly retrieves entity history for the given entity.
Options:
* `sort-order`: `#{:asc :desc}`
* `:with-docs?` (boolean, default false): specifies whether to include documents in the entries under the `:xtdb.api/doc` key
* `:with-corrections?` (boolean, default false): specifies whether to include bitemporal corrections in the sequence, sorted first by valid-time, then tx-id
* `:start-valid-time` (inclusive, default unbounded)
* `:start-tx`: (map, all keys optional)
- `:xtdb.api/tx-time` (Date, inclusive, default unbounded)
- `:xtdb.api/tx-id` (Long, inclusive, default unbounded)
* `:end-valid-time` (exclusive, default unbounded)
* `:end-tx`: (map, all keys optional)
- `:xtdb.api/tx-time` (Date, exclusive, default unbounded)
- `:xtdb.api/tx-id` (Long, exclusive, default unbounded)
No matter what `start-*` and `end-*` parameters you specify, you won't receive results later than the valid-time and tx-id of this DB value.
Each entry in the result contains the following keys:
* `:xtdb.api/valid-time`,
* `:xtdb.api/tx-time`,
* `:xtdb.api/tx-id`,
* `:xtdb.api/content-hash`
* `:xtdb.api/doc` (see `with-docs?`).")
(open-entity-history
^xtdb.api.ICursor [db eid sort-order]
^xtdb.api.ICursor [db eid sort-order opts]
"Lazily retrieves entity history for the given entity.
Don't forget to close the cursor when you've consumed enough history!
Consuming after the cursor is closed is undefined (e.g. may cause a JVM
segfault crash when using RocksDB). Therefore, be cautious with lazy
evaluation.
See `entity-history` for all the options")
(valid-time [db]
"returns the valid time of the db.
If valid time wasn't specified at the moment of the db value retrieval
then valid time will be time of db value retrieval.")
(transaction-time [db]
"returns the time of the latest transaction applied to this db value.
If a tx time was specified when db value was acquired then returns
the specified time.")
(db-basis [db]
"returns the basis of this db snapshot - a map containing `:xtdb.api/valid-time` and `:xtdb.api/tx`")
(^java.io.Closeable with-tx [db tx-ops]
"Returns a new db value with the tx-ops speculatively applied.
The tx-ops will only be visible in the value returned from this function - they're not submitted to the cluster, nor are they visible to any other database value in your application.
If the transaction doesn't commit (eg because of a failed 'match'), this function returns nil."))
(defn start-node
"NOTE: requires any dependencies on the classpath that the XTDB modules may need.
Accepts a map, or a JSON/EDN file or classpath resource.
See for details.
Returns a node which implements: DBProvider, PXtdb, PXtdbSubmitClient and java.io.Closeable.
Latter allows the node to be stopped by calling `(.close node)`.
Throws IndexVersionOutOfSyncException if the index needs rebuilding."
^java.io.Closeable [options]
(let [system (-> (sys/prep-system (into [{:xtdb/node 'xtdb.node/->node
:xtdb/index-store 'xtdb.kv.index-store/->kv-index-store
:xtdb/bus 'xtdb.bus/->bus
:xtdb.bus/bus-stop 'xtdb.bus/->bus-stop
:xtdb/tx-ingester 'xtdb.tx/->tx-ingester
:xtdb/tx-indexer 'xtdb.tx/->tx-indexer
:xtdb/document-store 'xtdb.kv.document-store/->document-store
:xtdb/tx-log 'xtdb.kv.tx-log/->tx-log
:xtdb/query-engine 'xtdb.query/->query-engine
:xtdb/secondary-indices 'xtdb.tx/->secondary-indices}]
(cond-> options (not (vector? options)) vector)))
(sys/start-system))]
(reset! (get-in system [:xtdb/node :!system]) system)
(-> (:xtdb/node system)
(assoc :close-fn #(.close ^AutoCloseable system)))))
(defn- ->RemoteClientOptions [{:keys [->jwt-token] :as opts}]
(RemoteClientOptions. (when ->jwt-token
(reify Supplier
(get [_] (->jwt-token))))))
(defn new-api-client
"Creates a new remote API client.
This implements: DBProvider, PXtdb, PXtdbSubmitClient and java.io.Closeable.
The remote client requires valid and transaction time to be specified for all calls to `db`.
NOTE: Requires either clj-http or http-kit on the classpath,
See for more information.
url the URL to an XTDB HTTP end-point.
(OPTIONAL) auth-supplier a supplier function which provides an auth token string for the XTDB HTTP end-point.
returns a remote API client."
(^java.io.Closeable [url]
(:node (IXtdb/newApiClient url)))
(^java.io.Closeable [url opts]
(:node (IXtdb/newApiClient url (->RemoteClientOptions opts)))))
(defn new-submit-client
"Starts a submit client for transacting into XTDB without running a full local node with index.
Accepts a map, or a JSON/EDN file or classpath resource.
See for details.
Returns a component that implements java.io.Closeable and PXtdbSubmitClient.
Latter allows the node to be stopped by calling `(.close node)`."
^java.io.Closeable [options]
(:client (IXtdbSubmitClient/newSubmitClient ^Map options)))
(defn conform-tx-ops [tx-ops]
(->> tx-ops
(mapv
(fn [tx-op]
(mapv #(if (instance? Map %) (into {} %) %)
tx-op)))))
(defprotocol DBProvider
(db
[node]
[node valid-time-or-basis]
^:deprecated [node valid-time tx-time]
"Returns a DB snapshot at the given time. The snapshot is not thread-safe.
db-basis: (optional map, all keys optional)
- `:xtdb.api/valid-time` (Date):
If provided, DB won't return any data with a valid-time greater than the given time.
Defaults to now.
- `:xtdb.api/tx` (Map):
If provided, DB will be a snapshot as of the given transaction.
Defaults to the latest completed transaction.
- `:xtdb.api/tx-time` (Date):
Shorthand for `{::tx {::tx-time <>}}`
Providing both `:xtdb.api/tx` and `:xtdb.api/tx-time` is undefined.
Arities passing dates directly (`node vt` and `node vt tt`) are deprecated and will be removed in a later release.
If the node hasn't yet indexed a transaction at or past the given transaction, this throws NodeOutOfSyncException")
(open-db
^java.io.Closeable [node]
^java.io.Closeable [node valid-time-or-basis]
^java.io.Closeable ^:deprecated [node valid-time tx-time]
"Opens a DB snapshot at the given time.
db-basis: (optional map, all keys optional)
- `:xtdb.api/valid-time` (Date):
If provided, DB won't return any data with a valid-time greater than the given time.
Defaults to now.
- `:xtdb.api/tx` (Map):
If provided, DB will be a snapshot as of the given transaction.
Defaults to the latest completed transaction.
- `:xtdb.api/tx-time` (Date):
Shorthand for `{::tx {::tx-time <>}}`
Providing both `:xtdb.api/tx` and `:xtdb.api/tx-time` is undefined.
Arities passing dates directly (`node vt` and `node vt tt`) are deprecated and will be removed in a later release.
If the node hasn't yet indexed a transaction at or past the given transaction, this throws NodeOutOfSyncException
This DB opens up shared resources to make multiple requests faster - it must be `.close`d when you've finished
using it (for example, in a `with-open` block). Be cautious with lazy evaluation that may attempt to make requests
after the DB is closed."))
(let [db-args '(^java.io.Closeable [node]
^java.io.Closeable [node db-basis]
^java.io.Closeable ^:deprecated [node valid-time]
^java.io.Closeable ^:deprecated [node valid-time tx-time])]
(alter-meta! #'db assoc :arglists db-args)
(alter-meta! #'open-db assoc :arglists db-args))
(defn q
"q[uery] an XTDB db.
query param is a datalog query in map, vector or string form.
otherwise, it will return a vector of result tuples."
[db q & args]
(q* db q (object-array args)))
(defprotocol TransactionFnContext
(indexing-tx [tx-fn-ctx]))
(defn open-q
"lazily q[uery] an XTDB db.
query param is a datalog query in map, vector or string form.
This function returns a Cursor of result tuples - once you've consumed
as much of the sequence as you need to, you'll need to `.close` the sequence.
A common way to do this is using `with-open`:
(with-open [res (xt/open-q db '{:find [...]
:where [...]})]
(doseq [row (iterator-seq res)]
...))
Once the sequence is closed, attempting to consume it is undefined (e.g. may
cause a JVM segfault crash when using RocksDB). Therefore, be cautious with
lazy evaluation."
^xtdb.api.ICursor [db q & args]
(open-q* db q (object-array args)))
(let [arglists '(^xtdb.api.ICursor
[db eid sort-order]
^xtdb.api.ICursor
[db eid sort-order {:keys [with-docs?
with-corrections?
start-valid-time
end-valid-time]
{start-tt ::tx-time
start-tx-id ::tx-id} :start-tx
{end-tt ::tx-time
end-tx-id ::tx-id} :end-tx}])]
(alter-meta! #'entity-history assoc :arglists arglists)
(alter-meta! #'open-entity-history assoc :arglists arglists))
|
a7c5941669b28fd9f3cf091792ef1e8bbcccaa6ca56c3479fb3879051edfa1d5 | 7theta/re-frame-via | msg_handler.clj | ;; Copyright (c) 7theta. All rights reserved.
;; The use and distribution terms for this software are covered by the
;; Eclipse Public License 1.0 (-v10.html)
;; which can be found in the LICENSE file at the root of this
;; distribution.
;;
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any others, from this software.
(ns example.msg-handler
(:require [re-frame-via.authenticator :as auth]
[integrant.core :as ig]))
(defmulti msg-handler (fn [opts message] (:id message)))
(defmethod ig/init-key :example/msg-handler
[_ {:keys [authenticator] :as opts}]
(fn [message] (msg-handler opts message)))
(defmethod msg-handler :api.example/login
[{:keys [authenticator] :as opts} {:keys [?data ?reply-fn]}]
(when ?reply-fn
(?reply-fn (auth/create-token authenticator (:id ?data) (:password ?data)))))
(defmethod msg-handler :default
[_ {:keys [event ?reply-fn]}]
(when ?reply-fn (?reply-fn {:via/unhandled-event-echoed-from-the-server event})))
| null | https://raw.githubusercontent.com/7theta/re-frame-via/ae530337eff4098991e937d6e06aa413d8ad7b45/example/src/clj/example/msg_handler.clj | clojure | Copyright (c) 7theta. All rights reserved.
The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-v10.html)
which can be found in the LICENSE file at the root of this
distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any others, from this software. |
(ns example.msg-handler
(:require [re-frame-via.authenticator :as auth]
[integrant.core :as ig]))
(defmulti msg-handler (fn [opts message] (:id message)))
(defmethod ig/init-key :example/msg-handler
[_ {:keys [authenticator] :as opts}]
(fn [message] (msg-handler opts message)))
(defmethod msg-handler :api.example/login
[{:keys [authenticator] :as opts} {:keys [?data ?reply-fn]}]
(when ?reply-fn
(?reply-fn (auth/create-token authenticator (:id ?data) (:password ?data)))))
(defmethod msg-handler :default
[_ {:keys [event ?reply-fn]}]
(when ?reply-fn (?reply-fn {:via/unhandled-event-echoed-from-the-server event})))
|
d90a3f2d5d28f5d309428ebb0b200ede93d5c22cb7d934f43129ddb0e90c09e8 | kazu-yamamoto/http2 | Main.hs | {-# LANGUAGE BangPatterns #-}
module Main where
import Control.Concurrent.STM
import Gauge.Main
import Data.List (foldl')
import System.Random.MWC
import qualified RingOfQueuesSTM as A
import qualified RingOfQueues as AIO
import qualified BinaryHeapSTM as B
import qualified BinaryHeap as BIO
import qualified Heap as O
import qualified Network.HTTP2.Priority.PSQ as P
import qualified RandomSkewHeap as R
type Key = Int
type Weight = Int
numOfStreams :: Int
numOfStreams = 100
numOfTrials :: Int
numOfTrials = 10000
main :: IO ()
main = do
gen <- create
ws <- uniformRs (1,256) gen numOfStreams
let ks = [1,3..]
xs = zip ks ws
defaultMain [
bgroup "enqueue & dequeue" [
bench "Random Skew Heap" $ whnf enqdeqR xs
, bench "Skew Binomial Heap" $ whnf enqdeqO xs
, bench "Priority Search Queue" $ whnf enqdeqP xs
, bench "Binary Heap" $ nfIO (enqdeqBIO xs)
, bench "Binary Heap STM" $ nfIO (enqdeqB xs)
, bench "Ring of Queues" $ nfIO (enqdeqAIO xs)
, bench "Ring of Queues STM" $ nfIO (enqdeqA xs)
]
, bgroup "delete" [
bench "Random Skew Heap" $ whnf deleteR xs
, bench "Skew Binomial Heap" $ whnf deleteO xs
, bench "Priority Search Queue" $ whnf deleteP xs
, bench "Binary Heap" $ nfIO (deleteBIO xs)
, bench "Binary Heap STM" $ nfIO (deleteB xs)
, bench "Ring of Queues IO" $ nfIO (deleteAIO xs)
]
]
where
uniformRs range gen n = loop n []
where
loop 0 rs = return rs
loop i rs = do
r <- uniformR range gen
loop (i-1) (r:rs)
----------------------------------------------------------------
enqdeqR :: [(Key,Weight)] -> ()
enqdeqR xs = loop pq numOfTrials
where
!pq = createR xs R.empty
loop _ 0 = ()
loop q !n = case R.dequeue q of
Nothing -> error "enqdeqR"
Just (k,w,v,q') -> let !q'' = R.enqueue k w v q'
in loop q'' (n - 1)
deleteR :: [(Key,Weight)] -> R.PriorityQueue Int
deleteR xs = foldl' (\q k -> let (_,!q') = R.delete k q in q') pq ks
where
!pq = createR xs R.empty
(ks,_) = unzip xs
createR :: [(Key,Weight)] -> R.PriorityQueue Int -> R.PriorityQueue Int
createR [] !q = q
createR ((k,w):xs) !q = createR xs q'
where
!v = k
!q' = R.enqueue k w v q
----------------------------------------------------------------
enqdeqO :: [(Key,Weight)] -> O.PriorityQueue Int
enqdeqO xs = loop pq numOfTrials
where
!pq = createO xs O.empty
loop !q 0 = q
loop !q !n = case O.dequeue q of
Nothing -> error "enqdeqO"
Just (k,p,v,q') -> loop (O.enqueue k p v q') (n - 1)
deleteO :: [(Key,Weight)] -> O.PriorityQueue Int
deleteO xs = foldl' (\q k -> let (_,!q') = O.delete k q in q') pq ks
where
!pq = createO xs O.empty
(ks,_) = unzip xs
createO :: [(Key,Weight)] -> O.PriorityQueue Int -> O.PriorityQueue Int
createO [] !q = q
createO ((k,w):xs) !q = createO xs q'
where
!pre = O.newPrecedence w
!v = k
!q' = O.enqueue k pre v q
----------------------------------------------------------------
enqdeqP :: [(Key,Weight)] -> P.PriorityQueue Int
enqdeqP xs = loop pq numOfTrials
where
!pq = createP xs P.empty
loop !q 0 = q
loop !q !n = case P.dequeue q of
Nothing -> error "enqdeqP"
Just (k,pre,x,q') -> loop (P.enqueue k pre x q') (n - 1)
deleteP :: [(Key,Weight)] -> P.PriorityQueue Int
deleteP xs = foldl' (\q k -> let (_,!q') = P.delete k q in q') pq ks
where
!pq = createP xs P.empty
(ks,_) = unzip xs
createP :: [(Key,Weight)] -> P.PriorityQueue Int -> P.PriorityQueue Int
createP [] !q = q
createP ((k,w):xs) !q = createP xs q'
where
!pre = P.newPrecedence w
!v = k
!q' = P.enqueue k pre v q
----------------------------------------------------------------
enqdeqB :: [(Key,Weight)] -> IO ()
enqdeqB xs = do
q <- atomically (B.new numOfStreams)
_ <- createB xs q
loop q numOfTrials
where
loop _ 0 = return ()
loop q !n = do
ent <- atomically $ B.dequeue q
atomically $ B.enqueue ent q
loop q (n - 1)
deleteB :: [(Key,Weight)] -> IO ()
deleteB xs = do
q <- atomically $ B.new numOfStreams
ents <- createB xs q
mapM_ (\ent -> atomically $ B.delete ent q) ents
createB :: [(Key,Weight)] -> B.PriorityQueue Int -> IO [B.Entry Key]
createB [] _ = return []
createB ((k,w):xs) !q = do
ent <- atomically $ B.newEntry k w
atomically $ B.enqueue ent q
ents <- createB xs q
return $ ent:ents
----------------------------------------------------------------
enqdeqBIO :: [(Key,Weight)] -> IO ()
enqdeqBIO xs = do
q <- BIO.new numOfStreams
_ <- createBIO xs q
loop q numOfTrials
where
loop _ 0 = return ()
loop q !n = do
ent <- BIO.dequeue q
BIO.enqueue ent q
loop q (n - 1)
deleteBIO :: [(Key,Weight)] -> IO ()
deleteBIO xs = do
q <- BIO.new numOfStreams
ents <- createBIO xs q
mapM_ (\ent -> BIO.delete ent q) ents
createBIO :: [(Key,Weight)] -> BIO.PriorityQueue Int -> IO [BIO.Entry Key]
createBIO [] _ = return []
createBIO ((k,w):xs) !q = do
ent <- BIO.newEntry k w
BIO.enqueue ent q
ents <- createBIO xs q
return $ ent:ents
----------------------------------------------------------------
enqdeqA :: [(Key,Weight)] -> IO ()
enqdeqA ws = do
q <- atomically A.new
createA ws q
loop q numOfTrials
where
loop _ 0 = return ()
loop q !n = do
ent <- atomically $ A.dequeue q
atomically $ A.enqueue ent q
loop q (n - 1)
createA :: [(Key,Weight)] -> A.PriorityQueue Int -> IO ()
createA [] _ = return ()
createA ((k,w):xs) !q = do
let !ent = A.newEntry k w
atomically $ A.enqueue ent q
createA xs q
----------------------------------------------------------------
enqdeqAIO :: [(Key,Weight)] -> IO ()
enqdeqAIO xs = do
q <- AIO.new
_ <- createAIO xs q
loop q numOfTrials
where
loop _ 0 = return ()
loop q !n = do
Just ent <- AIO.dequeue q
_ <- AIO.enqueue ent q
loop q (n - 1)
deleteAIO :: [(Key,Weight)] -> IO ()
deleteAIO xs = do
q <- AIO.new
ns <- createAIO xs q
mapM_ AIO.delete ns
createAIO :: [(Key,Weight)] -> AIO.PriorityQueue Int -> IO [AIO.Node (AIO.Entry Weight)]
createAIO [] _ = return []
createAIO ((k,w):xs) !q = do
let !ent = AIO.newEntry k w
n <- AIO.enqueue ent q
ns <- createAIO xs q
return $ n : ns
----------------------------------------------------------------
| null | https://raw.githubusercontent.com/kazu-yamamoto/http2/3c29763be147a3d482eff28f427ad80f1d4df706/bench-priority/Main.hs | haskell | # LANGUAGE BangPatterns #
--------------------------------------------------------------
--------------------------------------------------------------
--------------------------------------------------------------
--------------------------------------------------------------
--------------------------------------------------------------
--------------------------------------------------------------
--------------------------------------------------------------
-------------------------------------------------------------- |
module Main where
import Control.Concurrent.STM
import Gauge.Main
import Data.List (foldl')
import System.Random.MWC
import qualified RingOfQueuesSTM as A
import qualified RingOfQueues as AIO
import qualified BinaryHeapSTM as B
import qualified BinaryHeap as BIO
import qualified Heap as O
import qualified Network.HTTP2.Priority.PSQ as P
import qualified RandomSkewHeap as R
type Key = Int
type Weight = Int
numOfStreams :: Int
numOfStreams = 100
numOfTrials :: Int
numOfTrials = 10000
main :: IO ()
main = do
gen <- create
ws <- uniformRs (1,256) gen numOfStreams
let ks = [1,3..]
xs = zip ks ws
defaultMain [
bgroup "enqueue & dequeue" [
bench "Random Skew Heap" $ whnf enqdeqR xs
, bench "Skew Binomial Heap" $ whnf enqdeqO xs
, bench "Priority Search Queue" $ whnf enqdeqP xs
, bench "Binary Heap" $ nfIO (enqdeqBIO xs)
, bench "Binary Heap STM" $ nfIO (enqdeqB xs)
, bench "Ring of Queues" $ nfIO (enqdeqAIO xs)
, bench "Ring of Queues STM" $ nfIO (enqdeqA xs)
]
, bgroup "delete" [
bench "Random Skew Heap" $ whnf deleteR xs
, bench "Skew Binomial Heap" $ whnf deleteO xs
, bench "Priority Search Queue" $ whnf deleteP xs
, bench "Binary Heap" $ nfIO (deleteBIO xs)
, bench "Binary Heap STM" $ nfIO (deleteB xs)
, bench "Ring of Queues IO" $ nfIO (deleteAIO xs)
]
]
where
uniformRs range gen n = loop n []
where
loop 0 rs = return rs
loop i rs = do
r <- uniformR range gen
loop (i-1) (r:rs)
enqdeqR :: [(Key,Weight)] -> ()
enqdeqR xs = loop pq numOfTrials
where
!pq = createR xs R.empty
loop _ 0 = ()
loop q !n = case R.dequeue q of
Nothing -> error "enqdeqR"
Just (k,w,v,q') -> let !q'' = R.enqueue k w v q'
in loop q'' (n - 1)
deleteR :: [(Key,Weight)] -> R.PriorityQueue Int
deleteR xs = foldl' (\q k -> let (_,!q') = R.delete k q in q') pq ks
where
!pq = createR xs R.empty
(ks,_) = unzip xs
createR :: [(Key,Weight)] -> R.PriorityQueue Int -> R.PriorityQueue Int
createR [] !q = q
createR ((k,w):xs) !q = createR xs q'
where
!v = k
!q' = R.enqueue k w v q
enqdeqO :: [(Key,Weight)] -> O.PriorityQueue Int
enqdeqO xs = loop pq numOfTrials
where
!pq = createO xs O.empty
loop !q 0 = q
loop !q !n = case O.dequeue q of
Nothing -> error "enqdeqO"
Just (k,p,v,q') -> loop (O.enqueue k p v q') (n - 1)
deleteO :: [(Key,Weight)] -> O.PriorityQueue Int
deleteO xs = foldl' (\q k -> let (_,!q') = O.delete k q in q') pq ks
where
!pq = createO xs O.empty
(ks,_) = unzip xs
createO :: [(Key,Weight)] -> O.PriorityQueue Int -> O.PriorityQueue Int
createO [] !q = q
createO ((k,w):xs) !q = createO xs q'
where
!pre = O.newPrecedence w
!v = k
!q' = O.enqueue k pre v q
enqdeqP :: [(Key,Weight)] -> P.PriorityQueue Int
enqdeqP xs = loop pq numOfTrials
where
!pq = createP xs P.empty
loop !q 0 = q
loop !q !n = case P.dequeue q of
Nothing -> error "enqdeqP"
Just (k,pre,x,q') -> loop (P.enqueue k pre x q') (n - 1)
deleteP :: [(Key,Weight)] -> P.PriorityQueue Int
deleteP xs = foldl' (\q k -> let (_,!q') = P.delete k q in q') pq ks
where
!pq = createP xs P.empty
(ks,_) = unzip xs
createP :: [(Key,Weight)] -> P.PriorityQueue Int -> P.PriorityQueue Int
createP [] !q = q
createP ((k,w):xs) !q = createP xs q'
where
!pre = P.newPrecedence w
!v = k
!q' = P.enqueue k pre v q
enqdeqB :: [(Key,Weight)] -> IO ()
enqdeqB xs = do
q <- atomically (B.new numOfStreams)
_ <- createB xs q
loop q numOfTrials
where
loop _ 0 = return ()
loop q !n = do
ent <- atomically $ B.dequeue q
atomically $ B.enqueue ent q
loop q (n - 1)
deleteB :: [(Key,Weight)] -> IO ()
deleteB xs = do
q <- atomically $ B.new numOfStreams
ents <- createB xs q
mapM_ (\ent -> atomically $ B.delete ent q) ents
createB :: [(Key,Weight)] -> B.PriorityQueue Int -> IO [B.Entry Key]
createB [] _ = return []
createB ((k,w):xs) !q = do
ent <- atomically $ B.newEntry k w
atomically $ B.enqueue ent q
ents <- createB xs q
return $ ent:ents
enqdeqBIO :: [(Key,Weight)] -> IO ()
enqdeqBIO xs = do
q <- BIO.new numOfStreams
_ <- createBIO xs q
loop q numOfTrials
where
loop _ 0 = return ()
loop q !n = do
ent <- BIO.dequeue q
BIO.enqueue ent q
loop q (n - 1)
deleteBIO :: [(Key,Weight)] -> IO ()
deleteBIO xs = do
q <- BIO.new numOfStreams
ents <- createBIO xs q
mapM_ (\ent -> BIO.delete ent q) ents
createBIO :: [(Key,Weight)] -> BIO.PriorityQueue Int -> IO [BIO.Entry Key]
createBIO [] _ = return []
createBIO ((k,w):xs) !q = do
ent <- BIO.newEntry k w
BIO.enqueue ent q
ents <- createBIO xs q
return $ ent:ents
enqdeqA :: [(Key,Weight)] -> IO ()
enqdeqA ws = do
q <- atomically A.new
createA ws q
loop q numOfTrials
where
loop _ 0 = return ()
loop q !n = do
ent <- atomically $ A.dequeue q
atomically $ A.enqueue ent q
loop q (n - 1)
createA :: [(Key,Weight)] -> A.PriorityQueue Int -> IO ()
createA [] _ = return ()
createA ((k,w):xs) !q = do
let !ent = A.newEntry k w
atomically $ A.enqueue ent q
createA xs q
enqdeqAIO :: [(Key,Weight)] -> IO ()
enqdeqAIO xs = do
q <- AIO.new
_ <- createAIO xs q
loop q numOfTrials
where
loop _ 0 = return ()
loop q !n = do
Just ent <- AIO.dequeue q
_ <- AIO.enqueue ent q
loop q (n - 1)
deleteAIO :: [(Key,Weight)] -> IO ()
deleteAIO xs = do
q <- AIO.new
ns <- createAIO xs q
mapM_ AIO.delete ns
createAIO :: [(Key,Weight)] -> AIO.PriorityQueue Int -> IO [AIO.Node (AIO.Entry Weight)]
createAIO [] _ = return []
createAIO ((k,w):xs) !q = do
let !ent = AIO.newEntry k w
n <- AIO.enqueue ent q
ns <- createAIO xs q
return $ n : ns
|
6c5c6954dd792711be32f1f4632a4570eb2142a6531a4519a79581b3a5fa1dfc | keechma/keechma-toolbox | cursor.cljs | (ns keechma.toolbox.forms.cursor)
(defn set-selection! [el pos]
(set! (.-selectionStart el) pos)
(set! (.-selectionEnd el) pos))
(defn calc-caret-pos-add [format-chars caret-start old-value new-value]
(loop [start-pos caret-start
rest-old (drop caret-start old-value)
rest-new (drop caret-start new-value)]
(let [first-old (take 1 rest-old)
first-new (take 1 rest-new)
clean-old (remove format-chars rest-old)
clean-new (remove format-chars rest-new)
is-format-char? (contains? format-chars (first first-new))]
(cond
(empty? rest-new) start-pos
(= clean-new (take (count clean-new) clean-old)) start-pos
(and (= first-old first-new) is-format-char?) (recur (inc start-pos) (drop 1 rest-old) (drop 1 rest-new))
(and (not= rest-new rest-old)) (recur (inc start-pos) rest-old (drop 1 rest-new))
:else start-pos))))
(defn calc-caret-pos-remove [format-chars caret-start old-value new-value old-length new-length]
(let [diff (- old-length new-length)
start-pos (- caret-start diff)
prev-old (reverse (take start-pos old-value))
prev-new (reverse (take start-pos new-value))]
(loop [start-pos start-pos
prev-old prev-old
prev-new prev-new]
(let [first-old (take 1 prev-old)
first-new (take 1 prev-new)
is-format-char? (contains? format-chars (first first-new))]
(cond
(empty? prev-new) start-pos
(and (= first-old first-new) is-format-char?) (recur (dec start-pos) (drop 1 prev-old) (drop 1 prev-new))
:else start-pos)))))
(defn set-caret-pos! [el format-chars input-value new-value old-value caret-pos]
(let [old-length (count old-value)
new-length (count new-value)
input-length (count input-value)
caret-start (- caret-pos (- input-length old-length))]
(set! (.-value el) new-value)
(cond
(and (= new-value old-value) (< input-length old-length)) (set-selection! el (inc (- caret-pos (- old-length input-length))))
(= new-value old-value) (set-selection! el caret-start)
(>= new-length old-length) (set-selection! el (calc-caret-pos-add format-chars caret-start old-value new-value))
(< new-length old-length) (set-selection! el (calc-caret-pos-remove format-chars caret-start old-value new-value old-length new-length))
:else nil)))
| null | https://raw.githubusercontent.com/keechma/keechma-toolbox/61bf68cbffc1540b56b7b1925fef5e0aa12d60e7/src/cljs/keechma/toolbox/forms/cursor.cljs | clojure | (ns keechma.toolbox.forms.cursor)
(defn set-selection! [el pos]
(set! (.-selectionStart el) pos)
(set! (.-selectionEnd el) pos))
(defn calc-caret-pos-add [format-chars caret-start old-value new-value]
(loop [start-pos caret-start
rest-old (drop caret-start old-value)
rest-new (drop caret-start new-value)]
(let [first-old (take 1 rest-old)
first-new (take 1 rest-new)
clean-old (remove format-chars rest-old)
clean-new (remove format-chars rest-new)
is-format-char? (contains? format-chars (first first-new))]
(cond
(empty? rest-new) start-pos
(= clean-new (take (count clean-new) clean-old)) start-pos
(and (= first-old first-new) is-format-char?) (recur (inc start-pos) (drop 1 rest-old) (drop 1 rest-new))
(and (not= rest-new rest-old)) (recur (inc start-pos) rest-old (drop 1 rest-new))
:else start-pos))))
(defn calc-caret-pos-remove [format-chars caret-start old-value new-value old-length new-length]
(let [diff (- old-length new-length)
start-pos (- caret-start diff)
prev-old (reverse (take start-pos old-value))
prev-new (reverse (take start-pos new-value))]
(loop [start-pos start-pos
prev-old prev-old
prev-new prev-new]
(let [first-old (take 1 prev-old)
first-new (take 1 prev-new)
is-format-char? (contains? format-chars (first first-new))]
(cond
(empty? prev-new) start-pos
(and (= first-old first-new) is-format-char?) (recur (dec start-pos) (drop 1 prev-old) (drop 1 prev-new))
:else start-pos)))))
(defn set-caret-pos! [el format-chars input-value new-value old-value caret-pos]
(let [old-length (count old-value)
new-length (count new-value)
input-length (count input-value)
caret-start (- caret-pos (- input-length old-length))]
(set! (.-value el) new-value)
(cond
(and (= new-value old-value) (< input-length old-length)) (set-selection! el (inc (- caret-pos (- old-length input-length))))
(= new-value old-value) (set-selection! el caret-start)
(>= new-length old-length) (set-selection! el (calc-caret-pos-add format-chars caret-start old-value new-value))
(< new-length old-length) (set-selection! el (calc-caret-pos-remove format-chars caret-start old-value new-value old-length new-length))
:else nil)))
|
|
d850ed834ef3dee8bef1671678e3e1b41b47f026b72c97a6f71290d5a51bf95c | greglook/cljstyle | util.clj | (ns cljstyle.task.util
"Common utilities for output and option sharing."
(:require
[cljstyle.config :as config]
[clojure.java.io :as io]
[clojure.pprint :as pp]
[clojure.string :as str]))
# # Exit Handling
(def ^:dynamic *suppress-exit*
"Bind this to prevent tasks from exiting the system process."
false)
(defn exit!
"Exit a task with a status code."
[code]
(if *suppress-exit*
(throw (ex-info (str "Task exited with code " code)
{:code code}))
(System/exit code)))
# # Options
(def ^:dynamic *options*
"Runtime options."
{})
(defmacro with-options
"Evaluate the expressions in `body` with the print options bound to `opts`."
[opts & body]
`(binding [*options* ~opts]
~@body))
(defn option
"Return the value set for the given option, if any."
[k]
(get *options* k))
;; ## Coloring
(def ^:private ansi-codes
{:reset "[0m"
:red "[031m"
:green "[032m"
:cyan "[036m"})
(defn colorize
"Wrap the string in ANSI escape sequences to render the named color."
[s color]
{:pre [(ansi-codes color)]}
(str \u001b (ansi-codes color) s \u001b (ansi-codes :reset)))
;; ## Message Output
(defn printerr
"Print a message to standard error."
[& messages]
(binding [*out* *err*]
(print (str (str/join " " messages) "\n"))
(flush))
nil)
(defn printerrf
"Print a message to standard error with formatting."
[message & fmt-args]
(binding [*out* *err*]
(apply printf (str message "\n") fmt-args)
(flush))
nil)
(defn log
"Log a message which will only be printed when verbose output is enabled."
[& messages]
(when (option :verbose)
(apply printerr messages))
nil)
(defn logf
"Log a formatted message which will only be printed when verbose output is
enabled."
[message & fmt-args]
(when (option :verbose)
(apply printerrf message fmt-args))
nil)
# # Configuration
(defn search-roots
"Convert the list of paths into a collection of search roots. If the path
list is empty, uses the local directory as a single root."
[paths]
(mapv io/file (or (seq paths) ["."])))
(defn load-configs
"Load parent configuration files. Returns a merged configuration map."
[label file]
(let [configs (config/find-up file 25)]
(if (seq configs)
(logf "Using cljstyle configuration from %d sources for %s:\n%s"
(count configs)
label
(str/join "\n" (mapcat config/source-paths configs)))
(logf "Using default cljstyle configuration for %s"
label))
(apply config/merge-settings config/default-config configs)))
(defn warn-legacy-config
"Warn about legacy config files, if any are observed."
[]
(when-let [files (seq @config/legacy-files)]
(binding [*out* *err*]
(printf "WARNING: legacy configuration found in %d file%s:\n"
(count files)
(if (< 1 (count files)) "s" ""))
(run! (comp println str) files)
(println "Run the migrate command to update your configuration")
(flush))))
;; ## Reporting
(defn- write-stats!
"Write stats output to the named file."
[file-name stats]
(let [ext (last (str/split file-name #"\."))]
(case ext
"edn"
(spit file-name (prn-str stats))
"tsv"
(->> (:files stats)
(into (dissoc stats :files)
(map (fn [[k v]]
[(keyword "files" (name k)) v])))
(map (fn [[k v]] (str (subs (str k) 1) \tab v \newline)))
(str/join)
(spit file-name))
;; else
(printerrf "Unknown stats file extension '%s' - ignoring!" ext))))
(defn duration-str
"Format a duration in milliseconds for human consumption."
[elapsed]
(cond
;; 100 ms
(< elapsed 100)
(format "%.2f ms" (double elapsed))
1 second
(< elapsed 1000)
(format "%d ms" (int elapsed))
1 minute
(< elapsed (* 60 1000))
(format "%.2f sec" (/ elapsed 1000.0))
;; any longer
:else
(let [elapsed-sec (/ elapsed 1000.0)
minutes (long (/ elapsed-sec 60))
seconds (long (rem elapsed-sec 60))]
(format "%d:%02d" minutes seconds))))
(defn- durations-table
"Returns a sequence of row maps suitable for rendering a table of duration
results."
[durations]
(let [total (apply + (vals durations))]
(->> durations
(sort-by val (comp - compare))
(mapv (fn duration-row
[[rule-key duration]]
{"rule" (namespace rule-key)
"subrule" (name rule-key)
"elapsed" (duration-str (/ duration 1e6))
"percent" (if (pos? total)
(format "%.1f%%" (* 100.0 (/ duration total)))
"--")})))))
(defn report-stats
"General result reporting logic."
[results]
(let [counts (:counts results)
elapsed (:elapsed results)
total-files (apply + (vals counts))
total-processed (count (:results results))
total-size (apply + (keep :size (vals (:results results))))
diff-lines (apply + (keep :diff-lines (vals (:results results))))
durations (->> (vals (:results results))
(keep :durations)
(apply merge-with +))
stats (cond-> {:files counts
:total total-files
:elapsed (:elapsed results)}
(pos? diff-lines)
(assoc :diff-lines diff-lines)
(seq durations)
(assoc :durations durations))]
(log (pr-str stats))
(when (or (option :report) (option :verbose))
(printf "Checked %d of %d files in %s (%.1f fps)\n"
total-processed
total-files
(if elapsed
(duration-str elapsed)
"some amount of time")
(* 1e3 (/ total-processed elapsed)))
(printf "Checked %.1f KB of source files (%.1f KBps)\n"
(/ total-size 1024.0)
(* 1e3 (/ total-size 1024 elapsed)))
(doseq [[type-key file-count] (sort-by val (comp - compare) (:files stats))]
(printf "%6d %s\n" file-count (name type-key)))
(when (pos? diff-lines)
(printf "Resulting diff has %d lines\n" diff-lines))
(when (and (option :report-timing) (seq durations))
(pp/print-table
["rule" "subrule" "elapsed" "percent"]
(durations-table durations)))
(flush))
(when-let [stats-file (option :stats)]
(write-stats! stats-file stats))))
| null | https://raw.githubusercontent.com/greglook/cljstyle/1f58e2e7af4c193aa77ad0695f6c2b9ac2c5c5ec/src/cljstyle/task/util.clj | clojure | ## Coloring
## Message Output
## Reporting
else
100 ms
any longer | (ns cljstyle.task.util
"Common utilities for output and option sharing."
(:require
[cljstyle.config :as config]
[clojure.java.io :as io]
[clojure.pprint :as pp]
[clojure.string :as str]))
# # Exit Handling
(def ^:dynamic *suppress-exit*
"Bind this to prevent tasks from exiting the system process."
false)
(defn exit!
"Exit a task with a status code."
[code]
(if *suppress-exit*
(throw (ex-info (str "Task exited with code " code)
{:code code}))
(System/exit code)))
# # Options
(def ^:dynamic *options*
"Runtime options."
{})
(defmacro with-options
"Evaluate the expressions in `body` with the print options bound to `opts`."
[opts & body]
`(binding [*options* ~opts]
~@body))
(defn option
"Return the value set for the given option, if any."
[k]
(get *options* k))
(def ^:private ansi-codes
{:reset "[0m"
:red "[031m"
:green "[032m"
:cyan "[036m"})
(defn colorize
"Wrap the string in ANSI escape sequences to render the named color."
[s color]
{:pre [(ansi-codes color)]}
(str \u001b (ansi-codes color) s \u001b (ansi-codes :reset)))
(defn printerr
"Print a message to standard error."
[& messages]
(binding [*out* *err*]
(print (str (str/join " " messages) "\n"))
(flush))
nil)
(defn printerrf
"Print a message to standard error with formatting."
[message & fmt-args]
(binding [*out* *err*]
(apply printf (str message "\n") fmt-args)
(flush))
nil)
(defn log
"Log a message which will only be printed when verbose output is enabled."
[& messages]
(when (option :verbose)
(apply printerr messages))
nil)
(defn logf
"Log a formatted message which will only be printed when verbose output is
enabled."
[message & fmt-args]
(when (option :verbose)
(apply printerrf message fmt-args))
nil)
# # Configuration
(defn search-roots
"Convert the list of paths into a collection of search roots. If the path
list is empty, uses the local directory as a single root."
[paths]
(mapv io/file (or (seq paths) ["."])))
(defn load-configs
"Load parent configuration files. Returns a merged configuration map."
[label file]
(let [configs (config/find-up file 25)]
(if (seq configs)
(logf "Using cljstyle configuration from %d sources for %s:\n%s"
(count configs)
label
(str/join "\n" (mapcat config/source-paths configs)))
(logf "Using default cljstyle configuration for %s"
label))
(apply config/merge-settings config/default-config configs)))
(defn warn-legacy-config
"Warn about legacy config files, if any are observed."
[]
(when-let [files (seq @config/legacy-files)]
(binding [*out* *err*]
(printf "WARNING: legacy configuration found in %d file%s:\n"
(count files)
(if (< 1 (count files)) "s" ""))
(run! (comp println str) files)
(println "Run the migrate command to update your configuration")
(flush))))
(defn- write-stats!
"Write stats output to the named file."
[file-name stats]
(let [ext (last (str/split file-name #"\."))]
(case ext
"edn"
(spit file-name (prn-str stats))
"tsv"
(->> (:files stats)
(into (dissoc stats :files)
(map (fn [[k v]]
[(keyword "files" (name k)) v])))
(map (fn [[k v]] (str (subs (str k) 1) \tab v \newline)))
(str/join)
(spit file-name))
(printerrf "Unknown stats file extension '%s' - ignoring!" ext))))
(defn duration-str
"Format a duration in milliseconds for human consumption."
[elapsed]
(cond
(< elapsed 100)
(format "%.2f ms" (double elapsed))
1 second
(< elapsed 1000)
(format "%d ms" (int elapsed))
1 minute
(< elapsed (* 60 1000))
(format "%.2f sec" (/ elapsed 1000.0))
:else
(let [elapsed-sec (/ elapsed 1000.0)
minutes (long (/ elapsed-sec 60))
seconds (long (rem elapsed-sec 60))]
(format "%d:%02d" minutes seconds))))
(defn- durations-table
"Returns a sequence of row maps suitable for rendering a table of duration
results."
[durations]
(let [total (apply + (vals durations))]
(->> durations
(sort-by val (comp - compare))
(mapv (fn duration-row
[[rule-key duration]]
{"rule" (namespace rule-key)
"subrule" (name rule-key)
"elapsed" (duration-str (/ duration 1e6))
"percent" (if (pos? total)
(format "%.1f%%" (* 100.0 (/ duration total)))
"--")})))))
(defn report-stats
"General result reporting logic."
[results]
(let [counts (:counts results)
elapsed (:elapsed results)
total-files (apply + (vals counts))
total-processed (count (:results results))
total-size (apply + (keep :size (vals (:results results))))
diff-lines (apply + (keep :diff-lines (vals (:results results))))
durations (->> (vals (:results results))
(keep :durations)
(apply merge-with +))
stats (cond-> {:files counts
:total total-files
:elapsed (:elapsed results)}
(pos? diff-lines)
(assoc :diff-lines diff-lines)
(seq durations)
(assoc :durations durations))]
(log (pr-str stats))
(when (or (option :report) (option :verbose))
(printf "Checked %d of %d files in %s (%.1f fps)\n"
total-processed
total-files
(if elapsed
(duration-str elapsed)
"some amount of time")
(* 1e3 (/ total-processed elapsed)))
(printf "Checked %.1f KB of source files (%.1f KBps)\n"
(/ total-size 1024.0)
(* 1e3 (/ total-size 1024 elapsed)))
(doseq [[type-key file-count] (sort-by val (comp - compare) (:files stats))]
(printf "%6d %s\n" file-count (name type-key)))
(when (pos? diff-lines)
(printf "Resulting diff has %d lines\n" diff-lines))
(when (and (option :report-timing) (seq durations))
(pp/print-table
["rule" "subrule" "elapsed" "percent"]
(durations-table durations)))
(flush))
(when-let [stats-file (option :stats)]
(write-stats! stats-file stats))))
|
6caf1dee977bd7c72c48c9f74fff8f15ce208fbcce74591eb9a31dca6ff3bfeb | electric-sql/vaxine | multiple_dcs_SUITE.erl | %% -------------------------------------------------------------------
%%
Copyright < 2013 - 2018 > <
Technische Universität Kaiserslautern , Germany
, France
Universidade NOVA de Lisboa , Portugal
Université catholique de Louvain ( UCL ) , Belgique
, Portugal
%% >
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either expressed or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
List of the contributors to the development of Antidote : see file .
%% Description and complete License: see LICENSE file.
%% -------------------------------------------------------------------
-module(multiple_dcs_SUITE).
%% common_test callbacks
-export([
init_per_suite/1,
end_per_suite/1,
init_per_testcase/2,
end_per_testcase/2,
all/0]).
-export([multiple_writes/4,
replicated_set_test/1,
simple_replication_test/1,
failure_test/1,
blocking_test/1,
parallel_writes_test/1
]).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-define(BUCKET, test_utils:bucket(multiple_dcs_bucket)).
init_per_suite(InitialConfig) ->
Config = test_utils:init_multi_dc(?MODULE, InitialConfig),
Nodes = proplists:get_value(nodes, Config),
Clusters = proplists:get_value(clusters, Config),
Ensure that the protocol is used
test_utils:pmap(fun(Node) ->
rpc:call(Node, application, set_env,
[antidote, txn_prot, clocksi]) end, Nodes),
Check that indeed is running
{ok, clocksi} = rpc:call(hd(hd(Clusters)), application, get_env, [antidote, txn_prot]),
Config.
end_per_suite(Config) ->
Config.
init_per_testcase(_Case, Config) ->
Config.
end_per_testcase(Name, _) ->
ct:print("[ OK ] ~p", [Name]),
ok.
all() -> [
simple_replication_test,
failure_test,
blocking_test,
parallel_writes_test,
replicated_set_test
].
simple_replication_test(Config) ->
Bucket = ?BUCKET,
Clusters = proplists:get_value(clusters, Config),
[Node1, Node2, Node3 | _Nodes] = [ hd(Cluster)|| Cluster <- Clusters ],
Key = simple_replication_test_dc,
Type = antidote_crdt_counter_pn,
update_counters(Node1, [Key], [1], ignore, static, Bucket),
update_counters(Node1, [Key], [1], ignore, static, Bucket),
{ok, CommitTime} = update_counters(Node1, [Key], [1], ignore, static, Bucket),
check_read_key(Node1, Key, Type, 3, CommitTime, static, Bucket),
ct:log("Done append in Node1"),
check_read_key(Node3, Key, Type, 3, CommitTime, static, Bucket),
check_read_key(Node2, Key, Type, 3, CommitTime, static, Bucket),
ct:log("Done first round of read, I am gonna append"),
{ok, CommitTime2} = update_counters(Node2, [Key], [1], CommitTime, static, Bucket),
{ok, CommitTime3} = update_counters(Node3, [Key], [1], CommitTime2, static, Bucket),
ct:log("Done append in Node3"),
ct:log("Done waiting, I am gonna read"),
SnapshotTime = CommitTime3,
check_read_key(Node1, Key, Type, 5, SnapshotTime, static, Bucket),
check_read_key(Node2, Key, Type, 5, SnapshotTime, static, Bucket),
check_read_key(Node3, Key, Type, 5, SnapshotTime, static, Bucket),
pass.
parallel_writes_test(Config) ->
Bucket = ?BUCKET,
Clusters = proplists:get_value(clusters, Config),
[Node1, Node2, Node3 | _Nodes] = [ hd(Cluster)|| Cluster <- Clusters ],
Key = parallel_writes_test,
Type = antidote_crdt_counter_pn,
Pid = self(),
spawn(?MODULE, multiple_writes, [Node1, Key, Pid, Bucket]),
spawn(?MODULE, multiple_writes, [Node2, Key, Pid, Bucket]),
spawn(?MODULE, multiple_writes, [Node3, Key, Pid, Bucket]),
Result = receive
{ok, CT1} ->
receive
{ok, CT2} ->
receive
{ok, CT3} ->
Time = vectorclock:max([CT3, CT1, CT2]),
check_read_key(Node1, Key, Type, 15, Time, static, Bucket),
check_read_key(Node2, Key, Type, 15, Time, static, Bucket),
check_read_key(Node3, Key, Type, 15, Time, static, Bucket),
ct:log("Parallel reads passed"),
pass
end
end
end,
?assertEqual(Result, pass),
pass.
multiple_writes(Node, Key, ReplyTo, Bucket) ->
update_counters(Node, [Key], [1], ignore, static, Bucket),
update_counters(Node, [Key], [1], ignore, static, Bucket),
update_counters(Node, [Key], [1], ignore, static, Bucket),
update_counters(Node, [Key], [1], ignore, static, Bucket),
{ok, CommitTime} = update_counters(Node, [Key], [1], ignore, static, Bucket),
ReplyTo ! {ok, CommitTime}.
Test : when a DC is disconnected for a while and connected back it should
%% be able to read the missing updates. This should not affect the causal
%% dependency protocol
failure_test(Config) ->
Bucket = ?BUCKET,
Clusters = proplists:get_value(clusters, Config),
[Node1, Node2, Node3 | _Nodes] = [ hd(Cluster)|| Cluster <- Clusters ],
case rpc:call(Node1, application, get_env, [antidote, enable_logging]) of
{ok, false} ->
pass;
_ ->
Type = antidote_crdt_counter_pn,
Key = multiplde_dc_failure_test,
update_counters(Node1, [Key], [1], ignore, static, Bucket),
Simulate failure of NODE3 by stopping the receiver
{ok, D1} = rpc:call(Node1, inter_dc_manager, get_descriptor, []),
{ok, D2} = rpc:call(Node2, inter_dc_manager, get_descriptor, []),
ok = rpc:call(Node3, inter_dc_manager, forget_dcs, [[D1, D2]]),
update_counters(Node1, [Key], [1], ignore, static, Bucket),
%% Induce some delay
rpc:call(Node3, antidote, read_objects,
[ignore, [], [{Key, Type, ?BUCKET}]]),
{ok, CommitTime} = update_counters(Node1, [Key], [1], ignore, static, Bucket),
check_read_key(Node1, Key, Type, 3, CommitTime, static, Bucket),
ct:log("Done append in Node1"),
NODE3 comes back
[ok, ok] = rpc:call(Node3, inter_dc_manager, observe_dcs_sync, [[D1, D2]]),
check_read_key(Node2, Key, Type, 3, CommitTime, static, Bucket),
ct:log("Done read from Node2"),
check_read_key(Node3, Key, Type, 3, CommitTime, static, Bucket),
ct:log("Done Read in Node3"),
pass
end.
%% This is to test a situation where interDC transactions
%% can be blocked depending on the timing of transactions
going between 3 DCs
blocking_test(Config) ->
Bucket = ?BUCKET,
Clusters = proplists:get_value(clusters, Config),
[Node1, Node2, Node3 | _Nodes] = [ hd(Cluster)|| Cluster <- Clusters ],
Type = antidote_crdt_counter_pn,
Key = blocking_test,
%% Drop the heartbeat messages at DC3, allowing its
%% stable time to get old
ok = rpc:call(Node3, inter_dc_manager, drop_ping, [true]),
timer:sleep(5000),
Perform some transactions at and DC2
{ok, CommitTime1} = update_counters(Node1, [Key], [1], ignore, static, Bucket),
{ok, CommitTime2} = update_counters(Node2, [Key], [1], ignore, static, Bucket),
Be sure you can read the updates at and DC2
CommitTime3 = vectorclock:max([CommitTime1, CommitTime2]),
check_read_key(Node1, Key, Type, 2, CommitTime3, static, Bucket),
check_read_key(Node2, Key, Type, 2, CommitTime3, static, Bucket),
timer:sleep(1000),
%% Allow heartbeat pings to be received at DC3 again
ok = rpc:call(Node3, inter_dc_manager, drop_ping, [false]),
timer:sleep(5000),
check_read_key(Node3, Key, Type, 2, CommitTime3, static, Bucket),
ct:log("Blocking test passed!").
replicated_set_test(Config) ->
Bucket = ?BUCKET,
Clusters = proplists:get_value(clusters, Config),
[Node1, Node2 | _Nodes] = [ hd(Cluster)|| Cluster <- Clusters ],
Key1 = replicated_set_test,
Type = antidote_crdt_set_aw,
ct:log("Writing 100 elements to set"),
add 100 elements to the set on Node 1 while simultaneously reading on Node2
CommitTimes = lists:map(fun(N) ->
ct:log("Writing ~p to set", [N]),
{ok, CommitTime} = update_sets(Node1, [Key1], [{add, N}], ignore, Bucket),
timer:sleep(200),
CommitTime
end, lists:seq(1, 100)),
LastCommitTime = lists:last(CommitTimes),
ct:log("last commit time was ~p.", [LastCommitTime]),
%% now read on Node2
check_read_key(Node2, Key1, Type, lists:seq(1, 100), LastCommitTime, static, Bucket),
pass.
%% internal
check_read_key(Node, Key, Type, Expected, Clock, TxId, Bucket) ->
check_read(Node, [{Key, Type, Bucket}], [Expected], Clock, TxId).
check_read(Node, Objects, Expected, Clock, TxId) ->
case TxId of
static ->
{ok, Res, CT} = rpc:call(Node, cure, read_objects, [Clock, [], Objects]),
?assertEqual(Expected, Res),
{ok, Res, CT};
_ ->
{ok, Res} = rpc:call(Node, cure, read_objects, [Objects, TxId]),
?assertEqual(Expected, Res),
{ok, Res}
end.
update_counters(Node, Keys, IncValues, Clock, TxId, Bucket) ->
Updates = lists:map(fun({Key, Inc}) ->
{{Key, antidote_crdt_counter_pn, Bucket}, increment, Inc}
end,
lists:zip(Keys, IncValues)
),
case TxId of
static ->
{ok, CT} = rpc:call(Node, cure, update_objects, [Clock, [], Updates]),
{ok, CT};
_->
ok = rpc:call(Node, cure, update_objects, [Updates, TxId]),
ok
end.
update_sets(Node, Keys, Ops, Clock, Bucket) ->
Updates = lists:map(fun({Key, {Op, Param}}) ->
{{Key, antidote_crdt_set_aw, Bucket}, Op, Param}
end,
lists:zip(Keys, Ops)
),
{ok, CT} = rpc:call(Node, antidote, update_objects, [Clock, [], Updates]),
{ok, CT}.
| null | https://raw.githubusercontent.com/electric-sql/vaxine/872a83ea8d4935a52c7b850bb17ab099ee9c346b/apps/antidote/test/multidc/multiple_dcs_SUITE.erl | erlang | -------------------------------------------------------------------
>
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either expressed or implied. See the License for the
specific language governing permissions and limitations
under the License.
Description and complete License: see LICENSE file.
-------------------------------------------------------------------
common_test callbacks
be able to read the missing updates. This should not affect the causal
dependency protocol
Induce some delay
This is to test a situation where interDC transactions
can be blocked depending on the timing of transactions
Drop the heartbeat messages at DC3, allowing its
stable time to get old
Allow heartbeat pings to be received at DC3 again
now read on Node2
internal | Copyright < 2013 - 2018 > <
Technische Universität Kaiserslautern , Germany
, France
Universidade NOVA de Lisboa , Portugal
Université catholique de Louvain ( UCL ) , Belgique
, Portugal
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
List of the contributors to the development of Antidote : see file .
-module(multiple_dcs_SUITE).
-export([
init_per_suite/1,
end_per_suite/1,
init_per_testcase/2,
end_per_testcase/2,
all/0]).
-export([multiple_writes/4,
replicated_set_test/1,
simple_replication_test/1,
failure_test/1,
blocking_test/1,
parallel_writes_test/1
]).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-define(BUCKET, test_utils:bucket(multiple_dcs_bucket)).
init_per_suite(InitialConfig) ->
Config = test_utils:init_multi_dc(?MODULE, InitialConfig),
Nodes = proplists:get_value(nodes, Config),
Clusters = proplists:get_value(clusters, Config),
Ensure that the protocol is used
test_utils:pmap(fun(Node) ->
rpc:call(Node, application, set_env,
[antidote, txn_prot, clocksi]) end, Nodes),
Check that indeed is running
{ok, clocksi} = rpc:call(hd(hd(Clusters)), application, get_env, [antidote, txn_prot]),
Config.
end_per_suite(Config) ->
Config.
init_per_testcase(_Case, Config) ->
Config.
end_per_testcase(Name, _) ->
ct:print("[ OK ] ~p", [Name]),
ok.
all() -> [
simple_replication_test,
failure_test,
blocking_test,
parallel_writes_test,
replicated_set_test
].
simple_replication_test(Config) ->
Bucket = ?BUCKET,
Clusters = proplists:get_value(clusters, Config),
[Node1, Node2, Node3 | _Nodes] = [ hd(Cluster)|| Cluster <- Clusters ],
Key = simple_replication_test_dc,
Type = antidote_crdt_counter_pn,
update_counters(Node1, [Key], [1], ignore, static, Bucket),
update_counters(Node1, [Key], [1], ignore, static, Bucket),
{ok, CommitTime} = update_counters(Node1, [Key], [1], ignore, static, Bucket),
check_read_key(Node1, Key, Type, 3, CommitTime, static, Bucket),
ct:log("Done append in Node1"),
check_read_key(Node3, Key, Type, 3, CommitTime, static, Bucket),
check_read_key(Node2, Key, Type, 3, CommitTime, static, Bucket),
ct:log("Done first round of read, I am gonna append"),
{ok, CommitTime2} = update_counters(Node2, [Key], [1], CommitTime, static, Bucket),
{ok, CommitTime3} = update_counters(Node3, [Key], [1], CommitTime2, static, Bucket),
ct:log("Done append in Node3"),
ct:log("Done waiting, I am gonna read"),
SnapshotTime = CommitTime3,
check_read_key(Node1, Key, Type, 5, SnapshotTime, static, Bucket),
check_read_key(Node2, Key, Type, 5, SnapshotTime, static, Bucket),
check_read_key(Node3, Key, Type, 5, SnapshotTime, static, Bucket),
pass.
parallel_writes_test(Config) ->
Bucket = ?BUCKET,
Clusters = proplists:get_value(clusters, Config),
[Node1, Node2, Node3 | _Nodes] = [ hd(Cluster)|| Cluster <- Clusters ],
Key = parallel_writes_test,
Type = antidote_crdt_counter_pn,
Pid = self(),
spawn(?MODULE, multiple_writes, [Node1, Key, Pid, Bucket]),
spawn(?MODULE, multiple_writes, [Node2, Key, Pid, Bucket]),
spawn(?MODULE, multiple_writes, [Node3, Key, Pid, Bucket]),
Result = receive
{ok, CT1} ->
receive
{ok, CT2} ->
receive
{ok, CT3} ->
Time = vectorclock:max([CT3, CT1, CT2]),
check_read_key(Node1, Key, Type, 15, Time, static, Bucket),
check_read_key(Node2, Key, Type, 15, Time, static, Bucket),
check_read_key(Node3, Key, Type, 15, Time, static, Bucket),
ct:log("Parallel reads passed"),
pass
end
end
end,
?assertEqual(Result, pass),
pass.
multiple_writes(Node, Key, ReplyTo, Bucket) ->
update_counters(Node, [Key], [1], ignore, static, Bucket),
update_counters(Node, [Key], [1], ignore, static, Bucket),
update_counters(Node, [Key], [1], ignore, static, Bucket),
update_counters(Node, [Key], [1], ignore, static, Bucket),
{ok, CommitTime} = update_counters(Node, [Key], [1], ignore, static, Bucket),
ReplyTo ! {ok, CommitTime}.
Test : when a DC is disconnected for a while and connected back it should
failure_test(Config) ->
Bucket = ?BUCKET,
Clusters = proplists:get_value(clusters, Config),
[Node1, Node2, Node3 | _Nodes] = [ hd(Cluster)|| Cluster <- Clusters ],
case rpc:call(Node1, application, get_env, [antidote, enable_logging]) of
{ok, false} ->
pass;
_ ->
Type = antidote_crdt_counter_pn,
Key = multiplde_dc_failure_test,
update_counters(Node1, [Key], [1], ignore, static, Bucket),
Simulate failure of NODE3 by stopping the receiver
{ok, D1} = rpc:call(Node1, inter_dc_manager, get_descriptor, []),
{ok, D2} = rpc:call(Node2, inter_dc_manager, get_descriptor, []),
ok = rpc:call(Node3, inter_dc_manager, forget_dcs, [[D1, D2]]),
update_counters(Node1, [Key], [1], ignore, static, Bucket),
rpc:call(Node3, antidote, read_objects,
[ignore, [], [{Key, Type, ?BUCKET}]]),
{ok, CommitTime} = update_counters(Node1, [Key], [1], ignore, static, Bucket),
check_read_key(Node1, Key, Type, 3, CommitTime, static, Bucket),
ct:log("Done append in Node1"),
NODE3 comes back
[ok, ok] = rpc:call(Node3, inter_dc_manager, observe_dcs_sync, [[D1, D2]]),
check_read_key(Node2, Key, Type, 3, CommitTime, static, Bucket),
ct:log("Done read from Node2"),
check_read_key(Node3, Key, Type, 3, CommitTime, static, Bucket),
ct:log("Done Read in Node3"),
pass
end.
going between 3 DCs
blocking_test(Config) ->
Bucket = ?BUCKET,
Clusters = proplists:get_value(clusters, Config),
[Node1, Node2, Node3 | _Nodes] = [ hd(Cluster)|| Cluster <- Clusters ],
Type = antidote_crdt_counter_pn,
Key = blocking_test,
ok = rpc:call(Node3, inter_dc_manager, drop_ping, [true]),
timer:sleep(5000),
Perform some transactions at and DC2
{ok, CommitTime1} = update_counters(Node1, [Key], [1], ignore, static, Bucket),
{ok, CommitTime2} = update_counters(Node2, [Key], [1], ignore, static, Bucket),
Be sure you can read the updates at and DC2
CommitTime3 = vectorclock:max([CommitTime1, CommitTime2]),
check_read_key(Node1, Key, Type, 2, CommitTime3, static, Bucket),
check_read_key(Node2, Key, Type, 2, CommitTime3, static, Bucket),
timer:sleep(1000),
ok = rpc:call(Node3, inter_dc_manager, drop_ping, [false]),
timer:sleep(5000),
check_read_key(Node3, Key, Type, 2, CommitTime3, static, Bucket),
ct:log("Blocking test passed!").
replicated_set_test(Config) ->
Bucket = ?BUCKET,
Clusters = proplists:get_value(clusters, Config),
[Node1, Node2 | _Nodes] = [ hd(Cluster)|| Cluster <- Clusters ],
Key1 = replicated_set_test,
Type = antidote_crdt_set_aw,
ct:log("Writing 100 elements to set"),
add 100 elements to the set on Node 1 while simultaneously reading on Node2
CommitTimes = lists:map(fun(N) ->
ct:log("Writing ~p to set", [N]),
{ok, CommitTime} = update_sets(Node1, [Key1], [{add, N}], ignore, Bucket),
timer:sleep(200),
CommitTime
end, lists:seq(1, 100)),
LastCommitTime = lists:last(CommitTimes),
ct:log("last commit time was ~p.", [LastCommitTime]),
check_read_key(Node2, Key1, Type, lists:seq(1, 100), LastCommitTime, static, Bucket),
pass.
check_read_key(Node, Key, Type, Expected, Clock, TxId, Bucket) ->
check_read(Node, [{Key, Type, Bucket}], [Expected], Clock, TxId).
check_read(Node, Objects, Expected, Clock, TxId) ->
case TxId of
static ->
{ok, Res, CT} = rpc:call(Node, cure, read_objects, [Clock, [], Objects]),
?assertEqual(Expected, Res),
{ok, Res, CT};
_ ->
{ok, Res} = rpc:call(Node, cure, read_objects, [Objects, TxId]),
?assertEqual(Expected, Res),
{ok, Res}
end.
update_counters(Node, Keys, IncValues, Clock, TxId, Bucket) ->
Updates = lists:map(fun({Key, Inc}) ->
{{Key, antidote_crdt_counter_pn, Bucket}, increment, Inc}
end,
lists:zip(Keys, IncValues)
),
case TxId of
static ->
{ok, CT} = rpc:call(Node, cure, update_objects, [Clock, [], Updates]),
{ok, CT};
_->
ok = rpc:call(Node, cure, update_objects, [Updates, TxId]),
ok
end.
update_sets(Node, Keys, Ops, Clock, Bucket) ->
Updates = lists:map(fun({Key, {Op, Param}}) ->
{{Key, antidote_crdt_set_aw, Bucket}, Op, Param}
end,
lists:zip(Keys, Ops)
),
{ok, CT} = rpc:call(Node, antidote, update_objects, [Clock, [], Updates]),
{ok, CT}.
|
4ad04256a7e47a632fe0d340399201c741f9d31d4ffb1ff92265a3782f7b169c | rampion/tree-traversals | TreeLike.hs | {-# OPTIONS_GHC -Wno-name-shadowing #-}
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ViewPatterns #
# LANGUAGE DataKinds #
{-# LANGUAGE GADTs #-}
# LANGUAGE TypeOperators #
# LANGUAGE DeriveFunctor #
# LANGUAGE ScopedTypeVariables #
-- | By providing a 'TreeLike' instance, a functor can be traversed in several
-- orders:
--
-- ['inorder' / 'InOrder']
-- Viewing a 'TreeLike' functor as a sequence of values and subtrees, an
-- /__inorder__/ traversal iterates through this sequence visiting values and
-- traversing subtrees in the order they are given.
--
-- >>> printTree (label inorder exampleBinaryTree)
-- ┌──────6───┐
-- │ │
┌ ─ ─ 2 ┴ ─ ─ ─ ┐ ┌ 7 ─ ┴ ─ ─ ┐
-- │ │ │ │
┌ 0 ┴ ┐ ┌ ─ ┴ 5 ┐ ╵ ┌ ─ 9 ┴ ─ ┐
-- │ │ │ │ │ │
╵ ┌ 1 ┐ ┌ 3 ┴ ┐ ╵ ┌ 8 ┐ ┌ 10 ┐
-- │ │ │ │ │ │ │ │
-- ╵ ╵ ╵ ┌4┐ ╵ ╵ ╵ ╵
-- │ │
-- ╵ ╵
--
[ ' preorder ' / ' PreOrder ' ]
-- Viewing a 'TreeLike' functor as a sequence of values and subtrees, a
-- /__preorder__/ traversal visits all the values in the sequence before
-- traversing the subtrees.
--
> > > printTree ( label preorder exampleBinaryTree )
-- ┌──────0───┐
-- │ │
┌ ─ ─ 1 ┴ ─ ─ ─ ┐ ┌ 7 ─ ┴ ─ ─ ┐
-- │ │ │ │
┌ 2 ┴ ┐ ┌ ─ ┴ 4 ┐ ╵ ┌ ─ 8 ┴ ─ ┐
-- │ │ │ │ │ │
╵ ┌ 3 ┐ ┌ 5 ┴ ┐ ╵ ┌ 9 ┐ ┌ 10 ┐
-- │ │ │ │ │ │ │ │
-- ╵ ╵ ╵ ┌6┐ ╵ ╵ ╵ ╵
-- │ │
-- ╵ ╵
--
-- ['postorder' / 'PostOrder']
-- Viewing a 'TreeLike' functor as a sequence of values and subtrees, a
-- /__postorder__/ traversal traverses all the subtrees in the sequence
-- before visiting the values in the sequence before
-- traversing the subtrees.
--
-- >>> printTree (label postorder exampleBinaryTree)
┌ ─ ─ ─ ─ ─ ─ 10 ─ ─ ─ ┐
-- │ │
┌ ─ ─ 5 ┴ ─ ─ ─ ┐ ┌ 9 ─ ┴ ─ ┐
-- │ │ │ │
┌ 1 ┴ ┐ ┌ ─ ┴ 4 ┐ ╵ ┌ ─ 8 ─ ┐
│ │ │ │
╵ ┌ 0 ┐ ┌ 3 ┴ ┐ ╵ ┌ 6 ┐ ┌ 7 ┐
│ │ │ │ │
-- ╵ ╵ ╵ ┌2┐ ╵ ╵ ╵ ╵
-- │ │
-- ╵ ╵
--
[ ' levelorder ' / ' LevelOrder ' ]
Similar to a preorder traversal , a /__levelorder__/ traversal first visits
-- all the values at the root level before traversing any of the subtrees.
Instead of traversing the subtrees one by one , though , a levelorder
-- traversal interweaves their traversals, next visiting all the values at the
-- root of each subtree, then visiting all the values at the roots of each
-- subtree's subtrees, and so on. This is also known as a breadth-first
-- traversal.
--
> > > printTree ( label levelorder exampleBinaryTree )
-- ┌──────0───┐
-- │ │
┌ ─ ─ 1 ─ ┴ ─ ─ ─ ┐ ┌ 2 ─ ┴ ─ ┐
-- │ │ │ │
┌ 3 ┴ ┐ ┌ ─ ─ ┴ 4 ┐ ╵ ┌ ─ 5 ─ ┐
-- │ │ │ │ │ │
╵ ┌ 6 ┐ ┌ 7 ┴ ─ ┐ ╵ ┌ 8 ┐ ┌ 9 ┐
-- │ │ │ │ │ │ │ │
╵ ╵ ╵ ┌ 10 ┐ ╵ ╵ ╵ ╵
-- ╵ ╵
--
-- ['rlevelorder' / 'RLevelOrder']
Similar to a postlevel traversal , a /__reversed levelorder__/ traversal
-- only visits all the values at the root level after traversing all of the
subtrees . Instead of traversing the subtrees one by one , though , a
reversed levelorder traversal interweaves their traversals , working
-- from the deepest level up, though still in left-to-right order.
--
> > > printTree ( label rlevelorder exampleBinaryTree )
┌ ─ ─ ─ ─ ─ ─ 10 ─ ─ ─ ┐
-- │ │
┌ ─ ─ 8 ┴ ─ ─ ─ ┐ ┌ 9 ─ ┴ ─ ┐
-- │ │ │ │
┌ 5 ┴ ┐ ┌ ─ ┴ 6 ┐ ╵ ┌ ─ 7 ─ ┐
│ │ │ │
╵ ┌ 1 ┐ ┌ 2 ┴ ┐ ╵ ┌ 3 ┐ ┌ 4 ┐
│ │ │ │ │
╵ ╵ ╵ ┌ 0 ┐ ╵ ╵ ╵ ╵
-- │ │
-- ╵ ╵
--
module Data.Traversable.TreeLike
( TreeLike(..), treeFoldMap
-- | = TreeLike wrappers
These @newtype@s define ' TreeLike ' instances for ' ' types .
, Forest(..), Flat(..), List(..)
| =
-- Each 'TreeLike' type admits multiple traversal orders:
--
> inorder , preorder , postorder , levelorder , rlevelorder
-- > :: TreeLike tree => Traversal (tree a) (tree b) a b
--
-- Using the definition of 'Control.Lens.Traversal.Traversal' from
-- "Control.Lens.Traversal":
--
> type Traversal s t a b = forall > ( a - > f b ) - > s - > f t
--
, inorder, preorder, postorder, levelorder, rlevelorder
| = wrappers
These @newtype@s define ' ' instances for ' TreeLike ' types .
, InOrder(..), PreOrder(..), PostOrder(..), LevelOrder(..), RLevelOrder(..)
-- | = Convenience functions
, showTree, printTree
) where
import Data.Functor.Compose (Compose(..))
import Data.Functor.Const (Const(..))
import Data.Functor.Product (Product(..))
import Data.Functor.Sum (Sum(..))
import Data.Traversable (foldMapDefault)
import Data.Tree hiding (Forest)
import Control.Applicative.Phases
import Data.BinaryTree
import Data.Monoid.TreeDiagram
| Render the tree as a string , using the ' TreeDiagram ' monoid .
showTree :: (TreeLike tree, Show a) => tree a -> ShowS
showTree = showTreeDiagram . treeFoldMap singleton subtree
| Print the tree , using the ' TreeDiagram ' monoid .
printTree :: (TreeLike tree, Show a) => tree a -> IO ()
printTree = putStrLn . ($ []) . showTree
| , functors are ' TreeLike ' if any values and ' TreeLike '
-- substructure they contain can be traversed distinctly.
--
For example , given the ' TreeDiagram ' monoid , one can use ' ' with
-- the 'Const' applicative to recursively create a drawing of any tree,
-- rendering values inline with 'singleton' and dropping a line to drawings of
-- subtrees with 'subtree':
--
-- >>> :{
-- printTree :: (Show a, TreeLike tree) => tree a -> IO ()
-- printTree = printTreeDiagram . drawTree where
-- drawTree :: (Show a, TreeLike tree) => tree a -> TreeDiagram
drawTree = getConst . ( Const . ) ( Const . subtree . drawTree )
-- :}
--
-- This common pattern of mapping each element to a monoid and then modifying
-- each monoidal value generated from a subtree is captured by 'treeFoldMap', which
-- gives a slightly less verbose implementation of @printTree@.
--
> > > printTree = printTreeDiagram . singleton subtree
--
-- Instances of 'TreeLike' are encouraged to avoid recursively defining
' ' in terms of itself , and to instead traverse subtrees
-- using the provided argument.
--
-- For example, given this definition for balanced binary trees:
--
-- >>> :{
data BBT a = Nil | a ` Cons ` BBT ( a , a )
-- deriving Functor
infixr 4 ` Cons `
-- :}
--
-- Its 'TreeLike' instance can be defined as:
--
-- >>> :{
-- instance TreeLike BBT where
= t - > case t of
-- Nil -> pure Nil
a ` Cons ` at - > branch < $ > g ( fst < $ > at ) < * > f a < * > g ( snd < $ > at )
-- where
-- branch :: BBT b -> b -> BBT b -> BBT b
-- branch Nil b ~Nil = b `Cons` Nil
-- branch (x `Cons` xt) b ~(y `Cons` yt) = b `Cons` branch xt (x,y) yt
-- :}
--
-- This definition exposes the substructure in a way that can be used
by functions implemented in terms of ' ' , such as :
--
> > > printTree $ 1 ` Cons ` ( 2,3 ) ` Cons ` ( ( 4,5),(6,7 ) ) ` Cons ` Nil
┌ ─ ─ ─ 1 ─ ─ ─ ┐
-- │ │
┌ ─ 2 ─ ┐ ┌ ─ 3 ─ ┐
-- │ │ │ │
┌ 4 ┐ ┌ 6 ┐ ┌ 5 ┐ ┌ 7 ┐
-- │ │ │ │ │ │ │ │
-- ╵ ╵ ╵ ╵ ╵ ╵ ╵ ╵
class Functor tree => TreeLike tree where
treeTraverse :: Applicative f
=> (a -> f b)
-> (forall subtree. TreeLike subtree => subtree a -> f (subtree b))
-> tree a -> f (tree b)
-- | Recursively fold a tree into a monoid, using the given functions to
-- transform values and folded subtrees.
--
-- For example, one can find the maximum depth of a tree:
--
-- >>> printTree exampleTree
-- []─┬─────┬───────────┬─────────────────────────────┐
-- │ │ │ │
[ 0 ] [ 1 ] ┴ ─ ┐ [ 2 ] ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ┐ [ 3 ] ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ┬ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │ │ │
[ 1,0 ] [ 2,0 ] [ 2,1 ] ─ ─ ─ ┐ [ 3,0 ] [ 3,1 ] ─ ─ ─ ┐ [ 3,2 ] ─ ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │
[ 2,1,0 ] [ 3,1,0 ] [ 3,2,0 ] [ 3,2,1 ] ─ ─ ─ ─ ┐
-- │
[ 3,2,1,0 ]
-- >>> :set -XGeneralizedNewtypeDeriving
> > > import GHC.Natural
> > > import Data . Semigroup
-- >>> :{
newtype : : Natural } deriving ( , )
instance Semigroup where
-- (<>) = mappend
instance where
= Max 0
a ` mappend ` Max b = a ` max ` b
-- :}
--
-- >>> getMax $ treeFoldMap (const 0) succ exampleTree
4
treeFoldMap :: (Monoid m, TreeLike tree) => (a -> m) -> (m -> m) -> tree a -> m
treeFoldMap f g = getConst . treeTraverse (Const . f) (Const . g . treeFoldMap f g)
instance TreeLike Tree where
treeTraverse f g (Node a as) = Node <$> f a <*> traverse g as
instance TreeLike BinaryTree where
treeTraverse _ _ Leaf = pure Leaf
treeTraverse f g (Branch l a r) = Branch <$> g l <*> f a <*> g r
-- |
-- Use 'Product' to combine a pair of 'TreeLike' values into a single tree.
--
> > > smallBinaryTree = Branch ( Branch Leaf [ 0,1 ] Leaf ) [ 0 ] ( Branch Leaf [ 0,2 ] Leaf )
> > > smallRoseTree = Node [ 1 ] [ Node [ 1,0 ] [ ] , Node [ 1,1 ] [ ] , Node [ 1,2 ] [ ] , Node [ 1,3 ] [ ] ]
-- >>> printTree $ Pair smallBinaryTree smallRoseTree
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
-- │ │
┌ ─ ─ ─ [ 0 ] ─ ─ ─ ┐ [ 1 ] ─ ─ ┬ ─ ─ ─ ─ ─ ┬ ┴ ─ ─ ─ ─ ┬ ─ ─ ─ ─ ─ ┐
│ │ │ │ │ │
┌ [ 0,1 ] ┐ ┌ [ 0,2 ] ┐ [ 1,0 ] [ 1,1 ] [ 1,2 ] [ 1,3 ]
-- │ │ │ │
-- ╵ ╵ ╵ ╵
-- >>> visit a = StateT $ \e -> print a >> return (e, succ e)
-- >>> traversed <- postorder visit (Pair smallBinaryTree smallRoseTree) `evalStateT` 0
[ 0,1 ]
[ 0,2 ]
-- [0]
[ 1,0 ]
[ 1,1 ]
-- [1,2]
-- [1,3]
[ 1 ]
-- >>> printTree traversed
┌ ─ ─ ─ ─ ─ ─ ─ ┐
-- │ │
┌ ─ 2 ─ ┐ 7 ┬ ─ ┬ ┴ ┬ ─ ┐
│
┌ 0 ┐ ┌ 1 ┐ 3 4 5 6
-- │ │ │ │
-- ╵ ╵ ╵ ╵
instance (TreeLike fst, TreeLike snd) => TreeLike (Product fst snd) where
treeTraverse _ g (Pair x y) = Pair <$> g x <*> g y
| Use ' Sum ' to unify two different types of trees into a single type .
--
> > > smallBinaryTree = Branch ( Branch Leaf [ 0,1 ] Leaf ) [ 0 ] ( Branch Leaf [ 0,2 ] Leaf )
> > > smallRoseTree = Node [ 1 ] [ Node [ 1,0 ] [ ] , Node [ 1,1 ] [ ] , Node [ 1,2 ] [ ] , Node [ 1,3 ] [ ] ]
> > > someTree b = if not b then InL smallBinaryTree else InR smallRoseTree
-- >>> :t someTree
someTree : : a = > Bool - > Sum BinaryTree Tree [ a ]
-- >>> printTree (someTree False)
-- ╷
-- │
┌ ─ ─ ─ [ 0 ] ─ ─ ─ ┐
-- │ │
┌ [ 0,1 ] ┐ ┌ [ 0,2 ] ┐
-- │ │ │ │
-- ╵ ╵ ╵ ╵
-- >>> printTree (someTree True)
-- ╷
-- │
-- [1]──┬─────┬┴────┬─────┐
-- │ │ │ │
[ 1,0 ] [ 1,1 ] [ 1,2 ] [ 1,3 ]
instance (TreeLike left, TreeLike right) => TreeLike (Sum left right) where
treeTraverse _ g (InL x) = InL <$> g x
treeTraverse _ g (InR y) = InR <$> g y
-- |
-- A newtype wrapper to allow traversing an entire traversable of trees
-- simultaneously.
--
-- >>> printTree $ Forest exampleTrees
-- ┌─────┬───────────┬─────────────────────────────┐
-- │ │ │ │
[ 0 ] [ 1 ] ┴ ─ ┐ [ 2 ] ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ┐ [ 3 ] ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ┬ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │ │ │
[ 1,0 ] [ 2,0 ] [ 2,1 ] ─ ─ ─ ┐ [ 3,0 ] [ 3,1 ] ─ ─ ─ ┐ [ 3,2 ] ─ ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │
[ 2,1,0 ] [ 3,1,0 ] [ 3,2,0 ] [ 3,2,1 ] ─ ─ ─ ─ ┐
-- │
-- [3,2,1,0]
-- >>> visit a = StateT $ \e -> print a >> return (e, succ e)
> > > traversed < - levelorder visit ( Forest exampleTrees ) ` evalStateT ` 0
-- [0]
[ 1 ]
[ 2 ]
[ 3 ]
[ 1,0 ]
[ 2,0 ]
[ 2,1 ]
[ 3,0 ]
[ 3,1 ]
[ 3,2 ]
-- [2,1,0]
[ 3,1,0 ]
-- [3,2,0]
-- [3,2,1]
[ 3,2,1,0 ]
-- >>> printTree traversed
-- ┌──┬───┬────────┐
-- │ │ │ │
0 1 ┤ 2 ┬ ┴ ─ ┐ 3 ┬ ─ ─ ┬ ┴ ─ ─ ─ ─ ┐
│
4 5 6 ┴ ┐ 7 8 ┴ ┐ 9 ─ ┬ ┴ ─ ─ ┐
-- │ │ │ │
10 11 12 13 ┴
-- │
14
--
-- This is more of a convenience than a necessity, as @'Forest' t tree ~
-- 'Compose' ('Flat' t) tree@
--
-- >>> printTree . Compose $ Flat exampleTrees
-- ┌─────┬───────────┬─────────────────────────────┐
-- │ │ │ │
[ 0 ] [ 1 ] ┴ ─ ┐ [ 2 ] ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ┐ [ 3 ] ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ┬ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │ │ │
[ 1,0 ] [ 2,0 ] [ 2,1 ] ─ ─ ─ ┐ [ 3,0 ] [ 3,1 ] ─ ─ ─ ┐ [ 3,2 ] ─ ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │
[ 2,1,0 ] [ 3,1,0 ] [ 3,2,0 ] [ 3,2,1 ] ─ ─ ─ ─ ┐
-- │
-- [3,2,1,0]
newtype Forest t tree a = Forest { getForest :: t (tree a) }
deriving Functor
instance (Traversable t, TreeLike tree) => TreeLike (Forest t tree) where
treeTraverse _ g = fmap Forest . traverse g . getForest
-- |
A newtype wrapper for @[a]@ whose ` TreeLike ` instance
treats each cons - cell as a tree containing one value and one subtree .
--
> > > printTree $ List [ 1 .. 5 ]
1 ─ ┐
-- │
2 ┴
-- │
3 ┴
-- │
4 ┴
-- │
-- 5┤
-- │
-- ╵
-- >>> import Data.Foldable (toList)
> > > toList . PostOrder $ List [ 1 .. 5 ]
[ 5,4,3,2,1 ]
--
-- Contrast with @'Flat' [] a@:
--
> > > printTree $ Flat [ 1 .. 5 ]
1 ─ 2 ─ 3 ─ 4 ─ 5
> > > toList . PostOrder $ Flat [ 1 .. 5 ]
[ 1,2,3,4,5 ]
--
newtype List a = List { getList :: [a] }
deriving Functor
instance TreeLike List where
treeTraverse f g (List as) = List <$> case as of
[] -> pure []
a:as -> (:) <$> f a <*> (fmap getList . g .List) as
-- |
-- A newtype wraper for @t a@ whose `TreeLike` instance treats
-- the @t a@ as a flat structure with no subtrees.
--
> > > printTree $ Flat [ 1 .. 5 ]
1 ─ 2 ─ 3 ─ 4 ─ 5
-- >>> import Data.Foldable (toList)
> > > toList . PostOrder $ Flat [ 1 .. 5 ]
[ 1,2,3,4,5 ]
newtype Flat t a = Flat { getFlat :: t a }
deriving Functor
instance Traversable t => TreeLike (Flat t) where
treeTraverse f _ (Flat ta) = Flat <$> traverse f ta
-- |
-- Treat subtrees and values of @outer (inner a)@ as subtrees of
-- @'Compose' outer inner a@.
--
-- For example
--
-- >>> :{
-- exampleCompose = Compose $
-- Branch
( Branch Leaf ( Node ' a ' [ Node ' b ' [ ] , Node ' c ' [ ] , 'd ' [ ] ] ) Leaf )
-- (Node 'e' [Node 'f' [Node 'g' [], Node 'h' []]])
-- (Branch Leaf (Node 'i' [Node 'i' [Node 'j' [Node 'k' []]]]) Leaf)
-- :}
--
-- >>> printTree exampleCompose
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │
┌ ─ ─ ─ ─ ─ ─ ─ ┼ ─ ─ ─ ─ ─ ─ ─ ┐ ' e ' ─ ┴ ─ ─ ┐ ┌ ─ ─ ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │ │ │ │
╵ ' a ' ─ ┬ ─ ┴ ─ ┬ ─ ─ ─ ┐ ╵ ' f ' ─ ┼ ─ ─ ─ ╵ ' i ' ┴ ─ ─ ┐ ╵
-- │ │ │ │ │ │
-- 'b' 'c' 'd' 'g' 'h' 'i'┴─┐
-- │
-- 'j'─┐
-- │
-- 'k'
> > > ( const [ " value " ] ) ( const [ " subtree " ] ) exampleCompose
-- ["subtree","subtree","subtree"]
instance (TreeLike outer, TreeLike inner) => TreeLike (Compose outer inner) where
treeTraverse _ g (Compose trees) = Compose <$> treeTraverse g (fmap getCompose . g . Compose) trees
-- | Traverse all the values in a tree in left-to-right order.
--
-- >>> printTree exampleBinaryTree
-- ┌──────────────────────[]────────┐
-- │ │
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ [ L ] ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐ ┌ [ R ] ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │
┌ [ L , L ] ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ [ L , R ] ┐ ╵ ┌ ─ ─ ─ ─ [ R , R ] ─ ─ ─ ─ ┐
│ │ │
╵ ┌ [ L , L , R ] ┐ ┌ [ L , R , L ] ─ ─ ─ ─ ─ ┐ ╵ ┌ [ R , R , L ] ┐ ┌ [ R , R , R ] ┐
-- │ │ │ │ │ │ │ │
╵ ╵ ╵ ┌ [ L , R , L , R ] ┐ ╵ ╵ ╵ ╵
-- │ │
-- ╵ ╵
-- >>> visit a = StateT $ \e -> print a >> return (e, succ e)
-- >>> traversed <- inorder visit exampleBinaryTree `evalStateT` 0
-- [L,L]
-- [L,L,R]
-- [L]
-- [L,R,L]
-- [L,R,L,R]
-- [L,R]
-- []
-- [R]
-- [R,R,L]
-- [R,R]
-- [R,R,R]
-- >>> printTree traversed
-- ┌──────6───┐
-- │ │
┌ ─ ─ 2 ┴ ─ ─ ─ ┐ ┌ 7 ─ ┴ ─ ─ ┐
-- │ │ │ │
┌ 0 ┴ ┐ ┌ ─ ┴ 5 ┐ ╵ ┌ ─ 9 ┴ ─ ┐
-- │ │ │ │ │ │
╵ ┌ 1 ┐ ┌ 3 ┴ ┐ ╵ ┌ 8 ┐ ┌ 10 ┐
-- │ │ │ │ │ │ │ │
-- ╵ ╵ ╵ ┌4┐ ╵ ╵ ╵ ╵
-- │ │
-- ╵ ╵
-- >>> printTree exampleTree
-- []─┬─────┬───────────┬─────────────────────────────┐
-- │ │ │ │
[ 0 ] [ 1 ] ┴ ─ ┐ [ 2 ] ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ┐ [ 3 ] ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ┬ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │ │ │
[ 1,0 ] [ 2,0 ] [ 2,1 ] ─ ─ ─ ┐ [ 3,0 ] [ 3,1 ] ─ ─ ─ ┐ [ 3,2 ] ─ ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │
[ 2,1,0 ] [ 3,1,0 ] [ 3,2,0 ] [ 3,2,1 ] ─ ─ ─ ─ ┐
-- │
[ 3,2,1,0 ]
-- >>> traversed <- inorder visit exampleTree `evalStateT` 0
-- []
-- [0]
[ 1 ]
[ 1,0 ]
[ 2 ]
[ 2,0 ]
[ 2,1 ]
-- [2,1,0]
[ 3 ]
[ 3,0 ]
[ 3,1 ]
[ 3,1,0 ]
[ 3,2 ]
-- [3,2,0]
-- [3,2,1]
[ 3,2,1,0 ]
-- >>> printTree traversed
0 ┬ ─ ─ ┬ ─ ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
│ │
1 2 ┤ 4 ┬ ┴ ─ ┐ 8 ┬ ─ ─ ─ ┬ ┴ ─ ─ ─ ─ ─ ┐
-- │ │ │ │ │ │
3 5 6 ┤ 9 10 ┴ ┐ 12 ─ ┬ ┴ ─ ─ ┐
-- │ │ │ │
7 11 13 14 ┴
-- │
15
inorder :: (Applicative f, TreeLike tree) => (a -> f b) -> tree a -> f (tree b)
inorder f = treeTraverse f (inorder f)
-- | Traverse all the values of a node, then recurse into each of its subtrees
-- in left-to-right order.
--
-- >>> printTree exampleBinaryTree
-- ┌──────────────────────[]────────┐
-- │ │
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ [ L ] ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐ ┌ [ R ] ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │
┌ [ L , L ] ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ [ L , R ] ┐ ╵ ┌ ─ ─ ─ ─ [ R , R ] ─ ─ ─ ─ ┐
│ │ │
╵ ┌ [ L , L , R ] ┐ ┌ [ L , R , L ] ─ ─ ─ ─ ─ ┐ ╵ ┌ [ R , R , L ] ┐ ┌ [ R , R , R ] ┐
-- │ │ │ │ │ │ │ │
╵ ╵ ╵ ┌ [ L , R , L , R ] ┐ ╵ ╵ ╵ ╵
-- │ │
-- ╵ ╵
-- >>> visit a = StateT $ \e -> print a >> return (e, succ e)
-- >>> traversed <- preorder visit exampleBinaryTree `evalStateT` 0
-- []
-- [L]
-- [L,L]
-- [L,L,R]
-- [L,R]
-- [L,R,L]
-- [L,R,L,R]
-- [R]
-- [R,R]
-- [R,R,L]
-- [R,R,R]
-- >>> printTree traversed
-- ┌──────0───┐
-- │ │
┌ ─ ─ 1 ┴ ─ ─ ─ ┐ ┌ 7 ─ ┴ ─ ─ ┐
-- │ │ │ │
┌ 2 ┴ ┐ ┌ ─ ┴ 4 ┐ ╵ ┌ ─ 8 ┴ ─ ┐
-- │ │ │ │ │ │
╵ ┌ 3 ┐ ┌ 5 ┴ ┐ ╵ ┌ 9 ┐ ┌ 10 ┐
-- │ │ │ │ │ │ │ │
-- ╵ ╵ ╵ ┌6┐ ╵ ╵ ╵ ╵
-- │ │
-- ╵ ╵
-- >>> printTree exampleTree
-- []─┬─────┬───────────┬─────────────────────────────┐
-- │ │ │ │
[ 0 ] [ 1 ] ┴ ─ ┐ [ 2 ] ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ┐ [ 3 ] ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ┬ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │ │ │
[ 1,0 ] [ 2,0 ] [ 2,1 ] ─ ─ ─ ┐ [ 3,0 ] [ 3,1 ] ─ ─ ─ ┐ [ 3,2 ] ─ ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │
[ 2,1,0 ] [ 3,1,0 ] [ 3,2,0 ] [ 3,2,1 ] ─ ─ ─ ─ ┐
-- │
[ 3,2,1,0 ]
-- >>> traversed <- inorder visit exampleTree `evalStateT` 0
-- []
-- [0]
[ 1 ]
[ 1,0 ]
[ 2 ]
[ 2,0 ]
[ 2,1 ]
-- [2,1,0]
[ 3 ]
[ 3,0 ]
[ 3,1 ]
[ 3,1,0 ]
[ 3,2 ]
-- [3,2,0]
-- [3,2,1]
[ 3,2,1,0 ]
-- >>> printTree traversed
0 ┬ ─ ─ ┬ ─ ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
│ │
1 2 ┤ 4 ┬ ┴ ─ ┐ 8 ┬ ─ ─ ─ ┬ ┴ ─ ─ ─ ─ ─ ┐
-- │ │ │ │ │ │
3 5 6 ┤ 9 10 ┴ ┐ 12 ─ ┬ ┴ ─ ─ ┐
-- │ │ │ │
7 11 13 14 ┴
-- │
15
preorder :: (Applicative f, TreeLike tree) => (a -> f b) -> tree a -> f (tree b)
preorder f = runPhasesForwards . treeTraverse (now . f) (later . preorder f)
-- | Traverse all the values of a node after recursing into each of its
-- subtrees in left-to-right order.
--
-- >>> printTree exampleBinaryTree
-- ┌──────────────────────[]────────┐
-- │ │
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ [ L ] ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐ ┌ [ R ] ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │
┌ [ L , L ] ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ [ L , R ] ┐ ╵ ┌ ─ ─ ─ ─ [ R , R ] ─ ─ ─ ─ ┐
│ │ │
╵ ┌ [ L , L , R ] ┐ ┌ [ L , R , L ] ─ ─ ─ ─ ─ ┐ ╵ ┌ [ R , R , L ] ┐ ┌ [ R , R , R ] ┐
-- │ │ │ │ │ │ │ │
╵ ╵ ╵ ┌ [ L , R , L , R ] ┐ ╵ ╵ ╵ ╵
-- │ │
-- ╵ ╵
-- >>> visit a = StateT $ \e -> print a >> return (e, succ e)
-- >>> traversed <- postorder visit exampleBinaryTree `evalStateT` 0
-- [L,L,R]
-- [L,L]
-- [L,R,L,R]
-- [L,R,L]
-- [L,R]
-- [L]
-- [R,R,L]
-- [R,R,R]
-- [R,R]
-- [R]
-- []
-- >>> printTree traversed
┌ ─ ─ ─ ─ ─ ─ 10 ─ ─ ─ ┐
-- │ │
┌ ─ ─ 5 ┴ ─ ─ ─ ┐ ┌ 9 ─ ┴ ─ ┐
-- │ │ │ │
┌ 1 ┴ ┐ ┌ ─ ┴ 4 ┐ ╵ ┌ ─ 8 ─ ┐
│ │ │ │
╵ ┌ 0 ┐ ┌ 3 ┴ ┐ ╵ ┌ 6 ┐ ┌ 7 ┐
-- │ │ │ │ │ │ │ │
-- ╵ ╵ ╵ ┌2┐ ╵ ╵ ╵ ╵
-- │ │
-- ╵ ╵
-- >>> printTree exampleTree
-- []─┬─────┬───────────┬─────────────────────────────┐
-- │ │ │ │
[ 0 ] [ 1 ] ┴ ─ ┐ [ 2 ] ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ┐ [ 3 ] ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ┬ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │ │ │
[ 1,0 ] [ 2,0 ] [ 2,1 ] ─ ─ ─ ┐ [ 3,0 ] [ 3,1 ] ─ ─ ─ ┐ [ 3,2 ] ─ ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │
[ 2,1,0 ] [ 3,1,0 ] [ 3,2,0 ] [ 3,2,1 ] ─ ─ ─ ─ ┐
-- │
[ 3,2,1,0 ]
-- >>> traversed <- postorder visit exampleTree `evalStateT` 0
-- [0]
[ 1,0 ]
[ 1 ]
[ 2,0 ]
-- [2,1,0]
[ 2,1 ]
[ 2 ]
[ 3,0 ]
[ 3,1,0 ]
[ 3,1 ]
-- [3,2,0]
[ 3,2,1,0 ]
-- [3,2,1]
[ 3,2 ]
[ 3 ]
-- []
-- >>> printTree traversed
15 ┬ ─ ─ ┬ ─ ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
│ │
0 2 ┤ 6 ┬ ┴ ─ ┐ 14 ┬ ─ ─ ┬ ┴ ─ ─ ─ ─ ┐
│ │ │ │
1 3 5 ┤ 7 9 ┤ 13 ─ ┬ ┴ ─ ─ ┐
-- │ │ │ │
4 8 10 12 ┴
-- │
11
postorder :: (Applicative f, TreeLike tree) => (a -> f b) -> tree a -> f (tree b)
postorder f = runPhasesBackwards . treeTraverse (now . f) (later . postorder f)
| Traverse all the values of a tree in left - to - right breadth - first order .
( i.e. all nodes of depth @0@ , then all nodes of depth @1@ , then all nodes of
depth , etc . )
--
-- >>> printTree exampleBinaryTree
-- ┌──────────────────────[]────────┐
-- │ │
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ [ L ] ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐ ┌ [ R ] ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │
┌ [ L , L ] ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ [ L , R ] ┐ ╵ ┌ ─ ─ ─ ─ [ R , R ] ─ ─ ─ ─ ┐
│ │ │
╵ ┌ [ L , L , R ] ┐ ┌ [ L , R , L ] ─ ─ ─ ─ ─ ┐ ╵ ┌ [ R , R , L ] ┐ ┌ [ R , R , R ] ┐
-- │ │ │ │ │ │ │ │
╵ ╵ ╵ ┌ [ L , R , L , R ] ┐ ╵ ╵ ╵ ╵
-- │ │
-- ╵ ╵
-- >>> visit a = StateT $ \e -> print a >> return (e, succ e)
-- >>> traversed <- levelorder visit exampleBinaryTree `evalStateT` 0
-- []
-- [L]
-- [R]
-- [L,L]
-- [L,R]
-- [R,R]
-- [L,L,R]
-- [L,R,L]
-- [R,R,L]
-- [R,R,R]
-- [L,R,L,R]
-- >>> printTree traversed
-- ┌──────0───┐
-- │ │
┌ ─ ─ 1 ─ ┴ ─ ─ ─ ┐ ┌ 2 ─ ┴ ─ ┐
-- │ │ │ │
┌ 3 ┴ ┐ ┌ ─ ─ ┴ 4 ┐ ╵ ┌ ─ 5 ─ ┐
-- │ │ │ │ │ │
╵ ┌ 6 ┐ ┌ 7 ┴ ─ ┐ ╵ ┌ 8 ┐ ┌ 9 ┐
-- │ │ │ │ │ │ │ │
╵ ╵ ╵ ┌ 10 ┐ ╵ ╵ ╵ ╵
-- ╵ ╵
-- >>> printTree exampleTree
-- []─┬─────┬───────────┬─────────────────────────────┐
-- │ │ │ │
[ 0 ] [ 1 ] ┴ ─ ┐ [ 2 ] ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ┐ [ 3 ] ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ┬ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │ │ │
[ 1,0 ] [ 2,0 ] [ 2,1 ] ─ ─ ─ ┐ [ 3,0 ] [ 3,1 ] ─ ─ ─ ┐ [ 3,2 ] ─ ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │
[ 2,1,0 ] [ 3,1,0 ] [ 3,2,0 ] [ 3,2,1 ] ─ ─ ─ ─ ┐
-- │
[ 3,2,1,0 ]
-- >>> traversed <- levelorder visit exampleTree `evalStateT` 0
-- []
-- [0]
[ 1 ]
[ 2 ]
[ 3 ]
[ 1,0 ]
[ 2,0 ]
[ 2,1 ]
[ 3,0 ]
[ 3,1 ]
[ 3,2 ]
-- [2,1,0]
[ 3,1,0 ]
-- [3,2,0]
-- [3,2,1]
[ 3,2,1,0 ]
-- >>> printTree traversed
0 ┬ ─ ─ ┬ ─ ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
│ │
1 2 ┤ 3 ┬ ┴ ─ ┐ 4 ┬ ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ┐
-- │ │ │ │ │ │
5 6 7 ┴ ┐ 8 9 ┴ ┐ 10 ─ ┬ ┴ ─ ─ ┐
-- │ │ │ │
11 12 13 14 ┴
-- │
15
levelorder :: (Applicative f, TreeLike tree) => (a -> f b) -> tree a -> f (tree b)
levelorder = \f -> runPhasesForwards . schedule f where
schedule :: (Applicative f, TreeLike tree) => (a -> f b) -> tree a -> Phases f (tree b)
schedule f = treeTraverse (now . f) (delay . schedule f)
| Traverse all the values of a tree in left - to - right inverse breadth - first order .
( i.e. all nodes of @n@ , then all nodes of depth @n-1@ , then all nodes of
depth @n-2@ , etc . )
--
-- >>> printTree exampleBinaryTree
-- ┌──────────────────────[]────────┐
-- │ │
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ [ L ] ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐ ┌ [ R ] ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │
┌ [ L , L ] ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ [ L , R ] ┐ ╵ ┌ ─ ─ ─ ─ [ R , R ] ─ ─ ─ ─ ┐
│ │ │
╵ ┌ [ L , L , R ] ┐ ┌ [ L , R , L ] ─ ─ ─ ─ ─ ┐ ╵ ┌ [ R , R , L ] ┐ ┌ [ R , R , R ] ┐
-- │ │ │ │ │ │ │ │
╵ ╵ ╵ ┌ [ L , R , L , R ] ┐ ╵ ╵ ╵ ╵
-- │ │
-- ╵ ╵
-- >>> visit a = StateT $ \e -> print a >> return (e, succ e)
-- >>> traversed <- rlevelorder visit exampleBinaryTree `evalStateT` 0
-- [L,R,L,R]
-- [L,L,R]
-- [L,R,L]
-- [R,R,L]
-- [R,R,R]
-- [L,L]
-- [L,R]
-- [R,R]
-- [L]
-- [R]
-- []
-- >>> printTree traversed
┌ ─ ─ ─ ─ ─ ─ 10 ─ ─ ─ ┐
-- │ │
┌ ─ ─ 8 ┴ ─ ─ ─ ┐ ┌ 9 ─ ┴ ─ ┐
-- │ │ │ │
┌ 5 ┴ ┐ ┌ ─ ┴ 6 ┐ ╵ ┌ ─ 7 ─ ┐
│ │ │ │
╵ ┌ 1 ┐ ┌ 2 ┴ ┐ ╵ ┌ 3 ┐ ┌ 4 ┐
-- │ │ │ │ │ │ │ │
╵ ╵ ╵ ┌ 0 ┐ ╵ ╵ ╵ ╵
-- │ │
-- ╵ ╵
-- >>> printTree exampleTree
-- []─┬─────┬───────────┬─────────────────────────────┐
-- │ │ │ │
[ 0 ] [ 1 ] ┴ ─ ┐ [ 2 ] ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ┐ [ 3 ] ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ┬ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │ │ │
[ 1,0 ] [ 2,0 ] [ 2,1 ] ─ ─ ─ ┐ [ 3,0 ] [ 3,1 ] ─ ─ ─ ┐ [ 3,2 ] ─ ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │
[ 2,1,0 ] [ 3,1,0 ] [ 3,2,0 ] [ 3,2,1 ] ─ ─ ─ ─ ┐
-- │
[ 3,2,1,0 ]
-- >>> traversed <- rlevelorder visit exampleTree `evalStateT` 0
[ 3,2,1,0 ]
-- [2,1,0]
[ 3,1,0 ]
-- [3,2,0]
-- [3,2,1]
[ 1,0 ]
[ 2,0 ]
[ 2,1 ]
[ 3,0 ]
[ 3,1 ]
[ 3,2 ]
-- [0]
[ 1 ]
[ 2 ]
[ 3 ]
-- []
-- >>> printTree traversed
15 ─ ┬ ─ ─ ┬ ─ ─ ─ ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ─ ┐
│ │
11 12 ┐ 13 ┬ ┴ ─ ┐ 14 ┬ ─ ─ ┼ ─ ─ ─ ─ ┐
│ │
5 6 7 ┤ 8 9 ┤ 10 ┬ ┴ ─ ┐
│ │
-- 1 2 3 4┤
-- │
-- 0
rlevelorder :: (Applicative f, TreeLike tree) => (a -> f b) -> tree a -> f (tree b)
rlevelorder = \f -> runPhasesBackwards . schedule f where
schedule :: (Applicative f, TreeLike tree) => (a -> f b) -> tree a -> Phases f (tree b)
schedule f = treeTraverse (now . f) (delay . schedule f)
-- | 'Tree' wrapper to use 'inorder' traversal
--
-- >>> printTree exampleBinaryTree
-- ┌──────────────────────[]────────┐
-- │ │
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ [ L ] ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐ ┌ [ R ] ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │
┌ [ L , L ] ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ [ L , R ] ┐ ╵ ┌ ─ ─ ─ ─ [ R , R ] ─ ─ ─ ─ ┐
│ │ │
╵ ┌ [ L , L , R ] ┐ ┌ [ L , R , L ] ─ ─ ─ ─ ─ ┐ ╵ ┌ [ R , R , L ] ┐ ┌ [ R , R , R ] ┐
-- │ │ │ │ │ │ │ │
╵ ╵ ╵ ┌ [ L , R , L , R ] ┐ ╵ ╵ ╵ ╵
-- │ │
-- ╵ ╵
> > > _ < - traverse print $ InOrder exampleBinaryTree
-- [L,L]
-- [L,L,R]
-- [L]
-- [L,R,L]
-- [L,R,L,R]
-- [L,R]
-- []
-- [R]
-- [R,R,L]
-- [R,R]
-- [R,R,R]
newtype InOrder tree a = InOrder { getInOrder :: tree a }
deriving Functor
instance TreeLike tree => Foldable (InOrder tree) where
foldMap = foldMapDefault
instance TreeLike tree => Traversable (InOrder tree) where
traverse f = fmap InOrder . inorder f . getInOrder
-- | 'Tree' wrapper to use 'preorder' traversal
--
-- >>> printTree exampleBinaryTree
-- ┌──────────────────────[]────────┐
-- │ │
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ [ L ] ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐ ┌ [ R ] ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │
┌ [ L , L ] ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ [ L , R ] ┐ ╵ ┌ ─ ─ ─ ─ [ R , R ] ─ ─ ─ ─ ┐
│ │ │
╵ ┌ [ L , L , R ] ┐ ┌ [ L , R , L ] ─ ─ ─ ─ ─ ┐ ╵ ┌ [ R , R , L ] ┐ ┌ [ R , R , R ] ┐
-- │ │ │ │ │ │ │ │
╵ ╵ ╵ ┌ [ L , R , L , R ] ┐ ╵ ╵ ╵ ╵
-- │ │
-- ╵ ╵
> > > _ < - traverse print $ PreOrder exampleBinaryTree
-- []
-- [L]
-- [L,L]
-- [L,L,R]
-- [L,R]
-- [L,R,L]
-- [L,R,L,R]
-- [R]
-- [R,R]
-- [R,R,L]
-- [R,R,R]
newtype PreOrder tree a = PreOrder { getPreOrder :: tree a }
deriving Functor
instance TreeLike tree => Foldable (PreOrder tree) where
foldMap = foldMapDefault
instance TreeLike tree => Traversable (PreOrder tree) where
traverse f = fmap PreOrder . preorder f . getPreOrder
-- | 'Tree' wrapper to use 'postorder' traversal
--
-- >>> printTree exampleBinaryTree
-- ┌──────────────────────[]────────┐
-- │ │
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ [ L ] ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐ ┌ [ R ] ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │
┌ [ L , L ] ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ [ L , R ] ┐ ╵ ┌ ─ ─ ─ ─ [ R , R ] ─ ─ ─ ─ ┐
│ │ │
╵ ┌ [ L , L , R ] ┐ ┌ [ L , R , L ] ─ ─ ─ ─ ─ ┐ ╵ ┌ [ R , R , L ] ┐ ┌ [ R , R , R ] ┐
-- │ │ │ │ │ │ │ │
╵ ╵ ╵ ┌ [ L , R , L , R ] ┐ ╵ ╵ ╵ ╵
-- │ │
-- ╵ ╵
> > > _ < - traverse print $ PostOrder exampleBinaryTree
-- [L,L,R]
-- [L,L]
-- [L,R,L,R]
-- [L,R,L]
-- [L,R]
-- [L]
-- [R,R,L]
-- [R,R,R]
-- [R,R]
-- [R]
-- []
newtype PostOrder tree a = PostOrder { getPostOrder :: tree a }
deriving Functor
instance TreeLike tree => Foldable (PostOrder tree) where
foldMap = foldMapDefault
instance TreeLike tree => Traversable (PostOrder tree) where
traverse f = fmap PostOrder . postorder f . getPostOrder
-- | 'Tree' wrapper to use 'levelorder' traversal
--
-- >>> printTree exampleBinaryTree
-- ┌──────────────────────[]────────┐
-- │ │
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ [ L ] ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐ ┌ [ R ] ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │
┌ [ L , L ] ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ [ L , R ] ┐ ╵ ┌ ─ ─ ─ ─ [ R , R ] ─ ─ ─ ─ ┐
│ │ │
╵ ┌ [ L , L , R ] ┐ ┌ [ L , R , L ] ─ ─ ─ ─ ─ ┐ ╵ ┌ [ R , R , L ] ┐ ┌ [ R , R , R ] ┐
-- │ │ │ │ │ │ │ │
╵ ╵ ╵ ┌ [ L , R , L , R ] ┐ ╵ ╵ ╵ ╵
-- │ │
-- ╵ ╵
-- >>> _ <- traverse print $ LevelOrder exampleBinaryTree
-- []
-- [L]
-- [R]
-- [L,L]
-- [L,R]
-- [R,R]
-- [L,L,R]
-- [L,R,L]
-- [R,R,L]
-- [R,R,R]
-- [L,R,L,R]
newtype LevelOrder tree a = LevelOrder { getLevelOrder :: tree a }
deriving Functor
instance TreeLike tree => Foldable (LevelOrder tree) where
foldMap = foldMapDefault
instance TreeLike tree => Traversable (LevelOrder tree) where
traverse f = fmap LevelOrder . levelorder f . getLevelOrder
-- | 'Tree' wrapper to use 'rlevelorder' traversal
--
-- >>> printTree exampleBinaryTree
-- ┌──────────────────────[]────────┐
-- │ │
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ [ L ] ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐ ┌ [ R ] ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ┐
-- │ │ │ │
┌ [ L , L ] ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ [ L , R ] ┐ ╵ ┌ ─ ─ ─ ─ [ R , R ] ─ ─ ─ ─ ┐
│ │ │
╵ ┌ [ L , L , R ] ┐ ┌ [ L , R , L ] ─ ─ ─ ─ ─ ┐ ╵ ┌ [ R , R , L ] ┐ ┌ [ R , R , R ] ┐
-- │ │ │ │ │ │ │ │
╵ ╵ ╵ ┌ [ L , R , L , R ] ┐ ╵ ╵ ╵ ╵
-- │ │
-- ╵ ╵
> > > _ < - traverse print $ RLevelOrder exampleBinaryTree
-- [L,R,L,R]
-- [L,L,R]
-- [L,R,L]
-- [R,R,L]
-- [R,R,R]
-- [L,L]
-- [L,R]
-- [R,R]
-- [L]
-- [R]
-- []
newtype RLevelOrder tree a = RLevelOrder { getRLevelOrder :: tree a }
deriving Functor
instance TreeLike tree => Foldable (RLevelOrder tree) where
foldMap = foldMapDefault
instance TreeLike tree => Traversable (RLevelOrder tree) where
traverse f = fmap RLevelOrder . rlevelorder f . getRLevelOrder
-- $setup
-- >>> :set -XDeriveFunctor
> > > import Control . Monad . State
-- >>> data Direction = L | R deriving Show
-- >>> :{
-- next :: a -> State Int Int
next = const . state $ \n - > ( n , n+1 )
-- label :: ((a -> State Int Int) -> tree a -> State Int (tree Int)) -> tree a -> tree Int
-- label traversal tree = traversal next tree `evalState` (0 :: Int)
-- :}
--
-- >>> :{
-- exampleTrees :: [Tree [Int]]
exampleTrees =
-- [ Node [0] []
, Node [ 1 ] [ Node [ 1,0 ] [ ] ]
, Node [ 2 ] [ Node [ 2,0 ] [ ] , Node [ 2,1 ] [ Node [ 2,1,0 ] [ ] ] ]
, Node [ 3 ]
[ Node [ 3,0 ] [ ]
, Node [ 3,1 ] [ Node [ 3,1,0 ] [ ] ]
, Node [ 3,2 ] [ Node [ 3,2,0 ] [ ] , Node [ 3,2,1 ] [ Node [ 3,2,1,0 ] [ ] ] ]
-- ]
-- ]
-- exampleTree :: Tree [Int]
-- exampleTree = Node [] exampleTrees
-- exampleBinaryTree :: BinaryTree [Direction]
-- exampleBinaryTree =
-- Branch
-- ( Branch
-- ( Branch
-- Leaf
-- [L,L]
-- (Branch Leaf [L,L,R] Leaf)
-- )
-- [L]
-- ( Branch
-- ( Branch
-- Leaf
-- [L,R,L]
-- (Branch Leaf [L,R,L,R] Leaf)
-- )
-- [L,R]
-- Leaf
-- )
-- )
-- []
-- ( Branch
-- Leaf
-- [R]
-- ( Branch
-- (Branch Leaf [R,R,L] Leaf)
-- [R,R]
-- (Branch Leaf [R,R,R] Leaf)
-- )
-- )
-- :}
| null | https://raw.githubusercontent.com/rampion/tree-traversals/f9304f532bca56c455463ea66e148a4a02e908d9/src/Data/Traversable/TreeLike.hs | haskell | # OPTIONS_GHC -Wno-name-shadowing #
# LANGUAGE RankNTypes #
# LANGUAGE GADTs #
| By providing a 'TreeLike' instance, a functor can be traversed in several
orders:
['inorder' / 'InOrder']
Viewing a 'TreeLike' functor as a sequence of values and subtrees, an
/__inorder__/ traversal iterates through this sequence visiting values and
traversing subtrees in the order they are given.
>>> printTree (label inorder exampleBinaryTree)
┌──────6───┐
│ │
│ │ │ │
│ │ │ │ │ │
│ │ │ │ │ │ │ │
╵ ╵ ╵ ┌4┐ ╵ ╵ ╵ ╵
│ │
╵ ╵
Viewing a 'TreeLike' functor as a sequence of values and subtrees, a
/__preorder__/ traversal visits all the values in the sequence before
traversing the subtrees.
┌──────0───┐
│ │
│ │ │ │
│ │ │ │ │ │
│ │ │ │ │ │ │ │
╵ ╵ ╵ ┌6┐ ╵ ╵ ╵ ╵
│ │
╵ ╵
['postorder' / 'PostOrder']
Viewing a 'TreeLike' functor as a sequence of values and subtrees, a
/__postorder__/ traversal traverses all the subtrees in the sequence
before visiting the values in the sequence before
traversing the subtrees.
>>> printTree (label postorder exampleBinaryTree)
│ │
│ │ │ │
╵ ╵ ╵ ┌2┐ ╵ ╵ ╵ ╵
│ │
╵ ╵
all the values at the root level before traversing any of the subtrees.
traversal interweaves their traversals, next visiting all the values at the
root of each subtree, then visiting all the values at the roots of each
subtree's subtrees, and so on. This is also known as a breadth-first
traversal.
┌──────0───┐
│ │
│ │ │ │
│ │ │ │ │ │
│ │ │ │ │ │ │ │
╵ ╵
['rlevelorder' / 'RLevelOrder']
only visits all the values at the root level after traversing all of the
from the deepest level up, though still in left-to-right order.
│ │
│ │ │ │
│ │
╵ ╵
| = TreeLike wrappers
Each 'TreeLike' type admits multiple traversal orders:
> :: TreeLike tree => Traversal (tree a) (tree b) a b
Using the definition of 'Control.Lens.Traversal.Traversal' from
"Control.Lens.Traversal":
| = Convenience functions
substructure they contain can be traversed distinctly.
the 'Const' applicative to recursively create a drawing of any tree,
rendering values inline with 'singleton' and dropping a line to drawings of
subtrees with 'subtree':
>>> :{
printTree :: (Show a, TreeLike tree) => tree a -> IO ()
printTree = printTreeDiagram . drawTree where
drawTree :: (Show a, TreeLike tree) => tree a -> TreeDiagram
:}
This common pattern of mapping each element to a monoid and then modifying
each monoidal value generated from a subtree is captured by 'treeFoldMap', which
gives a slightly less verbose implementation of @printTree@.
Instances of 'TreeLike' are encouraged to avoid recursively defining
using the provided argument.
For example, given this definition for balanced binary trees:
>>> :{
deriving Functor
:}
Its 'TreeLike' instance can be defined as:
>>> :{
instance TreeLike BBT where
Nil -> pure Nil
where
branch :: BBT b -> b -> BBT b -> BBT b
branch Nil b ~Nil = b `Cons` Nil
branch (x `Cons` xt) b ~(y `Cons` yt) = b `Cons` branch xt (x,y) yt
:}
This definition exposes the substructure in a way that can be used
│ │
│ │ │ │
│ │ │ │ │ │ │ │
╵ ╵ ╵ ╵ ╵ ╵ ╵ ╵
| Recursively fold a tree into a monoid, using the given functions to
transform values and folded subtrees.
For example, one can find the maximum depth of a tree:
>>> printTree exampleTree
[]─┬─────┬───────────┬─────────────────────────────┐
│ │ │ │
│ │ │ │ │ │
│ │ │ │
│
>>> :set -XGeneralizedNewtypeDeriving
>>> :{
(<>) = mappend
:}
>>> getMax $ treeFoldMap (const 0) succ exampleTree
|
Use 'Product' to combine a pair of 'TreeLike' values into a single tree.
>>> printTree $ Pair smallBinaryTree smallRoseTree
│ │
│ │ │ │
╵ ╵ ╵ ╵
>>> visit a = StateT $ \e -> print a >> return (e, succ e)
>>> traversed <- postorder visit (Pair smallBinaryTree smallRoseTree) `evalStateT` 0
[0]
[1,2]
[1,3]
>>> printTree traversed
│ │
│ │ │ │
╵ ╵ ╵ ╵
>>> :t someTree
>>> printTree (someTree False)
╷
│
│ │
│ │ │ │
╵ ╵ ╵ ╵
>>> printTree (someTree True)
╷
│
[1]──┬─────┬┴────┬─────┐
│ │ │ │
|
A newtype wrapper to allow traversing an entire traversable of trees
simultaneously.
>>> printTree $ Forest exampleTrees
┌─────┬───────────┬─────────────────────────────┐
│ │ │ │
│ │ │ │ │ │
│ │ │ │
│
[3,2,1,0]
>>> visit a = StateT $ \e -> print a >> return (e, succ e)
[0]
[2,1,0]
[3,2,0]
[3,2,1]
>>> printTree traversed
┌──┬───┬────────┐
│ │ │ │
│ │ │ │
│
This is more of a convenience than a necessity, as @'Forest' t tree ~
'Compose' ('Flat' t) tree@
>>> printTree . Compose $ Flat exampleTrees
┌─────┬───────────┬─────────────────────────────┐
│ │ │ │
│ │ │ │ │ │
│ │ │ │
│
[3,2,1,0]
|
│
│
│
│
5┤
│
╵
>>> import Data.Foldable (toList)
Contrast with @'Flat' [] a@:
|
A newtype wraper for @t a@ whose `TreeLike` instance treats
the @t a@ as a flat structure with no subtrees.
>>> import Data.Foldable (toList)
|
Treat subtrees and values of @outer (inner a)@ as subtrees of
@'Compose' outer inner a@.
For example
>>> :{
exampleCompose = Compose $
Branch
(Node 'e' [Node 'f' [Node 'g' [], Node 'h' []]])
(Branch Leaf (Node 'i' [Node 'i' [Node 'j' [Node 'k' []]]]) Leaf)
:}
>>> printTree exampleCompose
│ │ │
│ │ │ │ │ │ │
│ │ │ │ │ │
'b' 'c' 'd' 'g' 'h' 'i'┴─┐
│
'j'─┐
│
'k'
["subtree","subtree","subtree"]
| Traverse all the values in a tree in left-to-right order.
>>> printTree exampleBinaryTree
┌──────────────────────[]────────┐
│ │
│ │ │ │
│ │ │ │ │ │ │ │
│ │
╵ ╵
>>> visit a = StateT $ \e -> print a >> return (e, succ e)
>>> traversed <- inorder visit exampleBinaryTree `evalStateT` 0
[L,L]
[L,L,R]
[L]
[L,R,L]
[L,R,L,R]
[L,R]
[]
[R]
[R,R,L]
[R,R]
[R,R,R]
>>> printTree traversed
┌──────6───┐
│ │
│ │ │ │
│ │ │ │ │ │
│ │ │ │ │ │ │ │
╵ ╵ ╵ ┌4┐ ╵ ╵ ╵ ╵
│ │
╵ ╵
>>> printTree exampleTree
[]─┬─────┬───────────┬─────────────────────────────┐
│ │ │ │
│ │ │ │ │ │
│ │ │ │
│
>>> traversed <- inorder visit exampleTree `evalStateT` 0
[]
[0]
[2,1,0]
[3,2,0]
[3,2,1]
>>> printTree traversed
│ │ │ │ │ │
│ │ │ │
│
| Traverse all the values of a node, then recurse into each of its subtrees
in left-to-right order.
>>> printTree exampleBinaryTree
┌──────────────────────[]────────┐
│ │
│ │ │ │
│ │ │ │ │ │ │ │
│ │
╵ ╵
>>> visit a = StateT $ \e -> print a >> return (e, succ e)
>>> traversed <- preorder visit exampleBinaryTree `evalStateT` 0
[]
[L]
[L,L]
[L,L,R]
[L,R]
[L,R,L]
[L,R,L,R]
[R]
[R,R]
[R,R,L]
[R,R,R]
>>> printTree traversed
┌──────0───┐
│ │
│ │ │ │
│ │ │ │ │ │
│ │ │ │ │ │ │ │
╵ ╵ ╵ ┌6┐ ╵ ╵ ╵ ╵
│ │
╵ ╵
>>> printTree exampleTree
[]─┬─────┬───────────┬─────────────────────────────┐
│ │ │ │
│ │ │ │ │ │
│ │ │ │
│
>>> traversed <- inorder visit exampleTree `evalStateT` 0
[]
[0]
[2,1,0]
[3,2,0]
[3,2,1]
>>> printTree traversed
│ │ │ │ │ │
│ │ │ │
│
| Traverse all the values of a node after recursing into each of its
subtrees in left-to-right order.
>>> printTree exampleBinaryTree
┌──────────────────────[]────────┐
│ │
│ │ │ │
│ │ │ │ │ │ │ │
│ │
╵ ╵
>>> visit a = StateT $ \e -> print a >> return (e, succ e)
>>> traversed <- postorder visit exampleBinaryTree `evalStateT` 0
[L,L,R]
[L,L]
[L,R,L,R]
[L,R,L]
[L,R]
[L]
[R,R,L]
[R,R,R]
[R,R]
[R]
[]
>>> printTree traversed
│ │
│ │ │ │
│ │ │ │ │ │ │ │
╵ ╵ ╵ ┌2┐ ╵ ╵ ╵ ╵
│ │
╵ ╵
>>> printTree exampleTree
[]─┬─────┬───────────┬─────────────────────────────┐
│ │ │ │
│ │ │ │ │ │
│ │ │ │
│
>>> traversed <- postorder visit exampleTree `evalStateT` 0
[0]
[2,1,0]
[3,2,0]
[3,2,1]
[]
>>> printTree traversed
│ │ │ │
│
>>> printTree exampleBinaryTree
┌──────────────────────[]────────┐
│ │
│ │ │ │
│ │ │ │ │ │ │ │
│ │
╵ ╵
>>> visit a = StateT $ \e -> print a >> return (e, succ e)
>>> traversed <- levelorder visit exampleBinaryTree `evalStateT` 0
[]
[L]
[R]
[L,L]
[L,R]
[R,R]
[L,L,R]
[L,R,L]
[R,R,L]
[R,R,R]
[L,R,L,R]
>>> printTree traversed
┌──────0───┐
│ │
│ │ │ │
│ │ │ │ │ │
│ │ │ │ │ │ │ │
╵ ╵
>>> printTree exampleTree
[]─┬─────┬───────────┬─────────────────────────────┐
│ │ │ │
│ │ │ │ │ │
│ │ │ │
│
>>> traversed <- levelorder visit exampleTree `evalStateT` 0
[]
[0]
[2,1,0]
[3,2,0]
[3,2,1]
>>> printTree traversed
│ │ │ │ │ │
│ │ │ │
│
>>> printTree exampleBinaryTree
┌──────────────────────[]────────┐
│ │
│ │ │ │
│ │ │ │ │ │ │ │
│ │
╵ ╵
>>> visit a = StateT $ \e -> print a >> return (e, succ e)
>>> traversed <- rlevelorder visit exampleBinaryTree `evalStateT` 0
[L,R,L,R]
[L,L,R]
[L,R,L]
[R,R,L]
[R,R,R]
[L,L]
[L,R]
[R,R]
[L]
[R]
[]
>>> printTree traversed
│ │
│ │ │ │
│ │ │ │ │ │ │ │
│ │
╵ ╵
>>> printTree exampleTree
[]─┬─────┬───────────┬─────────────────────────────┐
│ │ │ │
│ │ │ │ │ │
│ │ │ │
│
>>> traversed <- rlevelorder visit exampleTree `evalStateT` 0
[2,1,0]
[3,2,0]
[3,2,1]
[0]
[]
>>> printTree traversed
1 2 3 4┤
│
0
| 'Tree' wrapper to use 'inorder' traversal
>>> printTree exampleBinaryTree
┌──────────────────────[]────────┐
│ │
│ │ │ │
│ │ │ │ │ │ │ │
│ │
╵ ╵
[L,L]
[L,L,R]
[L]
[L,R,L]
[L,R,L,R]
[L,R]
[]
[R]
[R,R,L]
[R,R]
[R,R,R]
| 'Tree' wrapper to use 'preorder' traversal
>>> printTree exampleBinaryTree
┌──────────────────────[]────────┐
│ │
│ │ │ │
│ │ │ │ │ │ │ │
│ │
╵ ╵
[]
[L]
[L,L]
[L,L,R]
[L,R]
[L,R,L]
[L,R,L,R]
[R]
[R,R]
[R,R,L]
[R,R,R]
| 'Tree' wrapper to use 'postorder' traversal
>>> printTree exampleBinaryTree
┌──────────────────────[]────────┐
│ │
│ │ │ │
│ │ │ │ │ │ │ │
│ │
╵ ╵
[L,L,R]
[L,L]
[L,R,L,R]
[L,R,L]
[L,R]
[L]
[R,R,L]
[R,R,R]
[R,R]
[R]
[]
| 'Tree' wrapper to use 'levelorder' traversal
>>> printTree exampleBinaryTree
┌──────────────────────[]────────┐
│ │
│ │ │ │
│ │ │ │ │ │ │ │
│ │
╵ ╵
>>> _ <- traverse print $ LevelOrder exampleBinaryTree
[]
[L]
[R]
[L,L]
[L,R]
[R,R]
[L,L,R]
[L,R,L]
[R,R,L]
[R,R,R]
[L,R,L,R]
| 'Tree' wrapper to use 'rlevelorder' traversal
>>> printTree exampleBinaryTree
┌──────────────────────[]────────┐
│ │
│ │ │ │
│ │ │ │ │ │ │ │
│ │
╵ ╵
[L,R,L,R]
[L,L,R]
[L,R,L]
[R,R,L]
[R,R,R]
[L,L]
[L,R]
[R,R]
[L]
[R]
[]
$setup
>>> :set -XDeriveFunctor
>>> data Direction = L | R deriving Show
>>> :{
next :: a -> State Int Int
label :: ((a -> State Int Int) -> tree a -> State Int (tree Int)) -> tree a -> tree Int
label traversal tree = traversal next tree `evalState` (0 :: Int)
:}
>>> :{
exampleTrees :: [Tree [Int]]
[ Node [0] []
]
]
exampleTree :: Tree [Int]
exampleTree = Node [] exampleTrees
exampleBinaryTree :: BinaryTree [Direction]
exampleBinaryTree =
Branch
( Branch
( Branch
Leaf
[L,L]
(Branch Leaf [L,L,R] Leaf)
)
[L]
( Branch
( Branch
Leaf
[L,R,L]
(Branch Leaf [L,R,L,R] Leaf)
)
[L,R]
Leaf
)
)
[]
( Branch
Leaf
[R]
( Branch
(Branch Leaf [R,R,L] Leaf)
[R,R]
(Branch Leaf [R,R,R] Leaf)
)
)
:} | # LANGUAGE ViewPatterns #
# LANGUAGE DataKinds #
# LANGUAGE TypeOperators #
# LANGUAGE DeriveFunctor #
# LANGUAGE ScopedTypeVariables #
┌ ─ ─ 2 ┴ ─ ─ ─ ┐ ┌ 7 ─ ┴ ─ ─ ┐
┌ 0 ┴ ┐ ┌ ─ ┴ 5 ┐ ╵ ┌ ─ 9 ┴ ─ ┐
╵ ┌ 1 ┐ ┌ 3 ┴ ┐ ╵ ┌ 8 ┐ ┌ 10 ┐
[ ' preorder ' / ' PreOrder ' ]
> > > printTree ( label preorder exampleBinaryTree )
┌ ─ ─ 1 ┴ ─ ─ ─ ┐ ┌ 7 ─ ┴ ─ ─ ┐
┌ 2 ┴ ┐ ┌ ─ ┴ 4 ┐ ╵ ┌ ─ 8 ┴ ─ ┐
╵ ┌ 3 ┐ ┌ 5 ┴ ┐ ╵ ┌ 9 ┐ ┌ 10 ┐
┌ ─ ─ ─ ─ ─ ─ 10 ─ ─ ─ ┐
┌ ─ ─ 5 ┴ ─ ─ ─ ┐ ┌ 9 ─ ┴ ─ ┐
┌ 1 ┴ ┐ ┌ ─ ┴ 4 ┐ ╵ ┌ ─ 8 ─ ┐
│ │ │ │
╵ ┌ 0 ┐ ┌ 3 ┴ ┐ ╵ ┌ 6 ┐ ┌ 7 ┐
│ │ │ │ │
[ ' levelorder ' / ' LevelOrder ' ]
Similar to a preorder traversal , a /__levelorder__/ traversal first visits
Instead of traversing the subtrees one by one , though , a levelorder
> > > printTree ( label levelorder exampleBinaryTree )
┌ ─ ─ 1 ─ ┴ ─ ─ ─ ┐ ┌ 2 ─ ┴ ─ ┐
┌ 3 ┴ ┐ ┌ ─ ─ ┴ 4 ┐ ╵ ┌ ─ 5 ─ ┐
╵ ┌ 6 ┐ ┌ 7 ┴ ─ ┐ ╵ ┌ 8 ┐ ┌ 9 ┐
╵ ╵ ╵ ┌ 10 ┐ ╵ ╵ ╵ ╵
Similar to a postlevel traversal , a /__reversed levelorder__/ traversal
subtrees . Instead of traversing the subtrees one by one , though , a
reversed levelorder traversal interweaves their traversals , working
> > > printTree ( label rlevelorder exampleBinaryTree )
┌ ─ ─ ─ ─ ─ ─ 10 ─ ─ ─ ┐
┌ ─ ─ 8 ┴ ─ ─ ─ ┐ ┌ 9 ─ ┴ ─ ┐
┌ 5 ┴ ┐ ┌ ─ ┴ 6 ┐ ╵ ┌ ─ 7 ─ ┐
│ │ │ │
╵ ┌ 1 ┐ ┌ 2 ┴ ┐ ╵ ┌ 3 ┐ ┌ 4 ┐
│ │ │ │ │
╵ ╵ ╵ ┌ 0 ┐ ╵ ╵ ╵ ╵
module Data.Traversable.TreeLike
( TreeLike(..), treeFoldMap
These @newtype@s define ' TreeLike ' instances for ' ' types .
, Forest(..), Flat(..), List(..)
| =
> inorder , preorder , postorder , levelorder , rlevelorder
> type Traversal s t a b = forall > ( a - > f b ) - > s - > f t
, inorder, preorder, postorder, levelorder, rlevelorder
| = wrappers
These @newtype@s define ' ' instances for ' TreeLike ' types .
, InOrder(..), PreOrder(..), PostOrder(..), LevelOrder(..), RLevelOrder(..)
, showTree, printTree
) where
import Data.Functor.Compose (Compose(..))
import Data.Functor.Const (Const(..))
import Data.Functor.Product (Product(..))
import Data.Functor.Sum (Sum(..))
import Data.Traversable (foldMapDefault)
import Data.Tree hiding (Forest)
import Control.Applicative.Phases
import Data.BinaryTree
import Data.Monoid.TreeDiagram
| Render the tree as a string , using the ' TreeDiagram ' monoid .
showTree :: (TreeLike tree, Show a) => tree a -> ShowS
showTree = showTreeDiagram . treeFoldMap singleton subtree
| Print the tree , using the ' TreeDiagram ' monoid .
printTree :: (TreeLike tree, Show a) => tree a -> IO ()
printTree = putStrLn . ($ []) . showTree
| , functors are ' TreeLike ' if any values and ' TreeLike '
For example , given the ' TreeDiagram ' monoid , one can use ' ' with
drawTree = getConst . ( Const . ) ( Const . subtree . drawTree )
> > > printTree = printTreeDiagram . singleton subtree
' ' in terms of itself , and to instead traverse subtrees
data BBT a = Nil | a ` Cons ` BBT ( a , a )
infixr 4 ` Cons `
= t - > case t of
a ` Cons ` at - > branch < $ > g ( fst < $ > at ) < * > f a < * > g ( snd < $ > at )
by functions implemented in terms of ' ' , such as :
> > > printTree $ 1 ` Cons ` ( 2,3 ) ` Cons ` ( ( 4,5),(6,7 ) ) ` Cons ` Nil
┌ ─ ─ ─ 1 ─ ─ ─ ┐
┌ ─ 2 ─ ┐ ┌ ─ 3 ─ ┐
┌ 4 ┐ ┌ 6 ┐ ┌ 5 ┐ ┌ 7 ┐
class Functor tree => TreeLike tree where
treeTraverse :: Applicative f
=> (a -> f b)
-> (forall subtree. TreeLike subtree => subtree a -> f (subtree b))
-> tree a -> f (tree b)
[ 0 ] [ 1 ] ┴ ─ ┐ [ 2 ] ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ┐ [ 3 ] ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ┬ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
[ 1,0 ] [ 2,0 ] [ 2,1 ] ─ ─ ─ ┐ [ 3,0 ] [ 3,1 ] ─ ─ ─ ┐ [ 3,2 ] ─ ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ┐
[ 2,1,0 ] [ 3,1,0 ] [ 3,2,0 ] [ 3,2,1 ] ─ ─ ─ ─ ┐
[ 3,2,1,0 ]
> > > import GHC.Natural
> > > import Data . Semigroup
newtype : : Natural } deriving ( , )
instance Semigroup where
instance where
= Max 0
a ` mappend ` Max b = a ` max ` b
4
treeFoldMap :: (Monoid m, TreeLike tree) => (a -> m) -> (m -> m) -> tree a -> m
treeFoldMap f g = getConst . treeTraverse (Const . f) (Const . g . treeFoldMap f g)
instance TreeLike Tree where
treeTraverse f g (Node a as) = Node <$> f a <*> traverse g as
instance TreeLike BinaryTree where
treeTraverse _ _ Leaf = pure Leaf
treeTraverse f g (Branch l a r) = Branch <$> g l <*> f a <*> g r
> > > smallBinaryTree = Branch ( Branch Leaf [ 0,1 ] Leaf ) [ 0 ] ( Branch Leaf [ 0,2 ] Leaf )
> > > smallRoseTree = Node [ 1 ] [ Node [ 1,0 ] [ ] , Node [ 1,1 ] [ ] , Node [ 1,2 ] [ ] , Node [ 1,3 ] [ ] ]
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
┌ ─ ─ ─ [ 0 ] ─ ─ ─ ┐ [ 1 ] ─ ─ ┬ ─ ─ ─ ─ ─ ┬ ┴ ─ ─ ─ ─ ┬ ─ ─ ─ ─ ─ ┐
│ │ │ │ │ │
┌ [ 0,1 ] ┐ ┌ [ 0,2 ] ┐ [ 1,0 ] [ 1,1 ] [ 1,2 ] [ 1,3 ]
[ 0,1 ]
[ 0,2 ]
[ 1,0 ]
[ 1,1 ]
[ 1 ]
┌ ─ ─ ─ ─ ─ ─ ─ ┐
┌ ─ 2 ─ ┐ 7 ┬ ─ ┬ ┴ ┬ ─ ┐
│
┌ 0 ┐ ┌ 1 ┐ 3 4 5 6
instance (TreeLike fst, TreeLike snd) => TreeLike (Product fst snd) where
treeTraverse _ g (Pair x y) = Pair <$> g x <*> g y
| Use ' Sum ' to unify two different types of trees into a single type .
> > > smallBinaryTree = Branch ( Branch Leaf [ 0,1 ] Leaf ) [ 0 ] ( Branch Leaf [ 0,2 ] Leaf )
> > > smallRoseTree = Node [ 1 ] [ Node [ 1,0 ] [ ] , Node [ 1,1 ] [ ] , Node [ 1,2 ] [ ] , Node [ 1,3 ] [ ] ]
> > > someTree b = if not b then InL smallBinaryTree else InR smallRoseTree
someTree : : a = > Bool - > Sum BinaryTree Tree [ a ]
┌ ─ ─ ─ [ 0 ] ─ ─ ─ ┐
┌ [ 0,1 ] ┐ ┌ [ 0,2 ] ┐
[ 1,0 ] [ 1,1 ] [ 1,2 ] [ 1,3 ]
instance (TreeLike left, TreeLike right) => TreeLike (Sum left right) where
treeTraverse _ g (InL x) = InL <$> g x
treeTraverse _ g (InR y) = InR <$> g y
[ 0 ] [ 1 ] ┴ ─ ┐ [ 2 ] ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ┐ [ 3 ] ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ┬ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
[ 1,0 ] [ 2,0 ] [ 2,1 ] ─ ─ ─ ┐ [ 3,0 ] [ 3,1 ] ─ ─ ─ ┐ [ 3,2 ] ─ ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ┐
[ 2,1,0 ] [ 3,1,0 ] [ 3,2,0 ] [ 3,2,1 ] ─ ─ ─ ─ ┐
> > > traversed < - levelorder visit ( Forest exampleTrees ) ` evalStateT ` 0
[ 1 ]
[ 2 ]
[ 3 ]
[ 1,0 ]
[ 2,0 ]
[ 2,1 ]
[ 3,0 ]
[ 3,1 ]
[ 3,2 ]
[ 3,1,0 ]
[ 3,2,1,0 ]
0 1 ┤ 2 ┬ ┴ ─ ┐ 3 ┬ ─ ─ ┬ ┴ ─ ─ ─ ─ ┐
│
4 5 6 ┴ ┐ 7 8 ┴ ┐ 9 ─ ┬ ┴ ─ ─ ┐
10 11 12 13 ┴
14
[ 0 ] [ 1 ] ┴ ─ ┐ [ 2 ] ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ┐ [ 3 ] ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ┬ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
[ 1,0 ] [ 2,0 ] [ 2,1 ] ─ ─ ─ ┐ [ 3,0 ] [ 3,1 ] ─ ─ ─ ┐ [ 3,2 ] ─ ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ┐
[ 2,1,0 ] [ 3,1,0 ] [ 3,2,0 ] [ 3,2,1 ] ─ ─ ─ ─ ┐
newtype Forest t tree a = Forest { getForest :: t (tree a) }
deriving Functor
instance (Traversable t, TreeLike tree) => TreeLike (Forest t tree) where
treeTraverse _ g = fmap Forest . traverse g . getForest
A newtype wrapper for @[a]@ whose ` TreeLike ` instance
treats each cons - cell as a tree containing one value and one subtree .
> > > printTree $ List [ 1 .. 5 ]
1 ─ ┐
2 ┴
3 ┴
4 ┴
> > > toList . PostOrder $ List [ 1 .. 5 ]
[ 5,4,3,2,1 ]
> > > printTree $ Flat [ 1 .. 5 ]
1 ─ 2 ─ 3 ─ 4 ─ 5
> > > toList . PostOrder $ Flat [ 1 .. 5 ]
[ 1,2,3,4,5 ]
newtype List a = List { getList :: [a] }
deriving Functor
instance TreeLike List where
treeTraverse f g (List as) = List <$> case as of
[] -> pure []
a:as -> (:) <$> f a <*> (fmap getList . g .List) as
> > > printTree $ Flat [ 1 .. 5 ]
1 ─ 2 ─ 3 ─ 4 ─ 5
> > > toList . PostOrder $ Flat [ 1 .. 5 ]
[ 1,2,3,4,5 ]
newtype Flat t a = Flat { getFlat :: t a }
deriving Functor
instance Traversable t => TreeLike (Flat t) where
treeTraverse f _ (Flat ta) = Flat <$> traverse f ta
( Branch Leaf ( Node ' a ' [ Node ' b ' [ ] , Node ' c ' [ ] , 'd ' [ ] ] ) Leaf )
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
┌ ─ ─ ─ ─ ─ ─ ─ ┼ ─ ─ ─ ─ ─ ─ ─ ┐ ' e ' ─ ┴ ─ ─ ┐ ┌ ─ ─ ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ─ ┐
╵ ' a ' ─ ┬ ─ ┴ ─ ┬ ─ ─ ─ ┐ ╵ ' f ' ─ ┼ ─ ─ ─ ╵ ' i ' ┴ ─ ─ ┐ ╵
> > > ( const [ " value " ] ) ( const [ " subtree " ] ) exampleCompose
instance (TreeLike outer, TreeLike inner) => TreeLike (Compose outer inner) where
treeTraverse _ g (Compose trees) = Compose <$> treeTraverse g (fmap getCompose . g . Compose) trees
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ [ L ] ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐ ┌ [ R ] ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ┐
┌ [ L , L ] ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ [ L , R ] ┐ ╵ ┌ ─ ─ ─ ─ [ R , R ] ─ ─ ─ ─ ┐
│ │ │
╵ ┌ [ L , L , R ] ┐ ┌ [ L , R , L ] ─ ─ ─ ─ ─ ┐ ╵ ┌ [ R , R , L ] ┐ ┌ [ R , R , R ] ┐
╵ ╵ ╵ ┌ [ L , R , L , R ] ┐ ╵ ╵ ╵ ╵
┌ ─ ─ 2 ┴ ─ ─ ─ ┐ ┌ 7 ─ ┴ ─ ─ ┐
┌ 0 ┴ ┐ ┌ ─ ┴ 5 ┐ ╵ ┌ ─ 9 ┴ ─ ┐
╵ ┌ 1 ┐ ┌ 3 ┴ ┐ ╵ ┌ 8 ┐ ┌ 10 ┐
[ 0 ] [ 1 ] ┴ ─ ┐ [ 2 ] ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ┐ [ 3 ] ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ┬ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
[ 1,0 ] [ 2,0 ] [ 2,1 ] ─ ─ ─ ┐ [ 3,0 ] [ 3,1 ] ─ ─ ─ ┐ [ 3,2 ] ─ ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ┐
[ 2,1,0 ] [ 3,1,0 ] [ 3,2,0 ] [ 3,2,1 ] ─ ─ ─ ─ ┐
[ 3,2,1,0 ]
[ 1 ]
[ 1,0 ]
[ 2 ]
[ 2,0 ]
[ 2,1 ]
[ 3 ]
[ 3,0 ]
[ 3,1 ]
[ 3,1,0 ]
[ 3,2 ]
[ 3,2,1,0 ]
0 ┬ ─ ─ ┬ ─ ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
│ │
1 2 ┤ 4 ┬ ┴ ─ ┐ 8 ┬ ─ ─ ─ ┬ ┴ ─ ─ ─ ─ ─ ┐
3 5 6 ┤ 9 10 ┴ ┐ 12 ─ ┬ ┴ ─ ─ ┐
7 11 13 14 ┴
15
inorder :: (Applicative f, TreeLike tree) => (a -> f b) -> tree a -> f (tree b)
inorder f = treeTraverse f (inorder f)
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ [ L ] ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐ ┌ [ R ] ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ┐
┌ [ L , L ] ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ [ L , R ] ┐ ╵ ┌ ─ ─ ─ ─ [ R , R ] ─ ─ ─ ─ ┐
│ │ │
╵ ┌ [ L , L , R ] ┐ ┌ [ L , R , L ] ─ ─ ─ ─ ─ ┐ ╵ ┌ [ R , R , L ] ┐ ┌ [ R , R , R ] ┐
╵ ╵ ╵ ┌ [ L , R , L , R ] ┐ ╵ ╵ ╵ ╵
┌ ─ ─ 1 ┴ ─ ─ ─ ┐ ┌ 7 ─ ┴ ─ ─ ┐
┌ 2 ┴ ┐ ┌ ─ ┴ 4 ┐ ╵ ┌ ─ 8 ┴ ─ ┐
╵ ┌ 3 ┐ ┌ 5 ┴ ┐ ╵ ┌ 9 ┐ ┌ 10 ┐
[ 0 ] [ 1 ] ┴ ─ ┐ [ 2 ] ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ┐ [ 3 ] ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ┬ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
[ 1,0 ] [ 2,0 ] [ 2,1 ] ─ ─ ─ ┐ [ 3,0 ] [ 3,1 ] ─ ─ ─ ┐ [ 3,2 ] ─ ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ┐
[ 2,1,0 ] [ 3,1,0 ] [ 3,2,0 ] [ 3,2,1 ] ─ ─ ─ ─ ┐
[ 3,2,1,0 ]
[ 1 ]
[ 1,0 ]
[ 2 ]
[ 2,0 ]
[ 2,1 ]
[ 3 ]
[ 3,0 ]
[ 3,1 ]
[ 3,1,0 ]
[ 3,2 ]
[ 3,2,1,0 ]
0 ┬ ─ ─ ┬ ─ ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
│ │
1 2 ┤ 4 ┬ ┴ ─ ┐ 8 ┬ ─ ─ ─ ┬ ┴ ─ ─ ─ ─ ─ ┐
3 5 6 ┤ 9 10 ┴ ┐ 12 ─ ┬ ┴ ─ ─ ┐
7 11 13 14 ┴
15
preorder :: (Applicative f, TreeLike tree) => (a -> f b) -> tree a -> f (tree b)
preorder f = runPhasesForwards . treeTraverse (now . f) (later . preorder f)
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ [ L ] ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐ ┌ [ R ] ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ┐
┌ [ L , L ] ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ [ L , R ] ┐ ╵ ┌ ─ ─ ─ ─ [ R , R ] ─ ─ ─ ─ ┐
│ │ │
╵ ┌ [ L , L , R ] ┐ ┌ [ L , R , L ] ─ ─ ─ ─ ─ ┐ ╵ ┌ [ R , R , L ] ┐ ┌ [ R , R , R ] ┐
╵ ╵ ╵ ┌ [ L , R , L , R ] ┐ ╵ ╵ ╵ ╵
┌ ─ ─ ─ ─ ─ ─ 10 ─ ─ ─ ┐
┌ ─ ─ 5 ┴ ─ ─ ─ ┐ ┌ 9 ─ ┴ ─ ┐
┌ 1 ┴ ┐ ┌ ─ ┴ 4 ┐ ╵ ┌ ─ 8 ─ ┐
│ │ │ │
╵ ┌ 0 ┐ ┌ 3 ┴ ┐ ╵ ┌ 6 ┐ ┌ 7 ┐
[ 0 ] [ 1 ] ┴ ─ ┐ [ 2 ] ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ┐ [ 3 ] ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ┬ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
[ 1,0 ] [ 2,0 ] [ 2,1 ] ─ ─ ─ ┐ [ 3,0 ] [ 3,1 ] ─ ─ ─ ┐ [ 3,2 ] ─ ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ┐
[ 2,1,0 ] [ 3,1,0 ] [ 3,2,0 ] [ 3,2,1 ] ─ ─ ─ ─ ┐
[ 3,2,1,0 ]
[ 1,0 ]
[ 1 ]
[ 2,0 ]
[ 2,1 ]
[ 2 ]
[ 3,0 ]
[ 3,1,0 ]
[ 3,1 ]
[ 3,2,1,0 ]
[ 3,2 ]
[ 3 ]
15 ┬ ─ ─ ┬ ─ ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
│ │
0 2 ┤ 6 ┬ ┴ ─ ┐ 14 ┬ ─ ─ ┬ ┴ ─ ─ ─ ─ ┐
│ │ │ │
1 3 5 ┤ 7 9 ┤ 13 ─ ┬ ┴ ─ ─ ┐
4 8 10 12 ┴
11
postorder :: (Applicative f, TreeLike tree) => (a -> f b) -> tree a -> f (tree b)
postorder f = runPhasesBackwards . treeTraverse (now . f) (later . postorder f)
| Traverse all the values of a tree in left - to - right breadth - first order .
( i.e. all nodes of depth @0@ , then all nodes of depth @1@ , then all nodes of
depth , etc . )
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ [ L ] ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐ ┌ [ R ] ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ┐
┌ [ L , L ] ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ [ L , R ] ┐ ╵ ┌ ─ ─ ─ ─ [ R , R ] ─ ─ ─ ─ ┐
│ │ │
╵ ┌ [ L , L , R ] ┐ ┌ [ L , R , L ] ─ ─ ─ ─ ─ ┐ ╵ ┌ [ R , R , L ] ┐ ┌ [ R , R , R ] ┐
╵ ╵ ╵ ┌ [ L , R , L , R ] ┐ ╵ ╵ ╵ ╵
┌ ─ ─ 1 ─ ┴ ─ ─ ─ ┐ ┌ 2 ─ ┴ ─ ┐
┌ 3 ┴ ┐ ┌ ─ ─ ┴ 4 ┐ ╵ ┌ ─ 5 ─ ┐
╵ ┌ 6 ┐ ┌ 7 ┴ ─ ┐ ╵ ┌ 8 ┐ ┌ 9 ┐
╵ ╵ ╵ ┌ 10 ┐ ╵ ╵ ╵ ╵
[ 0 ] [ 1 ] ┴ ─ ┐ [ 2 ] ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ┐ [ 3 ] ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ┬ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
[ 1,0 ] [ 2,0 ] [ 2,1 ] ─ ─ ─ ┐ [ 3,0 ] [ 3,1 ] ─ ─ ─ ┐ [ 3,2 ] ─ ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ┐
[ 2,1,0 ] [ 3,1,0 ] [ 3,2,0 ] [ 3,2,1 ] ─ ─ ─ ─ ┐
[ 3,2,1,0 ]
[ 1 ]
[ 2 ]
[ 3 ]
[ 1,0 ]
[ 2,0 ]
[ 2,1 ]
[ 3,0 ]
[ 3,1 ]
[ 3,2 ]
[ 3,1,0 ]
[ 3,2,1,0 ]
0 ┬ ─ ─ ┬ ─ ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
│ │
1 2 ┤ 3 ┬ ┴ ─ ┐ 4 ┬ ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ┐
5 6 7 ┴ ┐ 8 9 ┴ ┐ 10 ─ ┬ ┴ ─ ─ ┐
11 12 13 14 ┴
15
levelorder :: (Applicative f, TreeLike tree) => (a -> f b) -> tree a -> f (tree b)
levelorder = \f -> runPhasesForwards . schedule f where
schedule :: (Applicative f, TreeLike tree) => (a -> f b) -> tree a -> Phases f (tree b)
schedule f = treeTraverse (now . f) (delay . schedule f)
| Traverse all the values of a tree in left - to - right inverse breadth - first order .
( i.e. all nodes of @n@ , then all nodes of depth @n-1@ , then all nodes of
depth @n-2@ , etc . )
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ [ L ] ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐ ┌ [ R ] ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ┐
┌ [ L , L ] ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ [ L , R ] ┐ ╵ ┌ ─ ─ ─ ─ [ R , R ] ─ ─ ─ ─ ┐
│ │ │
╵ ┌ [ L , L , R ] ┐ ┌ [ L , R , L ] ─ ─ ─ ─ ─ ┐ ╵ ┌ [ R , R , L ] ┐ ┌ [ R , R , R ] ┐
╵ ╵ ╵ ┌ [ L , R , L , R ] ┐ ╵ ╵ ╵ ╵
┌ ─ ─ ─ ─ ─ ─ 10 ─ ─ ─ ┐
┌ ─ ─ 8 ┴ ─ ─ ─ ┐ ┌ 9 ─ ┴ ─ ┐
┌ 5 ┴ ┐ ┌ ─ ┴ 6 ┐ ╵ ┌ ─ 7 ─ ┐
│ │ │ │
╵ ┌ 1 ┐ ┌ 2 ┴ ┐ ╵ ┌ 3 ┐ ┌ 4 ┐
╵ ╵ ╵ ┌ 0 ┐ ╵ ╵ ╵ ╵
[ 0 ] [ 1 ] ┴ ─ ┐ [ 2 ] ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ┐ [ 3 ] ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ┬ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
[ 1,0 ] [ 2,0 ] [ 2,1 ] ─ ─ ─ ┐ [ 3,0 ] [ 3,1 ] ─ ─ ─ ┐ [ 3,2 ] ─ ─ ─ ┬ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ┐
[ 2,1,0 ] [ 3,1,0 ] [ 3,2,0 ] [ 3,2,1 ] ─ ─ ─ ─ ┐
[ 3,2,1,0 ]
[ 3,2,1,0 ]
[ 3,1,0 ]
[ 1,0 ]
[ 2,0 ]
[ 2,1 ]
[ 3,0 ]
[ 3,1 ]
[ 3,2 ]
[ 1 ]
[ 2 ]
[ 3 ]
15 ─ ┬ ─ ─ ┬ ─ ─ ─ ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ─ ┐
│ │
11 12 ┐ 13 ┬ ┴ ─ ┐ 14 ┬ ─ ─ ┼ ─ ─ ─ ─ ┐
│ │
5 6 7 ┤ 8 9 ┤ 10 ┬ ┴ ─ ┐
│ │
rlevelorder :: (Applicative f, TreeLike tree) => (a -> f b) -> tree a -> f (tree b)
rlevelorder = \f -> runPhasesBackwards . schedule f where
schedule :: (Applicative f, TreeLike tree) => (a -> f b) -> tree a -> Phases f (tree b)
schedule f = treeTraverse (now . f) (delay . schedule f)
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ [ L ] ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐ ┌ [ R ] ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ┐
┌ [ L , L ] ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ [ L , R ] ┐ ╵ ┌ ─ ─ ─ ─ [ R , R ] ─ ─ ─ ─ ┐
│ │ │
╵ ┌ [ L , L , R ] ┐ ┌ [ L , R , L ] ─ ─ ─ ─ ─ ┐ ╵ ┌ [ R , R , L ] ┐ ┌ [ R , R , R ] ┐
╵ ╵ ╵ ┌ [ L , R , L , R ] ┐ ╵ ╵ ╵ ╵
> > > _ < - traverse print $ InOrder exampleBinaryTree
newtype InOrder tree a = InOrder { getInOrder :: tree a }
deriving Functor
instance TreeLike tree => Foldable (InOrder tree) where
foldMap = foldMapDefault
instance TreeLike tree => Traversable (InOrder tree) where
traverse f = fmap InOrder . inorder f . getInOrder
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ [ L ] ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐ ┌ [ R ] ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ┐
┌ [ L , L ] ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ [ L , R ] ┐ ╵ ┌ ─ ─ ─ ─ [ R , R ] ─ ─ ─ ─ ┐
│ │ │
╵ ┌ [ L , L , R ] ┐ ┌ [ L , R , L ] ─ ─ ─ ─ ─ ┐ ╵ ┌ [ R , R , L ] ┐ ┌ [ R , R , R ] ┐
╵ ╵ ╵ ┌ [ L , R , L , R ] ┐ ╵ ╵ ╵ ╵
> > > _ < - traverse print $ PreOrder exampleBinaryTree
newtype PreOrder tree a = PreOrder { getPreOrder :: tree a }
deriving Functor
instance TreeLike tree => Foldable (PreOrder tree) where
foldMap = foldMapDefault
instance TreeLike tree => Traversable (PreOrder tree) where
traverse f = fmap PreOrder . preorder f . getPreOrder
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ [ L ] ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐ ┌ [ R ] ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ┐
┌ [ L , L ] ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ [ L , R ] ┐ ╵ ┌ ─ ─ ─ ─ [ R , R ] ─ ─ ─ ─ ┐
│ │ │
╵ ┌ [ L , L , R ] ┐ ┌ [ L , R , L ] ─ ─ ─ ─ ─ ┐ ╵ ┌ [ R , R , L ] ┐ ┌ [ R , R , R ] ┐
╵ ╵ ╵ ┌ [ L , R , L , R ] ┐ ╵ ╵ ╵ ╵
> > > _ < - traverse print $ PostOrder exampleBinaryTree
newtype PostOrder tree a = PostOrder { getPostOrder :: tree a }
deriving Functor
instance TreeLike tree => Foldable (PostOrder tree) where
foldMap = foldMapDefault
instance TreeLike tree => Traversable (PostOrder tree) where
traverse f = fmap PostOrder . postorder f . getPostOrder
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ [ L ] ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐ ┌ [ R ] ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ┐
┌ [ L , L ] ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ [ L , R ] ┐ ╵ ┌ ─ ─ ─ ─ [ R , R ] ─ ─ ─ ─ ┐
│ │ │
╵ ┌ [ L , L , R ] ┐ ┌ [ L , R , L ] ─ ─ ─ ─ ─ ┐ ╵ ┌ [ R , R , L ] ┐ ┌ [ R , R , R ] ┐
╵ ╵ ╵ ┌ [ L , R , L , R ] ┐ ╵ ╵ ╵ ╵
newtype LevelOrder tree a = LevelOrder { getLevelOrder :: tree a }
deriving Functor
instance TreeLike tree => Foldable (LevelOrder tree) where
foldMap = foldMapDefault
instance TreeLike tree => Traversable (LevelOrder tree) where
traverse f = fmap LevelOrder . levelorder f . getLevelOrder
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ [ L ] ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐ ┌ [ R ] ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ┐
┌ [ L , L ] ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ [ L , R ] ┐ ╵ ┌ ─ ─ ─ ─ [ R , R ] ─ ─ ─ ─ ┐
│ │ │
╵ ┌ [ L , L , R ] ┐ ┌ [ L , R , L ] ─ ─ ─ ─ ─ ┐ ╵ ┌ [ R , R , L ] ┐ ┌ [ R , R , R ] ┐
╵ ╵ ╵ ┌ [ L , R , L , R ] ┐ ╵ ╵ ╵ ╵
> > > _ < - traverse print $ RLevelOrder exampleBinaryTree
newtype RLevelOrder tree a = RLevelOrder { getRLevelOrder :: tree a }
deriving Functor
instance TreeLike tree => Foldable (RLevelOrder tree) where
foldMap = foldMapDefault
instance TreeLike tree => Traversable (RLevelOrder tree) where
traverse f = fmap RLevelOrder . rlevelorder f . getRLevelOrder
> > > import Control . Monad . State
next = const . state $ \n - > ( n , n+1 )
exampleTrees =
, Node [ 1 ] [ Node [ 1,0 ] [ ] ]
, Node [ 2 ] [ Node [ 2,0 ] [ ] , Node [ 2,1 ] [ Node [ 2,1,0 ] [ ] ] ]
, Node [ 3 ]
[ Node [ 3,0 ] [ ]
, Node [ 3,1 ] [ Node [ 3,1,0 ] [ ] ]
, Node [ 3,2 ] [ Node [ 3,2,0 ] [ ] , Node [ 3,2,1 ] [ Node [ 3,2,1,0 ] [ ] ] ]
|
d2b5de776895d91b11bcf3cab43b4dc4712c621821413305f2b3850abb16123d | sunng87/slacker | bench.clj | (ns slacker.example.bench
(:require [slacker.client :refer :all])
(:import [java.util.concurrent Executors CountDownLatch]))
(def conn (slackerc "127.0.0.1:2104" :content-type :nippy))
(defn-remote conn slacker.example.api/rand-ints)
(defn -main [& args]
(let [task-count 500000
pool (Executors/newFixedThreadPool 500)
counter (CountDownLatch. task-count)
task (fn [] (rand-ints 40) (.countDown counter))
tasks (take task-count (repeat task))]
(time
(do
(.invokeAll pool tasks)
(.await counter)))
(shutdown-slacker-client-factory)
(shutdown-agents)))
| null | https://raw.githubusercontent.com/sunng87/slacker/60e5372782bed6fc58cb8ba55951516a6b971513/examples/slacker/example/bench.clj | clojure | (ns slacker.example.bench
(:require [slacker.client :refer :all])
(:import [java.util.concurrent Executors CountDownLatch]))
(def conn (slackerc "127.0.0.1:2104" :content-type :nippy))
(defn-remote conn slacker.example.api/rand-ints)
(defn -main [& args]
(let [task-count 500000
pool (Executors/newFixedThreadPool 500)
counter (CountDownLatch. task-count)
task (fn [] (rand-ints 40) (.countDown counter))
tasks (take task-count (repeat task))]
(time
(do
(.invokeAll pool tasks)
(.await counter)))
(shutdown-slacker-client-factory)
(shutdown-agents)))
|
|
6e5f4a5a2f2d9e96512323c0a9f392652c96bbb7cae8da81c41a4d809f48e1bf | nomeata/haskell-rec-def | P2.hs | # LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE CPP #
| A propagator for the two - point lattice
--
module Data.Propagator.P2
( P2
, newP2
, newTopP2
, setTop
, whenTop
, implies
, isTop
)
where
import Data.Propagator.Class
I want to test this code with dejafu , without carrying it as a dependency
of the main library . So here is a bit of CPP to care for that .
#ifdef DEJAFU
#define Ctxt MonadConc m =>
#define MaybeTop_ (MaybeTop m)
#define P2_ (P2 m)
#define PBool_ PBool m
#define PDualBool_ PDualBool m
#define MVar_ MVar m
#define M m
import Control.Concurrent.Classy
#else
#define Ctxt
#define MaybeTop_ MaybeTop
#define P2_ P2
#define PBool_ PBool
#define PDualBool_ PDualBool
#define MVar_ MVar
#define M IO
import Control.Exception
import Control.Concurrent.MVar
#endif
data MaybeTop_
= StillBottom (M ()) -- ^ Just act: Still bottom, run act (once!) when triggered
| SurelyBottom -- ^ Definitely bottom
| SurelyTop -- ^ Definitely top
| A type for propagators for the two - point lattice , consisting of bottom and top
newtype P2_ = P2 (MVar_ MaybeTop_)
-- | A new propagator, initialized at bottom
newP2 :: Ctxt M P2_
newP2 = P2 <$> newMVar (StillBottom (pure()))
-- | A new propagator, already set to top
newTopP2 :: Ctxt M P2_
newTopP2 = P2 <$> newMVar SurelyTop
| @whenTop p act@ runs @act@ if @p@ is already top , or after @setTop p@ is run
whenTop :: Ctxt P2_ -> M () -> M ()
whenTop (P2 p1) act = takeMVar p1 >>= \case
SurelyTop -> putMVar p1 SurelyTop >> act
SurelyBottom -> putMVar p1 SurelyBottom
StillBottom act' -> putMVar p1 (StillBottom (act >> act'))
-- | Set a propagator to top.
--
-- If it was bottom before, runs the actions queued with 'whenTop'. It does so
-- /after/ setting the propagator to top, so that cycles are broken.
setTop :: Ctxt P2_ -> M ()
setTop (P2 p) = takeMVar p >>= \case
SurelyTop -> putMVar p SurelyTop
SurelyBottom -> throw WriteToFrozenPropagatorException
StillBottom act -> do
Do this first , this breaks cycles
putMVar p SurelyTop
-- Now notify the dependencies
act
| @p1 ` implies ` p2@ chains propagators : If @p1@ becomes top , then so does @p2@.
implies :: Ctxt P2_ -> P2_ -> M ()
implies p1 p2 = whenTop p1 (setTop p2)
-- | Queries the current state of the propagator. All related calls to @setTop@
-- that have executed so far are taken into account.
isTop :: Ctxt P2_ -> M Bool
isTop (P2 p) = readMVar p >>= \case
SurelyTop -> pure True
SurelyBottom -> pure False
StillBottom _ -> pure False
#ifndef DEJAFU
-- | Freezes the value. Drops references to watchers.
freeze :: Ctxt P2_ -> M ()
freeze (P2 p) = takeMVar p >>= \case
SurelyTop -> putMVar p SurelyTop
_ -> putMVar p SurelyBottom
instance Propagator P2_ Bool where
newProp = newP2
newConstProp False = newP2
newConstProp True = newTopP2
freezeProp = freeze
readProp = isTop
#endif
| null | https://raw.githubusercontent.com/nomeata/haskell-rec-def/3365379b6c29aa32a5170b8d0336c73413ca7434/Data/Propagator/P2.hs | haskell |
^ Just act: Still bottom, run act (once!) when triggered
^ Definitely bottom
^ Definitely top
| A new propagator, initialized at bottom
| A new propagator, already set to top
| Set a propagator to top.
If it was bottom before, runs the actions queued with 'whenTop'. It does so
/after/ setting the propagator to top, so that cycles are broken.
Now notify the dependencies
| Queries the current state of the propagator. All related calls to @setTop@
that have executed so far are taken into account.
| Freezes the value. Drops references to watchers. | # LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE CPP #
| A propagator for the two - point lattice
module Data.Propagator.P2
( P2
, newP2
, newTopP2
, setTop
, whenTop
, implies
, isTop
)
where
import Data.Propagator.Class
I want to test this code with dejafu , without carrying it as a dependency
of the main library . So here is a bit of CPP to care for that .
#ifdef DEJAFU
#define Ctxt MonadConc m =>
#define MaybeTop_ (MaybeTop m)
#define P2_ (P2 m)
#define PBool_ PBool m
#define PDualBool_ PDualBool m
#define MVar_ MVar m
#define M m
import Control.Concurrent.Classy
#else
#define Ctxt
#define MaybeTop_ MaybeTop
#define P2_ P2
#define PBool_ PBool
#define PDualBool_ PDualBool
#define MVar_ MVar
#define M IO
import Control.Exception
import Control.Concurrent.MVar
#endif
data MaybeTop_
| A type for propagators for the two - point lattice , consisting of bottom and top
newtype P2_ = P2 (MVar_ MaybeTop_)
newP2 :: Ctxt M P2_
newP2 = P2 <$> newMVar (StillBottom (pure()))
newTopP2 :: Ctxt M P2_
newTopP2 = P2 <$> newMVar SurelyTop
| @whenTop p act@ runs @act@ if @p@ is already top , or after @setTop p@ is run
whenTop :: Ctxt P2_ -> M () -> M ()
whenTop (P2 p1) act = takeMVar p1 >>= \case
SurelyTop -> putMVar p1 SurelyTop >> act
SurelyBottom -> putMVar p1 SurelyBottom
StillBottom act' -> putMVar p1 (StillBottom (act >> act'))
setTop :: Ctxt P2_ -> M ()
setTop (P2 p) = takeMVar p >>= \case
SurelyTop -> putMVar p SurelyTop
SurelyBottom -> throw WriteToFrozenPropagatorException
StillBottom act -> do
Do this first , this breaks cycles
putMVar p SurelyTop
act
| @p1 ` implies ` p2@ chains propagators : If @p1@ becomes top , then so does @p2@.
implies :: Ctxt P2_ -> P2_ -> M ()
implies p1 p2 = whenTop p1 (setTop p2)
isTop :: Ctxt P2_ -> M Bool
isTop (P2 p) = readMVar p >>= \case
SurelyTop -> pure True
SurelyBottom -> pure False
StillBottom _ -> pure False
#ifndef DEJAFU
freeze :: Ctxt P2_ -> M ()
freeze (P2 p) = takeMVar p >>= \case
SurelyTop -> putMVar p SurelyTop
_ -> putMVar p SurelyBottom
instance Propagator P2_ Bool where
newProp = newP2
newConstProp False = newP2
newConstProp True = newTopP2
freezeProp = freeze
readProp = isTop
#endif
|
57738936b7367561dd199be5dd39ce5dfbb0eac491b2e5d1b9cf0a6d0d517187 | VisionsGlobalEmpowerment/webchange | cover_back.clj | (ns webchange.templates.library.flipbook.cover-back
(:require
[webchange.templates.library.flipbook.display-names :refer [get-text-display-name]]
[webchange.utils.scene-data :refer [generate-name rename-object]]))
(def page-name "page-cover-back")
(def resources ["/raw/img/flipbook/logo_2.png"])
(def template
{:page-cover-back {:type "group"
:transition "page-cover-back"
:children ["page-cover-back-background"
"page-cover-back-image"
"page-cover-back-license"]
:generated? true}
:page-cover-back-background {:type "rectangle"
:x 0
:y 0
:width "---"
:height "---"
:fill "---"}
:page-cover-back-image {:type "image"
:x "---"
:y 350
:src "/raw/img/flipbook/logo_2.png"
:origin {:type "center-center"}}
:page-cover-back-license {:type "text"
:word-wrap true
:vertical-align "top"
:font-size 24
:x "---"
:y 700
:width "---"
:fill "---"
:text "---"
:editable? {:select true
:drag true}
:placeholder "Add attributions"
:metadata {:display-name (get-text-display-name :cover-back "License")
:removable? true}}})
(defn- apply-page-size
[page-data {:keys [width height padding]}]
(let [page-center (/ width 2)]
(-> page-data
(assoc-in [:page-cover-back-background :width] width)
(assoc-in [:page-cover-back-background :height] height)
(assoc-in [:page-cover-back-image :x] page-center)
(assoc-in [:page-cover-back-license :x] (* 2 padding))
(assoc-in [:page-cover-back-license :width] (- width (* 4 padding))))))
(defn- set-content
[page-data {:keys [authors illustrators]}]
(let [text (cond->
"Written by __authors__.
Illustrated by __illustrators__."
true (clojure.string/replace-first #"__authors__" (clojure.string/join ", " authors))
(empty? illustrators) (clojure.string/replace-first #"Illustrated by __illustrators__." "")
(not (empty? illustrators)) (clojure.string/replace-first #"__illustrators__" (clojure.string/join ", " illustrators)))]
(-> page-data
(assoc-in [:page-cover-back-license :text] text))))
(defn- set-colors
[page-data {:keys [text-color background-color border-color]}]
(-> page-data
(assoc-in [:page-cover-back-background :fill] background-color)
(assoc-in [:page-cover-back-background-border :fill] border-color)
(assoc-in [:page-cover-back-license :fill] text-color)))
(defn create
[page-params content-params]
{:name page-name
:resources resources
:objects (-> template
(apply-page-size page-params)
(set-content content-params)
(set-colors page-params))})
| null | https://raw.githubusercontent.com/VisionsGlobalEmpowerment/webchange/4be2784b170befe245d60e32271b88e6fae31c13/src/clj/webchange/templates/library/flipbook/cover_back.clj | clojure | (ns webchange.templates.library.flipbook.cover-back
(:require
[webchange.templates.library.flipbook.display-names :refer [get-text-display-name]]
[webchange.utils.scene-data :refer [generate-name rename-object]]))
(def page-name "page-cover-back")
(def resources ["/raw/img/flipbook/logo_2.png"])
(def template
{:page-cover-back {:type "group"
:transition "page-cover-back"
:children ["page-cover-back-background"
"page-cover-back-image"
"page-cover-back-license"]
:generated? true}
:page-cover-back-background {:type "rectangle"
:x 0
:y 0
:width "---"
:height "---"
:fill "---"}
:page-cover-back-image {:type "image"
:x "---"
:y 350
:src "/raw/img/flipbook/logo_2.png"
:origin {:type "center-center"}}
:page-cover-back-license {:type "text"
:word-wrap true
:vertical-align "top"
:font-size 24
:x "---"
:y 700
:width "---"
:fill "---"
:text "---"
:editable? {:select true
:drag true}
:placeholder "Add attributions"
:metadata {:display-name (get-text-display-name :cover-back "License")
:removable? true}}})
(defn- apply-page-size
[page-data {:keys [width height padding]}]
(let [page-center (/ width 2)]
(-> page-data
(assoc-in [:page-cover-back-background :width] width)
(assoc-in [:page-cover-back-background :height] height)
(assoc-in [:page-cover-back-image :x] page-center)
(assoc-in [:page-cover-back-license :x] (* 2 padding))
(assoc-in [:page-cover-back-license :width] (- width (* 4 padding))))))
(defn- set-content
[page-data {:keys [authors illustrators]}]
(let [text (cond->
"Written by __authors__.
Illustrated by __illustrators__."
true (clojure.string/replace-first #"__authors__" (clojure.string/join ", " authors))
(empty? illustrators) (clojure.string/replace-first #"Illustrated by __illustrators__." "")
(not (empty? illustrators)) (clojure.string/replace-first #"__illustrators__" (clojure.string/join ", " illustrators)))]
(-> page-data
(assoc-in [:page-cover-back-license :text] text))))
(defn- set-colors
[page-data {:keys [text-color background-color border-color]}]
(-> page-data
(assoc-in [:page-cover-back-background :fill] background-color)
(assoc-in [:page-cover-back-background-border :fill] border-color)
(assoc-in [:page-cover-back-license :fill] text-color)))
(defn create
[page-params content-params]
{:name page-name
:resources resources
:objects (-> template
(apply-page-size page-params)
(set-content content-params)
(set-colors page-params))})
|
|
0685a7af69d2d01f61531b0af3ffd3c15a4363b480db0e95ca65aa55e1b490b7 | rmloveland/scheme48-0.53 | jar-defrecord.scm | Copyright ( c ) 1993 - 1999 by and . See file COPYING .
; This knows about the implementation of records and creates the various
accessors , mutators , etc . directly instead of calling the procedures
; from the record structure. This is done to allow the optional auto-inlining
optimizer to inline the accessors , mutators , etc .
; LOOPHOLE is used to get a little compile-time type checking (in addition to
; the usual complete run-time checking).
(define-syntax define-record-type
(syntax-rules ()
((define-record-type ?id ?type
(?constructor ?arg ...)
(?field . ?field-stuff)
...)
(begin (define ?type (make-record-type '?id '(?field ...)))
(define-constructor ?constructor ?type
((?arg :value) ...)
(?field ...))
(define-accessors ?type (?field . ?field-stuff) ...)))
((define-record-type ?id ?type
(?constructor ?arg ...)
?pred
?more ...)
(begin (define-record-type ?id ?type
(?constructor ?arg ...)
?more ...)
(define ?pred
(lambda (x)
(and (record? x)
(eq? ?type (record-ref x 0)))))))))
; (define-constructor <id> <type> ((<arg> <arg-type>)*) (<field-name>*))
;
; Checks to see that there is an <arg> corresponding to every <field-name>.
(define-syntax define-constructor
(lambda (e r c)
(let ((%record (r 'record))
(%begin (r 'begin))
(%lambda (r 'lambda))
(%loophole (r 'loophole))
(%proc (r 'proc))
(%unspecific (r 'unspecific))
(name (cadr e))
(type (caddr e))
(args (map car (cadddr e)))
(arg-types (map cadr (cadddr e)))
(fields (caddr (cddr e))))
(define (mem? name list)
(cond ((null? list) #f)
((c name (car list)) #t)
(else
(mem? name (cdr list)))))
(define (every? pred list)
(cond ((null? list) #t)
((pred (car list))
(every? pred (cdr list)))
(else #f)))
(if (every? (lambda (arg)
(mem? arg fields))
args)
`(define ,name
(,%loophole (,%proc ,arg-types ,type)
(,%lambda ,args
(,%record ,type . ,(map (lambda (field)
(if (mem? field args)
field
(list %unspecific)))
fields)))))
e)))
(record begin lambda loophole proc unspecific))
(define-syntax define-accessors
(lambda (e r c)
(let ((%define-accessor (r 'define-accessor))
(%begin (r 'begin))
(type (cadr e))
(field-specs (cddr e)))
(do ((i 1 (+ i 1))
(field-specs field-specs (cdr field-specs))
(ds '()
(cons `(,%define-accessor ,type ,i ,@(cdar field-specs))
ds)))
((null? field-specs)
`(,%begin ,@ds)))))
(define-accessor begin))
(define-syntax define-accessor
(syntax-rules ()
((define-accessor ?type ?index ?accessor)
(define ?accessor
(loophole (proc (?type) :value)
(lambda (r)
(checked-record-ref (loophole :record r) ?type ?index)))))
((define-accessor ?type ?index ?accessor ?modifier)
(begin (define-accessor ?type ?index ?accessor)
(define ?modifier
(loophole (proc (?type :value) :unspecific)
(lambda (r new)
(checked-record-set! (loophole :record r) ?type ?index new))))))
((define-accessor ?type ?index)
(begin))))
| null | https://raw.githubusercontent.com/rmloveland/scheme48-0.53/1ae4531fac7150bd2af42d124da9b50dd1b89ec1/scheme/rts/jar-defrecord.scm | scheme | This knows about the implementation of records and creates the various
from the record structure. This is done to allow the optional auto-inlining
LOOPHOLE is used to get a little compile-time type checking (in addition to
the usual complete run-time checking).
(define-constructor <id> <type> ((<arg> <arg-type>)*) (<field-name>*))
Checks to see that there is an <arg> corresponding to every <field-name>. | Copyright ( c ) 1993 - 1999 by and . See file COPYING .
accessors , mutators , etc . directly instead of calling the procedures
optimizer to inline the accessors , mutators , etc .
(define-syntax define-record-type
(syntax-rules ()
((define-record-type ?id ?type
(?constructor ?arg ...)
(?field . ?field-stuff)
...)
(begin (define ?type (make-record-type '?id '(?field ...)))
(define-constructor ?constructor ?type
((?arg :value) ...)
(?field ...))
(define-accessors ?type (?field . ?field-stuff) ...)))
((define-record-type ?id ?type
(?constructor ?arg ...)
?pred
?more ...)
(begin (define-record-type ?id ?type
(?constructor ?arg ...)
?more ...)
(define ?pred
(lambda (x)
(and (record? x)
(eq? ?type (record-ref x 0)))))))))
(define-syntax define-constructor
(lambda (e r c)
(let ((%record (r 'record))
(%begin (r 'begin))
(%lambda (r 'lambda))
(%loophole (r 'loophole))
(%proc (r 'proc))
(%unspecific (r 'unspecific))
(name (cadr e))
(type (caddr e))
(args (map car (cadddr e)))
(arg-types (map cadr (cadddr e)))
(fields (caddr (cddr e))))
(define (mem? name list)
(cond ((null? list) #f)
((c name (car list)) #t)
(else
(mem? name (cdr list)))))
(define (every? pred list)
(cond ((null? list) #t)
((pred (car list))
(every? pred (cdr list)))
(else #f)))
(if (every? (lambda (arg)
(mem? arg fields))
args)
`(define ,name
(,%loophole (,%proc ,arg-types ,type)
(,%lambda ,args
(,%record ,type . ,(map (lambda (field)
(if (mem? field args)
field
(list %unspecific)))
fields)))))
e)))
(record begin lambda loophole proc unspecific))
(define-syntax define-accessors
(lambda (e r c)
(let ((%define-accessor (r 'define-accessor))
(%begin (r 'begin))
(type (cadr e))
(field-specs (cddr e)))
(do ((i 1 (+ i 1))
(field-specs field-specs (cdr field-specs))
(ds '()
(cons `(,%define-accessor ,type ,i ,@(cdar field-specs))
ds)))
((null? field-specs)
`(,%begin ,@ds)))))
(define-accessor begin))
(define-syntax define-accessor
(syntax-rules ()
((define-accessor ?type ?index ?accessor)
(define ?accessor
(loophole (proc (?type) :value)
(lambda (r)
(checked-record-ref (loophole :record r) ?type ?index)))))
((define-accessor ?type ?index ?accessor ?modifier)
(begin (define-accessor ?type ?index ?accessor)
(define ?modifier
(loophole (proc (?type :value) :unspecific)
(lambda (r new)
(checked-record-set! (loophole :record r) ?type ?index new))))))
((define-accessor ?type ?index)
(begin))))
|
884ec20dbe6e3072d6904dfd668b2d47c829cd16d2e7d33429d4a67a01aeb7d7 | haskellari/strict | Strict.hs | {-# LANGUAGE Safe #-}
-----------------------------------------------------------------------------
-- |
-- Module : System.IO.Strict
Copyright : ( c ) 2007
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer :
-- Stability : stable
-- Portability : portable
--
The standard IO input functions using strict IO .
--
-----------------------------------------------------------------------------
module System.IO.Strict (
-- * Strict Handle IO
hGetContents, -- :: Handle -> IO [Char]
-- * Strict String IO wrappers
getContents, -- :: IO String
readFile, -- :: FilePath -> IO String
interact -- :: (String -> String) -> IO ()
) where
import Prelude ( String, (>>=), seq, return, (.), (=<<), FilePath, length)
import System.IO (IO)
import qualified System.IO as IO
-- -----------------------------------------------------------------------------
Strict hGetContents
| Computation ' hGetContents ' @hdl@ returns the list of characters
-- corresponding to the unread portion of the channel or file managed
by @hdl@ , which is immediate closed .
--
-- Items are read strictly from the input Handle.
--
-- This operation may fail with:
--
-- * 'isEOFError' if the end of file has been reached.
hGetContents :: IO.Handle -> IO.IO String
hGetContents h = IO.hGetContents h >>= \s -> length s `seq` return s
-- -----------------------------------------------------------------------------
-- Standard IO
-- | The 'getContents' operation returns all user input as a single string,
which is read stirctly ( same as ' hGetContents ' ' stdin ' ) .
getContents :: IO String
getContents = hGetContents IO.stdin
# INLINE getContents #
-- | The 'interact' function takes a function of type @String->String@
-- as its argument. The entire input from the standard input device is
-- passed to this function as its argument, and the resulting string is
-- output on the standard output device.
interact :: (String -> String) -> IO ()
interact f = IO.putStr . f =<< getContents
# INLINE interact #
-- | The 'readFile' function reads a file and
-- returns the contents of the file as a string.
The file is read strictly , as with ' ' .
readFile :: FilePath -> IO String
readFile name = IO.openFile name IO.ReadMode >>= hGetContents
# INLINE readFile #
| null | https://raw.githubusercontent.com/haskellari/strict/4691acdfbe95dcd49725baec09127fa0077d93ba/strict/src/System/IO/Strict.hs | haskell | # LANGUAGE Safe #
---------------------------------------------------------------------------
|
Module : System.IO.Strict
License : BSD-style (see the file libraries/base/LICENSE)
Maintainer :
Stability : stable
Portability : portable
---------------------------------------------------------------------------
* Strict Handle IO
:: Handle -> IO [Char]
* Strict String IO wrappers
:: IO String
:: FilePath -> IO String
:: (String -> String) -> IO ()
-----------------------------------------------------------------------------
corresponding to the unread portion of the channel or file managed
Items are read strictly from the input Handle.
This operation may fail with:
* 'isEOFError' if the end of file has been reached.
-----------------------------------------------------------------------------
Standard IO
| The 'getContents' operation returns all user input as a single string,
| The 'interact' function takes a function of type @String->String@
as its argument. The entire input from the standard input device is
passed to this function as its argument, and the resulting string is
output on the standard output device.
| The 'readFile' function reads a file and
returns the contents of the file as a string. | Copyright : ( c ) 2007
The standard IO input functions using strict IO .
module System.IO.Strict (
) where
import Prelude ( String, (>>=), seq, return, (.), (=<<), FilePath, length)
import System.IO (IO)
import qualified System.IO as IO
Strict hGetContents
| Computation ' hGetContents ' @hdl@ returns the list of characters
by @hdl@ , which is immediate closed .
hGetContents :: IO.Handle -> IO.IO String
hGetContents h = IO.hGetContents h >>= \s -> length s `seq` return s
which is read stirctly ( same as ' hGetContents ' ' stdin ' ) .
getContents :: IO String
getContents = hGetContents IO.stdin
# INLINE getContents #
interact :: (String -> String) -> IO ()
interact f = IO.putStr . f =<< getContents
# INLINE interact #
The file is read strictly , as with ' ' .
readFile :: FilePath -> IO String
readFile name = IO.openFile name IO.ReadMode >>= hGetContents
# INLINE readFile #
|
501ff1cf4a5b387234a47e52edc3c4718fc4e2e24e30f5c513605e8e2f02e795 | nick8325/quickspec | Refinements.hs | import QuickSpec
import Test.QuickCheck hiding (NonZero)
{- The universe of types -}
type NonZero = { x : Int | x /= 0 }
newtype NonZero = NonZero Int deriving (Ord, Eq, Show)
instance Arbitrary NonZero where
arbitrary = NonZero <$> arbitrary `suchThat` (/= 0)
{- type Odd = { x : Int | odd x } -}
newtype Odd = Odd Int deriving (Ord, Eq, Show)
instance Arbitrary Odd where
arbitrary = Odd <$> arbitrary `suchThat` odd
{- NonZero <= Int -}
nonZeroInt :: NonZero -> Int
nonZeroInt (NonZero i) = i
{- Odd <= Int -}
oddInt :: Odd -> Int
oddInt (Odd i) = i
Odd < = NonZero
oddNonZero :: Odd -> NonZero
oddNonZero (Odd i) = NonZero i
{- The functions of interest -}
divide :: Int -> NonZero -> Int
divide i (NonZero j) = div i j
main = quickSpec [
withMaxTermSize 10,
monoTypeWithVars ["x", "y", "z"] (Proxy :: Proxy NonZero),
monoTypeWithVars ["x", "y", "z"] (Proxy :: Proxy Odd),
con "1" (1 :: Int),
con "1" (NonZero 1),
con "1" (Odd 1),
con "0" (0 :: Int),
con "nonZeroInt" nonZeroInt,
con "oddInt" oddInt,
con "oddNonZero" oddNonZero,
con "divide" divide ]
| null | https://raw.githubusercontent.com/nick8325/quickspec/cb61f719d3d667674431867037ff44dec22ca4f9/examples/tests/Refinements.hs | haskell | The universe of types
type Odd = { x : Int | odd x }
NonZero <= Int
Odd <= Int
The functions of interest | import QuickSpec
import Test.QuickCheck hiding (NonZero)
type NonZero = { x : Int | x /= 0 }
newtype NonZero = NonZero Int deriving (Ord, Eq, Show)
instance Arbitrary NonZero where
arbitrary = NonZero <$> arbitrary `suchThat` (/= 0)
newtype Odd = Odd Int deriving (Ord, Eq, Show)
instance Arbitrary Odd where
arbitrary = Odd <$> arbitrary `suchThat` odd
nonZeroInt :: NonZero -> Int
nonZeroInt (NonZero i) = i
oddInt :: Odd -> Int
oddInt (Odd i) = i
Odd < = NonZero
oddNonZero :: Odd -> NonZero
oddNonZero (Odd i) = NonZero i
divide :: Int -> NonZero -> Int
divide i (NonZero j) = div i j
main = quickSpec [
withMaxTermSize 10,
monoTypeWithVars ["x", "y", "z"] (Proxy :: Proxy NonZero),
monoTypeWithVars ["x", "y", "z"] (Proxy :: Proxy Odd),
con "1" (1 :: Int),
con "1" (NonZero 1),
con "1" (Odd 1),
con "0" (0 :: Int),
con "nonZeroInt" nonZeroInt,
con "oddInt" oddInt,
con "oddNonZero" oddNonZero,
con "divide" divide ]
|
567024d2e2c05c4936e09094e763060f8e06d25b763d89d157f82c123e38eb03 | ghcjs/ghcjs-dom | CSSImportRule.hs | # LANGUAGE PatternSynonyms #
# LANGUAGE ForeignFunctionInterface #
# LANGUAGE JavaScriptFFI #
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
module GHCJS.DOM.JSFFI.Generated.CSSImportRule
(js_getHref, getHref, getHrefUnsafe, getHrefUnchecked, js_getMedia,
getMedia, js_getStyleSheet, getStyleSheet, CSSImportRule(..),
gTypeCSSImportRule)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import qualified Prelude (error)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull, jsUndefined)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad (void)
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import Data.Maybe (fromJust)
import Data.Traversable (mapM)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"href\"]" js_getHref ::
CSSImportRule -> IO (Nullable JSString)
| < -US/docs/Web/API/CSSImportRule.href Mozilla CSSImportRule.href documentation >
getHref ::
(MonadIO m, FromJSString result) =>
CSSImportRule -> m (Maybe result)
getHref self = liftIO (fromMaybeJSString <$> (js_getHref self))
| < -US/docs/Web/API/CSSImportRule.href Mozilla CSSImportRule.href documentation >
getHrefUnsafe ::
(MonadIO m, HasCallStack, FromJSString result) =>
CSSImportRule -> m result
getHrefUnsafe self
= liftIO
((fromMaybeJSString <$> (js_getHref self)) >>=
maybe (Prelude.error "Nothing to return") return)
| < -US/docs/Web/API/CSSImportRule.href Mozilla CSSImportRule.href documentation >
getHrefUnchecked ::
(MonadIO m, FromJSString result) => CSSImportRule -> m result
getHrefUnchecked self
= liftIO (fromJust . fromMaybeJSString <$> (js_getHref self))
foreign import javascript unsafe "$1[\"media\"]" js_getMedia ::
CSSImportRule -> IO MediaList
| < -US/docs/Web/API/CSSImportRule.media Mozilla CSSImportRule.media documentation >
getMedia :: (MonadIO m) => CSSImportRule -> m MediaList
getMedia self = liftIO (js_getMedia self)
foreign import javascript unsafe "$1[\"styleSheet\"]"
js_getStyleSheet :: CSSImportRule -> IO CSSStyleSheet
| < -US/docs/Web/API/CSSImportRule.styleSheet Mozilla CSSImportRule.styleSheet documentation >
getStyleSheet :: (MonadIO m) => CSSImportRule -> m CSSStyleSheet
getStyleSheet self = liftIO (js_getStyleSheet self) | null | https://raw.githubusercontent.com/ghcjs/ghcjs-dom/749963557d878d866be2d0184079836f367dd0ea/ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/CSSImportRule.hs | haskell | For HasCallStack compatibility
# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures # | # LANGUAGE PatternSynonyms #
# LANGUAGE ForeignFunctionInterface #
# LANGUAGE JavaScriptFFI #
module GHCJS.DOM.JSFFI.Generated.CSSImportRule
(js_getHref, getHref, getHrefUnsafe, getHrefUnchecked, js_getMedia,
getMedia, js_getStyleSheet, getStyleSheet, CSSImportRule(..),
gTypeCSSImportRule)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import qualified Prelude (error)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull, jsUndefined)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad (void)
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import Data.Maybe (fromJust)
import Data.Traversable (mapM)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"href\"]" js_getHref ::
CSSImportRule -> IO (Nullable JSString)
| < -US/docs/Web/API/CSSImportRule.href Mozilla CSSImportRule.href documentation >
getHref ::
(MonadIO m, FromJSString result) =>
CSSImportRule -> m (Maybe result)
getHref self = liftIO (fromMaybeJSString <$> (js_getHref self))
| < -US/docs/Web/API/CSSImportRule.href Mozilla CSSImportRule.href documentation >
getHrefUnsafe ::
(MonadIO m, HasCallStack, FromJSString result) =>
CSSImportRule -> m result
getHrefUnsafe self
= liftIO
((fromMaybeJSString <$> (js_getHref self)) >>=
maybe (Prelude.error "Nothing to return") return)
| < -US/docs/Web/API/CSSImportRule.href Mozilla CSSImportRule.href documentation >
getHrefUnchecked ::
(MonadIO m, FromJSString result) => CSSImportRule -> m result
getHrefUnchecked self
= liftIO (fromJust . fromMaybeJSString <$> (js_getHref self))
foreign import javascript unsafe "$1[\"media\"]" js_getMedia ::
CSSImportRule -> IO MediaList
| < -US/docs/Web/API/CSSImportRule.media Mozilla CSSImportRule.media documentation >
getMedia :: (MonadIO m) => CSSImportRule -> m MediaList
getMedia self = liftIO (js_getMedia self)
foreign import javascript unsafe "$1[\"styleSheet\"]"
js_getStyleSheet :: CSSImportRule -> IO CSSStyleSheet
| < -US/docs/Web/API/CSSImportRule.styleSheet Mozilla CSSImportRule.styleSheet documentation >
getStyleSheet :: (MonadIO m) => CSSImportRule -> m CSSStyleSheet
getStyleSheet self = liftIO (js_getStyleSheet self) |
11cee03f8ac2a82ce6f3ec3247dfa1c2d6dd3b94b51f65db8242af8522929e1d | metawilm/cl-python | mod-string-test.lisp | ;; -*- Mode: LISP; Syntax: COMMON-LISP; Package: CLPYTHON.TEST -*-
;;
This software is Copyright ( c ) Franz Inc. and .
Franz Inc. and grant you the rights to
;; distribute and use this software as governed by the terms
of the Lisp Lesser GNU Public License
;; (),
;; known as the LLGPL.
(in-package :clpython.test)
(defun run-mod-string-test ()
(with-subtest (:name "module String")
(run-no-error "
import string
t = string.maketrans('ab', 'xy')
string.translate('abcdxyz', t, 'd') == 'xycxyz'")
(run-no-error "
assert '%% %s' % (1,) == '% 1'"))) | null | https://raw.githubusercontent.com/metawilm/cl-python/bce7f80b0c67d3c9f514556f2d14d098efecdde8/test/mod-string-test.lisp | lisp | -*- Mode: LISP; Syntax: COMMON-LISP; Package: CLPYTHON.TEST -*-
distribute and use this software as governed by the terms
(),
known as the LLGPL. | This software is Copyright ( c ) Franz Inc. and .
Franz Inc. and grant you the rights to
of the Lisp Lesser GNU Public License
(in-package :clpython.test)
(defun run-mod-string-test ()
(with-subtest (:name "module String")
(run-no-error "
import string
t = string.maketrans('ab', 'xy')
string.translate('abcdxyz', t, 'd') == 'xycxyz'")
(run-no-error "
assert '%% %s' % (1,) == '% 1'"))) |
c9611e56f1bb28e28015e7047120f42054fe28830d288d623871fe955477b2e8 | unnohideyuki/bunny | sample247.hs | main = putStrLn $ concat ["abc", "", "xyz"]
| null | https://raw.githubusercontent.com/unnohideyuki/bunny/501856ff48f14b252b674585f25a2bf3801cb185/compiler/test/samples/sample247.hs | haskell | main = putStrLn $ concat ["abc", "", "xyz"]
|
|
47d4db21e0f30dba14e27aa3401db5dbc7568fcb2676a8b8e974c57a09bbc148 | kwanghoon/polyrpc | Prim.hs | # LANGUAGE DeriveDataTypeable , DeriveGeneric #
module Prim where
import GHC.Generics hiding (Prefix, Infix)
import Text.JSON.Generic
import Data.Text.Prettyprint.Doc hiding (Pretty)
import Data.Text.Prettyprint.Doc.Util
data PrimOp =
{ l } . -l- >
{ l } . ( , ) -l- >
{ l } . ( , ) -l- >
| EqPrimOp --{l}. (?, ?) -l-> Bool (Overloaded)
{ l } . ( String , String ) -l- >
{ l } . ( , ) -l- >
{ l } . ( Int , Int ) -l- >
| NeqPrimOp --{l}. (?, ?) -l-> Bool (Overloaded)
{ l } . ( String , String ) -l- >
{ l } . ( , ) -l- >
{ l } . ( Int , Int ) -l- >
{ l } . ( Int , Int ) -l- >
{ l } . ( Int , Int ) -l- >
{ l } . ( Int , Int ) -l- >
{ l } . ( Int , Int ) -l- >
{ l } . ( Int , Int ) -l- > Int
{ l } . ( Int , Int ) -l- > Int
{ l } . ( Int , Int ) -l- > Int
{ l } . ( Int , Int ) -l- > Int
| NegPrimOp --{l}. Int -l-> Int
-- For basic libraries
| PrimReadOp
| PrimPrintOp
| PrimIntToStringOp
| PrimConcatOp
| PrimRefCreateOp
| PrimRefReadOp
| PrimRefWriteOp
-- For creating recursive closures
-- | MkRecOp -- MkRecOp closure f
For
deriving ( Show , Eq , Generic )
deriving (Eq, Read, Show, Typeable, Data, Generic)
data Fixity = Prefix | Infix | Postfix deriving Show
fixity_info = [
(NotPrimOp, Prefix)
, (OrPrimOp, Infix)
, (AndPrimOp, Infix)
, (EqPrimOp, Infix)
, (EqStringPrimOp, Infix)
, (EqBoolPrimOp, Infix)
, (NeqPrimOp, Infix)
, (NeqStringPrimOp, Infix)
, (NeqBoolPrimOp, Infix)
, (NeqIntPrimOp, Infix)
, (LtPrimOp, Infix)
, (LePrimOp, Infix)
, (GtPrimOp, Infix)
, (GePrimOp, Infix)
, (AddPrimOp, Infix)
, (SubPrimOp, Infix)
, (MulPrimOp, Infix)
, (DivPrimOp, Infix)
, (NegPrimOp, Prefix)
]
-- Predefined type names
unitType = "Unit"
intType = "Int"
boolType = "Bool"
stringType = "String"
refType = "Ref"
--
-- Todo: to implement ppPrim
ppPrim (NotPrimOp) = pretty "!"
ppPrim (OrPrimOp) = pretty "or"
ppPrim (AndPrimOp) = pretty "and"
ppPrim (EqPrimOp) = pretty "=="
ppPrim (EqStringPrimOp) = pretty "=="
ppPrim (EqBoolPrimOp) = pretty "=="
ppPrim (EqIntPrimOp) = pretty "=="
ppPrim (NeqPrimOp) = pretty "!="
ppPrim (NeqStringPrimOp) = pretty "!="
ppPrim (NeqBoolPrimOp) = pretty "!="
ppPrim (NeqIntPrimOp) = pretty "!="
ppPrim (LtPrimOp) = pretty "<"
ppPrim (LePrimOp) = pretty "<="
ppPrim (GtPrimOp) = pretty ">"
ppPrim (GePrimOp) = pretty ">="
ppPrim (AddPrimOp) = pretty "+"
ppPrim (SubPrimOp) = pretty "-"
ppPrim (MulPrimOp) = pretty "*"
ppPrim (DivPrimOp) = pretty "/"
ppPrim (NegPrimOp) = pretty "-"
| null | https://raw.githubusercontent.com/kwanghoon/polyrpc/49ba773bd3f1b22dce1ad64cda44683553d27c89/app/ast/Prim.hs | haskell | {l}. (?, ?) -l-> Bool (Overloaded)
{l}. (?, ?) -l-> Bool (Overloaded)
{l}. Int -l-> Int
For basic libraries
For creating recursive closures
| MkRecOp -- MkRecOp closure f
Predefined type names
Todo: to implement ppPrim | # LANGUAGE DeriveDataTypeable , DeriveGeneric #
module Prim where
import GHC.Generics hiding (Prefix, Infix)
import Text.JSON.Generic
import Data.Text.Prettyprint.Doc hiding (Pretty)
import Data.Text.Prettyprint.Doc.Util
data PrimOp =
{ l } . -l- >
{ l } . ( , ) -l- >
{ l } . ( , ) -l- >
{ l } . ( String , String ) -l- >
{ l } . ( , ) -l- >
{ l } . ( Int , Int ) -l- >
{ l } . ( String , String ) -l- >
{ l } . ( , ) -l- >
{ l } . ( Int , Int ) -l- >
{ l } . ( Int , Int ) -l- >
{ l } . ( Int , Int ) -l- >
{ l } . ( Int , Int ) -l- >
{ l } . ( Int , Int ) -l- >
{ l } . ( Int , Int ) -l- > Int
{ l } . ( Int , Int ) -l- > Int
{ l } . ( Int , Int ) -l- > Int
{ l } . ( Int , Int ) -l- > Int
| PrimReadOp
| PrimPrintOp
| PrimIntToStringOp
| PrimConcatOp
| PrimRefCreateOp
| PrimRefReadOp
| PrimRefWriteOp
For
deriving ( Show , Eq , Generic )
deriving (Eq, Read, Show, Typeable, Data, Generic)
data Fixity = Prefix | Infix | Postfix deriving Show
fixity_info = [
(NotPrimOp, Prefix)
, (OrPrimOp, Infix)
, (AndPrimOp, Infix)
, (EqPrimOp, Infix)
, (EqStringPrimOp, Infix)
, (EqBoolPrimOp, Infix)
, (NeqPrimOp, Infix)
, (NeqStringPrimOp, Infix)
, (NeqBoolPrimOp, Infix)
, (NeqIntPrimOp, Infix)
, (LtPrimOp, Infix)
, (LePrimOp, Infix)
, (GtPrimOp, Infix)
, (GePrimOp, Infix)
, (AddPrimOp, Infix)
, (SubPrimOp, Infix)
, (MulPrimOp, Infix)
, (DivPrimOp, Infix)
, (NegPrimOp, Prefix)
]
unitType = "Unit"
intType = "Int"
boolType = "Bool"
stringType = "String"
refType = "Ref"
ppPrim (NotPrimOp) = pretty "!"
ppPrim (OrPrimOp) = pretty "or"
ppPrim (AndPrimOp) = pretty "and"
ppPrim (EqPrimOp) = pretty "=="
ppPrim (EqStringPrimOp) = pretty "=="
ppPrim (EqBoolPrimOp) = pretty "=="
ppPrim (EqIntPrimOp) = pretty "=="
ppPrim (NeqPrimOp) = pretty "!="
ppPrim (NeqStringPrimOp) = pretty "!="
ppPrim (NeqBoolPrimOp) = pretty "!="
ppPrim (NeqIntPrimOp) = pretty "!="
ppPrim (LtPrimOp) = pretty "<"
ppPrim (LePrimOp) = pretty "<="
ppPrim (GtPrimOp) = pretty ">"
ppPrim (GePrimOp) = pretty ">="
ppPrim (AddPrimOp) = pretty "+"
ppPrim (SubPrimOp) = pretty "-"
ppPrim (MulPrimOp) = pretty "*"
ppPrim (DivPrimOp) = pretty "/"
ppPrim (NegPrimOp) = pretty "-"
|
e7ff36e23da51ab720a3c070f468d19c6fb70ed09302ea07d06428979fc5e51a | kushidesign/kushi | state.cljs | (ns kushi.playground.state
(:require
[kushi.ui.dom :as dom]
[applied-science.js-interop :as j]
[reagent.core :as r]))
(def *state (r/atom {
;; :init-focused-component "button"
:components-expanded? false
:snippet-by-component {}
:dev {:show-focused-section? false}}))
(def *focused-component (r/atom nil))
(def *focused-section (r/atom :kushi-components))
(def *expanded-sections (r/atom #{}))
(def *visible-sections (r/atom {}))
(defn focused? [fname] (= @*focused-component fname))
(defn section-focused? [x] (= @*focused-section x))
#_(defn initial-focus? [fname]
(when (= (:init-focused-component @*state) fname)
(swap! *state assoc :focused-component fname)
(swap! *state assoc :init-focused-component nil)
true))
(defn set-focused-component! [x]
#_(js/console.log :set-focused-component! x)
(j/call js/history :pushState #js {} "" (str "#" x))
(reset! *focused-component x) )
(defn nav! [x]
#_(js/console.log :nav!)
(let [el (dom/el-by-id x)
expanded? (dom/has-class? el "kushi-collapse-expanded")]
(swap! *expanded-sections (if expanded? conj disj) x)
#_(js/console.log @*expanded-sections))
(when (when-not (focused? x) x)
(set-focused-component! x)))
| null | https://raw.githubusercontent.com/kushidesign/kushi/6a50dc0336a8e7fe8ddd776fcf41689f9110e590/src/kushi/playground/state.cljs | clojure | :init-focused-component "button" | (ns kushi.playground.state
(:require
[kushi.ui.dom :as dom]
[applied-science.js-interop :as j]
[reagent.core :as r]))
(def *state (r/atom {
:components-expanded? false
:snippet-by-component {}
:dev {:show-focused-section? false}}))
(def *focused-component (r/atom nil))
(def *focused-section (r/atom :kushi-components))
(def *expanded-sections (r/atom #{}))
(def *visible-sections (r/atom {}))
(defn focused? [fname] (= @*focused-component fname))
(defn section-focused? [x] (= @*focused-section x))
#_(defn initial-focus? [fname]
(when (= (:init-focused-component @*state) fname)
(swap! *state assoc :focused-component fname)
(swap! *state assoc :init-focused-component nil)
true))
(defn set-focused-component! [x]
#_(js/console.log :set-focused-component! x)
(j/call js/history :pushState #js {} "" (str "#" x))
(reset! *focused-component x) )
(defn nav! [x]
#_(js/console.log :nav!)
(let [el (dom/el-by-id x)
expanded? (dom/has-class? el "kushi-collapse-expanded")]
(swap! *expanded-sections (if expanded? conj disj) x)
#_(js/console.log @*expanded-sections))
(when (when-not (focused? x) x)
(set-focused-component! x)))
|
fa7ba016a0378c5d08e6cfe0df824e388f547a54754b4202909847510ed11aa1 | W-Net-AI/LISP-CV | contrib.lisp | ;;;; -*- mode: lisp; indent-tabs: nil -*-
;;;; contrib.lisp
;;;; OpenCV bindings
;;;; Contributed/Experimental Stuff
(in-package :lisp-cv)
;;; ColorMaps in OpenCV
void applyColorMap(InputArray src , OutputArray dst , )
void cv_applyColorMap(Mat * src , dst , )
(defcfun ("cv_applyColorMap" apply-color-map) :void
(src mat)
(dest mat)
(colormap :int))
| null | https://raw.githubusercontent.com/W-Net-AI/LISP-CV/10d5c7c1a6fa026de488ca89a28e8a5c519ff8f2/contrib.lisp | lisp | -*- mode: lisp; indent-tabs: nil -*-
contrib.lisp
OpenCV bindings
Contributed/Experimental Stuff
ColorMaps in OpenCV |
(in-package :lisp-cv)
void applyColorMap(InputArray src , OutputArray dst , )
void cv_applyColorMap(Mat * src , dst , )
(defcfun ("cv_applyColorMap" apply-color-map) :void
(src mat)
(dest mat)
(colormap :int))
|
755e95c1ea2425c78525f99e27926966d7561dfe48d397170734adfd12fdb722 | UU-ComputerScience/uhc | NameIntro1.hs | {- ----------------------------------------------------------------------------------------
what : name introduction
expected: ok, for function binding
---------------------------------------------------------------------------------------- -}
module Main where
-- ok
fok x = '4'
fok x = '5'
main :: IO ()
main = putStr "Dummy"
| null | https://raw.githubusercontent.com/UU-ComputerScience/uhc/f2b94a90d26e2093d84044b3832a9a3e3c36b129/EHC/test/regress/99/NameIntro1.hs | haskell | ----------------------------------------------------------------------------------------
what : name introduction
expected: ok, for function binding
----------------------------------------------------------------------------------------
ok |
module Main where
fok x = '4'
fok x = '5'
main :: IO ()
main = putStr "Dummy"
|
defc6b453a0566623eef57de2be2aee1df1a2c873180a3da9d32ed66ca4f78cb | alexbs01/OCaml | command.ml |
open Context;;
open Arith;;
exception End_of_program;;
type command =
Eval of arith
| Var_def of string * arith
| Quit;;
let rec run ctx = function
Eval e ->
let f = eval ctx e in
let _ = print_endline (string_of_float f) in
ctx
| Var_def (name, value) ->
let v = eval ctx value in
let _ = print_endline (name ^ " = " ^ (string_of_float v)) in
add_binding ctx name v
| Quit ->
raise (End_of_program);;
| null | https://raw.githubusercontent.com/alexbs01/OCaml/92a28522a8467d8ed87ef380b6175f1c21616f85/p12/command.ml | ocaml |
open Context;;
open Arith;;
exception End_of_program;;
type command =
Eval of arith
| Var_def of string * arith
| Quit;;
let rec run ctx = function
Eval e ->
let f = eval ctx e in
let _ = print_endline (string_of_float f) in
ctx
| Var_def (name, value) ->
let v = eval ctx value in
let _ = print_endline (name ^ " = " ^ (string_of_float v)) in
add_binding ctx name v
| Quit ->
raise (End_of_program);;
|
|
695cb792a1d659b6f91d14a696f970523dc6e1250de3eb1e35c3b05d0437780c | sirherrbatka/vellum | types.lisp | (cl:in-package #:vellum.table)
(defclass fundamental-table (cl-ds:traversable)
())
(defclass standard-table (fundamental-table)
((%header :reader header
:initarg :header)
(%columns :reader read-columns
:writer write-columns
:initarg :columns
:type vector)))
(defstruct standard-transformation
(dropped nil :type boolean)
marker-column
table
(enable-restarts *enable-restarts* :type boolean)
(wrap-errors *wrap-errors* :type boolean)
(in-place nil :type boolean)
(start 0 :type integer)
row
iterator
(columns #() :type simple-vector)
(column-count 0 :type fixnum)
(count 0 :type fixnum)
bind-row-closure
aggregation-results)
(cl-ds.utils:define-list-of-slots standard-transformation ()
(column-count standard-transformation-column-count)
(dropped standard-transformation-dropped)
(marker-column standard-transformation-marker-column)
(enable-restarts standard-transformation-enable-restarts)
(wrap-errors standard-transformation-wrap-errors)
(table standard-transformation-table)
(in-place standard-transformation-in-place)
(start standard-transformation-start)
(row standard-transformation-row)
(iterator standard-transformation-iterator)
(columns standard-transformation-columns)
(aggregation-results standard-transformation-aggregation-results)
(count standard-transformation-count))
(defmethod cl-ds.utils:cloning-information append ((table standard-table))
'((:header header)
(:columns read-columns)))
(defstruct table-row iterator)
(declaim (inline setfable-table-row-iterator))
(declaim (inline table-row-iterator))
(defstruct (setfable-table-row (:include table-row)))
(defclass standard-table-range (cl-ds:fundamental-forward-range)
((%table-row :initarg :table-row
:reader read-table-row)
(%header :initarg :header
:reader read-header
:reader header)
(%row-count :initarg :row-count
:type fixnum
:reader read-row-count)))
(defmethod cl-ds.utils:cloning-information append
((range standard-table-range))
'((:table-row read-table-row)
(:header read-header)
(:row-count read-row-count)))
(defclass bind-row ()
((%optimized-closure :initarg :optimized-closure
:reader optimized-closure))
(:metaclass closer-mop:funcallable-standard-class))
| null | https://raw.githubusercontent.com/sirherrbatka/vellum/92e133e0d2592b51a6cd5683761d37bbc663b17b/src/table/types.lisp | lisp | (cl:in-package #:vellum.table)
(defclass fundamental-table (cl-ds:traversable)
())
(defclass standard-table (fundamental-table)
((%header :reader header
:initarg :header)
(%columns :reader read-columns
:writer write-columns
:initarg :columns
:type vector)))
(defstruct standard-transformation
(dropped nil :type boolean)
marker-column
table
(enable-restarts *enable-restarts* :type boolean)
(wrap-errors *wrap-errors* :type boolean)
(in-place nil :type boolean)
(start 0 :type integer)
row
iterator
(columns #() :type simple-vector)
(column-count 0 :type fixnum)
(count 0 :type fixnum)
bind-row-closure
aggregation-results)
(cl-ds.utils:define-list-of-slots standard-transformation ()
(column-count standard-transformation-column-count)
(dropped standard-transformation-dropped)
(marker-column standard-transformation-marker-column)
(enable-restarts standard-transformation-enable-restarts)
(wrap-errors standard-transformation-wrap-errors)
(table standard-transformation-table)
(in-place standard-transformation-in-place)
(start standard-transformation-start)
(row standard-transformation-row)
(iterator standard-transformation-iterator)
(columns standard-transformation-columns)
(aggregation-results standard-transformation-aggregation-results)
(count standard-transformation-count))
(defmethod cl-ds.utils:cloning-information append ((table standard-table))
'((:header header)
(:columns read-columns)))
(defstruct table-row iterator)
(declaim (inline setfable-table-row-iterator))
(declaim (inline table-row-iterator))
(defstruct (setfable-table-row (:include table-row)))
(defclass standard-table-range (cl-ds:fundamental-forward-range)
((%table-row :initarg :table-row
:reader read-table-row)
(%header :initarg :header
:reader read-header
:reader header)
(%row-count :initarg :row-count
:type fixnum
:reader read-row-count)))
(defmethod cl-ds.utils:cloning-information append
((range standard-table-range))
'((:table-row read-table-row)
(:header read-header)
(:row-count read-row-count)))
(defclass bind-row ()
((%optimized-closure :initarg :optimized-closure
:reader optimized-closure))
(:metaclass closer-mop:funcallable-standard-class))
|
|
a8c05c4feb8c28bc6089bdd3219386b51e4558a4b7c645e14c4cfabd16a976a9 | charlieg/Sparser | head-of.lisp | ;;; -*- Mode:LISP; Syntax:Common-Lisp; Package:(CTI-source LISP) -*-
copyright ( c ) 1991 Content Technologies Inc. -- all rights reserved
;;;
;;; File: "head of"
Module : " model;sl : whos news : posts : "
version : April 1991 system version 1.8.4
initiated 5/9
(in-package :CTI-source)
5/26 --this is the only rule that mentions " head " literally
;; it should get flushed in favor of the verb
(def-cfr title ("head" ThisCo)
:referent (:composite head+company
left-edge right-edge))
;;;-------------------------------------------
;;; rules specific to "head" taken as a title
;;;-------------------------------------------
(def-cfr title (title of-company-activity)
:referent (:merge find-or-make/title-of-company-activity
left-edge right-edge))
(defun find-or-make/title-of-company-activity (title activity)
activity)
| null | https://raw.githubusercontent.com/charlieg/Sparser/b9bb7d01d2e40f783f3214fc104062db3d15e608/Sparser/code/s/grammar/model/core/titles/head-of.lisp | lisp | -*- Mode:LISP; Syntax:Common-Lisp; Package:(CTI-source LISP) -*-
File: "head of"
it should get flushed in favor of the verb
-------------------------------------------
rules specific to "head" taken as a title
------------------------------------------- | copyright ( c ) 1991 Content Technologies Inc. -- all rights reserved
Module : " model;sl : whos news : posts : "
version : April 1991 system version 1.8.4
initiated 5/9
(in-package :CTI-source)
5/26 --this is the only rule that mentions " head " literally
(def-cfr title ("head" ThisCo)
:referent (:composite head+company
left-edge right-edge))
(def-cfr title (title of-company-activity)
:referent (:merge find-or-make/title-of-company-activity
left-edge right-edge))
(defun find-or-make/title-of-company-activity (title activity)
activity)
|
b77edc20b57030339f0c9bc9a7b44d2870b7b59cfd71dae25d7534d5959d8fed | greglook/alphabase | bytes_test.cljc | (ns alphabase.bytes-test
(:require
[alphabase.bytes :as b]
[clojure.test :refer [deftest is are testing]]))
(deftest bytes-tests
(testing "bytes?"
(is (not (b/bytes? nil)))
(is (not (b/bytes? "foo")))
(is (b/bytes? (b/byte-array 0)))
(is (b/bytes? (b/byte-array 1))))
(testing "bytes="
(is (not (b/bytes= "foo" (b/init-bytes [0 1 2]))))
(is (not (b/bytes= (b/init-bytes [0 1 2]) nil)))
(is (not (b/bytes= (b/init-bytes [0 1 2])
(b/init-bytes [0 1 2 0]))))
(is (not (b/bytes= (b/init-bytes [0 1 2])
(b/init-bytes [0 1 3]))))
(is (b/bytes= (b/init-bytes [0 1 2])
(b/init-bytes [0 1 2])))))
(deftest array-manipulation
(let [bs (b/byte-array 3)]
(is (= 0 (b/get-byte bs 0)))
(b/set-byte bs 0 64)
(b/set-byte bs 1 128)
(b/set-byte bs 2 255)
(is (= 64 (b/get-byte bs 0)))
(is (= 128 (b/get-byte bs 1)))
(is (= 255 (b/get-byte bs 2)))))
(deftest array-copying
(testing "full copy"
(let [a (b/init-bytes [0 1 2 3 4])
b (b/copy a)]
(is (b/bytes? b))
(is (not (identical? a b)))
(is (b/bytes= a b))))
(testing "full write"
(let [a (b/init-bytes [0 1 2 3])
b (b/init-bytes [100 110 120 130 140 150 160])]
(is (= 4 (b/copy a b 2)))
(is (b/bytes= (b/init-bytes [100 110 0 1 2 3 160]) b))))
(testing "slice write"
(let [a (b/init-bytes [1 2 3 4 5 6 7 8 9 10])
b (b/init-bytes [100 110 120 130 140 150])]
(is (= 3 (b/copy a 3 b 2 3)))
(is (b/bytes= (b/init-bytes [100 110 4 5 6 150]) b)))))
(deftest array-sorting
(is (zero? (b/compare
(b/init-bytes [])
(b/init-bytes []))))
(is (zero? (b/compare
(b/init-bytes [0 1 2])
(b/init-bytes [0 1 2]))))
(is (neg? (b/compare
(b/init-bytes [0 1 2])
(b/init-bytes [0 1 3]))))
(is (pos? (b/compare
(b/init-bytes [0 2 2])
(b/init-bytes [0 1 3]))))
(is (pos? (b/compare
(b/init-bytes [0 1 2 0])
(b/init-bytes [0 1 2]))))
(is (neg? (b/compare
(b/init-bytes [0 1 2 0])
(b/init-bytes [0 1 2 0 0])))))
(deftest copy-slice
(are [bs offset len]
(b/bytes= (b/init-bytes (take len (drop offset bs)))
(-> bs b/init-bytes (b/copy-slice offset len)))
[1 2 3 4 5 6 7 8 9 10] 5 3
[0 1 2 0] 0 2
[0 1 2 0] 0 0
[0 1 2 0] 2 1)
(are [bs offset]
(= (drop offset bs)
(-> bs b/init-bytes (b/copy-slice offset) b/byte-seq))
[1 2 3 4 5 6 7 8 9 10] 5
[0 1 2 0] 0
[0 1 2 0] 0
[0 1 2 0] 2))
(deftest concat-arrays
(are [arrs expected]
(b/bytes= (b/init-bytes expected)
(->> arrs (map b/init-bytes) (apply b/concat)))
[[0 1] [2 3] [3 4]] [0 1 2 3 3 4]
[[0 1] [0 1 2]] [0 1 0 1 2]
[[0 1]] [0 1]
[[0] nil [1]] [0 1]
[nil nil] []
[[]] []
[] []))
| null | https://raw.githubusercontent.com/greglook/alphabase/21f03acd09520e298e2c42dc767bd2b63a718e6e/test/alphabase/bytes_test.cljc | clojure | (ns alphabase.bytes-test
(:require
[alphabase.bytes :as b]
[clojure.test :refer [deftest is are testing]]))
(deftest bytes-tests
(testing "bytes?"
(is (not (b/bytes? nil)))
(is (not (b/bytes? "foo")))
(is (b/bytes? (b/byte-array 0)))
(is (b/bytes? (b/byte-array 1))))
(testing "bytes="
(is (not (b/bytes= "foo" (b/init-bytes [0 1 2]))))
(is (not (b/bytes= (b/init-bytes [0 1 2]) nil)))
(is (not (b/bytes= (b/init-bytes [0 1 2])
(b/init-bytes [0 1 2 0]))))
(is (not (b/bytes= (b/init-bytes [0 1 2])
(b/init-bytes [0 1 3]))))
(is (b/bytes= (b/init-bytes [0 1 2])
(b/init-bytes [0 1 2])))))
(deftest array-manipulation
(let [bs (b/byte-array 3)]
(is (= 0 (b/get-byte bs 0)))
(b/set-byte bs 0 64)
(b/set-byte bs 1 128)
(b/set-byte bs 2 255)
(is (= 64 (b/get-byte bs 0)))
(is (= 128 (b/get-byte bs 1)))
(is (= 255 (b/get-byte bs 2)))))
(deftest array-copying
(testing "full copy"
(let [a (b/init-bytes [0 1 2 3 4])
b (b/copy a)]
(is (b/bytes? b))
(is (not (identical? a b)))
(is (b/bytes= a b))))
(testing "full write"
(let [a (b/init-bytes [0 1 2 3])
b (b/init-bytes [100 110 120 130 140 150 160])]
(is (= 4 (b/copy a b 2)))
(is (b/bytes= (b/init-bytes [100 110 0 1 2 3 160]) b))))
(testing "slice write"
(let [a (b/init-bytes [1 2 3 4 5 6 7 8 9 10])
b (b/init-bytes [100 110 120 130 140 150])]
(is (= 3 (b/copy a 3 b 2 3)))
(is (b/bytes= (b/init-bytes [100 110 4 5 6 150]) b)))))
(deftest array-sorting
(is (zero? (b/compare
(b/init-bytes [])
(b/init-bytes []))))
(is (zero? (b/compare
(b/init-bytes [0 1 2])
(b/init-bytes [0 1 2]))))
(is (neg? (b/compare
(b/init-bytes [0 1 2])
(b/init-bytes [0 1 3]))))
(is (pos? (b/compare
(b/init-bytes [0 2 2])
(b/init-bytes [0 1 3]))))
(is (pos? (b/compare
(b/init-bytes [0 1 2 0])
(b/init-bytes [0 1 2]))))
(is (neg? (b/compare
(b/init-bytes [0 1 2 0])
(b/init-bytes [0 1 2 0 0])))))
(deftest copy-slice
(are [bs offset len]
(b/bytes= (b/init-bytes (take len (drop offset bs)))
(-> bs b/init-bytes (b/copy-slice offset len)))
[1 2 3 4 5 6 7 8 9 10] 5 3
[0 1 2 0] 0 2
[0 1 2 0] 0 0
[0 1 2 0] 2 1)
(are [bs offset]
(= (drop offset bs)
(-> bs b/init-bytes (b/copy-slice offset) b/byte-seq))
[1 2 3 4 5 6 7 8 9 10] 5
[0 1 2 0] 0
[0 1 2 0] 0
[0 1 2 0] 2))
(deftest concat-arrays
(are [arrs expected]
(b/bytes= (b/init-bytes expected)
(->> arrs (map b/init-bytes) (apply b/concat)))
[[0 1] [2 3] [3 4]] [0 1 2 3 3 4]
[[0 1] [0 1 2]] [0 1 0 1 2]
[[0 1]] [0 1]
[[0] nil [1]] [0 1]
[nil nil] []
[[]] []
[] []))
|
|
e4599b6e2a182d1d3ad648b4e8e8615deb2702fa4265b88832df79464bb8207a | finnishtransportagency/harja | toimenpidekilometrit_test.clj | (ns harja.palvelin.raportointi.toimenpidekilometrit-test
(:require [clojure.test :refer :all]
[harja.palvelin.komponentit.tietokanta :as tietokanta]
[harja.palvelin.palvelut.toimenpidekoodit :refer :all]
[harja.palvelin.palvelut.urakat :refer :all]
[harja.testi :refer :all]
[com.stuartsierra.component :as component]
[clj-time.core :as t]
[clj-time.coerce :as c]
[harja.palvelin.komponentit.pdf-vienti :as pdf-vienti]
[harja.palvelin.raportointi :as raportointi]
[harja.palvelin.raportointi.testiapurit :as apurit]
[harja.palvelin.palvelut.raportit :as raportit]))
(defn jarjestelma-fixture [testit]
(alter-var-root #'jarjestelma
(fn [_]
(component/start
(component/system-map
:db (tietokanta/luo-tietokanta testitietokanta)
:http-palvelin (testi-http-palvelin)
:pdf-vienti (component/using
(pdf-vienti/luo-pdf-vienti)
[:http-palvelin])
:raportointi (component/using
(raportointi/luo-raportointi)
[:db :pdf-vienti])
:raportit (component/using
(raportit/->Raportit)
[:http-palvelin :db :raportointi :pdf-vienti])))))
(testit)
(alter-var-root #'jarjestelma component/stop))
(use-fixtures :once (compose-fixtures
jarjestelma-fixture
urakkatieto-fixture))
(deftest raportin-suoritus-urakalle-toimii
(let [vastaus (kutsu-palvelua (:http-palvelin jarjestelma)
:suorita-raportti
+kayttaja-jvh+
{:nimi :toimenpidekilometrit
:konteksti "urakka"
:urakka-id (hae-oulun-alueurakan-2014-2019-id)
:parametrit {:alkupvm (c/to-date (t/local-date 2014 10 1))
:loppupvm (c/to-date (t/local-date 2015 10 1))
:hoitoluokat #{1 2 3 4 5 6 7 9 10}
:urakkatyyppi :hoito}})
taulukko (apurit/taulukko-otsikolla vastaus "Oulun alueurakka 2014-2019, Toimenpidekilometrit ajalta 01.10.2014 - 01.10.2015")]
(is (vector? vastaus))
(apurit/tarkista-raportti vastaus "Toimenpidekilometrit")
(apurit/tarkista-taulukko-sarakkeet
taulukko
{:otsikko "Hoitoluokka"}
{:fmt :numero
:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea})
(apurit/tarkista-taulukko-kaikki-rivit
taulukko
(fn [r]
(let [rivi (if (map? r) (:rivi r) r)
hoitoluokka (first rivi)
toimenpidekilometrit (rest rivi)]
(and
(and (string? hoitoluokka)
(not-empty hoitoluokka))
(every?
(fn [solu]
(or (and (number? solu)
(<= 0 solu))
(nil? solu)))
toimenpidekilometrit)))))))
(deftest raportin-suoritus-hallintayksikolle-toimii-usean-vuoden-aikavalilla
(let [vastaus (kutsu-palvelua (:http-palvelin jarjestelma)
:suorita-raportti
+kayttaja-jvh+
{:nimi :toimenpidekilometrit
:konteksti "hallintayksikko"
:hallintayksikko-id (hae-pohjois-pohjanmaan-hallintayksikon-id)
:parametrit {:alkupvm (c/to-date (t/local-date 2014 10 1))
:loppupvm (c/to-date (t/local-date 2015 10 1))
:hoitoluokat #{1 2 3 4 5 6 7 9 10}
:urakkatyyppi :hoito}})
taulukko (apurit/taulukko-otsikolla vastaus "Pohjois-Pohjanmaa, Toimenpidekilometrit ajalta 01.10.2014 - 01.10.2015")]
(is (vector? vastaus))
(apurit/tarkista-raportti vastaus "Toimenpidekilometrit")
(apurit/tarkista-taulukko-kaikki-rivit
taulukko
(fn [r]
(let [rivi (if (map? r) (:rivi r) r)
hoitoluokka (first rivi)
toimenpidekilometrit (rest rivi)]
(and
(and (string? hoitoluokka)
(not-empty hoitoluokka))
(every?
(fn [solu]
(or (and (number? solu)
(<= 0 solu))
(nil? solu)))
toimenpidekilometrit)))))
(apurit/tarkista-taulukko-sarakkeet
taulukko
{:otsikko "Hoitoluokka"}
{:fmt :numero
:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea}
{:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea})))
(deftest raportin-suoritus-hallintayksikolle-toimii-usean-vuoden-aikavalilla
(let [vastaus (kutsu-palvelua (:http-palvelin jarjestelma)
:suorita-raportti
+kayttaja-jvh+
{:nimi :toimenpidekilometrit
:konteksti "koko maa"
:parametrit {:alkupvm (c/to-date (t/local-date 2014 1 1))
:loppupvm (c/to-date (t/local-date 2015 12 31))
:hoitoluokat #{1 2 3 4 5 6 7 9 10}
:urakkatyyppi :hoito}})
taulukko (apurit/taulukko-otsikolla vastaus "KOKO MAA, Toimenpidekilometrit ajalta 01.01.2014 - 31.12.2015")]
(is (vector? vastaus))
(apurit/tarkista-raportti vastaus "Toimenpidekilometrit")
(apurit/tarkista-taulukko-kaikki-rivit
taulukko
(fn [r]
(let [rivi (if (map? r) (:rivi r) r)
hoitoluokka (first rivi)
toimenpidekilometrit (rest rivi)]
(and
(and (string? hoitoluokka)
(not-empty hoitoluokka))
(every?
(fn [solu]
(or (and (number? solu)
(<= 0 solu))
(nil? solu)))
toimenpidekilometrit)))))
(apurit/tarkista-taulukko-sarakkeet
taulukko
{:otsikko "Hoitoluokka"}
{:fmt :numero
:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea}
{:fmt :numero
:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea}
{:fmt :numero
:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea}
{:fmt :numero
:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea}
{:fmt :numero
:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea}
{:fmt :numero
:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea}
{:fmt :numero
:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea}
{:fmt :numero
:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea}
{:fmt :numero
:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea})))
| null | https://raw.githubusercontent.com/finnishtransportagency/harja/cf1afd011f3db3ba41e20971b11add36f5a6ab97/test/clj/harja/palvelin/raportointi/toimenpidekilometrit_test.clj | clojure | (ns harja.palvelin.raportointi.toimenpidekilometrit-test
(:require [clojure.test :refer :all]
[harja.palvelin.komponentit.tietokanta :as tietokanta]
[harja.palvelin.palvelut.toimenpidekoodit :refer :all]
[harja.palvelin.palvelut.urakat :refer :all]
[harja.testi :refer :all]
[com.stuartsierra.component :as component]
[clj-time.core :as t]
[clj-time.coerce :as c]
[harja.palvelin.komponentit.pdf-vienti :as pdf-vienti]
[harja.palvelin.raportointi :as raportointi]
[harja.palvelin.raportointi.testiapurit :as apurit]
[harja.palvelin.palvelut.raportit :as raportit]))
(defn jarjestelma-fixture [testit]
(alter-var-root #'jarjestelma
(fn [_]
(component/start
(component/system-map
:db (tietokanta/luo-tietokanta testitietokanta)
:http-palvelin (testi-http-palvelin)
:pdf-vienti (component/using
(pdf-vienti/luo-pdf-vienti)
[:http-palvelin])
:raportointi (component/using
(raportointi/luo-raportointi)
[:db :pdf-vienti])
:raportit (component/using
(raportit/->Raportit)
[:http-palvelin :db :raportointi :pdf-vienti])))))
(testit)
(alter-var-root #'jarjestelma component/stop))
(use-fixtures :once (compose-fixtures
jarjestelma-fixture
urakkatieto-fixture))
(deftest raportin-suoritus-urakalle-toimii
(let [vastaus (kutsu-palvelua (:http-palvelin jarjestelma)
:suorita-raportti
+kayttaja-jvh+
{:nimi :toimenpidekilometrit
:konteksti "urakka"
:urakka-id (hae-oulun-alueurakan-2014-2019-id)
:parametrit {:alkupvm (c/to-date (t/local-date 2014 10 1))
:loppupvm (c/to-date (t/local-date 2015 10 1))
:hoitoluokat #{1 2 3 4 5 6 7 9 10}
:urakkatyyppi :hoito}})
taulukko (apurit/taulukko-otsikolla vastaus "Oulun alueurakka 2014-2019, Toimenpidekilometrit ajalta 01.10.2014 - 01.10.2015")]
(is (vector? vastaus))
(apurit/tarkista-raportti vastaus "Toimenpidekilometrit")
(apurit/tarkista-taulukko-sarakkeet
taulukko
{:otsikko "Hoitoluokka"}
{:fmt :numero
:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea})
(apurit/tarkista-taulukko-kaikki-rivit
taulukko
(fn [r]
(let [rivi (if (map? r) (:rivi r) r)
hoitoluokka (first rivi)
toimenpidekilometrit (rest rivi)]
(and
(and (string? hoitoluokka)
(not-empty hoitoluokka))
(every?
(fn [solu]
(or (and (number? solu)
(<= 0 solu))
(nil? solu)))
toimenpidekilometrit)))))))
(deftest raportin-suoritus-hallintayksikolle-toimii-usean-vuoden-aikavalilla
(let [vastaus (kutsu-palvelua (:http-palvelin jarjestelma)
:suorita-raportti
+kayttaja-jvh+
{:nimi :toimenpidekilometrit
:konteksti "hallintayksikko"
:hallintayksikko-id (hae-pohjois-pohjanmaan-hallintayksikon-id)
:parametrit {:alkupvm (c/to-date (t/local-date 2014 10 1))
:loppupvm (c/to-date (t/local-date 2015 10 1))
:hoitoluokat #{1 2 3 4 5 6 7 9 10}
:urakkatyyppi :hoito}})
taulukko (apurit/taulukko-otsikolla vastaus "Pohjois-Pohjanmaa, Toimenpidekilometrit ajalta 01.10.2014 - 01.10.2015")]
(is (vector? vastaus))
(apurit/tarkista-raportti vastaus "Toimenpidekilometrit")
(apurit/tarkista-taulukko-kaikki-rivit
taulukko
(fn [r]
(let [rivi (if (map? r) (:rivi r) r)
hoitoluokka (first rivi)
toimenpidekilometrit (rest rivi)]
(and
(and (string? hoitoluokka)
(not-empty hoitoluokka))
(every?
(fn [solu]
(or (and (number? solu)
(<= 0 solu))
(nil? solu)))
toimenpidekilometrit)))))
(apurit/tarkista-taulukko-sarakkeet
taulukko
{:otsikko "Hoitoluokka"}
{:fmt :numero
:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea}
{:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea})))
(deftest raportin-suoritus-hallintayksikolle-toimii-usean-vuoden-aikavalilla
(let [vastaus (kutsu-palvelua (:http-palvelin jarjestelma)
:suorita-raportti
+kayttaja-jvh+
{:nimi :toimenpidekilometrit
:konteksti "koko maa"
:parametrit {:alkupvm (c/to-date (t/local-date 2014 1 1))
:loppupvm (c/to-date (t/local-date 2015 12 31))
:hoitoluokat #{1 2 3 4 5 6 7 9 10}
:urakkatyyppi :hoito}})
taulukko (apurit/taulukko-otsikolla vastaus "KOKO MAA, Toimenpidekilometrit ajalta 01.01.2014 - 31.12.2015")]
(is (vector? vastaus))
(apurit/tarkista-raportti vastaus "Toimenpidekilometrit")
(apurit/tarkista-taulukko-kaikki-rivit
taulukko
(fn [r]
(let [rivi (if (map? r) (:rivi r) r)
hoitoluokka (first rivi)
toimenpidekilometrit (rest rivi)]
(and
(and (string? hoitoluokka)
(not-empty hoitoluokka))
(every?
(fn [solu]
(or (and (number? solu)
(<= 0 solu))
(nil? solu)))
toimenpidekilometrit)))))
(apurit/tarkista-taulukko-sarakkeet
taulukko
{:otsikko "Hoitoluokka"}
{:fmt :numero
:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea}
{:fmt :numero
:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea}
{:fmt :numero
:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea}
{:fmt :numero
:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea}
{:fmt :numero
:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea}
{:fmt :numero
:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea}
{:fmt :numero
:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea}
{:fmt :numero
:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea}
{:fmt :numero
:otsikko "IsE"
:tasaa :oikea}
{:fmt :numero
:otsikko "Is"
:tasaa :oikea}
{:fmt :numero
:otsikko "I"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ib"
:tasaa :oikea}
{:fmt :numero
:otsikko "Ic"
:tasaa :oikea}
{:fmt :numero
:otsikko "II"
:tasaa :oikea}
{:fmt :numero
:otsikko "III"
:tasaa :oikea}
{:fmt :numero
:otsikko "K1"
:tasaa :oikea}
{:fmt :numero
:otsikko "K2"
:tasaa :oikea})))
|
|
5b5e4f36a966a35e11f00b7f22e1c3b07077fa778c2a31e158fd972f88e8f6b3 | esl/MongooseIM | run_common_test.erl | %% During dev you would use something similar to:
%% TEST_HOSTS="mim" ./tools/test.sh -c false -s false -p odbc_mssql_mnesia
%%
%% If you also want to start just mim1 node use:
%% DEV_NODES="mim1" TEST_HOSTS="mim" ./tools/test.sh -c false -s false -p odbc_mssql_mnesia
%%
TEST_HOSTS variable contains host names from hosts in big_tests / test.config .
DEV_NODES variable contains release names from profiles in rebar.config .
%% Release names are also used to name directories in the _build directory.
%%
Valid TEST_HOSTS are , , mim3 , fed , reg .
Valid DEV_NODES are mim1 , , mim3 , fed1 , reg1 .
%%
Example with two nodes :
%% DEV_NODES="mim1 mim2" TEST_HOSTS="mim mim2" ./tools/test.sh -c false -s false -p odbc_mssql_mnesia
%%
%% Environment variable PRESET_ENABLED is true by default.
%% PRESET_ENABLED=false disables preset application and forces to run
%% one preset.
-module(run_common_test).
-export([init/0, main/1, analyze/2]).
-define(CT_DIR, filename:join([".", "tests"])).
-define(CT_REPORT, filename:join([".", "ct_report"])).
%% DEBUG: compile time settings
-define(PRINT_ERRORS, false).
-define(PRINT_STATS, false).
%%
%% Entry
%%
-record(opts, {test,
spec,
cover,
preset = all,
hooks}).
%% Accepted options formatted as:
{ opt_name , , fun value_sanitizer/1 } .
-spec value_sanitizer(string ( ) ) - > NewValue : : any ( ) .
opts() ->
[{test, #opts.test, fun quick_or_full/1},
{spec, #opts.spec, fun list_to_atom/1},
{cover, #opts.cover, fun bool_or_module_list/1},
{preset, #opts.preset, fun preset/1},
{hooks, #opts.hooks, fun module_list/1}].
Raw args are ' key = val ' atoms .
are { key : : atom ( ) , : : string ( ) } pairs .
%% "=" is an invalid character in option name or value.
main(RawArgs) ->
init(),
Args = [raw_to_arg(Raw) || Raw <- RawArgs],
Opts = apply_preset_enabled(args_to_opts(Args)),
try
CTRunDirsBeforeRun = ct_run_dirs(),
Results = run(Opts),
%% Waiting for messages to be flushed
timer:sleep(50),
CTRunDirsAfterRun = ct_run_dirs(),
ExitStatusByGroups = exit_status_by_groups(CTRunDirsBeforeRun, CTRunDirsAfterRun, Results),
ExitStatusByTestCases = process_results(Results),
case ExitStatusByGroups of
undefined ->
io:format("Exiting by test cases summary: ~p~n", [ExitStatusByTestCases]),
init:stop(ExitStatusByTestCases);
_ when is_integer(ExitStatusByGroups) ->
io:format("Exiting by groups summary: ~p~n", [ExitStatusByGroups]),
init:stop(ExitStatusByGroups)
end
catch Type:Reason:StackTrace ->
io:format("TEST CRASHED~n Error type: ~p~n Reason: ~p~n Stacktrace:~n~p~n",
[Type, Reason, StackTrace]),
error_logger:error_msg("TEST CRASHED~n Error type: ~p~n Reason: ~p~n Stacktrace:~n~p~n",
[Type, Reason, StackTrace]),
%% Waiting for messages to be flushed
timer:sleep(5000),
init:stop("run_common_test:main/1 crashed")
end.
init() ->
{ok, _} = application:ensure_all_started(jid).
run(#opts{test = quick, cover = Cover, spec = Spec}) ->
do_run_quick_test(tests_to_run(Spec), Cover);
run(#opts{test = full, spec = Spec, preset = Preset, cover = Cover, hooks = HookModules}) ->
run_test(tests_to_run(Spec) ++ ct_hooks(HookModules),
case Preset of
all -> all;
undefined -> all;
_ when is_list(Preset) -> Preset;
_ -> [Preset]
end, Cover).
apply_preset_enabled(#opts{} = Opts) ->
case os:getenv("PRESET_ENABLED") of
"false" ->
io:format("PRESET_ENABLED is set to false, enabling quick mode~n"),
Opts#opts{test = quick};
_ ->
Opts
end.
%%
%% Helpers
%%
repo_dir() ->
case os:getenv("REPO_DIR") of
false ->
init:stop("Environment variable REPO_DIR is undefined");
Value ->
Value
end.
args_to_opts(Args) ->
{Args, Opts} = lists:foldl(fun set_opt/2, {Args, #opts{}}, opts()),
Opts.
raw_to_arg(RawArg) ->
ArgVal = atom_to_list(RawArg),
case string:tokens(ArgVal, "=") of
[Arg, Val] ->
{list_to_atom(Arg), Val};
[Arg] ->
{list_to_atom(Arg), ""}
end.
set_opt({Opt, Index, Sanitizer}, {Args, Opts}) ->
Value = Sanitizer(proplists:get_value(Opt, Args)),
{Args, setelement(Index, Opts, Value)}.
quick_or_full("quick") -> quick;
quick_or_full("full") -> full.
preset(undefined) -> undefined;
preset(PresetList) ->
[list_to_atom(Preset) || Preset <- string:tokens(PresetList, " ")].
read_file(ConfigFile) when is_list(ConfigFile) ->
{ok, CWD} = file:get_cwd(),
filename:join([CWD, ConfigFile]),
{ok, Props} = handle_file_error(ConfigFile, file:consult(ConfigFile)),
Props.
tests_to_run(TestSpec) ->
TestSpecFile = atom_to_list(TestSpec),
[
{spec, TestSpecFile}
] ++ ct_opts().
ct_hooks([]) ->
[];
ct_hooks(HookModules) ->
[{ct_hooks, HookModules}].
save_count(Test, Configs) ->
Repeat = case proplists:get_value(repeat, Test) of
undefined -> 1;
Other -> Other
end,
Times = case length(Configs) of
0 -> 1;
N -> N
end,
file:write_file("/tmp/ct_count", integer_to_list(Repeat*Times)).
run_test(Test, PresetsToRun, CoverOpts) ->
{ConfigFiles, Props} = get_ct_config(Test),
prepare_cover(Props, CoverOpts),
error_logger:info_msg("Presets to run ~p", [PresetsToRun]),
case get_presets(Props) of
{ok, Presets} ->
Presets1 = case PresetsToRun of
all ->
Presets;
_ ->
assert_all_presets_present(PresetsToRun, Presets),
error_logger:info_msg("Skip presets ~p",
[ preset_names(Presets) -- PresetsToRun ]),
lists:filter(fun({Preset,_}) ->
lists:member(Preset, PresetsToRun)
end, Presets)
end,
Length = length(Presets1),
Names = preset_names(Presets1),
error_logger:info_msg("Starting test of ~p configurations: ~n~p~n",
[Length, Names]),
Zip = lists:zip(lists:seq(1, Length), Presets1),
R = [ run_config_test(Props, Preset, Test, N, Length) || {N, Preset} <- Zip ],
save_count(Test, Presets1),
analyze_coverage(Props, CoverOpts),
R;
{error, not_found} ->
error_logger:info_msg("Presets were not found in the config files ~ts",
[ConfigFiles]),
R = do_run_quick_test(Test, CoverOpts),
analyze_coverage(Props, CoverOpts),
R
end.
get_presets(Props) ->
case proplists:lookup(presets, Props) of
{presets, Presets} ->
case proplists:lookup(toml, Presets) of
{toml, Preset} ->
{ok, Preset};
_ ->
{error, not_found}
end;
_ ->
{error, not_found}
end.
get_ct_config(Opts) ->
Spec = proplists:get_value(spec, Opts),
Props = read_file(Spec),
ConfigFiles = proplists:get_value(config, Props, ["test.config"]),
% Apply the files in reverse, like ct will do when running the tests
ConfigProps = merge_vars([read_file(File) || File <- lists:reverse(ConfigFiles)]),
{ConfigFiles, ConfigProps}.
preset_names(Presets) ->
[Preset||{Preset, _} <- Presets].
do_run_quick_test(Test, CoverOpts) ->
prepare_cover(Test, CoverOpts),
load_test_modules(Test),
Result = ct:run_test(Test),
case Result of
{error, Reason} ->
throw({ct_error, Reason});
{Ok, Failed, {UserSkipped, AutoSkipped}} ->
analyze_coverage(Test, CoverOpts),
save_count(Test, []),
[{ok, {Ok, Failed, UserSkipped, AutoSkipped}}]
end.
run_config_test(Props, {Name, Variables}, Test, N, Tests) ->
enable_preset(Props, Name, Variables, N, Tests),
load_test_modules(Test),
Result = ct:run_test([{label, Name} | Test]),
case Result of
{error, Reason} ->
throw({ct_error, Reason});
{Ok, Failed, {UserSkipped, AutoSkipped}} ->
{ok, {Ok, Failed, UserSkipped, AutoSkipped}}
end.
enable_preset(Props, Name, PresetVars, N, Tests) ->
%% TODO: Do this with a multicall, otherwise it's not as fast as possible (not parallelized).
%% A multicall requires the function to be defined on the other side, though.
Rs = [ maybe_enable_preset_on_node(host_node(H), PresetVars,
host_vars(H), host_name(H))
|| H <- get_hosts_to_enable_preset(Props) ],
[ok] = lists:usort(Rs),
error_logger:info_msg("Configuration ~p of ~p: ~p started.~n",
[N, Tests, Name]).
%% Specify just some nodes to run the tests on:
TEST_HOSTS="mim " ./tools / test.sh -p
maybe_enable_preset_on_node(Node, PresetVars, HostVars, HostName) ->
case is_test_host_enabled(HostName) of
true ->
enable_preset_on_node(Node, PresetVars, HostVars);
false ->
error_logger:info_msg("Skip enable_preset_on_node for node=~p host=~p",
[Node, HostName]),
ok
end.
Check , that node is listed in TEST_HOSTS list ( if is set ) .
is_test_host_enabled(HostName) ->
case os:getenv("TEST_HOSTS") of
false ->
true; %% By default all hosts are enabled
EnvValue examples are " " or " mim2 "
BinHosts = binary:split(iolist_to_binary(EnvValue), <<" ">>, [global]),
lists:member(atom_to_binary(HostName, utf8), BinHosts)
end.
enable_preset_on_node(Node, PresetVars, HostVarsFilePrefix) ->
{ok, Cwd} = call(Node, file, get_cwd, []),
TemplatePath = filename:join([repo_dir(), "rel", "files", "mongooseim.toml"]),
NodeVarsPath = filename:join([repo_dir(), "rel", HostVarsFilePrefix ++ ".vars-toml.config"]),
{ok, Template} = handle_file_error(TemplatePath, file:read_file(TemplatePath)),
NodeVars = read_vars(NodeVarsPath),
TemplatedConfig = template_config(Template, NodeVars ++ PresetVars),
CfgPath = filename:join([Cwd, "etc", "mongooseim.toml"]),
ok = call(Node, file, write_file, [CfgPath, TemplatedConfig]),
call(Node, application, stop, [mongooseim]),
call(Node, application, start, [mongooseim]),
ok.
template_config(Template, RawVars) ->
MergedVars = ensure_binary_strings(maps:from_list(RawVars)),
%% Render twice to replace variables in variables
Tmp = bbmustache:render(Template, MergedVars, [{key_type, atom}]),
bbmustache:render(Tmp, MergedVars, [{key_type, atom}]).
merge_vars([Vars1, Vars2|Rest]) ->
Vars = lists:foldl(fun ({Var, Val}, Acc) ->
lists:keystore(Var, 1, Acc, {Var, Val})
end, Vars1, Vars2),
merge_vars([Vars|Rest]);
merge_vars([Vars]) -> Vars.
read_vars(File) ->
{ok, Terms} = handle_file_error(File, file:consult(File)),
lists:flatmap(fun({Key, Val}) ->
[{Key, Val}];
(IncludedFile) when is_list(IncludedFile) ->
Path = filename:join(filename:dirname(File), IncludedFile),
read_vars(Path)
end, Terms).
%% bbmustache tries to iterate over lists, so we need to make them binaries
ensure_binary_strings(Vars) ->
maps:map(fun(dbs, V) -> V;
(_K, []) -> <<"\n">>; % empty binary is considered falsey in conditions
(_K, V) when is_list(V) -> list_to_binary(V);
(_K, V) -> V
end, Vars).
call(Node, M, F, A) ->
case rpc:call(Node, M, F, A) of
{badrpc, Reason} ->
error_logger:error_msg("RPC call ~p:~p/~p to node ~p failed because ~p",
[M, F, length(A), Node, Reason]),
{badrpc, Reason};
Result ->
Result
end.
prepare_cover(Props, true) ->
io:format("Preparing cover~n"),
prepare(Props);
prepare_cover(_, _) ->
ok.
analyze_coverage(Props, true) ->
analyze(Props, true);
analyze_coverage(Props, ModuleList) when is_list(ModuleList) ->
analyze(Props, ModuleList);
analyze_coverage(_, _) ->
ok.
prepare(Props) ->
Nodes = get_mongoose_nodes(Props),
maybe_compile_cover(Nodes).
maybe_compile_cover([]) ->
io:format("cover: skip cover compilation~n", []),
ok;
maybe_compile_cover(Nodes) ->
io:format("cover: compiling modules for nodes ~p~n", [Nodes]),
import_code_paths(hd(Nodes)),
cover:start(Nodes),
Dir = call(hd(Nodes), code, lib_dir, [mongooseim, ebin]),
Time is in microseconds
{Time, Compiled} = timer:tc(fun() ->
Results = cover:compile_beam_directory(Dir),
Ok = [X || X = {ok, _} <- Results],
NotOk = Results -- Ok,
#{ok => length(Ok), failed => NotOk}
end),
github_actions_fold("cover compiled output", fun() ->
io:format("cover: compiled ~p~n", [Compiled])
end),
report_progress("~nCover compilation took ~ts~n", [microseconds_to_string(Time)]),
ok.
analyze(Props, CoverOpts) ->
io:format("Coverage analyzing~n"),
Nodes = get_mongoose_nodes(Props),
analyze(Props, CoverOpts, Nodes).
analyze(_Props, _CoverOpts, []) ->
ok;
analyze(_Props, CoverOpts, Nodes) ->
deduplicate_cover_server_console_prints(),
%% Import small tests cover
Files = filelib:wildcard(repo_dir() ++ "/_build/**/cover/*.coverdata"),
io:format("Files: ~p", [Files]),
report_time("Import cover data into run_common_test node", fun() ->
[cover:import(File) || File <- Files]
end),
report_time("Export merged cover data", fun() ->
cover:export("/tmp/mongoose_combined.coverdata")
end),
case os:getenv("GITHUB_RUN_ID") of
false ->
make_html(modules_to_analyze(CoverOpts));
_ ->
ok
end,
case os:getenv("KEEP_COVER_RUNNING") of
"1" ->
io:format("Skip stopping cover~n"),
ok;
_ ->
report_time("Stopping cover on MongooseIM nodes", fun() ->
cover:stop([node()|Nodes])
end)
end.
make_html(Modules) ->
{ok, Root} = file:get_cwd(),
SortScript = Root ++ "/priv/sorttable.js",
os:cmd("cp " ++ SortScript ++ " " ++ ?CT_REPORT),
FilePath = case file:read_file(?CT_REPORT++"/index.html") of
{ok, IndexFileData} ->
R = re:replace(IndexFileData, "<a href=\"all_runs.html\">ALL RUNS</a>", "& <a href=\"cover.html\" style=\"margin-right:5px\">COVER</a>"),
file:write_file(?CT_REPORT++"/index.html", R),
?CT_REPORT++"/cover.html";
_ -> skip
end,
CoverageDir = filename:dirname(FilePath)++"/coverage",
file:make_dir(CoverageDir),
{ok, File} = file:open(FilePath, [write]),
file:write(File, get_cover_header()),
Fun = fun(Module, {CAcc, NCAcc}) ->
FileName = lists:flatten(io_lib:format("~s.COVER.html",[Module])),
We assume that import_code_paths/1 was called earlier
case cover:analyse(Module, module) of
{ok, {Module, {C, NC}}} ->
file:write(File, row(atom_to_list(Module), C, NC, percent(C,NC),"coverage/"++FileName)),
FilePathC = filename:join([CoverageDir, FileName]),
catch cover:analyse_to_file(Module, FilePathC, [html]),
{CAcc + C, NCAcc + NC};
Reason ->
error_logger:error_msg("issue=cover_analyse_failed module=~p reason=~p",
[Module, Reason]),
{CAcc, NCAcc}
end
end,
{CSum, NCSum} = lists:foldl(Fun, {0, 0}, Modules),
file:write(File, row("Summary", CSum, NCSum, percent(CSum, NCSum), "#")),
file:close(File).
get_hosts_to_enable_preset(Props) ->
[Host || Host <- get_all_hosts(Props), should_enable_preset(host_cluster(Host))].
should_enable_preset(mim) -> true;
should_enable_preset(reg) -> true;
should_enable_preset(_) -> false.
get_all_hosts(Props) ->
{hosts, Hosts} = lists:keyfind(hosts, 1, Props),
Hosts.
get_mongoose_nodes(Props) ->
[ host_node(H) || H <- get_all_hosts(Props), is_test_host_enabled(host_name(H)) ].
percent(0, _) -> 0;
percent(C, NC) when C /= 0; NC /= 0 -> round(C / (NC+C) * 100);
percent(_, _) -> 100.
row(Row, C, NC, Percent, Path) ->
[
"<tr>",
"<td><a href='", Path, "'>", Row, "</a></td>",
"<td>", integer_to_list(Percent), "%</td>",
"<td>", integer_to_list(C), "</td>",
"<td>", integer_to_list(NC), "</td>",
"<td>", integer_to_list(C+NC), "</td>",
"</tr>\n"
].
get_cover_header() ->
"<html>\n<head></head>\n<body bgcolor=\"white\" text=\"black\" link=\"blue\" vlink=\"purple\" alink=\"red\">\n"
"<head><script src='sorttable.js'></script></head>"
"<h1>Coverage for application 'MongooseIM'</h1>\n"
"<table class='sortable' border=3 cellpadding=5>\n"
"<tr><th>Module</th><th>Covered (%)</th><th>Covered (Lines)</th><th>Not covered (Lines)</th><th>Total (Lines)</th></tr>".
bool_or_module_list("true") ->
true;
bool_or_module_list("false") ->
false;
bool_or_module_list(undefined) ->
false;
bool_or_module_list(ModuleList) when is_list(ModuleList) ->
module_list(ModuleList).
module_list(undefined) ->
[];
module_list(ModuleList) ->
[ list_to_atom(L) || L <- string:tokens(ModuleList, ", ") ].
modules_to_analyze(true) ->
lists:usort(cover:imported_modules() ++ cover:modules());
modules_to_analyze(ModuleList) when is_list(ModuleList) ->
ModuleList.
add({X1, X2, X3, X4},
{Y1, Y2, Y3, Y4}) ->
{X1 + Y1,
X2 + Y2,
X3 + Y3,
X4 + Y4}.
process_results(CTResults) ->
Ok = 0,
Failed = 0,
UserSkipped = 0,
AutoSkipped = 0,
Errors = [],
process_results(CTResults, {{Ok, Failed, UserSkipped, AutoSkipped}, Errors}).
process_results([], {StatsAcc, Errors}) ->
write_stats_into_vars_file(StatsAcc),
print_errors(Errors),
print_stats(StatsAcc),
exit_code(StatsAcc);
process_results([ {ok, RunStats} | T ], {StatsAcc, Errors}) ->
process_results(T, {add(RunStats, StatsAcc), Errors});
process_results([ Error | T ], {StatsAcc, Errors}) ->
process_results(T, {StatsAcc, [Error | Errors]}).
print_errors(Errors) ->
?PRINT_ERRORS andalso [ print(standard_error, "~p~n", [E]) || E <- Errors ].
print_stats(Stats) ->
?PRINT_STATS andalso do_print_stats(Stats).
do_print_stats({Ok, Failed, _UserSkipped, AutoSkipped}) when Ok == 0;
Failed > 0;
AutoSkipped > 0 ->
print(standard_error, "Tests:~n", []),
Ok == 0 andalso print(standard_error, " ok : ~b~n", [Ok]),
Failed > 0 andalso print(standard_error, " failed : ~b~n", [Failed]),
AutoSkipped > 0 andalso print(standard_error, " auto-skipped: ~b~n", [AutoSkipped]).
write_stats_into_vars_file(Stats) ->
file:write_file("/tmp/ct_stats_vars", [format_stats_as_vars(Stats)]).
format_stats_as_vars({Ok, Failed, UserSkipped, AutoSkipped}) ->
io_lib:format("CT_COUNTER_OK=~p~n"
"CT_COUNTER_FAILED=~p~n"
"CT_COUNTER_USER_SKIPPED=~p~n"
"CT_COUNTER_AUTO_SKIPPED=~p~n",
[Ok, Failed, UserSkipped, AutoSkipped]).
%% Fail if there are failed test cases, auto skipped cases,
%% or the number of passed tests is 0 (which is also strange - a misconfiguration?).
StatsAcc is similar ( Skipped are not a tuple ) to the success result from ct : run_test/1 :
%%
{ Ok , Failed , UserSkipped , AutoSkipped }
%%
exit_code({Ok, Failed, _UserSkipped, AutoSkipped})
when Ok == 0; Failed > 0; AutoSkipped > 0 ->
1;
exit_code({_, _, _, _}) ->
0.
print(Handle, Fmt, Args) ->
io:format(Handle, Fmt, Args).
host_cluster(Host) -> host_param(cluster, Host).
host_node(Host) -> host_param(node, Host).
host_vars(Host) -> host_param(vars, Host).
host_name({HostName,_}) -> HostName.
host_param(Name, {_, Params}) ->
{Name, Param} = lists:keyfind(Name, 1, Params),
Param.
report_time(Description, Fun) ->
report_progress("~nExecuting ~ts~n", [Description]),
Start = os:timestamp(),
try
Fun()
after
Microseconds = timer:now_diff(os:timestamp(), Start),
Time = microseconds_to_string(Microseconds),
report_progress("~ts took ~ts~n", [Description, Time])
end.
microseconds_to_string(Microseconds) ->
Milliseconds = Microseconds div 1000,
SecondsFloat = Milliseconds / 1000,
io_lib:format("~.3f seconds", [SecondsFloat]).
Writes onto GitHub actions console directly
report_progress(Format, Args) ->
Message = io_lib:format(Format, Args),
file:write_file("/tmp/progress", Message, [append]).
github_actions_fold(Description, Fun) ->
case os:getenv("GITHUB_RUN_ID") of
false ->
Fun();
_ ->
io:format("github_actions_fold:start:~ts~n", [Description]),
Result = Fun(),
io:format("github_actions_fold:end:~ts~n", [Description]),
Result
end.
%% Import code paths from a running node.
%% It allows cover:analyse/2 to find source file by calling
%% Module:module_info(compiled).
import_code_paths(FromNode) when is_atom(FromNode) ->
Paths = call(FromNode, code, get_path, []),
code:add_paths(Paths).
%% Gets result of file operation and prints filename, if we have any issues.
handle_file_error(FileName, {error, Reason}) ->
error_logger:error_msg("issue=file_operation_error filename=~p reason=~p",
[FileName, Reason]),
{error, Reason};
handle_file_error(_FileName, Other) ->
Other.
%% ------------------------------------------------------------------
%% cover_server process is using io:format too much.
%% This code removes duplicate io:formats.
%%
%% Example of a message we want to write only once:
" Analysis includes data from imported files " from cover.erl in Erlang / R19
deduplicate_cover_server_console_prints() ->
%% Set a new group leader for cover_server
CoverPid = whereis(cover_server),
dedup_proxy_group_leader:start_proxy_group_leader_for(CoverPid).
ct_run_dirs() ->
filelib:wildcard("ct_report/ct_run*").
exit_status_by_groups(CTRunDirsBeforeRun, CTRunDirsAfterRun, Results) ->
NewCTRunDirs = CTRunDirsAfterRun -- CTRunDirsBeforeRun,
case NewCTRunDirs of
[] ->
io:format("WARNING: ct_run directory has not been created~nResults ~p~n", [Results]),
undefined;
[_] ->
anaylyze_groups_runs(hd(NewCTRunDirs))
end.
anaylyze_groups_runs(LatestCTRun) ->
case file:consult(LatestCTRun ++ "/all_groups.summary") of
{ok, Terms} ->
proplists:get_value(total_failed, Terms, undefined);
{error, Error} ->
error_logger:error_msg("Error reading all_groups.summary: ~p~n", [Error]),
undefined
end.
ct_opts() ->
%% Tell Common Tests that it should not compile any more files
%% (they are compiled by rebar)
case os:getenv("SKIP_AUTO_COMPILE") of
"true" ->
[{auto_compile, false}];
_ ->
[]
end.
%% Common Tests does not try to use code autoloading feature
%% for loading suites.
So , if we want to use , we need to put tests , where Common tests
%% can find it. Or we can load modules instead (what we do here).
load_test_modules(Opts) ->
Spec = proplists:get_value(spec, Opts),
%% Read test spec properties
Props = read_file(Spec),
Modules = lists:usort(test_modules(Props)),
[try_load_module(M) || M <- Modules].
test_modules([H|T]) when is_tuple(H) ->
test_modules_list(tuple_to_list(H)) ++ test_modules(T);
test_modules([_|T]) ->
test_modules(T);
test_modules([]) ->
[].
test_modules_list([suites, _, Suite|_]) ->
[Suite];
test_modules_list([groups, _, Suite|_]) ->
[Suite];
test_modules_list([cases, _, Suite|_]) ->
[Suite];
test_modules_list(_) ->
[].
try_load_module(Module) ->
case code:is_loaded(Module) of
true -> already_loaded;
_ -> code:load_file(Module)
end.
assert_all_presets_present(PresetsToCheck, PresetConfs) ->
lists:foreach(fun(Preset) ->
assert_preset_present(Preset, PresetConfs)
end, PresetsToCheck).
assert_preset_present(small_tests, _PresetConfs) ->
ok;
assert_preset_present(Preset, PresetConfs) ->
case lists:keymember(Preset, 1, PresetConfs) of
true -> ok;
false ->
error_logger:error_msg("Preset not found ~p~n", [Preset]),
error({preset_not_found, Preset})
end.
| null | https://raw.githubusercontent.com/esl/MongooseIM/25757b1b433afd54f35baa13877a138b5a243bb6/big_tests/run_common_test.erl | erlang | During dev you would use something similar to:
TEST_HOSTS="mim" ./tools/test.sh -c false -s false -p odbc_mssql_mnesia
If you also want to start just mim1 node use:
DEV_NODES="mim1" TEST_HOSTS="mim" ./tools/test.sh -c false -s false -p odbc_mssql_mnesia
Release names are also used to name directories in the _build directory.
DEV_NODES="mim1 mim2" TEST_HOSTS="mim mim2" ./tools/test.sh -c false -s false -p odbc_mssql_mnesia
Environment variable PRESET_ENABLED is true by default.
PRESET_ENABLED=false disables preset application and forces to run
one preset.
DEBUG: compile time settings
Entry
Accepted options formatted as:
"=" is an invalid character in option name or value.
Waiting for messages to be flushed
Waiting for messages to be flushed
Helpers
Apply the files in reverse, like ct will do when running the tests
TODO: Do this with a multicall, otherwise it's not as fast as possible (not parallelized).
A multicall requires the function to be defined on the other side, though.
Specify just some nodes to run the tests on:
By default all hosts are enabled
Render twice to replace variables in variables
bbmustache tries to iterate over lists, so we need to make them binaries
empty binary is considered falsey in conditions
Import small tests cover
Fail if there are failed test cases, auto skipped cases,
or the number of passed tests is 0 (which is also strange - a misconfiguration?).
Import code paths from a running node.
It allows cover:analyse/2 to find source file by calling
Module:module_info(compiled).
Gets result of file operation and prints filename, if we have any issues.
------------------------------------------------------------------
cover_server process is using io:format too much.
This code removes duplicate io:formats.
Example of a message we want to write only once:
Set a new group leader for cover_server
Tell Common Tests that it should not compile any more files
(they are compiled by rebar)
Common Tests does not try to use code autoloading feature
for loading suites.
can find it. Or we can load modules instead (what we do here).
Read test spec properties | TEST_HOSTS variable contains host names from hosts in big_tests / test.config .
DEV_NODES variable contains release names from profiles in rebar.config .
Valid TEST_HOSTS are , , mim3 , fed , reg .
Valid DEV_NODES are mim1 , , mim3 , fed1 , reg1 .
Example with two nodes :
-module(run_common_test).
-export([init/0, main/1, analyze/2]).
-define(CT_DIR, filename:join([".", "tests"])).
-define(CT_REPORT, filename:join([".", "ct_report"])).
-define(PRINT_ERRORS, false).
-define(PRINT_STATS, false).
-record(opts, {test,
spec,
cover,
preset = all,
hooks}).
{ opt_name , , fun value_sanitizer/1 } .
-spec value_sanitizer(string ( ) ) - > NewValue : : any ( ) .
opts() ->
[{test, #opts.test, fun quick_or_full/1},
{spec, #opts.spec, fun list_to_atom/1},
{cover, #opts.cover, fun bool_or_module_list/1},
{preset, #opts.preset, fun preset/1},
{hooks, #opts.hooks, fun module_list/1}].
Raw args are ' key = val ' atoms .
are { key : : atom ( ) , : : string ( ) } pairs .
main(RawArgs) ->
init(),
Args = [raw_to_arg(Raw) || Raw <- RawArgs],
Opts = apply_preset_enabled(args_to_opts(Args)),
try
CTRunDirsBeforeRun = ct_run_dirs(),
Results = run(Opts),
timer:sleep(50),
CTRunDirsAfterRun = ct_run_dirs(),
ExitStatusByGroups = exit_status_by_groups(CTRunDirsBeforeRun, CTRunDirsAfterRun, Results),
ExitStatusByTestCases = process_results(Results),
case ExitStatusByGroups of
undefined ->
io:format("Exiting by test cases summary: ~p~n", [ExitStatusByTestCases]),
init:stop(ExitStatusByTestCases);
_ when is_integer(ExitStatusByGroups) ->
io:format("Exiting by groups summary: ~p~n", [ExitStatusByGroups]),
init:stop(ExitStatusByGroups)
end
catch Type:Reason:StackTrace ->
io:format("TEST CRASHED~n Error type: ~p~n Reason: ~p~n Stacktrace:~n~p~n",
[Type, Reason, StackTrace]),
error_logger:error_msg("TEST CRASHED~n Error type: ~p~n Reason: ~p~n Stacktrace:~n~p~n",
[Type, Reason, StackTrace]),
timer:sleep(5000),
init:stop("run_common_test:main/1 crashed")
end.
init() ->
{ok, _} = application:ensure_all_started(jid).
run(#opts{test = quick, cover = Cover, spec = Spec}) ->
do_run_quick_test(tests_to_run(Spec), Cover);
run(#opts{test = full, spec = Spec, preset = Preset, cover = Cover, hooks = HookModules}) ->
run_test(tests_to_run(Spec) ++ ct_hooks(HookModules),
case Preset of
all -> all;
undefined -> all;
_ when is_list(Preset) -> Preset;
_ -> [Preset]
end, Cover).
apply_preset_enabled(#opts{} = Opts) ->
case os:getenv("PRESET_ENABLED") of
"false" ->
io:format("PRESET_ENABLED is set to false, enabling quick mode~n"),
Opts#opts{test = quick};
_ ->
Opts
end.
repo_dir() ->
case os:getenv("REPO_DIR") of
false ->
init:stop("Environment variable REPO_DIR is undefined");
Value ->
Value
end.
args_to_opts(Args) ->
{Args, Opts} = lists:foldl(fun set_opt/2, {Args, #opts{}}, opts()),
Opts.
raw_to_arg(RawArg) ->
ArgVal = atom_to_list(RawArg),
case string:tokens(ArgVal, "=") of
[Arg, Val] ->
{list_to_atom(Arg), Val};
[Arg] ->
{list_to_atom(Arg), ""}
end.
set_opt({Opt, Index, Sanitizer}, {Args, Opts}) ->
Value = Sanitizer(proplists:get_value(Opt, Args)),
{Args, setelement(Index, Opts, Value)}.
quick_or_full("quick") -> quick;
quick_or_full("full") -> full.
preset(undefined) -> undefined;
preset(PresetList) ->
[list_to_atom(Preset) || Preset <- string:tokens(PresetList, " ")].
read_file(ConfigFile) when is_list(ConfigFile) ->
{ok, CWD} = file:get_cwd(),
filename:join([CWD, ConfigFile]),
{ok, Props} = handle_file_error(ConfigFile, file:consult(ConfigFile)),
Props.
tests_to_run(TestSpec) ->
TestSpecFile = atom_to_list(TestSpec),
[
{spec, TestSpecFile}
] ++ ct_opts().
ct_hooks([]) ->
[];
ct_hooks(HookModules) ->
[{ct_hooks, HookModules}].
save_count(Test, Configs) ->
Repeat = case proplists:get_value(repeat, Test) of
undefined -> 1;
Other -> Other
end,
Times = case length(Configs) of
0 -> 1;
N -> N
end,
file:write_file("/tmp/ct_count", integer_to_list(Repeat*Times)).
run_test(Test, PresetsToRun, CoverOpts) ->
{ConfigFiles, Props} = get_ct_config(Test),
prepare_cover(Props, CoverOpts),
error_logger:info_msg("Presets to run ~p", [PresetsToRun]),
case get_presets(Props) of
{ok, Presets} ->
Presets1 = case PresetsToRun of
all ->
Presets;
_ ->
assert_all_presets_present(PresetsToRun, Presets),
error_logger:info_msg("Skip presets ~p",
[ preset_names(Presets) -- PresetsToRun ]),
lists:filter(fun({Preset,_}) ->
lists:member(Preset, PresetsToRun)
end, Presets)
end,
Length = length(Presets1),
Names = preset_names(Presets1),
error_logger:info_msg("Starting test of ~p configurations: ~n~p~n",
[Length, Names]),
Zip = lists:zip(lists:seq(1, Length), Presets1),
R = [ run_config_test(Props, Preset, Test, N, Length) || {N, Preset} <- Zip ],
save_count(Test, Presets1),
analyze_coverage(Props, CoverOpts),
R;
{error, not_found} ->
error_logger:info_msg("Presets were not found in the config files ~ts",
[ConfigFiles]),
R = do_run_quick_test(Test, CoverOpts),
analyze_coverage(Props, CoverOpts),
R
end.
get_presets(Props) ->
case proplists:lookup(presets, Props) of
{presets, Presets} ->
case proplists:lookup(toml, Presets) of
{toml, Preset} ->
{ok, Preset};
_ ->
{error, not_found}
end;
_ ->
{error, not_found}
end.
get_ct_config(Opts) ->
Spec = proplists:get_value(spec, Opts),
Props = read_file(Spec),
ConfigFiles = proplists:get_value(config, Props, ["test.config"]),
ConfigProps = merge_vars([read_file(File) || File <- lists:reverse(ConfigFiles)]),
{ConfigFiles, ConfigProps}.
preset_names(Presets) ->
[Preset||{Preset, _} <- Presets].
do_run_quick_test(Test, CoverOpts) ->
prepare_cover(Test, CoverOpts),
load_test_modules(Test),
Result = ct:run_test(Test),
case Result of
{error, Reason} ->
throw({ct_error, Reason});
{Ok, Failed, {UserSkipped, AutoSkipped}} ->
analyze_coverage(Test, CoverOpts),
save_count(Test, []),
[{ok, {Ok, Failed, UserSkipped, AutoSkipped}}]
end.
run_config_test(Props, {Name, Variables}, Test, N, Tests) ->
enable_preset(Props, Name, Variables, N, Tests),
load_test_modules(Test),
Result = ct:run_test([{label, Name} | Test]),
case Result of
{error, Reason} ->
throw({ct_error, Reason});
{Ok, Failed, {UserSkipped, AutoSkipped}} ->
{ok, {Ok, Failed, UserSkipped, AutoSkipped}}
end.
enable_preset(Props, Name, PresetVars, N, Tests) ->
Rs = [ maybe_enable_preset_on_node(host_node(H), PresetVars,
host_vars(H), host_name(H))
|| H <- get_hosts_to_enable_preset(Props) ],
[ok] = lists:usort(Rs),
error_logger:info_msg("Configuration ~p of ~p: ~p started.~n",
[N, Tests, Name]).
TEST_HOSTS="mim " ./tools / test.sh -p
maybe_enable_preset_on_node(Node, PresetVars, HostVars, HostName) ->
case is_test_host_enabled(HostName) of
true ->
enable_preset_on_node(Node, PresetVars, HostVars);
false ->
error_logger:info_msg("Skip enable_preset_on_node for node=~p host=~p",
[Node, HostName]),
ok
end.
Check , that node is listed in TEST_HOSTS list ( if is set ) .
is_test_host_enabled(HostName) ->
case os:getenv("TEST_HOSTS") of
false ->
EnvValue examples are " " or " mim2 "
BinHosts = binary:split(iolist_to_binary(EnvValue), <<" ">>, [global]),
lists:member(atom_to_binary(HostName, utf8), BinHosts)
end.
enable_preset_on_node(Node, PresetVars, HostVarsFilePrefix) ->
{ok, Cwd} = call(Node, file, get_cwd, []),
TemplatePath = filename:join([repo_dir(), "rel", "files", "mongooseim.toml"]),
NodeVarsPath = filename:join([repo_dir(), "rel", HostVarsFilePrefix ++ ".vars-toml.config"]),
{ok, Template} = handle_file_error(TemplatePath, file:read_file(TemplatePath)),
NodeVars = read_vars(NodeVarsPath),
TemplatedConfig = template_config(Template, NodeVars ++ PresetVars),
CfgPath = filename:join([Cwd, "etc", "mongooseim.toml"]),
ok = call(Node, file, write_file, [CfgPath, TemplatedConfig]),
call(Node, application, stop, [mongooseim]),
call(Node, application, start, [mongooseim]),
ok.
template_config(Template, RawVars) ->
MergedVars = ensure_binary_strings(maps:from_list(RawVars)),
Tmp = bbmustache:render(Template, MergedVars, [{key_type, atom}]),
bbmustache:render(Tmp, MergedVars, [{key_type, atom}]).
merge_vars([Vars1, Vars2|Rest]) ->
Vars = lists:foldl(fun ({Var, Val}, Acc) ->
lists:keystore(Var, 1, Acc, {Var, Val})
end, Vars1, Vars2),
merge_vars([Vars|Rest]);
merge_vars([Vars]) -> Vars.
read_vars(File) ->
{ok, Terms} = handle_file_error(File, file:consult(File)),
lists:flatmap(fun({Key, Val}) ->
[{Key, Val}];
(IncludedFile) when is_list(IncludedFile) ->
Path = filename:join(filename:dirname(File), IncludedFile),
read_vars(Path)
end, Terms).
ensure_binary_strings(Vars) ->
maps:map(fun(dbs, V) -> V;
(_K, V) when is_list(V) -> list_to_binary(V);
(_K, V) -> V
end, Vars).
call(Node, M, F, A) ->
case rpc:call(Node, M, F, A) of
{badrpc, Reason} ->
error_logger:error_msg("RPC call ~p:~p/~p to node ~p failed because ~p",
[M, F, length(A), Node, Reason]),
{badrpc, Reason};
Result ->
Result
end.
prepare_cover(Props, true) ->
io:format("Preparing cover~n"),
prepare(Props);
prepare_cover(_, _) ->
ok.
analyze_coverage(Props, true) ->
analyze(Props, true);
analyze_coverage(Props, ModuleList) when is_list(ModuleList) ->
analyze(Props, ModuleList);
analyze_coverage(_, _) ->
ok.
prepare(Props) ->
Nodes = get_mongoose_nodes(Props),
maybe_compile_cover(Nodes).
maybe_compile_cover([]) ->
io:format("cover: skip cover compilation~n", []),
ok;
maybe_compile_cover(Nodes) ->
io:format("cover: compiling modules for nodes ~p~n", [Nodes]),
import_code_paths(hd(Nodes)),
cover:start(Nodes),
Dir = call(hd(Nodes), code, lib_dir, [mongooseim, ebin]),
Time is in microseconds
{Time, Compiled} = timer:tc(fun() ->
Results = cover:compile_beam_directory(Dir),
Ok = [X || X = {ok, _} <- Results],
NotOk = Results -- Ok,
#{ok => length(Ok), failed => NotOk}
end),
github_actions_fold("cover compiled output", fun() ->
io:format("cover: compiled ~p~n", [Compiled])
end),
report_progress("~nCover compilation took ~ts~n", [microseconds_to_string(Time)]),
ok.
analyze(Props, CoverOpts) ->
io:format("Coverage analyzing~n"),
Nodes = get_mongoose_nodes(Props),
analyze(Props, CoverOpts, Nodes).
analyze(_Props, _CoverOpts, []) ->
ok;
analyze(_Props, CoverOpts, Nodes) ->
deduplicate_cover_server_console_prints(),
Files = filelib:wildcard(repo_dir() ++ "/_build/**/cover/*.coverdata"),
io:format("Files: ~p", [Files]),
report_time("Import cover data into run_common_test node", fun() ->
[cover:import(File) || File <- Files]
end),
report_time("Export merged cover data", fun() ->
cover:export("/tmp/mongoose_combined.coverdata")
end),
case os:getenv("GITHUB_RUN_ID") of
false ->
make_html(modules_to_analyze(CoverOpts));
_ ->
ok
end,
case os:getenv("KEEP_COVER_RUNNING") of
"1" ->
io:format("Skip stopping cover~n"),
ok;
_ ->
report_time("Stopping cover on MongooseIM nodes", fun() ->
cover:stop([node()|Nodes])
end)
end.
make_html(Modules) ->
{ok, Root} = file:get_cwd(),
SortScript = Root ++ "/priv/sorttable.js",
os:cmd("cp " ++ SortScript ++ " " ++ ?CT_REPORT),
FilePath = case file:read_file(?CT_REPORT++"/index.html") of
{ok, IndexFileData} ->
R = re:replace(IndexFileData, "<a href=\"all_runs.html\">ALL RUNS</a>", "& <a href=\"cover.html\" style=\"margin-right:5px\">COVER</a>"),
file:write_file(?CT_REPORT++"/index.html", R),
?CT_REPORT++"/cover.html";
_ -> skip
end,
CoverageDir = filename:dirname(FilePath)++"/coverage",
file:make_dir(CoverageDir),
{ok, File} = file:open(FilePath, [write]),
file:write(File, get_cover_header()),
Fun = fun(Module, {CAcc, NCAcc}) ->
FileName = lists:flatten(io_lib:format("~s.COVER.html",[Module])),
We assume that import_code_paths/1 was called earlier
case cover:analyse(Module, module) of
{ok, {Module, {C, NC}}} ->
file:write(File, row(atom_to_list(Module), C, NC, percent(C,NC),"coverage/"++FileName)),
FilePathC = filename:join([CoverageDir, FileName]),
catch cover:analyse_to_file(Module, FilePathC, [html]),
{CAcc + C, NCAcc + NC};
Reason ->
error_logger:error_msg("issue=cover_analyse_failed module=~p reason=~p",
[Module, Reason]),
{CAcc, NCAcc}
end
end,
{CSum, NCSum} = lists:foldl(Fun, {0, 0}, Modules),
file:write(File, row("Summary", CSum, NCSum, percent(CSum, NCSum), "#")),
file:close(File).
get_hosts_to_enable_preset(Props) ->
[Host || Host <- get_all_hosts(Props), should_enable_preset(host_cluster(Host))].
should_enable_preset(mim) -> true;
should_enable_preset(reg) -> true;
should_enable_preset(_) -> false.
get_all_hosts(Props) ->
{hosts, Hosts} = lists:keyfind(hosts, 1, Props),
Hosts.
get_mongoose_nodes(Props) ->
[ host_node(H) || H <- get_all_hosts(Props), is_test_host_enabled(host_name(H)) ].
percent(0, _) -> 0;
percent(C, NC) when C /= 0; NC /= 0 -> round(C / (NC+C) * 100);
percent(_, _) -> 100.
row(Row, C, NC, Percent, Path) ->
[
"<tr>",
"<td><a href='", Path, "'>", Row, "</a></td>",
"<td>", integer_to_list(Percent), "%</td>",
"<td>", integer_to_list(C), "</td>",
"<td>", integer_to_list(NC), "</td>",
"<td>", integer_to_list(C+NC), "</td>",
"</tr>\n"
].
get_cover_header() ->
"<html>\n<head></head>\n<body bgcolor=\"white\" text=\"black\" link=\"blue\" vlink=\"purple\" alink=\"red\">\n"
"<head><script src='sorttable.js'></script></head>"
"<h1>Coverage for application 'MongooseIM'</h1>\n"
"<table class='sortable' border=3 cellpadding=5>\n"
"<tr><th>Module</th><th>Covered (%)</th><th>Covered (Lines)</th><th>Not covered (Lines)</th><th>Total (Lines)</th></tr>".
bool_or_module_list("true") ->
true;
bool_or_module_list("false") ->
false;
bool_or_module_list(undefined) ->
false;
bool_or_module_list(ModuleList) when is_list(ModuleList) ->
module_list(ModuleList).
module_list(undefined) ->
[];
module_list(ModuleList) ->
[ list_to_atom(L) || L <- string:tokens(ModuleList, ", ") ].
modules_to_analyze(true) ->
lists:usort(cover:imported_modules() ++ cover:modules());
modules_to_analyze(ModuleList) when is_list(ModuleList) ->
ModuleList.
add({X1, X2, X3, X4},
{Y1, Y2, Y3, Y4}) ->
{X1 + Y1,
X2 + Y2,
X3 + Y3,
X4 + Y4}.
process_results(CTResults) ->
Ok = 0,
Failed = 0,
UserSkipped = 0,
AutoSkipped = 0,
Errors = [],
process_results(CTResults, {{Ok, Failed, UserSkipped, AutoSkipped}, Errors}).
process_results([], {StatsAcc, Errors}) ->
write_stats_into_vars_file(StatsAcc),
print_errors(Errors),
print_stats(StatsAcc),
exit_code(StatsAcc);
process_results([ {ok, RunStats} | T ], {StatsAcc, Errors}) ->
process_results(T, {add(RunStats, StatsAcc), Errors});
process_results([ Error | T ], {StatsAcc, Errors}) ->
process_results(T, {StatsAcc, [Error | Errors]}).
print_errors(Errors) ->
?PRINT_ERRORS andalso [ print(standard_error, "~p~n", [E]) || E <- Errors ].
print_stats(Stats) ->
?PRINT_STATS andalso do_print_stats(Stats).
do_print_stats({Ok, Failed, _UserSkipped, AutoSkipped}) when Ok == 0;
Failed > 0;
AutoSkipped > 0 ->
print(standard_error, "Tests:~n", []),
Ok == 0 andalso print(standard_error, " ok : ~b~n", [Ok]),
Failed > 0 andalso print(standard_error, " failed : ~b~n", [Failed]),
AutoSkipped > 0 andalso print(standard_error, " auto-skipped: ~b~n", [AutoSkipped]).
write_stats_into_vars_file(Stats) ->
file:write_file("/tmp/ct_stats_vars", [format_stats_as_vars(Stats)]).
format_stats_as_vars({Ok, Failed, UserSkipped, AutoSkipped}) ->
io_lib:format("CT_COUNTER_OK=~p~n"
"CT_COUNTER_FAILED=~p~n"
"CT_COUNTER_USER_SKIPPED=~p~n"
"CT_COUNTER_AUTO_SKIPPED=~p~n",
[Ok, Failed, UserSkipped, AutoSkipped]).
StatsAcc is similar ( Skipped are not a tuple ) to the success result from ct : run_test/1 :
{ Ok , Failed , UserSkipped , AutoSkipped }
exit_code({Ok, Failed, _UserSkipped, AutoSkipped})
when Ok == 0; Failed > 0; AutoSkipped > 0 ->
1;
exit_code({_, _, _, _}) ->
0.
print(Handle, Fmt, Args) ->
io:format(Handle, Fmt, Args).
host_cluster(Host) -> host_param(cluster, Host).
host_node(Host) -> host_param(node, Host).
host_vars(Host) -> host_param(vars, Host).
host_name({HostName,_}) -> HostName.
host_param(Name, {_, Params}) ->
{Name, Param} = lists:keyfind(Name, 1, Params),
Param.
report_time(Description, Fun) ->
report_progress("~nExecuting ~ts~n", [Description]),
Start = os:timestamp(),
try
Fun()
after
Microseconds = timer:now_diff(os:timestamp(), Start),
Time = microseconds_to_string(Microseconds),
report_progress("~ts took ~ts~n", [Description, Time])
end.
microseconds_to_string(Microseconds) ->
Milliseconds = Microseconds div 1000,
SecondsFloat = Milliseconds / 1000,
io_lib:format("~.3f seconds", [SecondsFloat]).
Writes onto GitHub actions console directly
report_progress(Format, Args) ->
Message = io_lib:format(Format, Args),
file:write_file("/tmp/progress", Message, [append]).
github_actions_fold(Description, Fun) ->
case os:getenv("GITHUB_RUN_ID") of
false ->
Fun();
_ ->
io:format("github_actions_fold:start:~ts~n", [Description]),
Result = Fun(),
io:format("github_actions_fold:end:~ts~n", [Description]),
Result
end.
import_code_paths(FromNode) when is_atom(FromNode) ->
Paths = call(FromNode, code, get_path, []),
code:add_paths(Paths).
handle_file_error(FileName, {error, Reason}) ->
error_logger:error_msg("issue=file_operation_error filename=~p reason=~p",
[FileName, Reason]),
{error, Reason};
handle_file_error(_FileName, Other) ->
Other.
" Analysis includes data from imported files " from cover.erl in Erlang / R19
deduplicate_cover_server_console_prints() ->
CoverPid = whereis(cover_server),
dedup_proxy_group_leader:start_proxy_group_leader_for(CoverPid).
ct_run_dirs() ->
filelib:wildcard("ct_report/ct_run*").
exit_status_by_groups(CTRunDirsBeforeRun, CTRunDirsAfterRun, Results) ->
NewCTRunDirs = CTRunDirsAfterRun -- CTRunDirsBeforeRun,
case NewCTRunDirs of
[] ->
io:format("WARNING: ct_run directory has not been created~nResults ~p~n", [Results]),
undefined;
[_] ->
anaylyze_groups_runs(hd(NewCTRunDirs))
end.
anaylyze_groups_runs(LatestCTRun) ->
case file:consult(LatestCTRun ++ "/all_groups.summary") of
{ok, Terms} ->
proplists:get_value(total_failed, Terms, undefined);
{error, Error} ->
error_logger:error_msg("Error reading all_groups.summary: ~p~n", [Error]),
undefined
end.
ct_opts() ->
case os:getenv("SKIP_AUTO_COMPILE") of
"true" ->
[{auto_compile, false}];
_ ->
[]
end.
So , if we want to use , we need to put tests , where Common tests
load_test_modules(Opts) ->
Spec = proplists:get_value(spec, Opts),
Props = read_file(Spec),
Modules = lists:usort(test_modules(Props)),
[try_load_module(M) || M <- Modules].
test_modules([H|T]) when is_tuple(H) ->
test_modules_list(tuple_to_list(H)) ++ test_modules(T);
test_modules([_|T]) ->
test_modules(T);
test_modules([]) ->
[].
test_modules_list([suites, _, Suite|_]) ->
[Suite];
test_modules_list([groups, _, Suite|_]) ->
[Suite];
test_modules_list([cases, _, Suite|_]) ->
[Suite];
test_modules_list(_) ->
[].
try_load_module(Module) ->
case code:is_loaded(Module) of
true -> already_loaded;
_ -> code:load_file(Module)
end.
assert_all_presets_present(PresetsToCheck, PresetConfs) ->
lists:foreach(fun(Preset) ->
assert_preset_present(Preset, PresetConfs)
end, PresetsToCheck).
assert_preset_present(small_tests, _PresetConfs) ->
ok;
assert_preset_present(Preset, PresetConfs) ->
case lists:keymember(Preset, 1, PresetConfs) of
true -> ok;
false ->
error_logger:error_msg("Preset not found ~p~n", [Preset]),
error({preset_not_found, Preset})
end.
|
c64fdf37cbe5b5dd351d52848bca0b2c4a86309cf3124b9ef3061fa85ce8d3a6 | mbenke/zpf2013 | PlusMinusParsec.hs | import Text.ParserCombinators.Parsec
import Data.Char(digitToInt)
import Criterion.Main
gen 0 = "1"
gen n = ('1':'+':'1':'-':gen (n-1))
pNum :: Parser Int
pNum = fmap digitToInt digit
pExp = pNum `chainl1` addop
addop = do{ char '+'; return (+) }
<|> do{ char '-'; return (-) }
test n = parse pExp "gen" (gen n)
main = defaultMain
bench " gen 1e4 " $ whnf test 10000
,
, -} bench "gen 1e5" $ whnf test 100000
] | null | https://raw.githubusercontent.com/mbenke/zpf2013/85f32747e17f07a74e1c3cb064b1d6acaca3f2f0/Code/Parse1/PlusMinusParsec.hs | haskell | import Text.ParserCombinators.Parsec
import Data.Char(digitToInt)
import Criterion.Main
gen 0 = "1"
gen n = ('1':'+':'1':'-':gen (n-1))
pNum :: Parser Int
pNum = fmap digitToInt digit
pExp = pNum `chainl1` addop
addop = do{ char '+'; return (+) }
<|> do{ char '-'; return (-) }
test n = parse pExp "gen" (gen n)
main = defaultMain
bench " gen 1e4 " $ whnf test 10000
,
, -} bench "gen 1e5" $ whnf test 100000
] |
|
a9f27ef1bb6b8d3ac7b3db89dc52674febf9264bed04d5c3543ba8e1bcebba7d | OCamlPro/freeton_wallet | commandAccountState.ml | (**************************************************************************)
(* *)
Copyright ( c ) 2021 OCamlPro SAS
(* *)
(* All rights reserved. *)
(* This file is distributed under the terms of the GNU Lesser General *)
Public License version 2.1 , with the special exception on linking
(* described in the LICENSE.md file in the root directory. *)
(* *)
(* *)
(**************************************************************************)
open Ezcmd.V2
open EZCMD.TYPES
open Types
let get_address_info config address =
let addr = Misc.raw_address address in
let open Ton_sdk in
let level = if !Globals.verbosity > 1 then 3 else 1 in
match
Utils.post config
( REQUEST.account ~level ( ADDRESS.to_string addr ))
with
| [] -> None
| [ account ] ->
if !Globals.verbosity > 1 then
Format.printf "%s@."
(EzEncoding.construct ~compact:false ENCODING.accounts_enc [account]
);
begin
match account.acc_code_hash with
| None -> ()
| Some code_hash ->
match address with
| RawAddress _ -> ()
| Account acc ->
match acc.acc_contract with
| Some _ -> ()
| None ->
match Misc.contract_of_code_hash ~code_hash with
| None -> ()
| Some contract ->
Printf.eprintf "Setting contract %S for %s\n%!"
contract ( ADDRESS.to_string acc.acc_address );
acc.acc_contract <- Some contract;
config.modified <- true
end;
Some account
| _ -> assert false
let get_account_info config ~name ~address =
match get_address_info config address with
| None ->
Printf.printf "Account %S: not yet created (empty balance)\n%!" name
| Some account ->
Printf.printf "Account %S: %s\n%!" name
(match account.acc_balance with
| None -> "no balance"
| Some n ->
Printf.sprintf "%s TONs (%s)"
(Misc.string_of_nanoton (Z.to_int64 n))
(match account.acc_type_name with
| None -> "Non Exists"
| Some s ->
match address with
| Account { acc_contract = Some contract; _ } ->
Printf.sprintf "%s: %s" contract s
| _ -> s
))
let get_account_info accounts =
let config = Config.config () in
let net = Config.current_network config in
match accounts with
| [] ->
List.iter (fun key ->
match key.key_account with
| None -> ()
| Some account ->
get_account_info config
~address:( Account account ) ~name:key.key_name
) net.net_keys
| _ ->
List.iter (fun account ->
let address = Utils.address_of_account net account in
get_account_info config ~address ~name:account
) accounts
let get_live accounts =
let config = Config.config () in
let net = Config.current_network config in
let host = match net.net_name with
| "mainnet" -> "ton.live"
| "testnet" -> "net.ton.live"
| _ -> assert false
in
List.iter (fun account ->
let address = Utils.address_of_account net account in
let addr = Misc.raw_address address in
let url = Printf.sprintf
"" host
( ADDRESS.to_string addr ) in
Misc.call [ "xdg-open" ; url ]
) accounts
let action accounts ~live =
if live then
get_live accounts
else
get_account_info accounts
let cmd =
let accounts = ref [] in
let live = ref false in
Misc.cmd
["account" ; "state"]
(fun () -> action !accounts ~live:!live
)
~args:
[ [],
Arg.Anons (fun args -> accounts := args),
EZCMD.info "Name of account" ;
[ "live" ],
Arg.Set live,
EZCMD.info "Open block explorer on address";
]
~man:[
`S "DESCRIPTION";
`Blocks [
`P "This command displays the current states of the given accounts from the blockchain";
`P "Examples:";
`Pre {|ft account state MY-ACCOUNT|};
`Pre {|ft account state MY-ACCOUNT -v|}
];
]
~doc:
"Get account info (local or from blockchain)."
| null | https://raw.githubusercontent.com/OCamlPro/freeton_wallet/b97877379e51d96cb3544141d386d502348cfca9/src/freeton_wallet_lib/commandAccountState.ml | ocaml | ************************************************************************
All rights reserved.
This file is distributed under the terms of the GNU Lesser General
described in the LICENSE.md file in the root directory.
************************************************************************ | Copyright ( c ) 2021 OCamlPro SAS
Public License version 2.1 , with the special exception on linking
open Ezcmd.V2
open EZCMD.TYPES
open Types
let get_address_info config address =
let addr = Misc.raw_address address in
let open Ton_sdk in
let level = if !Globals.verbosity > 1 then 3 else 1 in
match
Utils.post config
( REQUEST.account ~level ( ADDRESS.to_string addr ))
with
| [] -> None
| [ account ] ->
if !Globals.verbosity > 1 then
Format.printf "%s@."
(EzEncoding.construct ~compact:false ENCODING.accounts_enc [account]
);
begin
match account.acc_code_hash with
| None -> ()
| Some code_hash ->
match address with
| RawAddress _ -> ()
| Account acc ->
match acc.acc_contract with
| Some _ -> ()
| None ->
match Misc.contract_of_code_hash ~code_hash with
| None -> ()
| Some contract ->
Printf.eprintf "Setting contract %S for %s\n%!"
contract ( ADDRESS.to_string acc.acc_address );
acc.acc_contract <- Some contract;
config.modified <- true
end;
Some account
| _ -> assert false
let get_account_info config ~name ~address =
match get_address_info config address with
| None ->
Printf.printf "Account %S: not yet created (empty balance)\n%!" name
| Some account ->
Printf.printf "Account %S: %s\n%!" name
(match account.acc_balance with
| None -> "no balance"
| Some n ->
Printf.sprintf "%s TONs (%s)"
(Misc.string_of_nanoton (Z.to_int64 n))
(match account.acc_type_name with
| None -> "Non Exists"
| Some s ->
match address with
| Account { acc_contract = Some contract; _ } ->
Printf.sprintf "%s: %s" contract s
| _ -> s
))
let get_account_info accounts =
let config = Config.config () in
let net = Config.current_network config in
match accounts with
| [] ->
List.iter (fun key ->
match key.key_account with
| None -> ()
| Some account ->
get_account_info config
~address:( Account account ) ~name:key.key_name
) net.net_keys
| _ ->
List.iter (fun account ->
let address = Utils.address_of_account net account in
get_account_info config ~address ~name:account
) accounts
let get_live accounts =
let config = Config.config () in
let net = Config.current_network config in
let host = match net.net_name with
| "mainnet" -> "ton.live"
| "testnet" -> "net.ton.live"
| _ -> assert false
in
List.iter (fun account ->
let address = Utils.address_of_account net account in
let addr = Misc.raw_address address in
let url = Printf.sprintf
"" host
( ADDRESS.to_string addr ) in
Misc.call [ "xdg-open" ; url ]
) accounts
let action accounts ~live =
if live then
get_live accounts
else
get_account_info accounts
let cmd =
let accounts = ref [] in
let live = ref false in
Misc.cmd
["account" ; "state"]
(fun () -> action !accounts ~live:!live
)
~args:
[ [],
Arg.Anons (fun args -> accounts := args),
EZCMD.info "Name of account" ;
[ "live" ],
Arg.Set live,
EZCMD.info "Open block explorer on address";
]
~man:[
`S "DESCRIPTION";
`Blocks [
`P "This command displays the current states of the given accounts from the blockchain";
`P "Examples:";
`Pre {|ft account state MY-ACCOUNT|};
`Pre {|ft account state MY-ACCOUNT -v|}
];
]
~doc:
"Get account info (local or from blockchain)."
|
c276c58e86abba94fa2ccc04365fd0777385b163fea49b0cce1bf36a07ac3604 | jumarko/clojure-experiments | tracer.clj | # 👩 🏻 💻 Show the code
^{:nextjournal.clerk/visibility #{:hide-ns}}
(ns tracer
(:require [nextjournal.clerk :as clerk]
[clojure.walk :as walk]))
# # Tracer
;; This is a minimal implementation of the code to trace a quoted form
;; and record the result from evaluating each sub-expresison. It
;; currently returns the data in a somewhat awkward shape. How would
;; you improve the API?
(def ^:dynamic *trace-accumulator* nil)
(defn add-trace [id form result]
(swap! *trace-accumulator* conj [id [form result]])
result)
(defn debug-expression [quoted-expr]
(let [trace (atom [])]
(binding [*trace-accumulator* trace]
(eval
(walk/postwalk
(fn [form]
(if (list? form)
`(add-trace '~(gensym) '~form (try ~form (catch Exception ~(symbol "e") {:exception (.getMessage ~(symbol "e"))})))
form))
quoted-expr)))
@trace))
(defn icon [kind]
[:div.bg-slate-500.rounded-full.text-white.text-xs.w-5.h-5.flex.items-center.justify-center.flex-shrink-0
{:style {:font-size 10}}
kind])
# # Display
;; This section provides a recursive function to show the nested the
;; code with results (currently in a visually unappealing way, but it
;; should be enough to get you started. 😊)
(declare show-element)
(defn show-let [lookup depth result-id elem]
[:div.rounded-md.p-2.pt-0
{:class (if (even? depth) "bg-slate-200 " "bg-slate-300 ")}
[:span.font-bold.inline-block.mt-2 "let"]
(let [depth (inc depth)]
[:div.rounded-md.p-2.pt-0.mt-2
{:class (if (even? depth) "bg-slate-200 " "bg-slate-300 ")}
[:div.flex
[:div.mt-2 (icon "[]")]
(into [:div.ml-2 {:class "-mt-2"}]
(map
(fn [[k v]]
[:div.flex.mt-2
[:div.mr-2.font-bold.flex-shrink-0
(show-element lookup (inc depth) nil k)]
[:div {:class (when (coll? v) "-mt-2")}
(show-element lookup (inc depth) nil v)]])
(->> elem second (partition 2))))]])
(into [:div]
(map
(fn [el]
(show-element lookup (inc depth) nil el))
(drop 2 elem)))])
(defn show-coll [lookup depth result-id elem]
[:div.rounded-md.p-2.pt-0.flex.mt-2
{:class (str (if (even? depth) "bg-slate-200 " "bg-slate-300 "))}
[:div.mt-2 (icon (cond (set? elem) "#{}"
(map? elem) "{}"
(vector? elem) "[]"))]
[:div.flex-auto.ml-2
(if (map? elem)
(into [:div]
(map
(fn [[k v]]
[:div.flex
[:div.mr-2.font-bold.flex-shrink-0
(show-element lookup (inc depth) nil k)]
[:div {:class (when (coll? v) "-mt-2 -mb-2")}
(show-element lookup (inc depth) nil v)]])
elem))
(into [:div] (mapv (partial show-element lookup (inc depth) nil) elem)))]])
(defn show-seq [lookup depth result-id elem]
[:div.rounded-md.p-2.pt-0.flex.mt-2
{:class (str (if (even? depth) "bg-slate-200 " "bg-slate-300 "))}
[:div
[:span.font-bold (show-element lookup (inc depth) nil (first elem))]
(into [:<>] (mapv (partial show-element lookup (inc depth) nil) (rest elem)))
(when result-id
(let [result (get lookup result-id)]
(if-let [e (:exception result)]
[:div.rounded.border-2.border-red-500.bg-red-100.text-red-500.p-2.font-bold.text-xs.mt-2 e]
[:span.text-slate-500.float-right.ml-2.mt-2
(str "→ " (pr-str result))])))]])
(defn show-element [lookup depth result-id elem]
(println elem)
(cond (and (list? elem) (= (first elem) 'add-trace)) (show-element lookup depth (second (second elem)) (second (nth elem 2)))
(and (list? elem) (= (first elem) 'let)) (show-let lookup depth result-id elem)
(and (not (list? elem)) (coll? elem)) (show-coll lookup depth result-id elem)
(sequential? elem) (show-seq lookup depth result-id elem)
:else
[:span.inline-block.mt-2.mr-2
(str " "
(if (string? elem) "\"")
elem
(if (string? elem) "\""))]))
^::clerk/no-cache
(clerk/html
[:div.text-sm {:class "font-mono"}
;; boring arithmetic example form
(let [t (debug-expression '(let [x 10
y (/ 20 0)
vs [3 1 4 1 5]
tab {:a 4
:b (+ 3 3)
:c 8}
ordered #{5 1 2 :eight}
a-fn (fn [] (println "Ohai 👋"))
another-fn #(+ %1 %2)]
(+ (* x 5)
(apply + vs)
(:a tab)
(- (/ y 2)
(:b tab)
3))))]
(show-element
;; result id -> value lookup table
(reduce (fn [m [k [_ v]]] (assoc m k v)) {} t)
;; initial display depth
0
;; id of top level form's result
(first (last t))
;; the top level form itself
(first (second (last t)))))])
| null | https://raw.githubusercontent.com/jumarko/clojure-experiments/a87098fe69044ad65813a68cb870d824c2c2d18f/src/clojure_experiments/visualizations/clerk/notebooks/tracer.clj | clojure | This is a minimal implementation of the code to trace a quoted form
and record the result from evaluating each sub-expresison. It
currently returns the data in a somewhat awkward shape. How would
you improve the API?
This section provides a recursive function to show the nested the
code with results (currently in a visually unappealing way, but it
should be enough to get you started. 😊)
boring arithmetic example form
result id -> value lookup table
initial display depth
id of top level form's result
the top level form itself | # 👩 🏻 💻 Show the code
^{:nextjournal.clerk/visibility #{:hide-ns}}
(ns tracer
(:require [nextjournal.clerk :as clerk]
[clojure.walk :as walk]))
# # Tracer
(def ^:dynamic *trace-accumulator* nil)
(defn add-trace [id form result]
(swap! *trace-accumulator* conj [id [form result]])
result)
(defn debug-expression [quoted-expr]
(let [trace (atom [])]
(binding [*trace-accumulator* trace]
(eval
(walk/postwalk
(fn [form]
(if (list? form)
`(add-trace '~(gensym) '~form (try ~form (catch Exception ~(symbol "e") {:exception (.getMessage ~(symbol "e"))})))
form))
quoted-expr)))
@trace))
(defn icon [kind]
[:div.bg-slate-500.rounded-full.text-white.text-xs.w-5.h-5.flex.items-center.justify-center.flex-shrink-0
{:style {:font-size 10}}
kind])
# # Display
(declare show-element)
(defn show-let [lookup depth result-id elem]
[:div.rounded-md.p-2.pt-0
{:class (if (even? depth) "bg-slate-200 " "bg-slate-300 ")}
[:span.font-bold.inline-block.mt-2 "let"]
(let [depth (inc depth)]
[:div.rounded-md.p-2.pt-0.mt-2
{:class (if (even? depth) "bg-slate-200 " "bg-slate-300 ")}
[:div.flex
[:div.mt-2 (icon "[]")]
(into [:div.ml-2 {:class "-mt-2"}]
(map
(fn [[k v]]
[:div.flex.mt-2
[:div.mr-2.font-bold.flex-shrink-0
(show-element lookup (inc depth) nil k)]
[:div {:class (when (coll? v) "-mt-2")}
(show-element lookup (inc depth) nil v)]])
(->> elem second (partition 2))))]])
(into [:div]
(map
(fn [el]
(show-element lookup (inc depth) nil el))
(drop 2 elem)))])
(defn show-coll [lookup depth result-id elem]
[:div.rounded-md.p-2.pt-0.flex.mt-2
{:class (str (if (even? depth) "bg-slate-200 " "bg-slate-300 "))}
[:div.mt-2 (icon (cond (set? elem) "#{}"
(map? elem) "{}"
(vector? elem) "[]"))]
[:div.flex-auto.ml-2
(if (map? elem)
(into [:div]
(map
(fn [[k v]]
[:div.flex
[:div.mr-2.font-bold.flex-shrink-0
(show-element lookup (inc depth) nil k)]
[:div {:class (when (coll? v) "-mt-2 -mb-2")}
(show-element lookup (inc depth) nil v)]])
elem))
(into [:div] (mapv (partial show-element lookup (inc depth) nil) elem)))]])
(defn show-seq [lookup depth result-id elem]
[:div.rounded-md.p-2.pt-0.flex.mt-2
{:class (str (if (even? depth) "bg-slate-200 " "bg-slate-300 "))}
[:div
[:span.font-bold (show-element lookup (inc depth) nil (first elem))]
(into [:<>] (mapv (partial show-element lookup (inc depth) nil) (rest elem)))
(when result-id
(let [result (get lookup result-id)]
(if-let [e (:exception result)]
[:div.rounded.border-2.border-red-500.bg-red-100.text-red-500.p-2.font-bold.text-xs.mt-2 e]
[:span.text-slate-500.float-right.ml-2.mt-2
(str "→ " (pr-str result))])))]])
(defn show-element [lookup depth result-id elem]
(println elem)
(cond (and (list? elem) (= (first elem) 'add-trace)) (show-element lookup depth (second (second elem)) (second (nth elem 2)))
(and (list? elem) (= (first elem) 'let)) (show-let lookup depth result-id elem)
(and (not (list? elem)) (coll? elem)) (show-coll lookup depth result-id elem)
(sequential? elem) (show-seq lookup depth result-id elem)
:else
[:span.inline-block.mt-2.mr-2
(str " "
(if (string? elem) "\"")
elem
(if (string? elem) "\""))]))
^::clerk/no-cache
(clerk/html
[:div.text-sm {:class "font-mono"}
(let [t (debug-expression '(let [x 10
y (/ 20 0)
vs [3 1 4 1 5]
tab {:a 4
:b (+ 3 3)
:c 8}
ordered #{5 1 2 :eight}
a-fn (fn [] (println "Ohai 👋"))
another-fn #(+ %1 %2)]
(+ (* x 5)
(apply + vs)
(:a tab)
(- (/ y 2)
(:b tab)
3))))]
(show-element
(reduce (fn [m [k [_ v]]] (assoc m k v)) {} t)
0
(first (last t))
(first (second (last t)))))])
|
21413ef0e33c23985e81dc90580d743537e7b1f6ad094f7dcd402212853ef59c | softwarelanguageslab/maf | R5RS_scp1_organigram-2.scm | ; Changes:
* removed : 0
* added : 3
* swaps : 0
; * negated predicates: 0
* swapped branches : 1
* calls to i d fun : 1
(letrec ((organigram (__toplevel_cons
'directeur
(__toplevel_cons
(__toplevel_cons
'hoofd-verkoop
(__toplevel_cons
(__toplevel_cons 'verkoopsleider-vlaanderen ())
(__toplevel_cons (__toplevel_cons 'verkoopsleider-brussel ()) ())))
(__toplevel_cons
(__toplevel_cons
'hoofd-productie
(__toplevel_cons
(__toplevel_cons
'hoofd-inkoop
(__toplevel_cons
(__toplevel_cons 'bediende1 ())
(__toplevel_cons
(__toplevel_cons 'bediende2 ())
(__toplevel_cons (__toplevel_cons 'bediende3 ()) ()))))
(__toplevel_cons (__toplevel_cons 'hoofd-fakturen ()) ())))
(__toplevel_cons
(__toplevel_cons
'hoofd-administratie
(__toplevel_cons
(__toplevel_cons 'hoofd-personeel ())
(__toplevel_cons (__toplevel_cons 'hoofd-boekhouding ()) ())))
())))))
(baas (lambda (organigram)
(car organigram)))
(sub-organigrammen (lambda (organigram)
(<change>
(cdr organigram)
((lambda (x) x) (cdr organigram)))))
(hierarchisch? (lambda (p1 p2 organigram)
(letrec ((hierarchisch?-in (lambda (path organigrammen)
(if (null? organigrammen)
#f
(let ((__or_res (hierarchisch? path (car organigrammen))))
(if __or_res
__or_res
(hierarchisch?-in path (cdr organigrammen)))))))
(hierarchisch? (lambda (path organigram)
(<change>
()
(display sub-organigrammen))
(<change>
()
(display p1))
(if (if (eq? p1 (baas organigram)) (member p2 path) #f)
#t
(if (if (eq? p2 (baas organigram)) (member p1 path) #f)
(<change>
#t
(hierarchisch?-in (cons (baas organigram) path) (sub-organigrammen organigram)))
(<change>
(hierarchisch?-in (cons (baas organigram) path) (sub-organigrammen organigram))
#t))))))
(hierarchisch? () organigram))))
(collegas (lambda (p organigram)
(letrec ((collegas-in (lambda (oversten organigrammen)
(if (null? organigrammen)
#f
(let ((__or_res (collegas oversten (car organigrammen))))
(if __or_res
__or_res
(collegas-in oversten (cdr organigrammen)))))))
(werknemers-in (lambda (organigrammen)
(if (null? organigrammen)
()
(append (werknemers (car organigrammen)) (werknemers-in (cdr organigrammen))))))
(werknemers (lambda (organigram)
(cons (baas organigram) (werknemers-in (sub-organigrammen organigram)))))
(collegas (lambda (oversten organigram)
(<change>
()
organigram)
(if (eq? p (baas organigram))
(append oversten (werknemers-in (sub-organigrammen organigram)))
(collegas-in (cons (baas organigram) oversten) (sub-organigrammen organigram))))))
(collegas () organigram)))))
(if (hierarchisch? 'directeur 'verkoopsleider-brussel organigram)
(if (hierarchisch? 'bediende1 'hoofd-productie organigram)
(if (not (hierarchisch? 'hoofd-personeel 'bediende3 organigram))
(equal?
(collegas 'hoofd-inkoop organigram)
(__toplevel_cons
'hoofd-productie
(__toplevel_cons
'directeur
(__toplevel_cons 'bediende1 (__toplevel_cons 'bediende2 (__toplevel_cons 'bediende3 ()))))))
#f)
#f)
#f)) | null | https://raw.githubusercontent.com/softwarelanguageslab/maf/11acedf56b9bf0c8e55ddb6aea754b6766d8bb40/test/changes/scheme/generated/R5RS_scp1_organigram-2.scm | scheme | Changes:
* negated predicates: 0 | * removed : 0
* added : 3
* swaps : 0
* swapped branches : 1
* calls to i d fun : 1
(letrec ((organigram (__toplevel_cons
'directeur
(__toplevel_cons
(__toplevel_cons
'hoofd-verkoop
(__toplevel_cons
(__toplevel_cons 'verkoopsleider-vlaanderen ())
(__toplevel_cons (__toplevel_cons 'verkoopsleider-brussel ()) ())))
(__toplevel_cons
(__toplevel_cons
'hoofd-productie
(__toplevel_cons
(__toplevel_cons
'hoofd-inkoop
(__toplevel_cons
(__toplevel_cons 'bediende1 ())
(__toplevel_cons
(__toplevel_cons 'bediende2 ())
(__toplevel_cons (__toplevel_cons 'bediende3 ()) ()))))
(__toplevel_cons (__toplevel_cons 'hoofd-fakturen ()) ())))
(__toplevel_cons
(__toplevel_cons
'hoofd-administratie
(__toplevel_cons
(__toplevel_cons 'hoofd-personeel ())
(__toplevel_cons (__toplevel_cons 'hoofd-boekhouding ()) ())))
())))))
(baas (lambda (organigram)
(car organigram)))
(sub-organigrammen (lambda (organigram)
(<change>
(cdr organigram)
((lambda (x) x) (cdr organigram)))))
(hierarchisch? (lambda (p1 p2 organigram)
(letrec ((hierarchisch?-in (lambda (path organigrammen)
(if (null? organigrammen)
#f
(let ((__or_res (hierarchisch? path (car organigrammen))))
(if __or_res
__or_res
(hierarchisch?-in path (cdr organigrammen)))))))
(hierarchisch? (lambda (path organigram)
(<change>
()
(display sub-organigrammen))
(<change>
()
(display p1))
(if (if (eq? p1 (baas organigram)) (member p2 path) #f)
#t
(if (if (eq? p2 (baas organigram)) (member p1 path) #f)
(<change>
#t
(hierarchisch?-in (cons (baas organigram) path) (sub-organigrammen organigram)))
(<change>
(hierarchisch?-in (cons (baas organigram) path) (sub-organigrammen organigram))
#t))))))
(hierarchisch? () organigram))))
(collegas (lambda (p organigram)
(letrec ((collegas-in (lambda (oversten organigrammen)
(if (null? organigrammen)
#f
(let ((__or_res (collegas oversten (car organigrammen))))
(if __or_res
__or_res
(collegas-in oversten (cdr organigrammen)))))))
(werknemers-in (lambda (organigrammen)
(if (null? organigrammen)
()
(append (werknemers (car organigrammen)) (werknemers-in (cdr organigrammen))))))
(werknemers (lambda (organigram)
(cons (baas organigram) (werknemers-in (sub-organigrammen organigram)))))
(collegas (lambda (oversten organigram)
(<change>
()
organigram)
(if (eq? p (baas organigram))
(append oversten (werknemers-in (sub-organigrammen organigram)))
(collegas-in (cons (baas organigram) oversten) (sub-organigrammen organigram))))))
(collegas () organigram)))))
(if (hierarchisch? 'directeur 'verkoopsleider-brussel organigram)
(if (hierarchisch? 'bediende1 'hoofd-productie organigram)
(if (not (hierarchisch? 'hoofd-personeel 'bediende3 organigram))
(equal?
(collegas 'hoofd-inkoop organigram)
(__toplevel_cons
'hoofd-productie
(__toplevel_cons
'directeur
(__toplevel_cons 'bediende1 (__toplevel_cons 'bediende2 (__toplevel_cons 'bediende3 ()))))))
#f)
#f)
#f)) |
d1c7ef4d8025e75815d396c5fbb020b1ce127162bcb5fea3a8f9159f3809d06b | haskell/cabal | LibV09.hs | # LANGUAGE FlexibleContexts #
{-# LANGUAGE RankNTypes #-}
module Distribution.Simple.Test.LibV09
( runTest
-- Test stub
, simpleTestStub
, stubFilePath, stubMain, stubName, stubWriteLog
, writeSimpleTestStub
) where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.Types.UnqualComponentName
import Distribution.Compat.Environment
import Distribution.Compat.Internal.TempFile
import Distribution.ModuleName
import qualified Distribution.PackageDescription as PD
import Distribution.Simple.Build.PathsModule
import Distribution.Simple.BuildPaths
import Distribution.Simple.Compiler
import Distribution.Simple.Hpc
import Distribution.Simple.InstallDirs
import qualified Distribution.Simple.LocalBuildInfo as LBI
import qualified Distribution.Types.LocalBuildInfo as LBI
import Distribution.Simple.Flag ( Flag(NoFlag, Flag), fromFlag )
import Distribution.Simple.Setup.Test
import Distribution.Simple.Test.Log
import Distribution.Simple.Utils
import Distribution.System
import Distribution.TestSuite
import Distribution.Pretty
import Distribution.Verbosity
import qualified Control.Exception as CE
import qualified Data.ByteString.Lazy as LBS
import System.Directory
( createDirectoryIfMissing, canonicalizePath
, doesDirectoryExist, doesFileExist
, getCurrentDirectory, removeDirectoryRecursive, removeFile
, setCurrentDirectory )
import System.FilePath ( (</>), (<.>) )
import System.IO ( hClose, hPutStr )
import Distribution.Compat.Process (proc)
import qualified System.Process as Process
runTest :: PD.PackageDescription
-> LBI.LocalBuildInfo
-> LBI.ComponentLocalBuildInfo
-> TestFlags
-> PD.TestSuite
-> IO TestSuiteLog
runTest pkg_descr lbi clbi flags suite = do
let isCoverageEnabled = LBI.testCoverage lbi
way = guessWay lbi
pwd <- getCurrentDirectory
existingEnv <- getEnvironment
let cmd = LBI.buildDir lbi </> stubName suite
</> stubName suite <.> exeExtension (LBI.hostPlatform lbi)
-- Check that the test executable exists.
exists <- doesFileExist cmd
unless exists $
die' verbosity $ "Could not find test program \"" ++ cmd
++ "\". Did you build the package first?"
-- Remove old .tix files if appropriate.
unless (fromFlag $ testKeepTix flags) $ do
let tDir = tixDir distPref way testName'
exists' <- doesDirectoryExist tDir
when exists' $ removeDirectoryRecursive tDir
Create directory for HPC files .
createDirectoryIfMissing True $ tixDir distPref way testName'
-- Write summary notices indicating start of test suite
notice verbosity $ summarizeSuiteStart testName'
suiteLog <- CE.bracket openCabalTemp deleteIfExists $ \tempLog -> do
-- Run test executable
let opts = map (testOption pkg_descr lbi suite) $ testOptions flags
dataDirPath = pwd </> PD.dataDir pkg_descr
tixFile = pwd </> tixFilePath distPref way testName'
pkgPathEnv = (pkgPathEnvVar pkg_descr "datadir", dataDirPath)
: existingEnv
shellEnv = [("HPCTIXFILE", tixFile) | isCoverageEnabled]
++ pkgPathEnv
-- Add (DY)LD_LIBRARY_PATH if needed
shellEnv' <-
if LBI.withDynExe lbi
then do
let (Platform _ os) = LBI.hostPlatform lbi
paths <- LBI.depLibraryPaths True False lbi clbi
cpath <- canonicalizePath $ LBI.componentBuildDir lbi clbi
return (addLibraryPath os (cpath : paths) shellEnv)
else return shellEnv
let (cmd', opts') = case testWrapper flags of
Flag path -> (path, cmd:opts)
NoFlag -> (cmd, opts)
-- TODO: this setup is broken,
-- if the test output is too big, we will deadlock.
(rOut, wOut) <- Process.createPipe
(exitcode, logText) <- rawSystemProcAction verbosity
(proc cmd' opts') { Process.env = Just shellEnv'
, Process.std_in = Process.CreatePipe
, Process.std_out = Process.UseHandle wOut
, Process.std_err = Process.UseHandle wOut
} $ \mIn _ _ -> do
let wIn = fromCreatePipe mIn
hPutStr wIn $ show (tempLog, PD.testName suite)
hClose wIn
-- Append contents of temporary log file to the final human-
-- readable log file
logText <- LBS.hGetContents rOut
Force the IO manager to drain the test output pipe
_ <- evaluate (force logText)
return logText
unless (exitcode == ExitSuccess) $
debug verbosity $ cmd ++ " returned " ++ show exitcode
-- Generate final log file name
let finalLogName l = testLogDir
</> testSuiteLogPath
(fromFlag $ testHumanLog flags) pkg_descr lbi
(unUnqualComponentName $ testSuiteName l) (testLogs l)
-- Generate TestSuiteLog from executable exit code and a machine-
-- readable test log
suiteLog <- fmap (\s -> (\l -> l { logFile = finalLogName l })
. fromMaybe (error $ "panic! read @TestSuiteLog " ++ show s) $ readMaybe s) -- TODO: eradicateNoParse
$ readFile tempLog
-- Write summary notice to log file indicating start of test suite
appendFile (logFile suiteLog) $ summarizeSuiteStart testName'
LBS.appendFile (logFile suiteLog) logText
-- Write end-of-suite summary notice to log file
appendFile (logFile suiteLog) $ summarizeSuiteFinish suiteLog
-- Show the contents of the human-readable log file on the terminal
-- if there is a failure and/or detailed output is requested
let details = fromFlag $ testShowDetails flags
whenPrinting = when $ (details > Never)
&& (not (suitePassed $ testLogs suiteLog) || details == Always)
&& verbosity >= normal
whenPrinting $ do
LBS.putStr logText
putChar '\n'
return suiteLog
-- Write summary notice to terminal indicating end of test suite
notice verbosity $ summarizeSuiteFinish suiteLog
when isCoverageEnabled $
case PD.library pkg_descr of
Nothing ->
die' verbosity "Test coverage is only supported for packages with a library component."
Just library ->
markupTest verbosity lbi distPref (prettyShow $ PD.package pkg_descr) suite library
return suiteLog
where
testName' = unUnqualComponentName $ PD.testName suite
deleteIfExists file = do
exists <- doesFileExist file
when exists $ removeFile file
testLogDir = distPref </> "test"
openCabalTemp = do
(f, h) <- openTempFile testLogDir $ "cabal-test-" <.> "log"
hClose h >> return f
distPref = fromFlag $ testDistPref flags
verbosity = fromFlag $ testVerbosity flags
TODO : This is abusing the notion of a ' PathTemplate ' . The result is n't
-- necessarily a path.
testOption :: PD.PackageDescription
-> LBI.LocalBuildInfo
-> PD.TestSuite
-> PathTemplate
-> String
testOption pkg_descr lbi suite template =
fromPathTemplate $ substPathTemplate env template
where
env = initialPathTemplateEnv
(PD.package pkg_descr) (LBI.localUnitId lbi)
(compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) ++
[(TestSuiteNameVar, toPathTemplate $ unUnqualComponentName $ PD.testName suite)]
-- Test stub ----------
-- | The name of the stub executable associated with a library 'TestSuite'.
stubName :: PD.TestSuite -> FilePath
stubName t = unUnqualComponentName (PD.testName t) ++ "Stub"
-- | The filename of the source file for the stub executable associated with a
-- library 'TestSuite'.
stubFilePath :: PD.TestSuite -> FilePath
stubFilePath t = stubName t <.> "hs"
| Write the source file for a library ' TestSuite ' stub executable .
^ library ' TestSuite ' for which a stub
-- is being created
-> FilePath -- ^ path to directory where stub source
-- should be located
-> IO ()
writeSimpleTestStub t dir = do
createDirectoryIfMissing True dir
let filename = dir </> stubFilePath t
m = case PD.testInterface t of
PD.TestSuiteLibV09 _ m' -> m'
_ -> error "writeSimpleTestStub: invalid TestSuite passed"
writeFile filename $ simpleTestStub m
-- | Source code for library test suite stub executable
simpleTestStub :: ModuleName -> String
simpleTestStub m = unlines
[ "module Main ( main ) where"
, "import Distribution.Simple.Test.LibV09 ( stubMain )"
, "import " ++ show (pretty m) ++ " ( tests )"
, "main :: IO ()"
, "main = stubMain tests"
]
-- | Main function for test stubs. Once, it was written directly into the stub,
-- but minimizing the amount of code actually in the stub maximizes the number
of detectable errors when Cabal is compiled .
stubMain :: IO [Test] -> IO ()
stubMain tests = do
(f, n) <- fmap (\s -> fromMaybe (error $ "panic! read " ++ show s) $ readMaybe s) getContents -- TODO: eradicateNoParse
dir <- getCurrentDirectory
results <- (tests >>= stubRunTests) `CE.catch` errHandler
setCurrentDirectory dir
stubWriteLog f n results
where
errHandler :: CE.SomeException -> IO TestLogs
errHandler e = case CE.fromException e of
Just CE.UserInterrupt -> CE.throwIO e
_ -> return $ TestLog { testName = "Cabal test suite exception",
testOptionsReturned = [],
testResult = Error $ show e }
| The test runner used in library " TestSuite " stub executables . Runs a list
-- of 'Test's. An executable calling this function is meant to be invoked as
the child of a Cabal process during @.\/setup test@. A ' TestSuiteLog ' ,
provided by Cabal , is read from the standard input ; it supplies the name of
-- the test suite and the location of the machine-readable test suite log file.
-- Human-readable log information is written to the standard output for capture
by the calling Cabal process .
stubRunTests :: [Test] -> IO TestLogs
stubRunTests tests = do
logs <- traverse stubRunTests' tests
return $ GroupLogs "Default" logs
where
stubRunTests' (Test t) = do
l <- run t >>= finish
summarizeTest normal Always l
return l
where
finish (Finished result) =
return TestLog
{ testName = name t
, testOptionsReturned = defaultOptions t
, testResult = result
}
finish (Progress _ next) = next >>= finish
stubRunTests' g@(Group {}) = do
logs <- traverse stubRunTests' $ groupTests g
return $ GroupLogs (groupName g) logs
stubRunTests' (ExtraOptions _ t) = stubRunTests' t
maybeDefaultOption opt =
maybe Nothing (\d -> Just (optionName opt, d)) $ optionDefault opt
defaultOptions testInst = mapMaybe maybeDefaultOption $ options testInst
-- | From a test stub, write the 'TestSuiteLog' to temporary file for the calling
Cabal process to read .
stubWriteLog :: FilePath -> UnqualComponentName -> TestLogs -> IO ()
stubWriteLog f n logs = do
let testLog = TestSuiteLog { testSuiteName = n, testLogs = logs, logFile = f }
writeFile (logFile testLog) $ show testLog
when (suiteError logs) $ exitWith $ ExitFailure 2
when (suiteFailed logs) $ exitWith $ ExitFailure 1
exitSuccess
| null | https://raw.githubusercontent.com/haskell/cabal/ab24689731e9fb45efa6277f290624622a6c214f/Cabal/src/Distribution/Simple/Test/LibV09.hs | haskell | # LANGUAGE RankNTypes #
Test stub
Check that the test executable exists.
Remove old .tix files if appropriate.
Write summary notices indicating start of test suite
Run test executable
Add (DY)LD_LIBRARY_PATH if needed
TODO: this setup is broken,
if the test output is too big, we will deadlock.
Append contents of temporary log file to the final human-
readable log file
Generate final log file name
Generate TestSuiteLog from executable exit code and a machine-
readable test log
TODO: eradicateNoParse
Write summary notice to log file indicating start of test suite
Write end-of-suite summary notice to log file
Show the contents of the human-readable log file on the terminal
if there is a failure and/or detailed output is requested
Write summary notice to terminal indicating end of test suite
necessarily a path.
Test stub ----------
| The name of the stub executable associated with a library 'TestSuite'.
| The filename of the source file for the stub executable associated with a
library 'TestSuite'.
is being created
^ path to directory where stub source
should be located
| Source code for library test suite stub executable
| Main function for test stubs. Once, it was written directly into the stub,
but minimizing the amount of code actually in the stub maximizes the number
TODO: eradicateNoParse
of 'Test's. An executable calling this function is meant to be invoked as
the test suite and the location of the machine-readable test suite log file.
Human-readable log information is written to the standard output for capture
| From a test stub, write the 'TestSuiteLog' to temporary file for the calling | # LANGUAGE FlexibleContexts #
module Distribution.Simple.Test.LibV09
( runTest
, simpleTestStub
, stubFilePath, stubMain, stubName, stubWriteLog
, writeSimpleTestStub
) where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.Types.UnqualComponentName
import Distribution.Compat.Environment
import Distribution.Compat.Internal.TempFile
import Distribution.ModuleName
import qualified Distribution.PackageDescription as PD
import Distribution.Simple.Build.PathsModule
import Distribution.Simple.BuildPaths
import Distribution.Simple.Compiler
import Distribution.Simple.Hpc
import Distribution.Simple.InstallDirs
import qualified Distribution.Simple.LocalBuildInfo as LBI
import qualified Distribution.Types.LocalBuildInfo as LBI
import Distribution.Simple.Flag ( Flag(NoFlag, Flag), fromFlag )
import Distribution.Simple.Setup.Test
import Distribution.Simple.Test.Log
import Distribution.Simple.Utils
import Distribution.System
import Distribution.TestSuite
import Distribution.Pretty
import Distribution.Verbosity
import qualified Control.Exception as CE
import qualified Data.ByteString.Lazy as LBS
import System.Directory
( createDirectoryIfMissing, canonicalizePath
, doesDirectoryExist, doesFileExist
, getCurrentDirectory, removeDirectoryRecursive, removeFile
, setCurrentDirectory )
import System.FilePath ( (</>), (<.>) )
import System.IO ( hClose, hPutStr )
import Distribution.Compat.Process (proc)
import qualified System.Process as Process
runTest :: PD.PackageDescription
-> LBI.LocalBuildInfo
-> LBI.ComponentLocalBuildInfo
-> TestFlags
-> PD.TestSuite
-> IO TestSuiteLog
runTest pkg_descr lbi clbi flags suite = do
let isCoverageEnabled = LBI.testCoverage lbi
way = guessWay lbi
pwd <- getCurrentDirectory
existingEnv <- getEnvironment
let cmd = LBI.buildDir lbi </> stubName suite
</> stubName suite <.> exeExtension (LBI.hostPlatform lbi)
exists <- doesFileExist cmd
unless exists $
die' verbosity $ "Could not find test program \"" ++ cmd
++ "\". Did you build the package first?"
unless (fromFlag $ testKeepTix flags) $ do
let tDir = tixDir distPref way testName'
exists' <- doesDirectoryExist tDir
when exists' $ removeDirectoryRecursive tDir
Create directory for HPC files .
createDirectoryIfMissing True $ tixDir distPref way testName'
notice verbosity $ summarizeSuiteStart testName'
suiteLog <- CE.bracket openCabalTemp deleteIfExists $ \tempLog -> do
let opts = map (testOption pkg_descr lbi suite) $ testOptions flags
dataDirPath = pwd </> PD.dataDir pkg_descr
tixFile = pwd </> tixFilePath distPref way testName'
pkgPathEnv = (pkgPathEnvVar pkg_descr "datadir", dataDirPath)
: existingEnv
shellEnv = [("HPCTIXFILE", tixFile) | isCoverageEnabled]
++ pkgPathEnv
shellEnv' <-
if LBI.withDynExe lbi
then do
let (Platform _ os) = LBI.hostPlatform lbi
paths <- LBI.depLibraryPaths True False lbi clbi
cpath <- canonicalizePath $ LBI.componentBuildDir lbi clbi
return (addLibraryPath os (cpath : paths) shellEnv)
else return shellEnv
let (cmd', opts') = case testWrapper flags of
Flag path -> (path, cmd:opts)
NoFlag -> (cmd, opts)
(rOut, wOut) <- Process.createPipe
(exitcode, logText) <- rawSystemProcAction verbosity
(proc cmd' opts') { Process.env = Just shellEnv'
, Process.std_in = Process.CreatePipe
, Process.std_out = Process.UseHandle wOut
, Process.std_err = Process.UseHandle wOut
} $ \mIn _ _ -> do
let wIn = fromCreatePipe mIn
hPutStr wIn $ show (tempLog, PD.testName suite)
hClose wIn
logText <- LBS.hGetContents rOut
Force the IO manager to drain the test output pipe
_ <- evaluate (force logText)
return logText
unless (exitcode == ExitSuccess) $
debug verbosity $ cmd ++ " returned " ++ show exitcode
let finalLogName l = testLogDir
</> testSuiteLogPath
(fromFlag $ testHumanLog flags) pkg_descr lbi
(unUnqualComponentName $ testSuiteName l) (testLogs l)
suiteLog <- fmap (\s -> (\l -> l { logFile = finalLogName l })
$ readFile tempLog
appendFile (logFile suiteLog) $ summarizeSuiteStart testName'
LBS.appendFile (logFile suiteLog) logText
appendFile (logFile suiteLog) $ summarizeSuiteFinish suiteLog
let details = fromFlag $ testShowDetails flags
whenPrinting = when $ (details > Never)
&& (not (suitePassed $ testLogs suiteLog) || details == Always)
&& verbosity >= normal
whenPrinting $ do
LBS.putStr logText
putChar '\n'
return suiteLog
notice verbosity $ summarizeSuiteFinish suiteLog
when isCoverageEnabled $
case PD.library pkg_descr of
Nothing ->
die' verbosity "Test coverage is only supported for packages with a library component."
Just library ->
markupTest verbosity lbi distPref (prettyShow $ PD.package pkg_descr) suite library
return suiteLog
where
testName' = unUnqualComponentName $ PD.testName suite
deleteIfExists file = do
exists <- doesFileExist file
when exists $ removeFile file
testLogDir = distPref </> "test"
openCabalTemp = do
(f, h) <- openTempFile testLogDir $ "cabal-test-" <.> "log"
hClose h >> return f
distPref = fromFlag $ testDistPref flags
verbosity = fromFlag $ testVerbosity flags
TODO : This is abusing the notion of a ' PathTemplate ' . The result is n't
testOption :: PD.PackageDescription
-> LBI.LocalBuildInfo
-> PD.TestSuite
-> PathTemplate
-> String
testOption pkg_descr lbi suite template =
fromPathTemplate $ substPathTemplate env template
where
env = initialPathTemplateEnv
(PD.package pkg_descr) (LBI.localUnitId lbi)
(compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) ++
[(TestSuiteNameVar, toPathTemplate $ unUnqualComponentName $ PD.testName suite)]
stubName :: PD.TestSuite -> FilePath
stubName t = unUnqualComponentName (PD.testName t) ++ "Stub"
stubFilePath :: PD.TestSuite -> FilePath
stubFilePath t = stubName t <.> "hs"
| Write the source file for a library ' TestSuite ' stub executable .
^ library ' TestSuite ' for which a stub
-> IO ()
writeSimpleTestStub t dir = do
createDirectoryIfMissing True dir
let filename = dir </> stubFilePath t
m = case PD.testInterface t of
PD.TestSuiteLibV09 _ m' -> m'
_ -> error "writeSimpleTestStub: invalid TestSuite passed"
writeFile filename $ simpleTestStub m
simpleTestStub :: ModuleName -> String
simpleTestStub m = unlines
[ "module Main ( main ) where"
, "import Distribution.Simple.Test.LibV09 ( stubMain )"
, "import " ++ show (pretty m) ++ " ( tests )"
, "main :: IO ()"
, "main = stubMain tests"
]
of detectable errors when Cabal is compiled .
stubMain :: IO [Test] -> IO ()
stubMain tests = do
dir <- getCurrentDirectory
results <- (tests >>= stubRunTests) `CE.catch` errHandler
setCurrentDirectory dir
stubWriteLog f n results
where
errHandler :: CE.SomeException -> IO TestLogs
errHandler e = case CE.fromException e of
Just CE.UserInterrupt -> CE.throwIO e
_ -> return $ TestLog { testName = "Cabal test suite exception",
testOptionsReturned = [],
testResult = Error $ show e }
| The test runner used in library " TestSuite " stub executables . Runs a list
the child of a Cabal process during @.\/setup test@. A ' TestSuiteLog ' ,
provided by Cabal , is read from the standard input ; it supplies the name of
by the calling Cabal process .
stubRunTests :: [Test] -> IO TestLogs
stubRunTests tests = do
logs <- traverse stubRunTests' tests
return $ GroupLogs "Default" logs
where
stubRunTests' (Test t) = do
l <- run t >>= finish
summarizeTest normal Always l
return l
where
finish (Finished result) =
return TestLog
{ testName = name t
, testOptionsReturned = defaultOptions t
, testResult = result
}
finish (Progress _ next) = next >>= finish
stubRunTests' g@(Group {}) = do
logs <- traverse stubRunTests' $ groupTests g
return $ GroupLogs (groupName g) logs
stubRunTests' (ExtraOptions _ t) = stubRunTests' t
maybeDefaultOption opt =
maybe Nothing (\d -> Just (optionName opt, d)) $ optionDefault opt
defaultOptions testInst = mapMaybe maybeDefaultOption $ options testInst
Cabal process to read .
stubWriteLog :: FilePath -> UnqualComponentName -> TestLogs -> IO ()
stubWriteLog f n logs = do
let testLog = TestSuiteLog { testSuiteName = n, testLogs = logs, logFile = f }
writeFile (logFile testLog) $ show testLog
when (suiteError logs) $ exitWith $ ExitFailure 2
when (suiteFailed logs) $ exitWith $ ExitFailure 1
exitSuccess
|
d5eb901837a208d3e0384a6bc3fa1bd09247b811838d0adef5ba313e3c4e1b7b | con-kitty/categorifier | BuildDictionary.hs | # LANGUAGE ExistentialQuantification #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE QuasiQuotes #
-- |
Module : ConCat . BuildDictionary
Copyright : ( c ) 2016 Conal Elliott
-- License : BSD3
--
-- Maintainer :
-- Stability : experimental
--
Adaptation of HERMIT 's buildDictionaryT via ConCat 's BuildDictonary
module Categorifier.Core.BuildDictionary (buildDictionary) where
import qualified Categorifier.Core.Benchmark as Bench
import Categorifier.Core.Trace (pprTrace')
import Categorifier.Core.Types
( CategoryState (..),
DictCacheEntry (..),
DictCacheKey,
DictionaryFailure (..),
DictionaryStack,
writerT,
)
import Categorifier.Duoidal ((<\*))
import qualified Categorifier.GHC.Core as Plugins
import qualified Categorifier.GHC.Data as Plugins
import qualified Categorifier.GHC.Driver as Plugins
import qualified Categorifier.GHC.HsToCore as Plugins
import qualified Categorifier.GHC.Runtime as Plugins
import qualified Categorifier.GHC.Tc as Typechecker
import qualified Categorifier.GHC.Types as Plugins
import qualified Categorifier.GHC.Unit as Plugins
import qualified Categorifier.GHC.Utils as Plugins
import Control.Arrow (Arrow (..))
import Control.Monad ((<=<))
import Control.Monad.Extra (filterM)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Except (ExceptT (..), throwE)
import Control.Monad.Trans.RWS.Strict (gets, modify)
import Data.Data (Data)
import Data.Foldable (traverse_)
import Data.Generics (everything, mkQ)
import Data.List.Extra (isPrefixOf)
import Data.List.NonEmpty (NonEmpty (..), nonEmpty)
import qualified Data.List.NonEmpty as NonEmpty
import qualified Data.Map.Strict as Map
import Data.Monoid (Any (..))
import PyF (fmt)
import Yaya.Functor (hmap)
uniqSetToList :: Plugins.UniqSet a -> [a]
uniqSetToList = Plugins.nonDetEltsUniqSet
traceTcS' :: String -> Plugins.SDoc -> Typechecker.TcS ()
traceTcS' str doc = pprTrace' str doc (pure ())
traceTc' :: String -> Plugins.SDoc -> Typechecker.TcRn ()
traceTc' str doc = pprTrace' str doc (pure ())
-- | The result type is a slight improvement of the situation handed to us by
-- `Typechecker.runTcInteractive`, so not too much we can do about it other than hide it a bit and
-- try to keep it from leaking all over everything.
runTcRn ::
Plugins.Outputable a =>
Plugins.HscEnv ->
Plugins.ModGuts ->
Typechecker.TcRn a ->
IO (Either (NonEmpty DictionaryFailure) a, Plugins.WarningMessages)
runTcRn env0 guts m = do
Remove hidden modules from dep_orphans
orphans <-
filterM (fmap isFound . flip (Plugins.findExposedPackageModule env0) Nothing)
. fmap Plugins.moduleName
. Plugins.dep_orphs
$ Plugins.mg_deps guts
((warns, errs), mr) <- Typechecker.runTcInteractive (env orphans) m
pure (handleResult errs mr, warns)
where
isFound :: Plugins.FindResult -> Bool
isFound (Plugins.Found _ _) = True
isFound _ = False
handleResult errors =
maybe
(Left . pure $ TypecheckFailure errors)
(if Plugins.isEmptyBag errors then pure else Left . pure . ErroneousTypecheckSuccess errors)
imports0 = Plugins.ic_imports (Plugins.hsc_IC env0)
env :: [Plugins.ModuleName] -> Plugins.HscEnv
env extraModuleNames =
env0
{ Plugins.hsc_IC =
(Plugins.hsc_IC env0)
{ Plugins.ic_imports = fmap Plugins.IIModule extraModuleNames <> imports0,
Plugins.ic_rn_gbl_env = Plugins.mg_rdr_env guts,
Plugins.ic_instances = (Plugins.mg_insts guts, Plugins.mg_fam_insts guts)
}
}
-- | Build a dictionary for the given id.
buildDictionary' ::
Plugins.VarSet -> Plugins.Id -> Typechecker.TcRn [Plugins.CoreBind]
buildDictionary' evIds evar = do
bs <- do
loc <- Typechecker.getCtLocM (Typechecker.GivenOrigin Typechecker.UnkSkol) Nothing
let givens = Typechecker.mkGivens loc (uniqSetToList evIds)
predTy = Plugins.varType evar
nonC =
Typechecker.mkNonCanonical $
Typechecker.CtWanted
{ Typechecker.ctev_pred = predTy,
Typechecker.ctev_dest = Typechecker.EvVarDest evar,
Typechecker.ctev_nosh = Typechecker.WOnly,
Typechecker.ctev_loc = loc
}
wCs = Typechecker.mkSimpleWC [Typechecker.cc_ev nonC]
-- TODO: Make sure solveWanteds is the right function to call.
traceTc' "buildDictionary': givens" (Plugins.ppr givens)
(wCs', bnds0) <-
second Typechecker.evBindMapBinds
<$> Typechecker.runTcS
( do
_ <- Typechecker.solveSimpleGivens givens
traceTcS' "buildDictionary' back from solveSimpleGivens" Plugins.empty
z <- Typechecker.solveWanteds wCs
traceTcS' "buildDictionary' back from solveWanteds" (Plugins.ppr z)
pure z
)
traceTc' "buildDictionary' back from runTcS" (Plugins.ppr bnds0)
ez <- Typechecker.emptyZonkEnv
-- Use the newly exported zonkEvBinds. <>
(_env', bnds) <- Typechecker.zonkEvBinds ez bnds0
traceTc " ' wCs ' " ( Plugins.ppr wCs ' )
traceTc' "buildDictionary' zonked" (Plugins.ppr bnds)
Typechecker.reportAllUnsolved wCs'
pure bnds
Plugins.initDsTc $ Plugins.dsEvBinds bs
TODO : : " use TcMType.newWanted to make your CtWanted . As it
-- stands, if predTy is an equality constraint, your CtWanted will be
ill - formed , as all equality constraints should have HoleDests , not
EvVarDests . Using TcMType.newWanted will simplify and improve your code . "
| This attempts to build a dictionary representing a type class instance . The ` CoreSyn . Type ` is
-- the constraint to satisfy.
buildDictionary ::
Plugins.HscEnv ->
Plugins.ModGuts ->
Plugins.InScopeEnv ->
Plugins.Type ->
DictionaryStack Plugins.CoreExpr
buildDictionary env guts inScope goalTy =
pprTrace' "\nbuildDictionary" (Plugins.ppr goalTy)
. pprTrace'
"buildDictionary in-scope evidence"
(Plugins.ppr (Plugins.WithType . Plugins.Var <$> uniqSetToList scopedDicts))
-- TODO: replace the hardcoded @True@.
. Bench.billTo True Bench.BuildDictionary
$ getCachedDict goalTy >>= \case
Just cachedDict -> pure cachedDict
Nothing -> do
dict <-
hmap lift . reassemble
<=< ExceptT . writerT . runTcRn env guts
$ buildDictionary' scopedDicts binder
cacheDict goalTy dict
pure dict
where
binder = Plugins.localId inScope name goalTy
name = "cccDict"
scopedDicts = Plugins.filterVarSet keepVar (Plugins.getInScopeVars (fst inScope))
This /should/ return ` True ` when @v@ 's an applicable instance related to our @goalTy@ ,
-- however it has run into some issues. Here is a bit of reconstructed history:
--
* 05b2df0 - removed @ & & not ( isEmptyVarSet ( tyCoVarsOfType goalTy ` intersectVarSet `
-- tyCoVarsOfType (Plugins.varType v)))@, because that was filtering out the
-- instances in the module being compiled
--
-- * cee7466 - changed to @keepVar = const False@ because of an occasional
" StgCmmEnv : variable not found " error . Included a comment , " See 2018 - 01 - 23
-- journal notes". This change also included a commented out
-- @&& not (isDeadBinder v)@, which presumably was hoped would eliminate the error,
-- but apparently didn't work.
--
We 've currently restored it to just keep evidence ( per 05b2df0 ) and have n't come across
StgCmmEnv errors yet , so maybe things have improved . If not , we 'll try to identify the
-- problem, as this is a useful feature to keep.
keepVar v =
let varName = Plugins.occNameString . Plugins.nameOccName $ Plugins.varName v
in Plugins.isEvVar v
&&
-- Here we remove all the "cccDict" vars from the `inScope`. Why? Because when there
-- are multiple functions (say `foo` and `bar`) being categorified in parallel, a
-- dictionary var `cccDict_...` created during categorifying `foo` may show up in the
-- `inScope` when categorifying `bar`. We don't want to use this var when building
-- dictionaries for `bar`, because it is out of scope in the result of `bar`.
not (name `isPrefixOf` varName)
&&
TODO ( ): I 'm not quite sure why this is needed .
not ("$d" `isPrefixOf` varName)
reassemble :: [Plugins.CoreBind] -> ExceptT (NonEmpty DictionaryFailure) IO Plugins.CoreExpr
reassemble =
maybe
(throwE (pure NoBindings))
( uncurry (<\*)
. ( uncurry (<\*)
_ _ NB _ _ : The ` simplifyExpr ` here and the one in Conal 's ConCat only differ in
terms of the ` Plugins . CompilerPhase ` they run in ( this is
` Plugins . InitialPhase ` vs @`Plugins . Phase ` 0@ in Conal 's . AFAICT , that
-- shouldn't matter, but if it does, come back here.
. ( lift . Plugins.simplifyExpr env
&&& ExceptT
. pure
. traverse_ (Left . pure . FreeIds)
. nonEmpty
. freeIdTys
)
. dict
&&& ExceptT
. pure
. traverse_ (Left . pure . CoercionHoles)
. nonEmpty
. NonEmpty.filter hasCoercionHole
)
)
. nonEmpty
where
dict = \case
-- Common case with single non-recursive let
(Plugins.NonRec v e :| []) | binder == v -> e
(h :| t) -> Plugins.mkCoreLets (h : t) (Plugins.varToCoreExpr binder)
Sometimes ' constructs bogus dictionaries with free
-- identifiers. Hence check that freeIds is empty. Allow for free *type*
-- variables, however, since there may be some in the given type as
-- parameters. Alternatively, check that there are no free variables (val or
-- type) in the resulting dictionary that were not in the original type.
freeIds dictionary =
Plugins.filterVarSet Plugins.isId (Plugins.exprFreeVars dictionary)
`Plugins.minusVarSet` scopedDicts
freeIdTys = fmap (id &&& Plugins.varType) . uniqSetToList . freeIds
hasCoercionHole :: Data t => t -> Bool
hasCoercionHole = getAny . everything (<>) (mkQ mempty (Any . isHole))
where
isHole :: Plugins.CoercionHole -> Bool
isHole = const True
cacheKey :: Plugins.Type -> DictCacheKey
cacheKey ty = [fmt|{modu}.{Plugins.showSDocUnsafe $ Plugins.ppr ty}|]
where
tyCon = fst $ Plugins.splitTyConApp ty
name = Plugins.tyConName tyCon
modu = maybe "" (Plugins.moduleNameString . Plugins.moduleName) (Plugins.nameModule_maybe name)
getCachedDict :: Plugins.Type -> DictionaryStack (Maybe Plugins.CoreExpr)
getCachedDict goalTy = do
lift (gets csDictCache)
>>= ( \case
Just cached
| Plugins.eqType goalTy (dceType cached) -> pure . Just $ Plugins.Var (dceVar cached)
_ -> pure Nothing
)
. Map.lookup (cacheKey goalTy)
cacheDict :: Plugins.Type -> Plugins.CoreExpr -> DictionaryStack ()
cacheDict goalTy dict = lift . modify $ \(CategoryState uniqS idx cache) -> case dict of
Plugins.Var v ->
CategoryState uniqS idx $
Map.insert (cacheKey goalTy) (DictCacheEntry goalTy v dict Nothing) cache
_ ->
let (u, uniqS') = Plugins.takeUniqFromSupply uniqS
name =
Plugins.mkInternalName u (Plugins.mkVarOcc "cccDict") $
Plugins.mkGeneralSrcSpan "oops"
v = Plugins.mkLocalVar (Plugins.DFunId False) name goalTy Plugins.vanillaIdInfo
in CategoryState uniqS' (idx + 1) $
Map.insert (cacheKey goalTy) (DictCacheEntry goalTy v dict (Just idx)) cache
| null | https://raw.githubusercontent.com/con-kitty/categorifier/7a829e6b1e700c13089d39c52912870ca79b12e6/plugin/Categorifier/Core/BuildDictionary.hs | haskell | # LANGUAGE OverloadedStrings #
|
License : BSD3
Maintainer :
Stability : experimental
| The result type is a slight improvement of the situation handed to us by
`Typechecker.runTcInteractive`, so not too much we can do about it other than hide it a bit and
try to keep it from leaking all over everything.
| Build a dictionary for the given id.
TODO: Make sure solveWanteds is the right function to call.
Use the newly exported zonkEvBinds. <>
stands, if predTy is an equality constraint, your CtWanted will be
the constraint to satisfy.
TODO: replace the hardcoded @True@.
however it has run into some issues. Here is a bit of reconstructed history:
tyCoVarsOfType (Plugins.varType v)))@, because that was filtering out the
instances in the module being compiled
* cee7466 - changed to @keepVar = const False@ because of an occasional
journal notes". This change also included a commented out
@&& not (isDeadBinder v)@, which presumably was hoped would eliminate the error,
but apparently didn't work.
problem, as this is a useful feature to keep.
Here we remove all the "cccDict" vars from the `inScope`. Why? Because when there
are multiple functions (say `foo` and `bar`) being categorified in parallel, a
dictionary var `cccDict_...` created during categorifying `foo` may show up in the
`inScope` when categorifying `bar`. We don't want to use this var when building
dictionaries for `bar`, because it is out of scope in the result of `bar`.
shouldn't matter, but if it does, come back here.
Common case with single non-recursive let
identifiers. Hence check that freeIds is empty. Allow for free *type*
variables, however, since there may be some in the given type as
parameters. Alternatively, check that there are no free variables (val or
type) in the resulting dictionary that were not in the original type. | # LANGUAGE ExistentialQuantification #
# LANGUAGE QuasiQuotes #
Module : ConCat . BuildDictionary
Copyright : ( c ) 2016 Conal Elliott
Adaptation of HERMIT 's buildDictionaryT via ConCat 's BuildDictonary
module Categorifier.Core.BuildDictionary (buildDictionary) where
import qualified Categorifier.Core.Benchmark as Bench
import Categorifier.Core.Trace (pprTrace')
import Categorifier.Core.Types
( CategoryState (..),
DictCacheEntry (..),
DictCacheKey,
DictionaryFailure (..),
DictionaryStack,
writerT,
)
import Categorifier.Duoidal ((<\*))
import qualified Categorifier.GHC.Core as Plugins
import qualified Categorifier.GHC.Data as Plugins
import qualified Categorifier.GHC.Driver as Plugins
import qualified Categorifier.GHC.HsToCore as Plugins
import qualified Categorifier.GHC.Runtime as Plugins
import qualified Categorifier.GHC.Tc as Typechecker
import qualified Categorifier.GHC.Types as Plugins
import qualified Categorifier.GHC.Unit as Plugins
import qualified Categorifier.GHC.Utils as Plugins
import Control.Arrow (Arrow (..))
import Control.Monad ((<=<))
import Control.Monad.Extra (filterM)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Except (ExceptT (..), throwE)
import Control.Monad.Trans.RWS.Strict (gets, modify)
import Data.Data (Data)
import Data.Foldable (traverse_)
import Data.Generics (everything, mkQ)
import Data.List.Extra (isPrefixOf)
import Data.List.NonEmpty (NonEmpty (..), nonEmpty)
import qualified Data.List.NonEmpty as NonEmpty
import qualified Data.Map.Strict as Map
import Data.Monoid (Any (..))
import PyF (fmt)
import Yaya.Functor (hmap)
uniqSetToList :: Plugins.UniqSet a -> [a]
uniqSetToList = Plugins.nonDetEltsUniqSet
traceTcS' :: String -> Plugins.SDoc -> Typechecker.TcS ()
traceTcS' str doc = pprTrace' str doc (pure ())
traceTc' :: String -> Plugins.SDoc -> Typechecker.TcRn ()
traceTc' str doc = pprTrace' str doc (pure ())
runTcRn ::
Plugins.Outputable a =>
Plugins.HscEnv ->
Plugins.ModGuts ->
Typechecker.TcRn a ->
IO (Either (NonEmpty DictionaryFailure) a, Plugins.WarningMessages)
runTcRn env0 guts m = do
Remove hidden modules from dep_orphans
orphans <-
filterM (fmap isFound . flip (Plugins.findExposedPackageModule env0) Nothing)
. fmap Plugins.moduleName
. Plugins.dep_orphs
$ Plugins.mg_deps guts
((warns, errs), mr) <- Typechecker.runTcInteractive (env orphans) m
pure (handleResult errs mr, warns)
where
isFound :: Plugins.FindResult -> Bool
isFound (Plugins.Found _ _) = True
isFound _ = False
handleResult errors =
maybe
(Left . pure $ TypecheckFailure errors)
(if Plugins.isEmptyBag errors then pure else Left . pure . ErroneousTypecheckSuccess errors)
imports0 = Plugins.ic_imports (Plugins.hsc_IC env0)
env :: [Plugins.ModuleName] -> Plugins.HscEnv
env extraModuleNames =
env0
{ Plugins.hsc_IC =
(Plugins.hsc_IC env0)
{ Plugins.ic_imports = fmap Plugins.IIModule extraModuleNames <> imports0,
Plugins.ic_rn_gbl_env = Plugins.mg_rdr_env guts,
Plugins.ic_instances = (Plugins.mg_insts guts, Plugins.mg_fam_insts guts)
}
}
buildDictionary' ::
Plugins.VarSet -> Plugins.Id -> Typechecker.TcRn [Plugins.CoreBind]
buildDictionary' evIds evar = do
bs <- do
loc <- Typechecker.getCtLocM (Typechecker.GivenOrigin Typechecker.UnkSkol) Nothing
let givens = Typechecker.mkGivens loc (uniqSetToList evIds)
predTy = Plugins.varType evar
nonC =
Typechecker.mkNonCanonical $
Typechecker.CtWanted
{ Typechecker.ctev_pred = predTy,
Typechecker.ctev_dest = Typechecker.EvVarDest evar,
Typechecker.ctev_nosh = Typechecker.WOnly,
Typechecker.ctev_loc = loc
}
wCs = Typechecker.mkSimpleWC [Typechecker.cc_ev nonC]
traceTc' "buildDictionary': givens" (Plugins.ppr givens)
(wCs', bnds0) <-
second Typechecker.evBindMapBinds
<$> Typechecker.runTcS
( do
_ <- Typechecker.solveSimpleGivens givens
traceTcS' "buildDictionary' back from solveSimpleGivens" Plugins.empty
z <- Typechecker.solveWanteds wCs
traceTcS' "buildDictionary' back from solveWanteds" (Plugins.ppr z)
pure z
)
traceTc' "buildDictionary' back from runTcS" (Plugins.ppr bnds0)
ez <- Typechecker.emptyZonkEnv
(_env', bnds) <- Typechecker.zonkEvBinds ez bnds0
traceTc " ' wCs ' " ( Plugins.ppr wCs ' )
traceTc' "buildDictionary' zonked" (Plugins.ppr bnds)
Typechecker.reportAllUnsolved wCs'
pure bnds
Plugins.initDsTc $ Plugins.dsEvBinds bs
TODO : : " use TcMType.newWanted to make your CtWanted . As it
ill - formed , as all equality constraints should have HoleDests , not
EvVarDests . Using TcMType.newWanted will simplify and improve your code . "
| This attempts to build a dictionary representing a type class instance . The ` CoreSyn . Type ` is
buildDictionary ::
Plugins.HscEnv ->
Plugins.ModGuts ->
Plugins.InScopeEnv ->
Plugins.Type ->
DictionaryStack Plugins.CoreExpr
buildDictionary env guts inScope goalTy =
pprTrace' "\nbuildDictionary" (Plugins.ppr goalTy)
. pprTrace'
"buildDictionary in-scope evidence"
(Plugins.ppr (Plugins.WithType . Plugins.Var <$> uniqSetToList scopedDicts))
. Bench.billTo True Bench.BuildDictionary
$ getCachedDict goalTy >>= \case
Just cachedDict -> pure cachedDict
Nothing -> do
dict <-
hmap lift . reassemble
<=< ExceptT . writerT . runTcRn env guts
$ buildDictionary' scopedDicts binder
cacheDict goalTy dict
pure dict
where
binder = Plugins.localId inScope name goalTy
name = "cccDict"
scopedDicts = Plugins.filterVarSet keepVar (Plugins.getInScopeVars (fst inScope))
This /should/ return ` True ` when @v@ 's an applicable instance related to our @goalTy@ ,
* 05b2df0 - removed @ & & not ( isEmptyVarSet ( tyCoVarsOfType goalTy ` intersectVarSet `
" StgCmmEnv : variable not found " error . Included a comment , " See 2018 - 01 - 23
We 've currently restored it to just keep evidence ( per 05b2df0 ) and have n't come across
StgCmmEnv errors yet , so maybe things have improved . If not , we 'll try to identify the
keepVar v =
let varName = Plugins.occNameString . Plugins.nameOccName $ Plugins.varName v
in Plugins.isEvVar v
&&
not (name `isPrefixOf` varName)
&&
TODO ( ): I 'm not quite sure why this is needed .
not ("$d" `isPrefixOf` varName)
reassemble :: [Plugins.CoreBind] -> ExceptT (NonEmpty DictionaryFailure) IO Plugins.CoreExpr
reassemble =
maybe
(throwE (pure NoBindings))
( uncurry (<\*)
. ( uncurry (<\*)
_ _ NB _ _ : The ` simplifyExpr ` here and the one in Conal 's ConCat only differ in
terms of the ` Plugins . CompilerPhase ` they run in ( this is
` Plugins . InitialPhase ` vs @`Plugins . Phase ` 0@ in Conal 's . AFAICT , that
. ( lift . Plugins.simplifyExpr env
&&& ExceptT
. pure
. traverse_ (Left . pure . FreeIds)
. nonEmpty
. freeIdTys
)
. dict
&&& ExceptT
. pure
. traverse_ (Left . pure . CoercionHoles)
. nonEmpty
. NonEmpty.filter hasCoercionHole
)
)
. nonEmpty
where
dict = \case
(Plugins.NonRec v e :| []) | binder == v -> e
(h :| t) -> Plugins.mkCoreLets (h : t) (Plugins.varToCoreExpr binder)
Sometimes ' constructs bogus dictionaries with free
freeIds dictionary =
Plugins.filterVarSet Plugins.isId (Plugins.exprFreeVars dictionary)
`Plugins.minusVarSet` scopedDicts
freeIdTys = fmap (id &&& Plugins.varType) . uniqSetToList . freeIds
hasCoercionHole :: Data t => t -> Bool
hasCoercionHole = getAny . everything (<>) (mkQ mempty (Any . isHole))
where
isHole :: Plugins.CoercionHole -> Bool
isHole = const True
cacheKey :: Plugins.Type -> DictCacheKey
cacheKey ty = [fmt|{modu}.{Plugins.showSDocUnsafe $ Plugins.ppr ty}|]
where
tyCon = fst $ Plugins.splitTyConApp ty
name = Plugins.tyConName tyCon
modu = maybe "" (Plugins.moduleNameString . Plugins.moduleName) (Plugins.nameModule_maybe name)
getCachedDict :: Plugins.Type -> DictionaryStack (Maybe Plugins.CoreExpr)
getCachedDict goalTy = do
lift (gets csDictCache)
>>= ( \case
Just cached
| Plugins.eqType goalTy (dceType cached) -> pure . Just $ Plugins.Var (dceVar cached)
_ -> pure Nothing
)
. Map.lookup (cacheKey goalTy)
cacheDict :: Plugins.Type -> Plugins.CoreExpr -> DictionaryStack ()
cacheDict goalTy dict = lift . modify $ \(CategoryState uniqS idx cache) -> case dict of
Plugins.Var v ->
CategoryState uniqS idx $
Map.insert (cacheKey goalTy) (DictCacheEntry goalTy v dict Nothing) cache
_ ->
let (u, uniqS') = Plugins.takeUniqFromSupply uniqS
name =
Plugins.mkInternalName u (Plugins.mkVarOcc "cccDict") $
Plugins.mkGeneralSrcSpan "oops"
v = Plugins.mkLocalVar (Plugins.DFunId False) name goalTy Plugins.vanillaIdInfo
in CategoryState uniqS' (idx + 1) $
Map.insert (cacheKey goalTy) (DictCacheEntry goalTy v dict (Just idx)) cache
|
aa84aaa727b1eebbf7cf51675cc0fc2d8ad32f3ee0a8736872e1e05ecae6b50c | MLstate/opalang | listMap.ml |
Copyright © 2011 MLstate
This file is part of .
is free software : you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License , version 3 , as published by
the Free Software Foundation .
is distributed in the hope that it will be useful , but WITHOUT ANY
WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU Affero General Public License for
more details .
You should have received a copy of the GNU Affero General Public License
along with . If not , see < / > .
Copyright © 2011 MLstate
This file is part of Opa.
Opa is free software: you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License, version 3, as published by
the Free Software Foundation.
Opa is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
more details.
You should have received a copy of the GNU Affero General Public License
along with Opa. If not, see </>.
*)
module Make (Ord: OrderedTypeSig.S) =
struct
module M = BaseMap.Make (Ord)
include M
let append k v m =
add k (match find_opt k m with
| Some l -> v :: l
| _ -> [v]
) m
let fold_elt f acc m =
fold (fun k l acc -> List.fold_left (fun acc v -> f acc k v) acc l) m acc
let append_left m1 m2 =
fold_elt (fun m k v -> append k v m) m1 m2
end
| null | https://raw.githubusercontent.com/MLstate/opalang/424b369160ce693406cece6ac033d75d85f5df4f/ocamllib/libbase/listMap.ml | ocaml |
Copyright © 2011 MLstate
This file is part of .
is free software : you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License , version 3 , as published by
the Free Software Foundation .
is distributed in the hope that it will be useful , but WITHOUT ANY
WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU Affero General Public License for
more details .
You should have received a copy of the GNU Affero General Public License
along with . If not , see < / > .
Copyright © 2011 MLstate
This file is part of Opa.
Opa is free software: you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License, version 3, as published by
the Free Software Foundation.
Opa is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
more details.
You should have received a copy of the GNU Affero General Public License
along with Opa. If not, see </>.
*)
module Make (Ord: OrderedTypeSig.S) =
struct
module M = BaseMap.Make (Ord)
include M
let append k v m =
add k (match find_opt k m with
| Some l -> v :: l
| _ -> [v]
) m
let fold_elt f acc m =
fold (fun k l acc -> List.fold_left (fun acc v -> f acc k v) acc l) m acc
let append_left m1 m2 =
fold_elt (fun m k v -> append k v m) m1 m2
end
|
|
9b3383fbbf65625e324589e04bda8c343713dd2d9d8ca7d2d8d2e014267d695a | 2600hz/kazoo | pqc_httpd.erl | %%%-----------------------------------------------------------------------------
( C ) 2018 - 2020 , 2600Hz
%%% @doc
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%%
%%% @end
%%%-----------------------------------------------------------------------------
-module(pqc_httpd).
-behaviour(cowboy_handler).
-behaviour(gen_server).
-export([fetch_req/1, fetch_req/2
,get_req/1
,wait_for_req/1, wait_for_req/2
,update_req/2
,base_url/0
,status/0
,stop/0
]).
%% gen_server
-export([start_link/0, start_link/1
,init/1
,handle_call/3
,handle_cast/2
,handle_info/2
,code_change/3
,terminate/2
]).
%% Cowboy callbacks
-export([init/2
,handle/2
,terminate/3
]).
-include("kazoo_proper.hrl").
-define(LISTENER, 'kazoo_proper_httpd').
{ { Pid , MRef } , TRef , | { JSONPath , sender function } }
-type wait_path() :: kz_json:path() | {kz_json:path(), fun()}.
-type wait() :: {kz_term:pid_ref(), reference(), wait_path()}.
-type waits() :: [wait()].
-record(state, {requests = kz_json:new() :: kz_json:object()
,waits = [] :: waits()
}).
-type state() :: #state{}.
-spec start_link() -> {'ok', pid()}.
start_link() ->
start_link(kz_binary:rand_hex(5)).
-spec start_link(kz_term:ne_binary()) -> {'ok', pid()}.
start_link(LogId) ->
gen_server:start_link({'local', ?MODULE}, ?MODULE, [LogId], []).
-spec status() -> kz_json:object().
status() ->
gen_server:call(?MODULE, 'status').
-spec stop() -> 'ok'.
stop() ->
case whereis(?MODULE) of
'undefined' -> stop_listener();
Pid -> gen_server:stop(Pid)
end.
-spec stop_listener() -> 'ok'.
stop_listener() ->
cowboy:stop_listener(?LISTENER).
%% @doc fetches the value and removes it from the state if found
-spec fetch_req(kz_json:path()) -> kz_json:api_json_term().
fetch_req(Path) ->
fetch_req(Path, 'undefined').
%% @doc fetches the value and removes it from the state if found
-spec fetch_req(kz_json:path(), kz_term:api_pos_integer()) -> kz_json:api_json_term().
fetch_req(Path, TimeoutMs) ->
gen_server:call(?MODULE, {'fetch_req', Path, TimeoutMs}, timeout_or_default(TimeoutMs)).
-spec timeout_or_default(kz_term:api_pos_integer()) -> pos_integer().
timeout_or_default('undefined') -> 5 * ?MILLISECONDS_IN_SECOND;
timeout_or_default(TimeoutMs) when is_integer(TimeoutMs), TimeoutMs > 0 -> TimeoutMs + 100.
%% @doc reads the value and leaves it in the state if found
-spec get_req(kz_json:path()) -> kz_json:api_json_term().
get_req(Path) ->
gen_server:call(?MODULE, {'get_req', Path}).
%% @doc waits until the request can be fulfilled then returns the value, leaving in state
-spec wait_for_req(kz_json:path()) -> kz_json:api_json_term() |
{'error', 'timeout'}.
wait_for_req(Path) ->
wait_for_req(Path, 5 * ?MILLISECONDS_IN_SECOND).
%% @doc waits until the request can be fulfilled then returns the value, leaving in state
-spec wait_for_req(kz_json:path(), pos_integer()) ->
kz_json:api_json_term() |
{'error', 'timeout'}.
wait_for_req([_|_]=Path, TimeoutMs) when is_integer(TimeoutMs), TimeoutMs > 0 ->
gen_server:call(?MODULE, {'wait_for_req', Path, TimeoutMs}, TimeoutMs + 100).
@doc updates the state to store Content at the Path location
-spec update_req(kz_json:path(), binary()) -> 'ok'.
update_req(Path, <<Content/binary>>) ->
Store = try base64:decode(Content) of
Decoded -> Decoded
catch
'error':_ -> Content
end,
lager:info("trying to store ~p: ~s", [Path, Store]),
gen_server:call(?MODULE, {'req', Path, Store}).
log_meta(LogId) ->
kz_log:put_callid(LogId),
lager:md([{'request_id', LogId}]),
put('start_time', kz_time:start_time()),
'ok'.
-spec init(list()) -> {'ok', state()}.
init([LogId]) ->
log_meta(LogId),
io:format("starting HTTPD with ~p~n", [LogId]),
Dispatch = cowboy_router:compile(routes(LogId)),
{'ok', _Pid} = start_plaintext(Dispatch),
lager:info("started HTTPD(~p) at ~s", [_Pid, base_url()]),
{'ok', #state{}}.
-spec routes(kz_term:ne_binary()) -> cowboy_router:routes().
routes(LogId) -> [{'_', paths_list(LogId)}].
paths_list(LogId) ->
[default_path(LogId)].
default_path(LogId) ->
{'_', 'pqc_httpd', [{'log_id', LogId}]}.
start_plaintext(Dispatch) ->
cowboy:start_clear(?LISTENER
,#{'num_acceptors' => 5}
,#{'env' => #{'dispatch' => Dispatch}}
).
-spec base_url() -> kz_term:ne_binary().
base_url() ->
Port = ranch:get_port(?LISTENER),
Host = kz_network_utils:get_hostname(),
kz_term:to_binary(["http://", Host, $:, integer_to_list(Port), $/]).
-spec init(cowboy_req:req(), kz_term:proplist()) ->
{'ok', cowboy_req:req(), 'undefined'}.
init(Req, HandlerOpts) ->
log_meta(props:get_value('log_id', HandlerOpts)),
handle(Req, HandlerOpts).
-spec handle(cowboy_req:req(), State) -> {'ok', cowboy_req:req(), State}.
handle(Req, State) ->
put('start_time', kz_time:start_time()),
handle(Req, State, cowboy_req:method(Req)).
handle(Req, State, <<"POST">>) ->
add_req_to_state(Req, State);
handle(Req, State, <<"PUT">>) ->
add_req_to_state(Req, State);
handle(Req, State, <<"GET">>) ->
get_from_state(Req, State).
get_from_state(Req, State) ->
Path = cowboy_req:path(Req), % <<"/foo/bar/baz">>
PathParts = tl(binary:split(Path, <<"/">>, ['global'])),
{RespCode, Body} =
case get_req(PathParts) of
'undefined' -> {404, <<>>};
Value -> {200, Value}
end,
lager:info("GET req ~s: ~p", [Path, RespCode]),
Req1 = cowboy_req:reply(RespCode, #{}, Body, Req),
{'ok', Req1, State}.
add_req_to_state(Req, State) ->
Path = cowboy_req:path(Req), % <<"/foo/bar/baz">>
PathParts = tl(binary:split(Path, <<"/">>, ['global'])),
{Req1, ReqBody} = maybe_handle_multipart(Req),
RespCode = case get_req(PathParts) of
'undefined' -> 201;
_Value -> 200
end,
lager:info("PUT req ~s: ~p: ~s", [Path, RespCode, ReqBody]),
update_req(PathParts, iolist_to_binary(ReqBody)),
Headers = #{<<"content-type">> => <<"application/json">>},
Req2 = cowboy_req:reply(RespCode, Headers, <<"{}">>, Req1),
{'ok', Req2, State}.
-spec read_body({'ok', binary(), cowboy_req:req()} |
{'more', binary(), cowboy_req:req()}
) -> {cowboy_req:req(), iodata()}.
read_body({'ok', BodyPart, Req}) ->
{Req, BodyPart};
read_body({'more', BodyPart, Req}) ->
{Req1, Rest} = read_body(cowboy_req:read_body(Req)),
{Req1, [BodyPart, Rest]}.
-spec maybe_handle_multipart(cowboy_req:req()) -> {cowboy_req:req(), iodata()}.
maybe_handle_multipart(Req) ->
maybe_handle_multipart(Req, cowboy_req:parse_header(<<"content-type">>, Req)).
maybe_handle_multipart(Req, {<<"multipart">>, <<"form-data">>, _Boundary}) ->
lager:info("handle multipart body with boundary: ~p", [_Boundary]),
handle_multipart(Req);
maybe_handle_multipart(Req, _CT) ->
lager:info("req has content-type: ~p", [_CT]),
read_body(cowboy_req:read_body(Req)).
handle_multipart(Req0) ->
case cowboy_req:read_part(Req0) of
{'ok', Headers, Req1} ->
lager:info("recv part headers: ~p", [Headers]),
handle_part_headers(Req1, Headers);
{'done', Req1} ->
lager:info("finished reading parts, no body"),
{Req1, <<>>}
end.
handle_part_headers(Req, #{<<"content-type">> := <<"application/json">>}) ->
lager:info("skipping JSON metadata"),
handle_multipart(Req);
handle_part_headers(Req, Headers) ->
case cow_multipart:form_data(Headers) of
{'data', Field} ->
lager:info("field: ~p", [Field]),
{'ok', Body, Req1} = cowboy_req:read_part_body(Req),
lager:info("body: ~p", [Body]),
{Req1, Body};
{'file', _FieldName, _Filename, _CType} ->
lager:info("file ~p: ~p: ~p", [_FieldName, _Filename, _CType]),
{'ok', Body, Req1} = cowboy_req:read_part_body(Req),
lager:info("body: ~p", [Body]),
{Req1, Body}
end.
-spec terminate(any(), cowboy_req:req(), any()) -> 'ok'.
terminate(_Reason, _Req, _State) ->
lager:info("finished req ~p", [kz_time:elapsed_ms(get('start_time'))]).
-spec terminate(any(), state()) -> 'ok'.
terminate(_Reason, _State) ->
stop_listener(),
lager:debug("terminating: ~p", [_Reason]),
lager:debug("state: ~p", [_State]).
-spec code_change(any(), state(), any()) -> {'ok', state()}.
code_change(_OldVsn, State, _Extra) ->
{'ok', State}.
-spec handle_call(any(), kz_term:pid_ref(), state()) ->
{'noreply', state()} |
{'reply', kz_json:api_json_term(), state()}.
handle_call({'wait_for_req', Path, TimeoutMs}
,From
,#state{requests=Requests
,waits=Waits
}=State
) ->
case kz_json:get_value(Path, Requests) of
'undefined' ->
{'noreply', State#state{waits=[new_wait(From, Path, TimeoutMs) | Waits]}};
Value ->
{'reply', Value, State}
end;
handle_call('status', _From, State) ->
{'reply', State, State};
handle_call({'fetch_req', Path, 'undefined'}, _From, #state{requests=Requests}=State) ->
case kz_json:take_value(Path, Requests) of
'false' ->
lager:info("failed to fetch ~p", [Path]),
{'reply', 'undefined', State};
{'value', Value, NewRequests} ->
lager:info("fetched ~p: ~s", [Path, Value]),
{'reply', Value, State#state{requests=NewRequests}}
end;
handle_call({'fetch_req', Path, TimeoutMs}
,From
,#state{requests=Requests
,waits=Waits
}=State
) when is_integer(TimeoutMs) ->
case kz_json:take_value(Path, Requests) of
'false' ->
{'noreply', State#state{waits=[new_wait(From, {Path, fun fetch_req/1}, TimeoutMs) | Waits]}};
{'value', Value, NewRequests} ->
?INFO("fetched ~p: ~s", [Path, Value]),
{'reply', Value, State#state{requests=NewRequests}}
end;
handle_call({'get_req', Path}, _From, #state{requests=Requests}=State) ->
lager:info("getting ~p", [Path]),
{'reply', kz_json:get_value(Path, Requests), State};
handle_call({'req', PathInfo, ReqBody}, _From, State) ->
NewState = handle_req_update(PathInfo, ReqBody, State),
{'reply', 'ok', NewState};
handle_call(_Req, _From, State) ->
{'noreply', State}.
handle_req_update(PathInfo, ReqBody, #state{requests=Requests
,waits=Waits
}=State) ->
?INFO("storing to ~p: ~s", [PathInfo, ReqBody]),
UpdatedReqs = kz_json:set_value(PathInfo, ReqBody, Requests),
{Relays, StillWaiting} =
lists:splitwith(fun({_F, _T, {P, _Fun}}) -> lists:prefix(P, PathInfo);
({_F, _T, P}) -> lists:prefix(P, PathInfo)
end
,Waits
),
_ = relay(Relays, UpdatedReqs),
State#state{requests=UpdatedReqs
,waits=StillWaiting
}.
-spec handle_cast(any(), state()) -> {'noreply', state()}.
handle_cast({'req', PathInfo, ReqBody}, State) ->
NewState = handle_req_update(PathInfo, ReqBody, State),
{'noreply', NewState};
handle_cast(_Msg, State) ->
{'noreply', State}.
-spec handle_info(any(), state()) -> {'noreply', state()}.
handle_info({'DOWN', MRef, 'process', Pid, _Reason}
,#state{waits=Waits}=State
) ->
{'noreply', State#state{waits=[Wait || {{P, R}, _, _}=Wait <- Waits, P =/= Pid, R =/= MRef]}};
handle_info({'EXIT', Pid, _Reason}
,#state{waits=Waits}=State
) ->
{'noreply', State#state{waits=[Wait || {{P, _R}, _, _}=Wait <- Waits, P =/= Pid]}};
handle_info({'timeout', TRef, {From, Path}}
,#state{waits=Waits}=State
) ->
{Relays, StillWaiting}
= lists:splitwith(fun({F, T, P}) -> F =:= From
andalso T =:= TRef
andalso P =:= Path
end
,Waits
),
_ = relay(Relays, {'error', 'timeout'}),
{'noreply', State#state{waits=StillWaiting}};
handle_info(_Msg, State) ->
{'noreply', State}.
-spec relay(waits(), kz_json:object() | {'error', 'timeout'}) -> ['ok' | pid()].
relay(Relays, {'error', _}=Msg) ->
[gen_server:reply(From, Msg) || {From, _, _} <- Relays];
relay(Relays, Requests) ->
[begin
_ = erlang:cancel_timer(TRef),
reply(From, Path, Requests)
end
|| {From, TRef, Path} <- Relays
].
reply(From, {Path, Fun}, _Requests) ->
%% spawn a function that asks the server for the payload and replies to the waiting caller
_ = spawn(fun() -> gen_server:reply(From, Fun(Path)) end);
reply(From, Path, Requests) ->
gen_server:reply(From, kz_json:get_value(Path, Requests)).
-spec new_wait(kz_term:pid_ref(), kz_json:path() | {kz_json:path(), fun()}, pos_integer()) -> wait().
new_wait(From, Path, TimeoutMs) ->
TRef = erlang:start_timer(TimeoutMs, self(), {From, Path}),
{From, TRef, Path}.
| null | https://raw.githubusercontent.com/2600hz/kazoo/24519b9af9792caa67f7c09bbb9d27e2418f7ad6/core/kazoo_proper/src/pqc_httpd.erl | erlang | -----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
gen_server
Cowboy callbacks
@doc fetches the value and removes it from the state if found
@doc fetches the value and removes it from the state if found
@doc reads the value and leaves it in the state if found
@doc waits until the request can be fulfilled then returns the value, leaving in state
@doc waits until the request can be fulfilled then returns the value, leaving in state
<<"/foo/bar/baz">>
<<"/foo/bar/baz">>
spawn a function that asks the server for the payload and replies to the waiting caller | ( C ) 2018 - 2020 , 2600Hz
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
-module(pqc_httpd).
-behaviour(cowboy_handler).
-behaviour(gen_server).
-export([fetch_req/1, fetch_req/2
,get_req/1
,wait_for_req/1, wait_for_req/2
,update_req/2
,base_url/0
,status/0
,stop/0
]).
-export([start_link/0, start_link/1
,init/1
,handle_call/3
,handle_cast/2
,handle_info/2
,code_change/3
,terminate/2
]).
-export([init/2
,handle/2
,terminate/3
]).
-include("kazoo_proper.hrl").
-define(LISTENER, 'kazoo_proper_httpd').
{ { Pid , MRef } , TRef , | { JSONPath , sender function } }
-type wait_path() :: kz_json:path() | {kz_json:path(), fun()}.
-type wait() :: {kz_term:pid_ref(), reference(), wait_path()}.
-type waits() :: [wait()].
-record(state, {requests = kz_json:new() :: kz_json:object()
,waits = [] :: waits()
}).
-type state() :: #state{}.
-spec start_link() -> {'ok', pid()}.
start_link() ->
start_link(kz_binary:rand_hex(5)).
-spec start_link(kz_term:ne_binary()) -> {'ok', pid()}.
start_link(LogId) ->
gen_server:start_link({'local', ?MODULE}, ?MODULE, [LogId], []).
-spec status() -> kz_json:object().
status() ->
gen_server:call(?MODULE, 'status').
-spec stop() -> 'ok'.
stop() ->
case whereis(?MODULE) of
'undefined' -> stop_listener();
Pid -> gen_server:stop(Pid)
end.
-spec stop_listener() -> 'ok'.
stop_listener() ->
cowboy:stop_listener(?LISTENER).
-spec fetch_req(kz_json:path()) -> kz_json:api_json_term().
fetch_req(Path) ->
fetch_req(Path, 'undefined').
-spec fetch_req(kz_json:path(), kz_term:api_pos_integer()) -> kz_json:api_json_term().
fetch_req(Path, TimeoutMs) ->
gen_server:call(?MODULE, {'fetch_req', Path, TimeoutMs}, timeout_or_default(TimeoutMs)).
-spec timeout_or_default(kz_term:api_pos_integer()) -> pos_integer().
timeout_or_default('undefined') -> 5 * ?MILLISECONDS_IN_SECOND;
timeout_or_default(TimeoutMs) when is_integer(TimeoutMs), TimeoutMs > 0 -> TimeoutMs + 100.
-spec get_req(kz_json:path()) -> kz_json:api_json_term().
get_req(Path) ->
gen_server:call(?MODULE, {'get_req', Path}).
-spec wait_for_req(kz_json:path()) -> kz_json:api_json_term() |
{'error', 'timeout'}.
wait_for_req(Path) ->
wait_for_req(Path, 5 * ?MILLISECONDS_IN_SECOND).
-spec wait_for_req(kz_json:path(), pos_integer()) ->
kz_json:api_json_term() |
{'error', 'timeout'}.
wait_for_req([_|_]=Path, TimeoutMs) when is_integer(TimeoutMs), TimeoutMs > 0 ->
gen_server:call(?MODULE, {'wait_for_req', Path, TimeoutMs}, TimeoutMs + 100).
@doc updates the state to store Content at the Path location
-spec update_req(kz_json:path(), binary()) -> 'ok'.
update_req(Path, <<Content/binary>>) ->
Store = try base64:decode(Content) of
Decoded -> Decoded
catch
'error':_ -> Content
end,
lager:info("trying to store ~p: ~s", [Path, Store]),
gen_server:call(?MODULE, {'req', Path, Store}).
log_meta(LogId) ->
kz_log:put_callid(LogId),
lager:md([{'request_id', LogId}]),
put('start_time', kz_time:start_time()),
'ok'.
-spec init(list()) -> {'ok', state()}.
init([LogId]) ->
log_meta(LogId),
io:format("starting HTTPD with ~p~n", [LogId]),
Dispatch = cowboy_router:compile(routes(LogId)),
{'ok', _Pid} = start_plaintext(Dispatch),
lager:info("started HTTPD(~p) at ~s", [_Pid, base_url()]),
{'ok', #state{}}.
-spec routes(kz_term:ne_binary()) -> cowboy_router:routes().
routes(LogId) -> [{'_', paths_list(LogId)}].
paths_list(LogId) ->
[default_path(LogId)].
default_path(LogId) ->
{'_', 'pqc_httpd', [{'log_id', LogId}]}.
start_plaintext(Dispatch) ->
cowboy:start_clear(?LISTENER
,#{'num_acceptors' => 5}
,#{'env' => #{'dispatch' => Dispatch}}
).
-spec base_url() -> kz_term:ne_binary().
base_url() ->
Port = ranch:get_port(?LISTENER),
Host = kz_network_utils:get_hostname(),
kz_term:to_binary(["http://", Host, $:, integer_to_list(Port), $/]).
-spec init(cowboy_req:req(), kz_term:proplist()) ->
{'ok', cowboy_req:req(), 'undefined'}.
init(Req, HandlerOpts) ->
log_meta(props:get_value('log_id', HandlerOpts)),
handle(Req, HandlerOpts).
-spec handle(cowboy_req:req(), State) -> {'ok', cowboy_req:req(), State}.
handle(Req, State) ->
put('start_time', kz_time:start_time()),
handle(Req, State, cowboy_req:method(Req)).
handle(Req, State, <<"POST">>) ->
add_req_to_state(Req, State);
handle(Req, State, <<"PUT">>) ->
add_req_to_state(Req, State);
handle(Req, State, <<"GET">>) ->
get_from_state(Req, State).
get_from_state(Req, State) ->
PathParts = tl(binary:split(Path, <<"/">>, ['global'])),
{RespCode, Body} =
case get_req(PathParts) of
'undefined' -> {404, <<>>};
Value -> {200, Value}
end,
lager:info("GET req ~s: ~p", [Path, RespCode]),
Req1 = cowboy_req:reply(RespCode, #{}, Body, Req),
{'ok', Req1, State}.
add_req_to_state(Req, State) ->
PathParts = tl(binary:split(Path, <<"/">>, ['global'])),
{Req1, ReqBody} = maybe_handle_multipart(Req),
RespCode = case get_req(PathParts) of
'undefined' -> 201;
_Value -> 200
end,
lager:info("PUT req ~s: ~p: ~s", [Path, RespCode, ReqBody]),
update_req(PathParts, iolist_to_binary(ReqBody)),
Headers = #{<<"content-type">> => <<"application/json">>},
Req2 = cowboy_req:reply(RespCode, Headers, <<"{}">>, Req1),
{'ok', Req2, State}.
-spec read_body({'ok', binary(), cowboy_req:req()} |
{'more', binary(), cowboy_req:req()}
) -> {cowboy_req:req(), iodata()}.
read_body({'ok', BodyPart, Req}) ->
{Req, BodyPart};
read_body({'more', BodyPart, Req}) ->
{Req1, Rest} = read_body(cowboy_req:read_body(Req)),
{Req1, [BodyPart, Rest]}.
-spec maybe_handle_multipart(cowboy_req:req()) -> {cowboy_req:req(), iodata()}.
maybe_handle_multipart(Req) ->
maybe_handle_multipart(Req, cowboy_req:parse_header(<<"content-type">>, Req)).
maybe_handle_multipart(Req, {<<"multipart">>, <<"form-data">>, _Boundary}) ->
lager:info("handle multipart body with boundary: ~p", [_Boundary]),
handle_multipart(Req);
maybe_handle_multipart(Req, _CT) ->
lager:info("req has content-type: ~p", [_CT]),
read_body(cowboy_req:read_body(Req)).
handle_multipart(Req0) ->
case cowboy_req:read_part(Req0) of
{'ok', Headers, Req1} ->
lager:info("recv part headers: ~p", [Headers]),
handle_part_headers(Req1, Headers);
{'done', Req1} ->
lager:info("finished reading parts, no body"),
{Req1, <<>>}
end.
handle_part_headers(Req, #{<<"content-type">> := <<"application/json">>}) ->
lager:info("skipping JSON metadata"),
handle_multipart(Req);
handle_part_headers(Req, Headers) ->
case cow_multipart:form_data(Headers) of
{'data', Field} ->
lager:info("field: ~p", [Field]),
{'ok', Body, Req1} = cowboy_req:read_part_body(Req),
lager:info("body: ~p", [Body]),
{Req1, Body};
{'file', _FieldName, _Filename, _CType} ->
lager:info("file ~p: ~p: ~p", [_FieldName, _Filename, _CType]),
{'ok', Body, Req1} = cowboy_req:read_part_body(Req),
lager:info("body: ~p", [Body]),
{Req1, Body}
end.
-spec terminate(any(), cowboy_req:req(), any()) -> 'ok'.
terminate(_Reason, _Req, _State) ->
lager:info("finished req ~p", [kz_time:elapsed_ms(get('start_time'))]).
-spec terminate(any(), state()) -> 'ok'.
terminate(_Reason, _State) ->
stop_listener(),
lager:debug("terminating: ~p", [_Reason]),
lager:debug("state: ~p", [_State]).
-spec code_change(any(), state(), any()) -> {'ok', state()}.
code_change(_OldVsn, State, _Extra) ->
{'ok', State}.
-spec handle_call(any(), kz_term:pid_ref(), state()) ->
{'noreply', state()} |
{'reply', kz_json:api_json_term(), state()}.
handle_call({'wait_for_req', Path, TimeoutMs}
,From
,#state{requests=Requests
,waits=Waits
}=State
) ->
case kz_json:get_value(Path, Requests) of
'undefined' ->
{'noreply', State#state{waits=[new_wait(From, Path, TimeoutMs) | Waits]}};
Value ->
{'reply', Value, State}
end;
handle_call('status', _From, State) ->
{'reply', State, State};
handle_call({'fetch_req', Path, 'undefined'}, _From, #state{requests=Requests}=State) ->
case kz_json:take_value(Path, Requests) of
'false' ->
lager:info("failed to fetch ~p", [Path]),
{'reply', 'undefined', State};
{'value', Value, NewRequests} ->
lager:info("fetched ~p: ~s", [Path, Value]),
{'reply', Value, State#state{requests=NewRequests}}
end;
handle_call({'fetch_req', Path, TimeoutMs}
,From
,#state{requests=Requests
,waits=Waits
}=State
) when is_integer(TimeoutMs) ->
case kz_json:take_value(Path, Requests) of
'false' ->
{'noreply', State#state{waits=[new_wait(From, {Path, fun fetch_req/1}, TimeoutMs) | Waits]}};
{'value', Value, NewRequests} ->
?INFO("fetched ~p: ~s", [Path, Value]),
{'reply', Value, State#state{requests=NewRequests}}
end;
handle_call({'get_req', Path}, _From, #state{requests=Requests}=State) ->
lager:info("getting ~p", [Path]),
{'reply', kz_json:get_value(Path, Requests), State};
handle_call({'req', PathInfo, ReqBody}, _From, State) ->
NewState = handle_req_update(PathInfo, ReqBody, State),
{'reply', 'ok', NewState};
handle_call(_Req, _From, State) ->
{'noreply', State}.
handle_req_update(PathInfo, ReqBody, #state{requests=Requests
,waits=Waits
}=State) ->
?INFO("storing to ~p: ~s", [PathInfo, ReqBody]),
UpdatedReqs = kz_json:set_value(PathInfo, ReqBody, Requests),
{Relays, StillWaiting} =
lists:splitwith(fun({_F, _T, {P, _Fun}}) -> lists:prefix(P, PathInfo);
({_F, _T, P}) -> lists:prefix(P, PathInfo)
end
,Waits
),
_ = relay(Relays, UpdatedReqs),
State#state{requests=UpdatedReqs
,waits=StillWaiting
}.
-spec handle_cast(any(), state()) -> {'noreply', state()}.
handle_cast({'req', PathInfo, ReqBody}, State) ->
NewState = handle_req_update(PathInfo, ReqBody, State),
{'noreply', NewState};
handle_cast(_Msg, State) ->
{'noreply', State}.
-spec handle_info(any(), state()) -> {'noreply', state()}.
handle_info({'DOWN', MRef, 'process', Pid, _Reason}
,#state{waits=Waits}=State
) ->
{'noreply', State#state{waits=[Wait || {{P, R}, _, _}=Wait <- Waits, P =/= Pid, R =/= MRef]}};
handle_info({'EXIT', Pid, _Reason}
,#state{waits=Waits}=State
) ->
{'noreply', State#state{waits=[Wait || {{P, _R}, _, _}=Wait <- Waits, P =/= Pid]}};
handle_info({'timeout', TRef, {From, Path}}
,#state{waits=Waits}=State
) ->
{Relays, StillWaiting}
= lists:splitwith(fun({F, T, P}) -> F =:= From
andalso T =:= TRef
andalso P =:= Path
end
,Waits
),
_ = relay(Relays, {'error', 'timeout'}),
{'noreply', State#state{waits=StillWaiting}};
handle_info(_Msg, State) ->
{'noreply', State}.
-spec relay(waits(), kz_json:object() | {'error', 'timeout'}) -> ['ok' | pid()].
relay(Relays, {'error', _}=Msg) ->
[gen_server:reply(From, Msg) || {From, _, _} <- Relays];
relay(Relays, Requests) ->
[begin
_ = erlang:cancel_timer(TRef),
reply(From, Path, Requests)
end
|| {From, TRef, Path} <- Relays
].
reply(From, {Path, Fun}, _Requests) ->
_ = spawn(fun() -> gen_server:reply(From, Fun(Path)) end);
reply(From, Path, Requests) ->
gen_server:reply(From, kz_json:get_value(Path, Requests)).
-spec new_wait(kz_term:pid_ref(), kz_json:path() | {kz_json:path(), fun()}, pos_integer()) -> wait().
new_wait(From, Path, TimeoutMs) ->
TRef = erlang:start_timer(TimeoutMs, self(), {From, Path}),
{From, TRef, Path}.
|
fe87e58bcc36a77fb5c842e7fbb2626a01df1562d58384879d78aa5371510e09 | racket/redex | ls-typed-gen.rkt | #lang racket
(define the-error "no error")
(require redex/reduction-semantics
racket/list
racket/match)
(provide (all-defined-out))
(define-language list-machine
(a nil
(cons a a))
(v variable-not-otherwise-mentioned)
(r empty
(r v ↦ a))
(l variable-not-otherwise-mentioned)
(ι (jump l)
(branch-if-nil v l)
(fetch-field v 0 v)
(fetch-field v 1 v)
(cons v v v)
halt
(begin ι ι))
(p (l : ι p)
end))
(define-judgment-form list-machine
#:contract (var-lookup r v a)
#:mode (var-lookup I I O)
[-----
(var-lookup (r v ↦ a) v a)]
[(where #t (different v_1 v_2))
(var-lookup r v_2 a_2)
-----
(var-lookup (r v_1 ↦ a_1) v_2 a_2)])
(define-judgment-form list-machine
#:contract (var-set r v a r)
#:mode (var-set I I I O)
[-----
(var-set (r v ↦ a) v a_2 (r v ↦ a_2))]
[(where #t (different v v_2))
(var-set r v_2 a_2 r_2)
-----
(var-set (r v ↦ a) v_2 a_2 (r_2 v ↦ a))]
[-----
(var-set empty v a (empty v ↦ a))])
(define-judgment-form list-machine
#:contract (program-lookup p l ι)
#:mode (program-lookup I I O)
[-----
(program-lookup (l : ι p) l ι)]
[(where #t (different l l_2))
(program-lookup p l_2 ι_2)
-----
(program-lookup (l : ι p) l_2 ι_2)])
(define red
(reduction-relation
list-machine
#:domain (p r ι)
(--> (p r (begin (begin ι_1 ι_2) ι_3))
(p r (begin ι_1 (begin ι_2 ι_3)))
"step-seq")
(--> (p r (begin (fetch-field v 0 v_2) ι))
(p r_2 ι)
"step-fetch-field-0"
(judgment-holds (var-lookup r v (cons a_0 a_1)))
(judgment-holds (var-set r v_2 a_0 r_2)))
(--> (p r (begin (fetch-field v 1 v_2) ι))
(p r_2 ι)
"step-fetch-field-1"
(judgment-holds (var-lookup r v (cons a_0 a_1)))
(judgment-holds (var-set r v_2 a_1 r_2)))
(--> (p r (begin (cons v_0 v_1 v_2) ι))
(p r_2 ι)
"step-cons"
(judgment-holds (var-lookup r v_0 a_0))
(judgment-holds (var-lookup r v_1 a_1))
(judgment-holds (var-set r v_2 (cons a_0 a_1) r_2)))
(--> (p r (begin (branch-if-nil v l) ι))
(p r ι)
"step-branch-not-taken"
(judgment-holds (var-lookup r v (cons a_0 a_1))))
(--> (p r (begin (branch-if-nil v l) ι))
(p r ι_2)
"step-branch-taken"
(judgment-holds (var-lookup r v nil))
(judgment-holds (program-lookup p l ι_2)))
(--> (p r (jump l))
(p r ι_2)
"step-jump"
(judgment-holds (program-lookup p l ι_2)))))
(define (run-prog p)
(define r_0 (term (empty v0 ↦ nil)))
(define ι_0 (car (judgment-holds (program-lookup ,p l0 ι) ι)))
(apply-reduction-relation* red `(,p ,r_0 ,ι_0)))
(define (check-progress p)
(define r_0 (term (empty v0 ↦ nil)))
(define ι_0 (car (judgment-holds (program-lookup ,p l0 ι) ι)))
(or (equal? ι_0 'halt)
(and
(= 1
(length (apply-reduction-relation
red
`(,p ,r_0 ,ι_0))))
(let ([closure (apply-reduction-relation*
red
`(,p ,r_0 ,ι_0)
#:stop-when
(let ([count 0])
(λ (_)
(begin0
(count . > . 1000)
(set! count (add1 count))))))])
if reduction terminates in less than 1000 steps , check it ends with halt
;; (if the #:stop-when condition is true, we get back an empty list,
;; and the same thing for a reduction cycle)
(or (empty? closure)
(and (= 1 (length closure))
(match (car closure)
[`(,p ,r ,ι)
(equal? ι 'halt)])))))))
(define (check p)
(or (not p)
(check-progress p)))
(define-metafunction list-machine
different : any any -> any
[(different any_1 any_1)
#f]
[(different any_1 any_2)
#t])
(define-extended-language list-machine-typing list-machine
(τ nil (list τ) (listcons τ))
(Γ empty (v : τ Γ))
(Π empty (l : Γ Π)))
(define-judgment-form list-machine-typing
#:contract (check-program p Π)
#:mode (check-program I I)
[(:lookup-Π Π l0 (v0 : nil empty))
(labels-distinct (l0 : ι p))
;; note : changed from l-⊂
(d= Π (l0 : ι p))
(labels-distinct-Π Π)
(check-blocks Π (l0 : ι p))
-----
(check-program (l0 : ι p) Π)])
(define-judgment-form list-machine-typing
#:contract (Γ-⊂ Γ Γ)
#:mode (Γ-⊂ I I)
[-----
(Γ-⊂ Γ empty)]
[(:lookup-Γ Γ_1 v τ_1)
(⊂ τ_1 τ_2)
(Γ-⊂ Γ_1 Γ_2)
----
(Γ-⊂ Γ_1 (v : τ_2 Γ_2))])
(define-judgment-form list-machine-typing
#:contract (check-blocks Π p)
#:mode (check-blocks I I)
[(:lookup-Π Π l Γ)
(check-block Π Γ ι)
(check-blocks Π p)
-----
(check-blocks Π (l : ι p)) ]
[-----
(check-blocks Π end)])
(define-judgment-form list-machine-typing
#:contract (check-block Π Γ ι)
#:mode (check-block I I I)
[-----
(check-block Π Γ halt)]
[(check-instr Π Γ ι_1 Γ_2)
(check-block Π Γ_2 ι_2)
-----
(check-block Π Γ (begin ι_1 ι_2))]
[(:lookup-Π Π l Γ_2)
(Γ-⊂ Γ Γ_2)
-----
(check-block Π Γ (jump l))])
(define-judgment-form list-machine-typing
#:contract (check-instr Π Γ ι Γ)
#:mode (check-instr I I I O)
[(check-instr Π Γ ι_1 Γ_1)
(check-instr Π Γ_1 ι_2 Γ_2)
-----
(check-instr Π Γ (begin ι_1 ι_2) Γ_2)]
[(:lookup-Γ Γ v (list τ))
(:lookup-Π Π l Γ_1)
(:set Γ v nil Γ_3)
(Γ-⊂ Γ_3 Γ_1)
(:set Γ_3 v (listcons τ) Γ_2)
-----
(check-instr Π Γ (branch-if-nil v l) Γ_2)]
[(:lookup-Γ Γ v (listcons τ))
(:lookup-Π Π l Γ_1)
(:set Γ v nil Γ_2)
(Γ-⊂ Γ_2 Γ_1)
-----
(check-instr Π Γ (branch-if-nil v l) Γ)]
[(:lookup-Γ Γ v nil)
(:lookup-Π Π l Γ_1)
(Γ-⊂ Γ Γ_1)
-----
(check-instr Π Γ (branch-if-nil v l) Γ)]
[(:lookup-Γ Γ v (listcons τ)) (:set Γ v_2 τ Γ_2)
-----
(check-instr Π Γ (fetch-field v 0 v_2) Γ_2)]
[(:lookup-Γ Γ v (listcons τ)) (:set Γ v_2 (list τ) Γ_2)
-----
(check-instr Π Γ (fetch-field v 1 v_2) Γ_2)]
[(:lookup-Γ Γ v_0 τ_0) (:lookup-Γ Γ v_1 τ_1)
(⊔ (list τ_0) τ_1 (list τ)) (:set Γ v (listcons τ) Γ_2)
-----
(check-instr Π Γ (cons v_0 v_1 v) Γ_2)])
(define-judgment-form list-machine-typing
#:contract (⊂ τ τ)
#:mode (⊂ O I)
[-----
(⊂ τ τ)]
[-----
(⊂ nil (list τ))]
[(⊂ τ τ_2)
-----
(⊂ (list τ) (list τ_2))]
[(⊂ τ τ_2)
-----
(⊂ (listcons τ) (list τ_2))]
[(⊂ τ τ_2)
-----
(⊂ (listcons τ) (listcons τ_2))])
(define-judgment-form list-machine-typing
#:contract (⊔ τ τ τ)
#:mode (⊔ I I O)
[-----
(⊔ τ τ τ)]
[-----
(⊔ (list τ) nil (list τ))]
[-----
(⊔ nil (list τ) (list τ))]
[(⊔ (list τ_1) (list τ_2) τ_3)
-----
(⊔ (list τ_1) (listcons τ_2) τ_3)]
[(⊔ (list τ_1) (list τ_2) τ_3)
-----
(⊔ (listcons τ_1) (list τ_2) τ_3)]
[(⊔ τ_1 τ_2 τ_3)
-----
(⊔ (list τ_1) (list τ_2) (list τ_3))]
[-----
(⊔ (listcons τ) nil (list τ))]
[-----
(⊔ nil (listcons τ) (list τ))]
[(⊔ τ_1 τ_2 τ_3)
-----
(⊔ (listcons τ_1) (listcons τ_2) (listcons τ_3))])
(define-judgment-form list-machine-typing
#:contract (:lookup any v any)
#:mode (:lookup I I O)
[-----
(:lookup (v : any_τ any_Γ) v any_τ)]
[(where #t (different v_1 v_2))
(:lookup any_Γ v_2 any_τ2)
-----
(:lookup (v_1 : any_τ1 any_Γ) v_2 any_τ2)])
(define-judgment-form list-machine-typing
#:contract (:lookup-Γ Γ v τ)
#:mode (:lookup-Γ I I O)
[-----
(:lookup-Γ (v : τ Γ) v τ)]
[(where #t (different v_1 v_2))
(:lookup-Γ Γ v_2 τ_2)
-----
(:lookup-Γ (v_1 : τ_1 Γ) v_2 τ_2)])
(define-judgment-form list-machine-typing
#:contract (:lookup-Π Π l Γ)
#:mode (:lookup-Π I I O)
[-----
(:lookup-Π (l : Γ Π) l Γ)]
[(where #t (different l_1 l_2))
(:lookup-Π Π l_2 Γ_2)
-----
(:lookup-Π (l_1 : Γ_1 Π) l_2 Γ_2)])
(define-judgment-form list-machine-typing
#:contract (:set Γ v τ Γ)
#:mode (:set I I I O)
[-----
(:set (v : any_τ any_Γ) v any_τ2 (v : any_τ2 any_Γ))]
[(where #t (different v v_2))
(:set any_Γ v_2 any_τ2 any_Γ2)
-----
(:set (v : any_τ any_Γ) v_2 any_τ2 (v : any_τ any_Γ2))]
[-----
(:set empty v any_τ (v : any_τ empty))])
(define-metafunction list-machine-typing
[(dom (l_1 : any_1 any_2))
(l_1 (dom any_2))]
[(dom empty) empty])
(define-metafunction list-machine-typing
[(dom-P (l_1 : ι_1 p))
(l_1 (dom p))]
[(dom-P end) empty])
(define-metafunction list-machine-typing
[(dom-Π (l_1 : Γ_1 Π))
(l_1 (dom Π))]
[(dom-Π empty) empty])
#;
(define-metafunction list-machine-typing
l-⊂ : (l ...) (l ...) -> any
[(l-⊂ (l_1 ...) (l_2 ...))
,(let ([ht (make-hash)])
(for ([l (in-list (term (l_2 ...)))])
(hash-set! ht l #t))
(for/and ([l (in-list (term (l_1 ...)))])
(hash-ref ht l #f)))])
(define-relation list-machine-typing
[(l-⊂ (l_1 empty) l_2)
(where #t (lmem l_1 l_2))]
[(l-⊂ (l_1 l_2) l_3)
(l-⊂ l_2 l_3)
(where #t (lmem l_1 l_3))]
[(l-⊂ empty any)])
(define-relation list-machine-typing
[(d= (l_1 : Γ_1 Π) (l_1 : ι p))
(d= Π p)]
[(d= empty end)])
(define-relation list-machine-typing
[(d-⊂ (l_1 : Γ_1 Π) p)
(has-label p l_1)
(d-⊂ Π p)]
[(d-⊂ (l_1 : Γ_1 Π) (l_2 : ι p))
(d-⊂ (l_1 : Γ_1 Π) p)]
[(d-⊂ empty p)])
(define-relation list-machine-typing
[(has-label (l_1 : ι p) l_1)]
[(has-label (l_1 : ι p) l_2)
(has-label p l_2)])
(define-relation list-machine-typing
[(labels-distinct (l_1 : ι p))
(label-not-in l_1 p)
(labels-distinct p)]
[(labels-distinct end)])
(define-relation list-machine-typing
[(label-not-in l_1 (l_2 : ι p))
(different l_1 l_2)
(label-not-in l_1 p)]
[(label-not-in l_1 end)])
(define-metafunction list-machine-typing
[(lmem l_1 (l_1 l_2))
#t]
[(lmem l_1 (l_2 l_3))
(lmem l_1 l_3)]
[(lmem l_1 empty)
#f])
(define-relation list-machine-typing
[(labels-distinct-Π (l_1 : Γ Π))
(label-not-in-Π l_1 Π)
(labels-distinct-Π Π)]
[(labels-distinct-Π empty)])
(define-relation list-machine-typing
[(label-not-in-Π l_1 (l_2 : Γ Π))
(different l_1 l_2)
(label-not-in-Π l_1 Π)]
[(label-not-in-Π l_1 empty)])
(define (generate-M-term)
(generate-term list-machine-typing (l0 : ι p) 7))
(define (type-check p)
need to provide a program typing , so generate 10 randomly and
;; see if any succeed...
(let loop ([i 0])
(cond
[(i . > . 10) #f]
[else
(define guess-Π (generate-term list-machine-typing (l0 : (v0 : nil empty) Π) 7))
(or (judgment-holds (check-program ,p ,guess-Π))
(loop (add1 i)))])))
(define (typed-generator)
(let ([g (redex-generator list-machine-typing
(check-program p Π)
7)])
(λ ()
(match (g)
[`(check-program ,p ,Π)
p]
[#f #f]))))
(define (generate-typed-term)
(match (generate-term list-machine-typing
#:satisfying
(check-program p Π)
7)
[`(check-program ,p ,Π)
p]
[#f #f])) | null | https://raw.githubusercontent.com/racket/redex/4c2dc96d90cedeb08ec1850575079b952c5ad396/redex-benchmark/redex/benchmark/models/list-machine/ls-typed-gen.rkt | racket | (if the #:stop-when condition is true, we get back an empty list,
and the same thing for a reduction cycle)
note : changed from l-⊂
see if any succeed... | #lang racket
(define the-error "no error")
(require redex/reduction-semantics
racket/list
racket/match)
(provide (all-defined-out))
(define-language list-machine
(a nil
(cons a a))
(v variable-not-otherwise-mentioned)
(r empty
(r v ↦ a))
(l variable-not-otherwise-mentioned)
(ι (jump l)
(branch-if-nil v l)
(fetch-field v 0 v)
(fetch-field v 1 v)
(cons v v v)
halt
(begin ι ι))
(p (l : ι p)
end))
(define-judgment-form list-machine
#:contract (var-lookup r v a)
#:mode (var-lookup I I O)
[-----
(var-lookup (r v ↦ a) v a)]
[(where #t (different v_1 v_2))
(var-lookup r v_2 a_2)
-----
(var-lookup (r v_1 ↦ a_1) v_2 a_2)])
(define-judgment-form list-machine
#:contract (var-set r v a r)
#:mode (var-set I I I O)
[-----
(var-set (r v ↦ a) v a_2 (r v ↦ a_2))]
[(where #t (different v v_2))
(var-set r v_2 a_2 r_2)
-----
(var-set (r v ↦ a) v_2 a_2 (r_2 v ↦ a))]
[-----
(var-set empty v a (empty v ↦ a))])
(define-judgment-form list-machine
#:contract (program-lookup p l ι)
#:mode (program-lookup I I O)
[-----
(program-lookup (l : ι p) l ι)]
[(where #t (different l l_2))
(program-lookup p l_2 ι_2)
-----
(program-lookup (l : ι p) l_2 ι_2)])
(define red
(reduction-relation
list-machine
#:domain (p r ι)
(--> (p r (begin (begin ι_1 ι_2) ι_3))
(p r (begin ι_1 (begin ι_2 ι_3)))
"step-seq")
(--> (p r (begin (fetch-field v 0 v_2) ι))
(p r_2 ι)
"step-fetch-field-0"
(judgment-holds (var-lookup r v (cons a_0 a_1)))
(judgment-holds (var-set r v_2 a_0 r_2)))
(--> (p r (begin (fetch-field v 1 v_2) ι))
(p r_2 ι)
"step-fetch-field-1"
(judgment-holds (var-lookup r v (cons a_0 a_1)))
(judgment-holds (var-set r v_2 a_1 r_2)))
(--> (p r (begin (cons v_0 v_1 v_2) ι))
(p r_2 ι)
"step-cons"
(judgment-holds (var-lookup r v_0 a_0))
(judgment-holds (var-lookup r v_1 a_1))
(judgment-holds (var-set r v_2 (cons a_0 a_1) r_2)))
(--> (p r (begin (branch-if-nil v l) ι))
(p r ι)
"step-branch-not-taken"
(judgment-holds (var-lookup r v (cons a_0 a_1))))
(--> (p r (begin (branch-if-nil v l) ι))
(p r ι_2)
"step-branch-taken"
(judgment-holds (var-lookup r v nil))
(judgment-holds (program-lookup p l ι_2)))
(--> (p r (jump l))
(p r ι_2)
"step-jump"
(judgment-holds (program-lookup p l ι_2)))))
(define (run-prog p)
(define r_0 (term (empty v0 ↦ nil)))
(define ι_0 (car (judgment-holds (program-lookup ,p l0 ι) ι)))
(apply-reduction-relation* red `(,p ,r_0 ,ι_0)))
(define (check-progress p)
(define r_0 (term (empty v0 ↦ nil)))
(define ι_0 (car (judgment-holds (program-lookup ,p l0 ι) ι)))
(or (equal? ι_0 'halt)
(and
(= 1
(length (apply-reduction-relation
red
`(,p ,r_0 ,ι_0))))
(let ([closure (apply-reduction-relation*
red
`(,p ,r_0 ,ι_0)
#:stop-when
(let ([count 0])
(λ (_)
(begin0
(count . > . 1000)
(set! count (add1 count))))))])
if reduction terminates in less than 1000 steps , check it ends with halt
(or (empty? closure)
(and (= 1 (length closure))
(match (car closure)
[`(,p ,r ,ι)
(equal? ι 'halt)])))))))
(define (check p)
(or (not p)
(check-progress p)))
(define-metafunction list-machine
different : any any -> any
[(different any_1 any_1)
#f]
[(different any_1 any_2)
#t])
(define-extended-language list-machine-typing list-machine
(τ nil (list τ) (listcons τ))
(Γ empty (v : τ Γ))
(Π empty (l : Γ Π)))
(define-judgment-form list-machine-typing
#:contract (check-program p Π)
#:mode (check-program I I)
[(:lookup-Π Π l0 (v0 : nil empty))
(labels-distinct (l0 : ι p))
(d= Π (l0 : ι p))
(labels-distinct-Π Π)
(check-blocks Π (l0 : ι p))
-----
(check-program (l0 : ι p) Π)])
(define-judgment-form list-machine-typing
#:contract (Γ-⊂ Γ Γ)
#:mode (Γ-⊂ I I)
[-----
(Γ-⊂ Γ empty)]
[(:lookup-Γ Γ_1 v τ_1)
(⊂ τ_1 τ_2)
(Γ-⊂ Γ_1 Γ_2)
----
(Γ-⊂ Γ_1 (v : τ_2 Γ_2))])
(define-judgment-form list-machine-typing
#:contract (check-blocks Π p)
#:mode (check-blocks I I)
[(:lookup-Π Π l Γ)
(check-block Π Γ ι)
(check-blocks Π p)
-----
(check-blocks Π (l : ι p)) ]
[-----
(check-blocks Π end)])
(define-judgment-form list-machine-typing
#:contract (check-block Π Γ ι)
#:mode (check-block I I I)
[-----
(check-block Π Γ halt)]
[(check-instr Π Γ ι_1 Γ_2)
(check-block Π Γ_2 ι_2)
-----
(check-block Π Γ (begin ι_1 ι_2))]
[(:lookup-Π Π l Γ_2)
(Γ-⊂ Γ Γ_2)
-----
(check-block Π Γ (jump l))])
(define-judgment-form list-machine-typing
#:contract (check-instr Π Γ ι Γ)
#:mode (check-instr I I I O)
[(check-instr Π Γ ι_1 Γ_1)
(check-instr Π Γ_1 ι_2 Γ_2)
-----
(check-instr Π Γ (begin ι_1 ι_2) Γ_2)]
[(:lookup-Γ Γ v (list τ))
(:lookup-Π Π l Γ_1)
(:set Γ v nil Γ_3)
(Γ-⊂ Γ_3 Γ_1)
(:set Γ_3 v (listcons τ) Γ_2)
-----
(check-instr Π Γ (branch-if-nil v l) Γ_2)]
[(:lookup-Γ Γ v (listcons τ))
(:lookup-Π Π l Γ_1)
(:set Γ v nil Γ_2)
(Γ-⊂ Γ_2 Γ_1)
-----
(check-instr Π Γ (branch-if-nil v l) Γ)]
[(:lookup-Γ Γ v nil)
(:lookup-Π Π l Γ_1)
(Γ-⊂ Γ Γ_1)
-----
(check-instr Π Γ (branch-if-nil v l) Γ)]
[(:lookup-Γ Γ v (listcons τ)) (:set Γ v_2 τ Γ_2)
-----
(check-instr Π Γ (fetch-field v 0 v_2) Γ_2)]
[(:lookup-Γ Γ v (listcons τ)) (:set Γ v_2 (list τ) Γ_2)
-----
(check-instr Π Γ (fetch-field v 1 v_2) Γ_2)]
[(:lookup-Γ Γ v_0 τ_0) (:lookup-Γ Γ v_1 τ_1)
(⊔ (list τ_0) τ_1 (list τ)) (:set Γ v (listcons τ) Γ_2)
-----
(check-instr Π Γ (cons v_0 v_1 v) Γ_2)])
(define-judgment-form list-machine-typing
#:contract (⊂ τ τ)
#:mode (⊂ O I)
[-----
(⊂ τ τ)]
[-----
(⊂ nil (list τ))]
[(⊂ τ τ_2)
-----
(⊂ (list τ) (list τ_2))]
[(⊂ τ τ_2)
-----
(⊂ (listcons τ) (list τ_2))]
[(⊂ τ τ_2)
-----
(⊂ (listcons τ) (listcons τ_2))])
(define-judgment-form list-machine-typing
#:contract (⊔ τ τ τ)
#:mode (⊔ I I O)
[-----
(⊔ τ τ τ)]
[-----
(⊔ (list τ) nil (list τ))]
[-----
(⊔ nil (list τ) (list τ))]
[(⊔ (list τ_1) (list τ_2) τ_3)
-----
(⊔ (list τ_1) (listcons τ_2) τ_3)]
[(⊔ (list τ_1) (list τ_2) τ_3)
-----
(⊔ (listcons τ_1) (list τ_2) τ_3)]
[(⊔ τ_1 τ_2 τ_3)
-----
(⊔ (list τ_1) (list τ_2) (list τ_3))]
[-----
(⊔ (listcons τ) nil (list τ))]
[-----
(⊔ nil (listcons τ) (list τ))]
[(⊔ τ_1 τ_2 τ_3)
-----
(⊔ (listcons τ_1) (listcons τ_2) (listcons τ_3))])
(define-judgment-form list-machine-typing
#:contract (:lookup any v any)
#:mode (:lookup I I O)
[-----
(:lookup (v : any_τ any_Γ) v any_τ)]
[(where #t (different v_1 v_2))
(:lookup any_Γ v_2 any_τ2)
-----
(:lookup (v_1 : any_τ1 any_Γ) v_2 any_τ2)])
(define-judgment-form list-machine-typing
#:contract (:lookup-Γ Γ v τ)
#:mode (:lookup-Γ I I O)
[-----
(:lookup-Γ (v : τ Γ) v τ)]
[(where #t (different v_1 v_2))
(:lookup-Γ Γ v_2 τ_2)
-----
(:lookup-Γ (v_1 : τ_1 Γ) v_2 τ_2)])
(define-judgment-form list-machine-typing
#:contract (:lookup-Π Π l Γ)
#:mode (:lookup-Π I I O)
[-----
(:lookup-Π (l : Γ Π) l Γ)]
[(where #t (different l_1 l_2))
(:lookup-Π Π l_2 Γ_2)
-----
(:lookup-Π (l_1 : Γ_1 Π) l_2 Γ_2)])
(define-judgment-form list-machine-typing
#:contract (:set Γ v τ Γ)
#:mode (:set I I I O)
[-----
(:set (v : any_τ any_Γ) v any_τ2 (v : any_τ2 any_Γ))]
[(where #t (different v v_2))
(:set any_Γ v_2 any_τ2 any_Γ2)
-----
(:set (v : any_τ any_Γ) v_2 any_τ2 (v : any_τ any_Γ2))]
[-----
(:set empty v any_τ (v : any_τ empty))])
(define-metafunction list-machine-typing
[(dom (l_1 : any_1 any_2))
(l_1 (dom any_2))]
[(dom empty) empty])
(define-metafunction list-machine-typing
[(dom-P (l_1 : ι_1 p))
(l_1 (dom p))]
[(dom-P end) empty])
(define-metafunction list-machine-typing
[(dom-Π (l_1 : Γ_1 Π))
(l_1 (dom Π))]
[(dom-Π empty) empty])
(define-metafunction list-machine-typing
l-⊂ : (l ...) (l ...) -> any
[(l-⊂ (l_1 ...) (l_2 ...))
,(let ([ht (make-hash)])
(for ([l (in-list (term (l_2 ...)))])
(hash-set! ht l #t))
(for/and ([l (in-list (term (l_1 ...)))])
(hash-ref ht l #f)))])
(define-relation list-machine-typing
[(l-⊂ (l_1 empty) l_2)
(where #t (lmem l_1 l_2))]
[(l-⊂ (l_1 l_2) l_3)
(l-⊂ l_2 l_3)
(where #t (lmem l_1 l_3))]
[(l-⊂ empty any)])
(define-relation list-machine-typing
[(d= (l_1 : Γ_1 Π) (l_1 : ι p))
(d= Π p)]
[(d= empty end)])
(define-relation list-machine-typing
[(d-⊂ (l_1 : Γ_1 Π) p)
(has-label p l_1)
(d-⊂ Π p)]
[(d-⊂ (l_1 : Γ_1 Π) (l_2 : ι p))
(d-⊂ (l_1 : Γ_1 Π) p)]
[(d-⊂ empty p)])
(define-relation list-machine-typing
[(has-label (l_1 : ι p) l_1)]
[(has-label (l_1 : ι p) l_2)
(has-label p l_2)])
(define-relation list-machine-typing
[(labels-distinct (l_1 : ι p))
(label-not-in l_1 p)
(labels-distinct p)]
[(labels-distinct end)])
(define-relation list-machine-typing
[(label-not-in l_1 (l_2 : ι p))
(different l_1 l_2)
(label-not-in l_1 p)]
[(label-not-in l_1 end)])
(define-metafunction list-machine-typing
[(lmem l_1 (l_1 l_2))
#t]
[(lmem l_1 (l_2 l_3))
(lmem l_1 l_3)]
[(lmem l_1 empty)
#f])
(define-relation list-machine-typing
[(labels-distinct-Π (l_1 : Γ Π))
(label-not-in-Π l_1 Π)
(labels-distinct-Π Π)]
[(labels-distinct-Π empty)])
(define-relation list-machine-typing
[(label-not-in-Π l_1 (l_2 : Γ Π))
(different l_1 l_2)
(label-not-in-Π l_1 Π)]
[(label-not-in-Π l_1 empty)])
(define (generate-M-term)
(generate-term list-machine-typing (l0 : ι p) 7))
(define (type-check p)
need to provide a program typing , so generate 10 randomly and
(let loop ([i 0])
(cond
[(i . > . 10) #f]
[else
(define guess-Π (generate-term list-machine-typing (l0 : (v0 : nil empty) Π) 7))
(or (judgment-holds (check-program ,p ,guess-Π))
(loop (add1 i)))])))
(define (typed-generator)
(let ([g (redex-generator list-machine-typing
(check-program p Π)
7)])
(λ ()
(match (g)
[`(check-program ,p ,Π)
p]
[#f #f]))))
(define (generate-typed-term)
(match (generate-term list-machine-typing
#:satisfying
(check-program p Π)
7)
[`(check-program ,p ,Π)
p]
[#f #f])) |
25a975102032e70601af06e0e273a865097990a3cc51746b6a042028081ff6b0 | GrammaTech/sel | limit-stream.lisp | (defpackage :software-evolution-library/utility/limit-stream
(:use :gt/full)
(:local-nicknames (:gray :trivial-gray-streams))
(:export :make-limit-stream))
(in-package :software-evolution-library/utility/limit-stream)
(defclass limit-stream (gray:fundamental-character-output-stream)
((newlines :initform 0)
(chars :initform 0)
(newline-limit :initarg :newline-limit)
(char-limit :initarg :char-limit)
(callback :initarg :callback :type function))
(:default-initargs
:newline-limit 0
:char-limit 0)
(:documentation "A stream that invokes a callback after seeing a
certain number of newlines."))
(defun make-limit-stream (stream callback &key
(newline-limit 0)
(char-limit 0))
"Return a stream that wraps STREAM and invokes CALLBACK after
writing LIMIT newlines.
Note that LIMIT is a lower-bound; the actual number of newlines may be
greater."
(make-broadcast-stream
stream
(make 'limit-stream
:callback callback
:newline-limit newline-limit
:char-limit char-limit)))
(defun increment-newlines (stream n)
(with-slots (newlines newline-limit callback) stream
(unless (zerop newline-limit)
(when (>= (incf newlines n) newline-limit)
(funcall callback)))))
(defun increment-chars (stream n)
(with-slots (chars char-limit callback) stream
(unless (zerop char-limit)
(when (>= (incf chars n) char-limit)
(funcall callback)))))
(defmethod gray:stream-write-char ((stream limit-stream)
(char (eql #\Newline)))
(increment-newlines stream 1)
(increment-chars stream 1))
(defmethod gray:stream-terpri ((stream limit-stream))
(increment-newlines stream 1)
(increment-chars stream 1))
(defmethod gray:stream-write-string ((stream limit-stream)
(string string)
&optional start end)
Workaround for CCL , to ensure end is not nil .
(let ((start (or start 0))
(end (or end (length string))))
(increment-chars stream (- end start))
(increment-newlines stream (count #\Newline string :start start :end end))))
| null | https://raw.githubusercontent.com/GrammaTech/sel/52e037780e877467b0bff60bd0625f4be49557a1/utility/limit-stream.lisp | lisp | the actual number of newlines may be | (defpackage :software-evolution-library/utility/limit-stream
(:use :gt/full)
(:local-nicknames (:gray :trivial-gray-streams))
(:export :make-limit-stream))
(in-package :software-evolution-library/utility/limit-stream)
(defclass limit-stream (gray:fundamental-character-output-stream)
((newlines :initform 0)
(chars :initform 0)
(newline-limit :initarg :newline-limit)
(char-limit :initarg :char-limit)
(callback :initarg :callback :type function))
(:default-initargs
:newline-limit 0
:char-limit 0)
(:documentation "A stream that invokes a callback after seeing a
certain number of newlines."))
(defun make-limit-stream (stream callback &key
(newline-limit 0)
(char-limit 0))
"Return a stream that wraps STREAM and invokes CALLBACK after
writing LIMIT newlines.
greater."
(make-broadcast-stream
stream
(make 'limit-stream
:callback callback
:newline-limit newline-limit
:char-limit char-limit)))
(defun increment-newlines (stream n)
(with-slots (newlines newline-limit callback) stream
(unless (zerop newline-limit)
(when (>= (incf newlines n) newline-limit)
(funcall callback)))))
(defun increment-chars (stream n)
(with-slots (chars char-limit callback) stream
(unless (zerop char-limit)
(when (>= (incf chars n) char-limit)
(funcall callback)))))
(defmethod gray:stream-write-char ((stream limit-stream)
(char (eql #\Newline)))
(increment-newlines stream 1)
(increment-chars stream 1))
(defmethod gray:stream-terpri ((stream limit-stream))
(increment-newlines stream 1)
(increment-chars stream 1))
(defmethod gray:stream-write-string ((stream limit-stream)
(string string)
&optional start end)
Workaround for CCL , to ensure end is not nil .
(let ((start (or start 0))
(end (or end (length string))))
(increment-chars stream (- end start))
(increment-newlines stream (count #\Newline string :start start :end end))))
|
e9c5940728729f9c2a25ecaf87d3321084ca0f7a19b556f70f727aaf9cc20aae | huangz1990/real-world-haskell-cn | Prettify2.hs | file : ch11 / Prettify2.hs
module Prettify2 where
import Data.Monoid hiding ((<>))
data Doc = Empty
| Char Char
| Text String
| Line
| Concat Doc Doc
| Union Doc Doc
deriving (Show,Eq)
instance Monoid Doc where
mempty = empty
mappend = (<>)
(<>) :: Doc -> Doc -> Doc
Empty <> y = y
x <> Empty = x
x <> y = x `Concat` y
empty :: Doc
empty = Empty
char :: Char -> Doc
char c = Char c
text :: String -> Doc
text "" = Empty
text s = Text s
double :: Double -> Doc
double d = text (show d)
line :: Doc
line = Line
punctuate :: Doc -> [Doc] -> [Doc]
punctuate p [] = []
punctuate p [d] = [d]
punctuate p (d:ds) = (d <> p) : punctuate p ds
hcat :: [Doc] -> Doc
hcat = fold (<>)
fold :: (Doc -> Doc -> Doc) -> [Doc] -> Doc
fold f = foldr f empty | null | https://raw.githubusercontent.com/huangz1990/real-world-haskell-cn/f67b07dd846b1950d17ff941d650089fcbbe9586/code/ch11/Prettify2.hs | haskell | file : ch11 / Prettify2.hs
module Prettify2 where
import Data.Monoid hiding ((<>))
data Doc = Empty
| Char Char
| Text String
| Line
| Concat Doc Doc
| Union Doc Doc
deriving (Show,Eq)
instance Monoid Doc where
mempty = empty
mappend = (<>)
(<>) :: Doc -> Doc -> Doc
Empty <> y = y
x <> Empty = x
x <> y = x `Concat` y
empty :: Doc
empty = Empty
char :: Char -> Doc
char c = Char c
text :: String -> Doc
text "" = Empty
text s = Text s
double :: Double -> Doc
double d = text (show d)
line :: Doc
line = Line
punctuate :: Doc -> [Doc] -> [Doc]
punctuate p [] = []
punctuate p [d] = [d]
punctuate p (d:ds) = (d <> p) : punctuate p ds
hcat :: [Doc] -> Doc
hcat = fold (<>)
fold :: (Doc -> Doc -> Doc) -> [Doc] -> Doc
fold f = foldr f empty |
|
837101d32ee94552f90dbe8bfa13b68a1e4d6a6577c9aa6cb301fb7074d8db4a | hargettp/raft | Log.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE ExistentialQuantification #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE StandaloneDeriving #
-----------------------------------------------------------------------------
-- |
-- Module : Control.Consensus.Raft.State
Copyright : ( c ) 2014
License : MIT ( see LICENSE file )
--
-- Maintainer :
-- Stability : experimental
Portability : non - portable ( requires STM )
--
This module defines the base extensions to the fundamental ' Data . Log . Log ' and ' Data . Log . State '
types in order to support the Raft algorithm . For example , in ordinary ' Data . Log . Log 's ,
-- there are no constraints on the entries that change the 'Data.Log.State' of the underlying
-- state machine. For Raft, however, such entries must be capable of declaring the 'Term'
-- in which the entry was created. Thus, a 'RaftLog' uses a 'RaftLogEntry' as the type for
-- its entries.
--
-----------------------------------------------------------------------------
module Control.Consensus.Raft.Log (
-- * Raft state
Raft(..),
mkRaft,
raftCurrentTerm,
raftName,
RaftContext(..),
RaftLog(..),
RaftLogEntry(..),
RaftState(..),
mkRaftState,
setRaftTerm,
setRaftLeader,
isRaftLeader,
setRaftLastCandidate,
setRaftConfiguration,
raftConfiguration,
raftMembers,
raftSafeAppendedTerm,
setRaftMembers,
setRaftLog,
setRaftState,
raftData,
setRaftData,
ListLog(..),
mkListLog,
module Control.Consensus.Raft.Actions,
module Control.Consensus.Raft.Types,
module Data.Log
) where
-- local imports
import Data.Log
import Control.Consensus.Raft.Actions
import Control.Consensus.Raft.Members
import Control.Consensus.Raft.Types
-- external imports
import Control.Concurrent.STM
-- import qualified Data.Map as M
import Data.Serialize
import Network.Endpoints
import Prelude hiding (log)
import System.Log.Logger
import Text.Printf
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
_log :: String
_log = "raft.consensus"
|
Encapsulates the state necessary for the Raft algorithm , depending
on a ' RaftServer ' for customizing the use of the algorithm to a
specific application .
Encapsulates the state necessary for the Raft algorithm, depending
on a 'RaftServer' for customizing the use of the algorithm to a
specific application.
-}
data RaftContext l e v = (RaftLog l e v) => RaftContext {
raftEndpoint :: Endpoint,
raftLog :: l,
raftState :: RaftState v
}
|
Encapsulates the complete state necessary for participating in the Raft algorithm ,
in a mutable form by storing it in a ' TVar ' .
Encapsulates the complete state necessary for participating in the Raft algorithm,
in a mutable form by storing it in a 'TVar'.
-}
data Raft l e v = (RaftLog l e v,Serialize v) => Raft {raftContext :: TVar (RaftContext l e v)}
{-|
Create a new 'Raft' instance.
-}
mkRaft :: (RaftLog l e v) => Endpoint -> l -> RaftState v -> STM (Raft l e v)
mkRaft endpoint initialLog initialState = do
ctx <- newTVar $ RaftContext {
raftEndpoint = endpoint,
raftLog = initialLog,
raftState = initialState
}
return $ Raft ctx
{-|
A minimal 'Log' sufficient for a member to particpate in the Raft algorithm'.
-}
class (Serialize e,Serialize v,Log l IO (RaftLogEntry e) (RaftState v)) => RaftLog l e v where
lastAppendedTime :: l -> RaftTime
lastCommittedTime :: l -> RaftTime
|
The ' State ' that ' RaftLog 's expect for participating in the Raft algorithm .
The 'State' that 'RaftLog's expect for participating in the Raft algorithm.
-}
data RaftState v = (Serialize v) => RaftState {
raftStateCurrentTerm :: Term,
raftStateLastCandidate :: Maybe Name,
raftStateName :: Name,
raftStateConfigurationIndex :: Maybe Index,
raftStateConfiguration :: RaftConfiguration,
raftStateMembers :: Members,
raftStateData :: v
}
deriving instance (Eq v) => Eq (RaftState v)
deriving instance (Show v) => Show (RaftState v)
|
Create a fresh ' RaftState ' instance .
Create a fresh 'RaftState' instance.
-}
mkRaftState :: (Serialize v) => v -> RaftConfiguration -> Name -> RaftState v
mkRaftState initialData cfg name = RaftState {
raftStateCurrentTerm = 0,
raftStateLastCandidate = Nothing,
raftStateName = name,
raftStateConfigurationIndex = Nothing,
raftStateConfiguration = cfg,
raftStateMembers = mkMembers cfg initialRaftTime,
raftStateData = initialData
}
{-|
The type of entry that a 'RaftLog' manages.
-}
data RaftLogEntry e = (Serialize e) => RaftLogEntry {
entryTerm :: Term,
entryAction :: RaftAction e
}
deriving instance (Eq e) => Eq (RaftLogEntry e)
deriving instance (Show e) => Show (RaftLogEntry e)
instance (Serialize e) => Serialize (RaftLogEntry e) where
get = do
term <- get
action <- get
return $ RaftLogEntry term action
put (RaftLogEntry term action) = do
put term
put action
instance (Serialize e,State v IO e) => State (RaftState v) IO (RaftLogEntry e) where
canApplyEntry oldRaftState entry = do
let members = raftStateMembers oldRaftState
cfg = raftStateConfiguration oldRaftState
term = membersSafeAppendedTerm members $ clusterConfiguration cfg
currentTerm = raftStateCurrentTerm oldRaftState
leader = (Just $ raftStateName oldRaftState) == (clusterLeader $ clusterConfiguration cfg)
infoM _log $ printf "%v: Safe term %v for members %v" currentTerm term (show members)
if leader
then if term /= raftStateCurrentTerm oldRaftState
then return False
else canApply $ entryAction entry
else canApply $ entryAction entry
where
canApply (Cmd cmd) = do
let oldData = raftStateData oldRaftState
canApplyEntry oldData cmd
TODO check configuration cases
canApply _ = return True
applyEntry oldRaftState entry = applyAction $ entryAction entry
where
applyAction (Cmd cmd) = do
let oldData = raftStateData oldRaftState
newData <- applyEntry oldData cmd
return $ oldRaftState {raftStateData = newData}
applyAction action = do
let cfg = applyConfigurationAction (clusterConfiguration $ raftStateConfiguration oldRaftState) action
members = raftStateMembers oldRaftState
infoM _log $ printf "New configuration is %v" (show cfg)
return $ oldRaftState {
raftStateMembers = reconfigureMembers members cfg initialRaftTime,
raftStateConfiguration = (raftStateConfiguration oldRaftState) {
clusterConfiguration = cfg
}
}
{-|
The current 'Term' for this instance.
-}
raftCurrentTerm :: (RaftLog l e v) => RaftContext l e v -> Term
raftCurrentTerm raft = raftStateCurrentTerm $ raftState raft
{-|
The 'Name' this instance uses for communicating in the network.
-}
raftName :: (RaftLog l e v) => RaftContext l e v -> Name
raftName raft = raftStateName $ raftState raft
|
Update the current term in a new ' RaftContext '
Update the current term in a new 'RaftContext'
-}
setRaftTerm :: Term -> RaftContext l e v -> RaftContext l e v
setRaftTerm term raft = raft {
raftState = (raftState raft) {
raftStateCurrentTerm = term
}
}
|
Update the current term in a new ' RaftContext '
Update the current term in a new 'RaftContext'
-}
setRaftMembers :: Members -> RaftContext l e v -> RaftContext l e v
setRaftMembers members raft = raft {
raftState = (raftState raft) {
raftStateMembers = members
}
}
{-|
The current state of 'Members' in the cluster; only leaders track 'Member' state,
so in followers the valueof 'Members' is less useful.
-}
raftMembers :: (RaftLog l e v) => RaftContext l e v -> Members
raftMembers raft = raftStateMembers $ raftState raft
{-|
Computes 'membersSafeAppendedTerm' on this instance's 'Members'.
-}
raftSafeAppendedTerm :: (RaftLog l e v) => RaftContext l e v -> Term
raftSafeAppendedTerm raft =
let members = raftMembers raft
cfg = raftConfiguration raft
in membersSafeAppendedTerm members cfg
|
Update the last candidate in a new ' RaftContext '
Update the last candidate in a new 'RaftContext'
-}
setRaftLastCandidate :: Maybe Name -> RaftContext l e v -> RaftContext l e v
setRaftLastCandidate candidate raft = raft {
raftState = (raftState raft) {
raftStateLastCandidate = candidate
}
}
|
Update the ' RaftState ' in a new ' RaftContext ' to specify a new leader
Update the 'RaftState' in a new 'RaftContext' to specify a new leader
-}
setRaftLeader :: Maybe Name -> RaftContext l e v -> RaftContext l e v
setRaftLeader leader raft =
let cfg = clusterConfiguration $ raftStateConfiguration $ raftState raft
in case cfg of
Configuration _ _ _ -> raft {
raftState = (raftState raft) {
raftStateConfiguration = (raftStateConfiguration $ raftState raft) {
clusterConfiguration = cfg {
configurationLeader = leader
}}}
}
JointConfiguration _ jointNew -> raft {
raftState = (raftState raft) {
raftStateConfiguration = (raftStateConfiguration $ raftState raft) {
clusterConfiguration = jointNew {
configurationLeader = leader
}}}
}
{-|
Returns 'True' if this instance is operating as the leader.
-}
isRaftLeader :: (RaftLog l e v) => RaftContext l e v -> Bool
isRaftLeader raft = (Just $ raftName raft) == (clusterLeader $ raftConfiguration raft)
{-|
Update the 'RaftLog' in this instance.
-}
setRaftLog :: (RaftLog l e v) => l -> RaftContext l e v -> RaftContext l e v
setRaftLog rlog raft = raft {
raftLog = rlog
}
{-|
Change the 'Configuration' in this instance.
-}
setRaftConfiguration :: (RaftLog l e v) => Configuration -> RaftContext l e v -> RaftContext l e v
setRaftConfiguration cfg raft =
let newState = (raftState raft) {
raftStateConfiguration = (raftStateConfiguration $ raftState raft) {
clusterConfiguration = cfg
}}
in setRaftState newState raft
{-|
Return the 'Configuration' for this instance.
-}
raftConfiguration :: (RaftLog l e v) => RaftContext l e v -> Configuration
raftConfiguration raft = clusterConfiguration $ raftStateConfiguration $ raftState raft
|
Update the ' RaftState ' for this instance .
Update the 'RaftState' for this instance.
-}
setRaftState :: (RaftLog l e v) => RaftState v -> RaftContext l e v -> RaftContext l e v
setRaftState state raft = raft {
raftState = state
}
raftData :: (RaftLog l e v) => RaftContext l e v -> v
raftData raft = raftStateData $ raftState raft
{-|
Update the 'raftStateData' for this instance.
-}
setRaftData :: (RaftLog l e v) => v -> RaftContext l e v -> RaftContext l e v
setRaftData newData raft = raft {
raftState = (raftState raft) {
raftStateData = newData
}
}
--------------------------------------------------------------------------------
-- List log
--------------------------------------------------------------------------------
|
A simple implementation of a ' Log ' and ' RaftLog ' useful in many scenarios . Since
typically there should not be that many uncommitted entries ( e.g. , appended
but not committed ) in a log , then the size of this list should be small , relative
to the number of operations performed through it . As a ' ' implements ' Serialize ' ,
applications may choose to persist the log in its entirety to stable storage
as needed .
A simple implementation of a 'Log' and 'RaftLog' useful in many scenarios. Since
typically there should not be that many uncommitted entries (e.g., appended
but not committed) in a log, then the size of this list should be small, relative
to the number of operations performed through it. As a 'ListLog' implements 'Serialize',
applications may choose to persist the log in its entirety to stable storage
as needed.
-}
data ListLog e v = (Serialize e,Serialize v) => ListLog {
listLogLastCommitted :: RaftTime,
listLogLastAppended :: RaftTime,
listLogEntries :: [RaftLogEntry e]
}
deriving instance (Eq e) => Eq (ListLog e v)
deriving instance (Show e) => Show (ListLog e v)
instance (Serialize e,State v IO e) => Log (ListLog e v) IO (RaftLogEntry e) (RaftState v) where
lastCommitted log = logIndex $ listLogLastCommitted log
lastAppended log = logIndex $ listLogLastAppended log
appendEntries log index newEntries = do
if null newEntries || (lastCommitted log) >= index
then return log
else do
let term = maximum $ map entryTerm newEntries
logEntries = (take index (listLogEntries log)) ++ newEntries
appendedTime = RaftTime term ( (length logEntries) - 1)
return $ log {
listLogEntries = logEntries,
listLogLastAppended = appendedTime
}
fetchEntries log index count = do
let entries = listLogEntries log
return $ take count $ drop index entries
commitEntry oldLog commitIndex entry = do
let newLog = oldLog {
listLogLastCommitted = RaftTime (entryTerm entry) (minimum [commitIndex,lastAppended oldLog])
}
return newLog
checkpoint oldLog oldState = return (oldLog,oldState)
instance (Serialize e,Serialize v,State v IO e) => RaftLog (ListLog e v) e v where
lastAppendedTime = listLogLastAppended
lastCommittedTime = listLogLastCommitted
|
Create a new ' ' .
Create a new 'ListLog'.
-}
mkListLog :: (Serialize e,Serialize v) => IO (ListLog e v)
mkListLog = let initial = RaftTime (-1) (-1)
in return $ ListLog initial initial []
| null | https://raw.githubusercontent.com/hargettp/raft/1ed007986a908e43d9867935af759bccc3b9a57f/src/Control/Consensus/Raft/Log.hs | haskell | ---------------------------------------------------------------------------
|
Module : Control.Consensus.Raft.State
Maintainer :
Stability : experimental
there are no constraints on the entries that change the 'Data.Log.State' of the underlying
state machine. For Raft, however, such entries must be capable of declaring the 'Term'
in which the entry was created. Thus, a 'RaftLog' uses a 'RaftLogEntry' as the type for
its entries.
---------------------------------------------------------------------------
* Raft state
local imports
external imports
import qualified Data.Map as M
------------------------------------------------------------------------------
------------------------------------------------------------------------------
|
Create a new 'Raft' instance.
|
A minimal 'Log' sufficient for a member to particpate in the Raft algorithm'.
|
The type of entry that a 'RaftLog' manages.
|
The current 'Term' for this instance.
|
The 'Name' this instance uses for communicating in the network.
|
The current state of 'Members' in the cluster; only leaders track 'Member' state,
so in followers the valueof 'Members' is less useful.
|
Computes 'membersSafeAppendedTerm' on this instance's 'Members'.
|
Returns 'True' if this instance is operating as the leader.
|
Update the 'RaftLog' in this instance.
|
Change the 'Configuration' in this instance.
|
Return the 'Configuration' for this instance.
|
Update the 'raftStateData' for this instance.
------------------------------------------------------------------------------
List log
------------------------------------------------------------------------------ | # LANGUAGE DeriveGeneric #
# LANGUAGE ExistentialQuantification #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE StandaloneDeriving #
Copyright : ( c ) 2014
License : MIT ( see LICENSE file )
Portability : non - portable ( requires STM )
This module defines the base extensions to the fundamental ' Data . Log . Log ' and ' Data . Log . State '
types in order to support the Raft algorithm . For example , in ordinary ' Data . Log . Log 's ,
module Control.Consensus.Raft.Log (
Raft(..),
mkRaft,
raftCurrentTerm,
raftName,
RaftContext(..),
RaftLog(..),
RaftLogEntry(..),
RaftState(..),
mkRaftState,
setRaftTerm,
setRaftLeader,
isRaftLeader,
setRaftLastCandidate,
setRaftConfiguration,
raftConfiguration,
raftMembers,
raftSafeAppendedTerm,
setRaftMembers,
setRaftLog,
setRaftState,
raftData,
setRaftData,
ListLog(..),
mkListLog,
module Control.Consensus.Raft.Actions,
module Control.Consensus.Raft.Types,
module Data.Log
) where
import Data.Log
import Control.Consensus.Raft.Actions
import Control.Consensus.Raft.Members
import Control.Consensus.Raft.Types
import Control.Concurrent.STM
import Data.Serialize
import Network.Endpoints
import Prelude hiding (log)
import System.Log.Logger
import Text.Printf
_log :: String
_log = "raft.consensus"
|
Encapsulates the state necessary for the Raft algorithm , depending
on a ' RaftServer ' for customizing the use of the algorithm to a
specific application .
Encapsulates the state necessary for the Raft algorithm, depending
on a 'RaftServer' for customizing the use of the algorithm to a
specific application.
-}
data RaftContext l e v = (RaftLog l e v) => RaftContext {
raftEndpoint :: Endpoint,
raftLog :: l,
raftState :: RaftState v
}
|
Encapsulates the complete state necessary for participating in the Raft algorithm ,
in a mutable form by storing it in a ' TVar ' .
Encapsulates the complete state necessary for participating in the Raft algorithm,
in a mutable form by storing it in a 'TVar'.
-}
data Raft l e v = (RaftLog l e v,Serialize v) => Raft {raftContext :: TVar (RaftContext l e v)}
mkRaft :: (RaftLog l e v) => Endpoint -> l -> RaftState v -> STM (Raft l e v)
mkRaft endpoint initialLog initialState = do
ctx <- newTVar $ RaftContext {
raftEndpoint = endpoint,
raftLog = initialLog,
raftState = initialState
}
return $ Raft ctx
class (Serialize e,Serialize v,Log l IO (RaftLogEntry e) (RaftState v)) => RaftLog l e v where
lastAppendedTime :: l -> RaftTime
lastCommittedTime :: l -> RaftTime
|
The ' State ' that ' RaftLog 's expect for participating in the Raft algorithm .
The 'State' that 'RaftLog's expect for participating in the Raft algorithm.
-}
data RaftState v = (Serialize v) => RaftState {
raftStateCurrentTerm :: Term,
raftStateLastCandidate :: Maybe Name,
raftStateName :: Name,
raftStateConfigurationIndex :: Maybe Index,
raftStateConfiguration :: RaftConfiguration,
raftStateMembers :: Members,
raftStateData :: v
}
deriving instance (Eq v) => Eq (RaftState v)
deriving instance (Show v) => Show (RaftState v)
|
Create a fresh ' RaftState ' instance .
Create a fresh 'RaftState' instance.
-}
mkRaftState :: (Serialize v) => v -> RaftConfiguration -> Name -> RaftState v
mkRaftState initialData cfg name = RaftState {
raftStateCurrentTerm = 0,
raftStateLastCandidate = Nothing,
raftStateName = name,
raftStateConfigurationIndex = Nothing,
raftStateConfiguration = cfg,
raftStateMembers = mkMembers cfg initialRaftTime,
raftStateData = initialData
}
data RaftLogEntry e = (Serialize e) => RaftLogEntry {
entryTerm :: Term,
entryAction :: RaftAction e
}
deriving instance (Eq e) => Eq (RaftLogEntry e)
deriving instance (Show e) => Show (RaftLogEntry e)
instance (Serialize e) => Serialize (RaftLogEntry e) where
get = do
term <- get
action <- get
return $ RaftLogEntry term action
put (RaftLogEntry term action) = do
put term
put action
instance (Serialize e,State v IO e) => State (RaftState v) IO (RaftLogEntry e) where
canApplyEntry oldRaftState entry = do
let members = raftStateMembers oldRaftState
cfg = raftStateConfiguration oldRaftState
term = membersSafeAppendedTerm members $ clusterConfiguration cfg
currentTerm = raftStateCurrentTerm oldRaftState
leader = (Just $ raftStateName oldRaftState) == (clusterLeader $ clusterConfiguration cfg)
infoM _log $ printf "%v: Safe term %v for members %v" currentTerm term (show members)
if leader
then if term /= raftStateCurrentTerm oldRaftState
then return False
else canApply $ entryAction entry
else canApply $ entryAction entry
where
canApply (Cmd cmd) = do
let oldData = raftStateData oldRaftState
canApplyEntry oldData cmd
TODO check configuration cases
canApply _ = return True
applyEntry oldRaftState entry = applyAction $ entryAction entry
where
applyAction (Cmd cmd) = do
let oldData = raftStateData oldRaftState
newData <- applyEntry oldData cmd
return $ oldRaftState {raftStateData = newData}
applyAction action = do
let cfg = applyConfigurationAction (clusterConfiguration $ raftStateConfiguration oldRaftState) action
members = raftStateMembers oldRaftState
infoM _log $ printf "New configuration is %v" (show cfg)
return $ oldRaftState {
raftStateMembers = reconfigureMembers members cfg initialRaftTime,
raftStateConfiguration = (raftStateConfiguration oldRaftState) {
clusterConfiguration = cfg
}
}
raftCurrentTerm :: (RaftLog l e v) => RaftContext l e v -> Term
raftCurrentTerm raft = raftStateCurrentTerm $ raftState raft
raftName :: (RaftLog l e v) => RaftContext l e v -> Name
raftName raft = raftStateName $ raftState raft
|
Update the current term in a new ' RaftContext '
Update the current term in a new 'RaftContext'
-}
setRaftTerm :: Term -> RaftContext l e v -> RaftContext l e v
setRaftTerm term raft = raft {
raftState = (raftState raft) {
raftStateCurrentTerm = term
}
}
|
Update the current term in a new ' RaftContext '
Update the current term in a new 'RaftContext'
-}
setRaftMembers :: Members -> RaftContext l e v -> RaftContext l e v
setRaftMembers members raft = raft {
raftState = (raftState raft) {
raftStateMembers = members
}
}
raftMembers :: (RaftLog l e v) => RaftContext l e v -> Members
raftMembers raft = raftStateMembers $ raftState raft
raftSafeAppendedTerm :: (RaftLog l e v) => RaftContext l e v -> Term
raftSafeAppendedTerm raft =
let members = raftMembers raft
cfg = raftConfiguration raft
in membersSafeAppendedTerm members cfg
|
Update the last candidate in a new ' RaftContext '
Update the last candidate in a new 'RaftContext'
-}
setRaftLastCandidate :: Maybe Name -> RaftContext l e v -> RaftContext l e v
setRaftLastCandidate candidate raft = raft {
raftState = (raftState raft) {
raftStateLastCandidate = candidate
}
}
|
Update the ' RaftState ' in a new ' RaftContext ' to specify a new leader
Update the 'RaftState' in a new 'RaftContext' to specify a new leader
-}
setRaftLeader :: Maybe Name -> RaftContext l e v -> RaftContext l e v
setRaftLeader leader raft =
let cfg = clusterConfiguration $ raftStateConfiguration $ raftState raft
in case cfg of
Configuration _ _ _ -> raft {
raftState = (raftState raft) {
raftStateConfiguration = (raftStateConfiguration $ raftState raft) {
clusterConfiguration = cfg {
configurationLeader = leader
}}}
}
JointConfiguration _ jointNew -> raft {
raftState = (raftState raft) {
raftStateConfiguration = (raftStateConfiguration $ raftState raft) {
clusterConfiguration = jointNew {
configurationLeader = leader
}}}
}
isRaftLeader :: (RaftLog l e v) => RaftContext l e v -> Bool
isRaftLeader raft = (Just $ raftName raft) == (clusterLeader $ raftConfiguration raft)
setRaftLog :: (RaftLog l e v) => l -> RaftContext l e v -> RaftContext l e v
setRaftLog rlog raft = raft {
raftLog = rlog
}
setRaftConfiguration :: (RaftLog l e v) => Configuration -> RaftContext l e v -> RaftContext l e v
setRaftConfiguration cfg raft =
let newState = (raftState raft) {
raftStateConfiguration = (raftStateConfiguration $ raftState raft) {
clusterConfiguration = cfg
}}
in setRaftState newState raft
raftConfiguration :: (RaftLog l e v) => RaftContext l e v -> Configuration
raftConfiguration raft = clusterConfiguration $ raftStateConfiguration $ raftState raft
|
Update the ' RaftState ' for this instance .
Update the 'RaftState' for this instance.
-}
setRaftState :: (RaftLog l e v) => RaftState v -> RaftContext l e v -> RaftContext l e v
setRaftState state raft = raft {
raftState = state
}
raftData :: (RaftLog l e v) => RaftContext l e v -> v
raftData raft = raftStateData $ raftState raft
setRaftData :: (RaftLog l e v) => v -> RaftContext l e v -> RaftContext l e v
setRaftData newData raft = raft {
raftState = (raftState raft) {
raftStateData = newData
}
}
|
A simple implementation of a ' Log ' and ' RaftLog ' useful in many scenarios . Since
typically there should not be that many uncommitted entries ( e.g. , appended
but not committed ) in a log , then the size of this list should be small , relative
to the number of operations performed through it . As a ' ' implements ' Serialize ' ,
applications may choose to persist the log in its entirety to stable storage
as needed .
A simple implementation of a 'Log' and 'RaftLog' useful in many scenarios. Since
typically there should not be that many uncommitted entries (e.g., appended
but not committed) in a log, then the size of this list should be small, relative
to the number of operations performed through it. As a 'ListLog' implements 'Serialize',
applications may choose to persist the log in its entirety to stable storage
as needed.
-}
data ListLog e v = (Serialize e,Serialize v) => ListLog {
listLogLastCommitted :: RaftTime,
listLogLastAppended :: RaftTime,
listLogEntries :: [RaftLogEntry e]
}
deriving instance (Eq e) => Eq (ListLog e v)
deriving instance (Show e) => Show (ListLog e v)
instance (Serialize e,State v IO e) => Log (ListLog e v) IO (RaftLogEntry e) (RaftState v) where
lastCommitted log = logIndex $ listLogLastCommitted log
lastAppended log = logIndex $ listLogLastAppended log
appendEntries log index newEntries = do
if null newEntries || (lastCommitted log) >= index
then return log
else do
let term = maximum $ map entryTerm newEntries
logEntries = (take index (listLogEntries log)) ++ newEntries
appendedTime = RaftTime term ( (length logEntries) - 1)
return $ log {
listLogEntries = logEntries,
listLogLastAppended = appendedTime
}
fetchEntries log index count = do
let entries = listLogEntries log
return $ take count $ drop index entries
commitEntry oldLog commitIndex entry = do
let newLog = oldLog {
listLogLastCommitted = RaftTime (entryTerm entry) (minimum [commitIndex,lastAppended oldLog])
}
return newLog
checkpoint oldLog oldState = return (oldLog,oldState)
instance (Serialize e,Serialize v,State v IO e) => RaftLog (ListLog e v) e v where
lastAppendedTime = listLogLastAppended
lastCommittedTime = listLogLastCommitted
|
Create a new ' ' .
Create a new 'ListLog'.
-}
mkListLog :: (Serialize e,Serialize v) => IO (ListLog e v)
mkListLog = let initial = RaftTime (-1) (-1)
in return $ ListLog initial initial []
|
85db3f82e3240802a850f2728c9be51c596a4a43be49e7654e460485755f1d2d | generateme/cljplot | math.clj | (ns cljplot.impl.math
(:require [cljplot.common :refer :all]
[fastmath.core :as m]
[fastmath.vector :as v]
[clojure2d.color :as c]
[fastmath.complex :as cx]
[fastmath.random :as r]
[fastmath.fields :as f]
[clojure2d.core :refer :all]
[fastmath.grid :as grid]
[fastmath.stats :as stats]
[cljplot.scale :as s])
(:import [marchingsquares Algorithm]))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(m/use-primitive-operators)
(def ^:private ^:const pi-r [m/-PI m/PI])
(defmethod data-extent :complex [_ _ {:keys [x y]}]
{:x [:numerical (or x pi-r)]
:y [:numerical (or y pi-r)]})
(defmacro ^:private permutation->color
[p a b c]
`(case ~p
0 (c/color ~a ~b ~c)
1 (c/color ~a ~c ~b)
2 (c/color ~b ~a ~c)
3 (c/color ~b ~c ~a)
4 (c/color ~c ~a ~b)
5 (c/color ~c ~b ~a)
(c/color ~a ~b ~c)))
(defn- wrap
^double [method ^double v]
(case method
:log2 (m/frac (m/log2 (inc v)))
:log10 (m/frac (m/log10 (inc v)))
:sin (m/norm (m/sin v) -1.0 1.0 0.0 1.0)
:exp (- 1.0 (m/exp (- v)))
:sigmoid (m/sigmoid v)
(m/frac v)))
(defmethod render-graph :complex [_ f {:keys [colorspace permutation wrap-method] :as conf} {:keys [w h x y] :as chart-data}]
(let [permutation (int permutation)
iscale-x (:inverse (:scale x))
iscale-y (:inverse (:scale y))
dw (double w)
dh (double h)
from-cs (or (second (c/colorspaces* colorspace)) c/from-HSB*)]
(do-graph chart-data false
(dotimes [x w]
(dotimes [y h]
(let [xx (/ x dw)
yy (/ y dh)
sx (iscale-x xx)
sy (iscale-y yy)
fv (f (v/vec2 sx sy))
angle (m/norm (cx/arg fv) m/-PI m/PI 0.0 255.0)
^double mag (cx/abs fv)
mag (* 255.0 (wrap wrap-method mag))
col (from-cs (permutation->color permutation angle (- 255.0 (/ (- 255.0 mag) 4.0)) mag))]
(set-color c col)
(rect c x y 1 1)))))))
;; scalar
(defmethod data-extent :scalar [_ d c] (data-extent :complex d c))
(defmethod render-graph :scalar [_ f {:keys [gradient wrap-method] :as conf} {:keys [w h x y] :as chart-data}]
(let [iscale-x (:inverse (:scale x))
iscale-y (:inverse (:scale y))
dw (double w)
dh (double h)]
(do-graph chart-data false
(dotimes [x w]
(dotimes [y h]
(let [xx (/ x dw)
yy (/ y dh)
sx (iscale-x xx)
sy (iscale-y yy)
v (wrap wrap-method (f (v/vec2 sx sy)))]
(set-color c (gradient v))
(rect c x y 1 1)))))))
;;
(defmethod data-extent :function-2d [_ d c] (data-extent :complex d c))
(defmethod render-graph :function-2d [_ f {:keys [gradient wrap-method] :as conf} {:keys [^int w ^int h x y] :as chart-data}]
(let [iscale-x (:inverse (:scale x))
iscale-y (:inverse (:scale y))
dw (double w)
dh (double h)
buffer (double-array (* w h))]
(dotimes [y h]
(let [off (* y w)]
(dotimes [x w]
(let [xx (/ x dw)
yy (/ y dh)
^double v (f (iscale-x xx) (iscale-y yy))]
(aset buffer (+ off x) v)))))
(let [[mnz mxz] (stats/extent buffer)]
(do-graph chart-data false
(dotimes [y h]
(let [off (* y w)]
(dotimes [x w]
(let [v (aget buffer (+ off x))]
(set-color c (gradient (m/norm v mnz mxz)))
(rect c x y 1 1)))))))))
;; contour
(defmethod data-extent :contour-2d [_ d c] (data-extent :complex d c))
(defmethod render-graph :contour-2d [_ f {:keys [palette ^int contours fill?]} {:keys [^int w ^int h x y] :as chart-data}]
(let [palette (c/resample palette (inc contours))
iscale-x (:inverse (:scale x))
iscale-y (:inverse (:scale y))
dw (double w)
dh (double h)
values (for [^long y (range h)
^long x (range w)
:let [xx (/ x dw)
yy (/ y dh)]]
(f (iscale-x xx) (iscale-y yy)))]
(let [^Algorithm algo (Algorithm. (m/seq->double-double-array (partition (int w) values)))
steps (s/splice-range (inc contours) (.-min algo) (.-max algo))]
(do-graph chart-data true
(doseq [[id p] (map-indexed vector (.buildContours algo (double-array steps)))
:let [col (nth palette id)]]
(if fill?
(do
(set-color c col)
(.fill ^java.awt.Graphics2D (.graphics ^clojure2d.core.Canvas c) p)
(set-color c (c/darken col))
(.draw ^java.awt.Graphics2D (.graphics ^clojure2d.core.Canvas c) p))
(do
(set-color c :black 200)
(.draw ^java.awt.Graphics2D (.graphics ^clojure2d.core.Canvas c) p))))))))
;; field
(defmethod prepare-data :field [_ f {:keys [x y points generator jitter wrap?]}]
(let [[x1 x2] (or x pi-r)
[y1 y2] (or y pi-r)
f (if wrap? (comp (f/field :sinusoidal ) f) f)]
(mapv (fn [[xx yy]] (f (v/vec2 (m/norm xx 0.0 1.0 x1 x2)
(m/norm yy 0.0 1.0 y1 y2)))) (take points (r/jittered-sequence-generator generator 2 jitter)))))
(defmethod render-graph :field [_ data {:keys [color] :as conf} {:keys [^int w ^int h x y] :as chart-data}]
(let [scale-x (:scale x)
scale-y (:scale y)]
(do-graph chart-data false
(set-color c color)
(doseq [[xx yy] data]
(rect c (* w ^double (scale-x xx)) (* h ^double (scale-y yy)) 1 1)))))
;; vectors
(defmethod data-extent :vector [_ f c] (data-extent :complex f c))
(defmethod render-graph :vector [_ f {:keys [^double size grid color ^double scale]} {:keys [^int w ^int h x y] :as chart-data}]
(let [scale-x (:scale x)
scale-y (:scale y)
iscale-x (:inverse scale-x)
iscale-y (:inverse scale-y)
grid (grid/grid grid size)
hsize (/ size 2.0)
coords (distinct (for [x (range 0 w hsize)
y (range 0 h hsize)
:let [[^double mx ^double my] (grid/coords->mid grid [x y])]]
[(iscale-x (/ mx w)) (iscale-y (/ my h))]))]
(do-graph (assoc chart-data :oversize 0) true
(set-color c color)
(doseq [[x y] coords
:let [xx (* w ^double (scale-x x))
yy (* h ^double (scale-y y))
v (f (v/vec2 x y))
len (* scale size (wrap :exp (v/mag v)))]]
(-> c
(push-matrix)
(translate xx yy)
(ellipse 0 0 3 3 false)
(rotate (v/heading v))
(line 0 0 len 0))
(when (> len 2.0)
(line c len 0 (- len 2.0) -2.0)
(line c len 0 (- len 2.0) 2.0))
(pop-matrix c)))))
;;
(defmethod prepare-data :trace [_ f {:keys [x y points generator jitter]}]
(let [[x1 x2] (or x pi-r)
[y1 y2] (or y pi-r)]
[f (mapv (fn [[xx yy]] (v/vec2 (m/norm xx 0.0 1.0 x1 x2)
(m/norm yy 0.0 1.0 y1 y2))) (take points (r/jittered-sequence-generator generator 2 jitter)))]))
(defmethod data-extent :trace [_ data c] (data-extent :complex data c))
(defmethod render-graph :trace [_ [f coords] {:keys [^double step color ^double length]} {:keys [^int w ^int h x y] :as chart-data}]
(let [scale-x (:scale x)
scale-y (:scale y)]
(do-graph (assoc chart-data :oversize 0) true
(set-color c color)
(doseq [v coords
:let [p (take length
(iterate (fn [v]
(let [nv (f v)]
(v/add v (v/mult nv (* (wrap :exp (v/mag nv)) step))))) v))]]
(doseq [[x y] p]
(point c (* w ^double (scale-x x)) (* h ^double (scale-y y))))))))
| null | https://raw.githubusercontent.com/generateme/cljplot/1eb865439653c95940be18421298c574b7ce8db6/src/cljplot/impl/math.clj | clojure | scalar
contour
field
vectors
| (ns cljplot.impl.math
(:require [cljplot.common :refer :all]
[fastmath.core :as m]
[fastmath.vector :as v]
[clojure2d.color :as c]
[fastmath.complex :as cx]
[fastmath.random :as r]
[fastmath.fields :as f]
[clojure2d.core :refer :all]
[fastmath.grid :as grid]
[fastmath.stats :as stats]
[cljplot.scale :as s])
(:import [marchingsquares Algorithm]))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(m/use-primitive-operators)
(def ^:private ^:const pi-r [m/-PI m/PI])
(defmethod data-extent :complex [_ _ {:keys [x y]}]
{:x [:numerical (or x pi-r)]
:y [:numerical (or y pi-r)]})
(defmacro ^:private permutation->color
[p a b c]
`(case ~p
0 (c/color ~a ~b ~c)
1 (c/color ~a ~c ~b)
2 (c/color ~b ~a ~c)
3 (c/color ~b ~c ~a)
4 (c/color ~c ~a ~b)
5 (c/color ~c ~b ~a)
(c/color ~a ~b ~c)))
(defn- wrap
^double [method ^double v]
(case method
:log2 (m/frac (m/log2 (inc v)))
:log10 (m/frac (m/log10 (inc v)))
:sin (m/norm (m/sin v) -1.0 1.0 0.0 1.0)
:exp (- 1.0 (m/exp (- v)))
:sigmoid (m/sigmoid v)
(m/frac v)))
(defmethod render-graph :complex [_ f {:keys [colorspace permutation wrap-method] :as conf} {:keys [w h x y] :as chart-data}]
(let [permutation (int permutation)
iscale-x (:inverse (:scale x))
iscale-y (:inverse (:scale y))
dw (double w)
dh (double h)
from-cs (or (second (c/colorspaces* colorspace)) c/from-HSB*)]
(do-graph chart-data false
(dotimes [x w]
(dotimes [y h]
(let [xx (/ x dw)
yy (/ y dh)
sx (iscale-x xx)
sy (iscale-y yy)
fv (f (v/vec2 sx sy))
angle (m/norm (cx/arg fv) m/-PI m/PI 0.0 255.0)
^double mag (cx/abs fv)
mag (* 255.0 (wrap wrap-method mag))
col (from-cs (permutation->color permutation angle (- 255.0 (/ (- 255.0 mag) 4.0)) mag))]
(set-color c col)
(rect c x y 1 1)))))))
(defmethod data-extent :scalar [_ d c] (data-extent :complex d c))
(defmethod render-graph :scalar [_ f {:keys [gradient wrap-method] :as conf} {:keys [w h x y] :as chart-data}]
(let [iscale-x (:inverse (:scale x))
iscale-y (:inverse (:scale y))
dw (double w)
dh (double h)]
(do-graph chart-data false
(dotimes [x w]
(dotimes [y h]
(let [xx (/ x dw)
yy (/ y dh)
sx (iscale-x xx)
sy (iscale-y yy)
v (wrap wrap-method (f (v/vec2 sx sy)))]
(set-color c (gradient v))
(rect c x y 1 1)))))))
(defmethod data-extent :function-2d [_ d c] (data-extent :complex d c))
(defmethod render-graph :function-2d [_ f {:keys [gradient wrap-method] :as conf} {:keys [^int w ^int h x y] :as chart-data}]
(let [iscale-x (:inverse (:scale x))
iscale-y (:inverse (:scale y))
dw (double w)
dh (double h)
buffer (double-array (* w h))]
(dotimes [y h]
(let [off (* y w)]
(dotimes [x w]
(let [xx (/ x dw)
yy (/ y dh)
^double v (f (iscale-x xx) (iscale-y yy))]
(aset buffer (+ off x) v)))))
(let [[mnz mxz] (stats/extent buffer)]
(do-graph chart-data false
(dotimes [y h]
(let [off (* y w)]
(dotimes [x w]
(let [v (aget buffer (+ off x))]
(set-color c (gradient (m/norm v mnz mxz)))
(rect c x y 1 1)))))))))
(defmethod data-extent :contour-2d [_ d c] (data-extent :complex d c))
(defmethod render-graph :contour-2d [_ f {:keys [palette ^int contours fill?]} {:keys [^int w ^int h x y] :as chart-data}]
(let [palette (c/resample palette (inc contours))
iscale-x (:inverse (:scale x))
iscale-y (:inverse (:scale y))
dw (double w)
dh (double h)
values (for [^long y (range h)
^long x (range w)
:let [xx (/ x dw)
yy (/ y dh)]]
(f (iscale-x xx) (iscale-y yy)))]
(let [^Algorithm algo (Algorithm. (m/seq->double-double-array (partition (int w) values)))
steps (s/splice-range (inc contours) (.-min algo) (.-max algo))]
(do-graph chart-data true
(doseq [[id p] (map-indexed vector (.buildContours algo (double-array steps)))
:let [col (nth palette id)]]
(if fill?
(do
(set-color c col)
(.fill ^java.awt.Graphics2D (.graphics ^clojure2d.core.Canvas c) p)
(set-color c (c/darken col))
(.draw ^java.awt.Graphics2D (.graphics ^clojure2d.core.Canvas c) p))
(do
(set-color c :black 200)
(.draw ^java.awt.Graphics2D (.graphics ^clojure2d.core.Canvas c) p))))))))
(defmethod prepare-data :field [_ f {:keys [x y points generator jitter wrap?]}]
(let [[x1 x2] (or x pi-r)
[y1 y2] (or y pi-r)
f (if wrap? (comp (f/field :sinusoidal ) f) f)]
(mapv (fn [[xx yy]] (f (v/vec2 (m/norm xx 0.0 1.0 x1 x2)
(m/norm yy 0.0 1.0 y1 y2)))) (take points (r/jittered-sequence-generator generator 2 jitter)))))
(defmethod render-graph :field [_ data {:keys [color] :as conf} {:keys [^int w ^int h x y] :as chart-data}]
(let [scale-x (:scale x)
scale-y (:scale y)]
(do-graph chart-data false
(set-color c color)
(doseq [[xx yy] data]
(rect c (* w ^double (scale-x xx)) (* h ^double (scale-y yy)) 1 1)))))
(defmethod data-extent :vector [_ f c] (data-extent :complex f c))
(defmethod render-graph :vector [_ f {:keys [^double size grid color ^double scale]} {:keys [^int w ^int h x y] :as chart-data}]
(let [scale-x (:scale x)
scale-y (:scale y)
iscale-x (:inverse scale-x)
iscale-y (:inverse scale-y)
grid (grid/grid grid size)
hsize (/ size 2.0)
coords (distinct (for [x (range 0 w hsize)
y (range 0 h hsize)
:let [[^double mx ^double my] (grid/coords->mid grid [x y])]]
[(iscale-x (/ mx w)) (iscale-y (/ my h))]))]
(do-graph (assoc chart-data :oversize 0) true
(set-color c color)
(doseq [[x y] coords
:let [xx (* w ^double (scale-x x))
yy (* h ^double (scale-y y))
v (f (v/vec2 x y))
len (* scale size (wrap :exp (v/mag v)))]]
(-> c
(push-matrix)
(translate xx yy)
(ellipse 0 0 3 3 false)
(rotate (v/heading v))
(line 0 0 len 0))
(when (> len 2.0)
(line c len 0 (- len 2.0) -2.0)
(line c len 0 (- len 2.0) 2.0))
(pop-matrix c)))))
(defmethod prepare-data :trace [_ f {:keys [x y points generator jitter]}]
(let [[x1 x2] (or x pi-r)
[y1 y2] (or y pi-r)]
[f (mapv (fn [[xx yy]] (v/vec2 (m/norm xx 0.0 1.0 x1 x2)
(m/norm yy 0.0 1.0 y1 y2))) (take points (r/jittered-sequence-generator generator 2 jitter)))]))
(defmethod data-extent :trace [_ data c] (data-extent :complex data c))
(defmethod render-graph :trace [_ [f coords] {:keys [^double step color ^double length]} {:keys [^int w ^int h x y] :as chart-data}]
(let [scale-x (:scale x)
scale-y (:scale y)]
(do-graph (assoc chart-data :oversize 0) true
(set-color c color)
(doseq [v coords
:let [p (take length
(iterate (fn [v]
(let [nv (f v)]
(v/add v (v/mult nv (* (wrap :exp (v/mag nv)) step))))) v))]]
(doseq [[x y] p]
(point c (* w ^double (scale-x x)) (* h ^double (scale-y y))))))))
|
28f2133b7608dea1d1387e7427546105b2df2d9f522ed87cf4170756edc7b1f6 | facebookarchive/pfff | database_juju_php.mli |
type database
val juju_db_of_files:
?show_progress:bool ->
Common.filename list -> database
val code_database_of_juju_db:
database -> Env_interpreter_php.code_database
| null | https://raw.githubusercontent.com/facebookarchive/pfff/ec21095ab7d445559576513a63314e794378c367/lang_php/analyze/foundation/database_juju_php.mli | ocaml |
type database
val juju_db_of_files:
?show_progress:bool ->
Common.filename list -> database
val code_database_of_juju_db:
database -> Env_interpreter_php.code_database
|
|
7c048154d37504f49cdd34042c132afef4ace569e192b2614ffceaa3d6101e9d | softmechanics/warp | Timeout.hs | module Timeout
( Manager
, Handle
, initialize
, register
, registerKillThread
, tickle
, pause
, resume
, cancel
) where
import qualified Data.IORef as I
import Control.Concurrent (forkIO, threadDelay, myThreadId, killThread)
import Control.Monad (forever)
import qualified Control.Exception as E
FIXME implement stopManager
newtype Manager = Manager (I.IORef [Handle])
data Handle = Handle (IO ()) (I.IORef State)
data State = Active | Inactive | Paused | Canceled
initialize :: Int -> IO Manager
initialize timeout = do
ref <- I.newIORef []
_ <- forkIO $ forever $ do
threadDelay timeout
ms <- I.atomicModifyIORef ref (\x -> ([], x))
ms' <- go ms id
I.atomicModifyIORef ref (\x -> (ms' x, ()))
return $ Manager ref
where
go [] front = return front
go (m@(Handle onTimeout iactive):rest) front = do
state <- I.atomicModifyIORef iactive (\x -> (go' x, x))
case state of
Inactive -> do
onTimeout `E.catch` ignoreAll
go rest front
Canceled -> go rest front
_ -> go rest (front . (:) m)
go' Active = Inactive
go' x = x
ignoreAll :: E.SomeException -> IO ()
ignoreAll _ = return ()
register :: Manager -> IO () -> IO Handle
register (Manager ref) onTimeout = do
iactive <- I.newIORef Active
let h = Handle onTimeout iactive
I.atomicModifyIORef ref (\x -> (h : x, ()))
return h
registerKillThread :: Manager -> IO Handle
registerKillThread m = do
tid <- myThreadId
register m $ killThread tid
tickle, pause, resume, cancel :: Handle -> IO ()
tickle (Handle _ iactive) = I.writeIORef iactive Active
pause (Handle _ iactive) = I.writeIORef iactive Paused
resume = tickle
cancel (Handle _ iactive) = I.writeIORef iactive Canceled
| null | https://raw.githubusercontent.com/softmechanics/warp/6cfc88aefc87a0ce33a44b1f95ad5ebf379583d4/Timeout.hs | haskell | module Timeout
( Manager
, Handle
, initialize
, register
, registerKillThread
, tickle
, pause
, resume
, cancel
) where
import qualified Data.IORef as I
import Control.Concurrent (forkIO, threadDelay, myThreadId, killThread)
import Control.Monad (forever)
import qualified Control.Exception as E
FIXME implement stopManager
newtype Manager = Manager (I.IORef [Handle])
data Handle = Handle (IO ()) (I.IORef State)
data State = Active | Inactive | Paused | Canceled
initialize :: Int -> IO Manager
initialize timeout = do
ref <- I.newIORef []
_ <- forkIO $ forever $ do
threadDelay timeout
ms <- I.atomicModifyIORef ref (\x -> ([], x))
ms' <- go ms id
I.atomicModifyIORef ref (\x -> (ms' x, ()))
return $ Manager ref
where
go [] front = return front
go (m@(Handle onTimeout iactive):rest) front = do
state <- I.atomicModifyIORef iactive (\x -> (go' x, x))
case state of
Inactive -> do
onTimeout `E.catch` ignoreAll
go rest front
Canceled -> go rest front
_ -> go rest (front . (:) m)
go' Active = Inactive
go' x = x
ignoreAll :: E.SomeException -> IO ()
ignoreAll _ = return ()
register :: Manager -> IO () -> IO Handle
register (Manager ref) onTimeout = do
iactive <- I.newIORef Active
let h = Handle onTimeout iactive
I.atomicModifyIORef ref (\x -> (h : x, ()))
return h
registerKillThread :: Manager -> IO Handle
registerKillThread m = do
tid <- myThreadId
register m $ killThread tid
tickle, pause, resume, cancel :: Handle -> IO ()
tickle (Handle _ iactive) = I.writeIORef iactive Active
pause (Handle _ iactive) = I.writeIORef iactive Paused
resume = tickle
cancel (Handle _ iactive) = I.writeIORef iactive Canceled
|
|
db6e6af5258a1befdbd3aa02df888920e41eb544f9a8767b2c8fbe3aae7af432 | dgiot/dgiot | dgiot_rule_funcs.erl | %%--------------------------------------------------------------------
Copyright ( c ) 2020 - 2021 EMQ Technologies Co. , Ltd. All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(dgiot_rule_funcs).
-include("rule_engine.hrl").
%% IoT Funcs
-export([ msgid/0
, qos/0
, flags/0
, flag/1
, topic/0
, topic/1
, clientid/0
, clientip/0
, peerhost/0
, username/0
, payload/0
, payload/1
, contains_topic/2
, contains_topic/3
, contains_topic_match/2
, contains_topic_match/3
, null/0
]).
Arithmetic Funcs
-export([ '+'/2
, '-'/2
, '*'/2
, '/'/2
, 'div'/2
, mod/2
, eq/2
]).
%% Math Funcs
-export([ abs/1
, acos/1
, acosh/1
, asin/1
, asinh/1
, atan/1
, atanh/1
, ceil/1
, cos/1
, cosh/1
, exp/1
, floor/1
, fmod/2
, log/1
, log10/1
, log2/1
, power/2
, round/1
, sin/1
, sinh/1
, sqrt/1
, tan/1
, tanh/1
]).
%% Bits Funcs
-export([ bitnot/1
, bitand/2
, bitor/2
, bitxor/2
, bitsl/2
, bitsr/2
, bitsize/1
, subbits/2
, subbits/3
, subbits/6
]).
Data Type Convertion
-export([ str/1
, str_utf8/1
, bool/1
, int/1
, float/1
, map/1
, bin2hexstr/1
, hexstr2bin/1
]).
%% Data Type Validation Funcs
-export([ is_null/1
, is_not_null/1
, is_str/1
, is_bool/1
, is_int/1
, is_float/1
, is_num/1
, is_map/1
, is_array/1
]).
%% String Funcs
-export([ lower/1
, ltrim/1
, reverse/1
, rtrim/1
, strlen/1
, substr/2
, substr/3
, trim/1
, upper/1
, split/2
, split/3
, concat/2
, tokens/2
, tokens/3
, sprintf_s/2
, pad/2
, pad/3
, pad/4
, replace/3
, replace/4
, regex_match/2
, regex_replace/3
, ascii/1
, find/2
, find/3
]).
%% Map Funcs
-export([ map_new/0
]).
-export([ map_get/2
, map_get/3
, map_put/3
]).
For backword compatibility
-export([ mget/2
, mget/3
, mput/3
]).
%% Array Funcs
-export([ nth/2
, length/1
, sublist/2
, sublist/3
, first/1
, last/1
, contains/2
]).
%% Hash Funcs
-export([ md5/1
, sha/1
, sha256/1
]).
%% Data encode and decode
-export([ base64_encode/1
, base64_decode/1
, json_decode/1
, json_encode/1
, term_decode/1
, term_encode/1
]).
%% Date functions
-export([ now_rfc3339/0
, now_rfc3339/1
, unix_ts_to_rfc3339/1
, unix_ts_to_rfc3339/2
, rfc3339_to_unix_ts/1
, rfc3339_to_unix_ts/2
, now_timestamp/0
, now_timestamp/1
]).
%% Proc Dict Func
-export([ proc_dict_get/1
, proc_dict_put/2
, proc_dict_del/1
, kv_store_get/1
, kv_store_get/2
, kv_store_put/2
, kv_store_del/1
]).
-export(['$handle_undefined_function'/2]).
-compile({no_auto_import,
[ abs/1
, ceil/1
, floor/1
, round/1
, map_get/2
]}).
-define(is_var(X), is_binary(X)).
@doc " msgid ( ) " Func
msgid() ->
fun(#{id := MsgId}) -> MsgId; (_) -> undefined end.
%% @doc "qos()" Func
qos() ->
fun(#{qos := QoS}) -> QoS; (_) -> undefined end.
%% @doc "topic()" Func
topic() ->
fun(#{topic := Topic}) -> Topic; (_) -> undefined end.
%% @doc "topic(N)" Func
topic(I) when is_integer(I) ->
fun(#{topic := Topic}) ->
lists:nth(I, dgiot_topic:tokens(Topic));
(_) -> undefined
end.
%% @doc "flags()" Func
flags() ->
fun(#{flags := Flags}) -> Flags; (_) -> #{} end.
@doc " flags(Name ) " Func
flag(Name) ->
fun(#{flags := Flags}) -> dgiot_rule_maps:nested_get({var,Name}, Flags); (_) -> undefined end.
@doc " ( ) " Func
clientid() ->
fun(#{from := ClientId}) -> ClientId; (_) -> undefined end.
%% @doc "username()" Func
username() ->
fun(#{username := Username}) -> Username; (_) -> undefined end.
%% @doc "clientip()" Func
clientip() ->
peerhost().
peerhost() ->
fun(#{peerhost := Addr}) -> Addr; (_) -> undefined end.
payload() ->
fun(#{payload := Payload}) -> Payload; (_) -> undefined end.
payload(Path) ->
fun(#{payload := Payload}) when erlang:is_map(Payload) ->
dgiot_rule_maps:nested_get(map_path(Path), Payload);
(_) -> undefined
end.
%% @doc Check if a topic_filter contains a specific topic
TopicFilters = [ { < < " t / a " > > , # { qos = > 0 } ] .
-spec(contains_topic(emqx_mqtt_types:topic_filters(), emqx_types:topic())
-> true | false).
contains_topic(TopicFilters, Topic) ->
case find_topic_filter(Topic, TopicFilters, fun eq/2) of
not_found -> false;
_ -> true
end.
contains_topic(TopicFilters, Topic, QoS) ->
case find_topic_filter(Topic, TopicFilters, fun eq/2) of
{_, #{qos := QoS}} -> true;
_ -> false
end.
-spec(contains_topic_match(emqx_mqtt_types:topic_filters(), emqx_types:topic())
-> true | false).
contains_topic_match(TopicFilters, Topic) ->
case find_topic_filter(Topic, TopicFilters, fun dgiot_topic:match/2) of
not_found -> false;
_ -> true
end.
contains_topic_match(TopicFilters, Topic, QoS) ->
case find_topic_filter(Topic, TopicFilters, fun dgiot_topic:match/2) of
{_, #{qos := QoS}} -> true;
_ -> false
end.
find_topic_filter(Filter, TopicFilters, Func) ->
try
[case Func(Topic, Filter) of
true -> throw(Result);
false -> ok
end || Result = #{topic := Topic} <- TopicFilters],
not_found
catch
throw:Result -> Result
end.
null() ->
undefined.
%%------------------------------------------------------------------------------
Arithmetic Funcs
%%------------------------------------------------------------------------------
plus 2 numbers
'+'(X, Y) when is_number(X), is_number(Y) ->
X + Y;
concat 2 strings
'+'(X, Y) when is_binary(X), is_binary(Y) ->
concat(X, Y).
'-'(X, Y) when is_number(X), is_number(Y) ->
X - Y.
'*'(X, Y) when is_number(X), is_number(Y) ->
X * Y.
'/'(X, Y) when is_number(X), is_number(Y) ->
X / Y.
'div'(X, Y) when is_integer(X), is_integer(Y) ->
X div Y.
mod(X, Y) when is_integer(X), is_integer(Y) ->
X rem Y.
eq(X, Y) ->
X == Y.
%%------------------------------------------------------------------------------
%% Math Funcs
%%------------------------------------------------------------------------------
abs(N) when is_integer(N) ->
erlang:abs(N).
acos(N) when is_number(N) ->
math:acos(N).
acosh(N) when is_number(N) ->
math:acosh(N).
asin(N) when is_number(N)->
math:asin(N).
asinh(N) when is_number(N) ->
math:asinh(N).
atan(N) when is_number(N) ->
math:atan(N).
atanh(N) when is_number(N)->
math:atanh(N).
ceil(N) when is_number(N) ->
erlang:ceil(N).
cos(N) when is_number(N)->
math:cos(N).
cosh(N) when is_number(N) ->
math:cosh(N).
exp(N) when is_number(N)->
math:exp(N).
floor(N) when is_number(N) ->
erlang:floor(N).
fmod(X, Y) when is_number(X), is_number(Y) ->
math:fmod(X, Y).
log(N) when is_number(N) ->
math:log(N).
log10(N) when is_number(N) ->
math:log10(N).
log2(N) when is_number(N)->
math:log2(N).
power(X, Y) when is_number(X), is_number(Y) ->
math:pow(X, Y).
round(N) when is_number(N) ->
erlang:round(N).
sin(N) when is_number(N) ->
math:sin(N).
sinh(N) when is_number(N) ->
math:sinh(N).
sqrt(N) when is_number(N) ->
math:sqrt(N).
tan(N) when is_number(N) ->
math:tan(N).
tanh(N) when is_number(N) ->
math:tanh(N).
%%------------------------------------------------------------------------------
%% Bits Funcs
%%------------------------------------------------------------------------------
bitnot(I) when is_integer(I) ->
bnot I.
bitand(X, Y) when is_integer(X), is_integer(Y) ->
X band Y.
bitor(X, Y) when is_integer(X), is_integer(Y) ->
X bor Y.
bitxor(X, Y) when is_integer(X), is_integer(Y) ->
X bxor Y.
bitsl(X, I) when is_integer(X), is_integer(I) ->
X bsl I.
bitsr(X, I) when is_integer(X), is_integer(I) ->
X bsr I.
bitsize(Bits) when is_bitstring(Bits) ->
bit_size(Bits).
subbits(Bits, Len) when is_integer(Len), is_bitstring(Bits) ->
subbits(Bits, 1, Len).
subbits(Bits, Start, Len) when is_integer(Start), is_integer(Len), is_bitstring(Bits) ->
get_subbits(Bits, Start, Len, <<"integer">>, <<"unsigned">>, <<"big">>).
subbits(Bits, Start, Len, Type, Signedness, Endianness) when is_integer(Start), is_integer(Len), is_bitstring(Bits) ->
get_subbits(Bits, Start, Len, Type, Signedness, Endianness).
get_subbits(Bits, Start, Len, Type, Signedness, Endianness) ->
Begin = Start - 1,
case Bits of
<<_:Begin, Rem/bits>> when Rem =/= <<>> ->
Sz = bit_size(Rem),
do_get_subbits(Rem, Sz, Len, Type, Signedness, Endianness);
_ -> undefined
end.
-define(match_bits(Bits0, Pattern, ElesePattern),
case Bits0 of
Pattern ->
SubBits;
ElesePattern ->
SubBits
end).
do_get_subbits(Bits, Sz, Len, <<"integer">>, <<"unsigned">>, <<"big">>) ->
?match_bits(Bits, <<SubBits:Len/integer-unsigned-big-unit:1, _/bits>>,
<<SubBits:Sz/integer-unsigned-big-unit:1>>);
do_get_subbits(Bits, Sz, Len, <<"float">>, <<"unsigned">>, <<"big">>) ->
?match_bits(Bits, <<SubBits:Len/float-unsigned-big-unit:1, _/bits>>,
<<SubBits:Sz/float-unsigned-big-unit:1>>);
do_get_subbits(Bits, Sz, Len, <<"bits">>, <<"unsigned">>, <<"big">>) ->
?match_bits(Bits, <<SubBits:Len/bits-unsigned-big-unit:1, _/bits>>,
<<SubBits:Sz/bits-unsigned-big-unit:1>>);
do_get_subbits(Bits, Sz, Len, <<"integer">>, <<"signed">>, <<"big">>) ->
?match_bits(Bits, <<SubBits:Len/integer-signed-big-unit:1, _/bits>>,
<<SubBits:Sz/integer-signed-big-unit:1>>);
do_get_subbits(Bits, Sz, Len, <<"float">>, <<"signed">>, <<"big">>) ->
?match_bits(Bits, <<SubBits:Len/float-signed-big-unit:1, _/bits>>,
<<SubBits:Sz/float-signed-big-unit:1>>);
do_get_subbits(Bits, Sz, Len, <<"bits">>, <<"signed">>, <<"big">>) ->
?match_bits(Bits, <<SubBits:Len/bits-signed-big-unit:1, _/bits>>,
<<SubBits:Sz/bits-signed-big-unit:1>>);
do_get_subbits(Bits, Sz, Len, <<"integer">>, <<"unsigned">>, <<"little">>) ->
?match_bits(Bits, <<SubBits:Len/integer-unsigned-little-unit:1, _/bits>>,
<<SubBits:Sz/integer-unsigned-little-unit:1>>);
do_get_subbits(Bits, Sz, Len, <<"float">>, <<"unsigned">>, <<"little">>) ->
?match_bits(Bits, <<SubBits:Len/float-unsigned-little-unit:1, _/bits>>,
<<SubBits:Sz/float-unsigned-little-unit:1>>);
do_get_subbits(Bits, Sz, Len, <<"bits">>, <<"unsigned">>, <<"little">>) ->
?match_bits(Bits, <<SubBits:Len/bits-unsigned-little-unit:1, _/bits>>,
<<SubBits:Sz/bits-unsigned-little-unit:1>>);
do_get_subbits(Bits, Sz, Len, <<"integer">>, <<"signed">>, <<"little">>) ->
?match_bits(Bits, <<SubBits:Len/integer-signed-little-unit:1, _/bits>>,
<<SubBits:Sz/integer-signed-little-unit:1>>);
do_get_subbits(Bits, Sz, Len, <<"float">>, <<"signed">>, <<"little">>) ->
?match_bits(Bits, <<SubBits:Len/float-signed-little-unit:1, _/bits>>,
<<SubBits:Sz/float-signed-little-unit:1>>);
do_get_subbits(Bits, Sz, Len, <<"bits">>, <<"signed">>, <<"little">>) ->
?match_bits(Bits, <<SubBits:Len/bits-signed-little-unit:1, _/bits>>,
<<SubBits:Sz/bits-signed-little-unit:1>>).
%%------------------------------------------------------------------------------
%% Data Type Convertion Funcs
%%------------------------------------------------------------------------------
str(Data) ->
dgiot_rule_utils:bin(Data).
str_utf8(Data) ->
dgiot_rule_utils:utf8_bin(Data).
bool(Data) ->
dgiot_rule_utils:bool(Data).
int(Data) ->
dgiot_rule_utils:int(Data).
float(Data) ->
dgiot_rule_utils:float(Data).
map(Data) ->
dgiot_rule_utils:map(Data).
bin2hexstr(Bin) when is_binary(Bin) ->
dgiot_utils:bin2hexstr_A_F(Bin).
hexstr2bin(Str) when is_binary(Str) ->
dgiot_utils:hexstr2bin(Str).
%%------------------------------------------------------------------------------
NULL Funcs
%%------------------------------------------------------------------------------
is_null(undefined) -> true;
is_null(_Data) -> false.
is_not_null(Data) ->
not is_null(Data).
is_str(T) when is_binary(T) -> true;
is_str(_) -> false.
is_bool(T) when is_boolean(T) -> true;
is_bool(_) -> false.
is_int(T) when is_integer(T) -> true;
is_int(_) -> false.
is_float(T) when erlang:is_float(T) -> true;
is_float(_) -> false.
is_num(T) when is_number(T) -> true;
is_num(_) -> false.
is_map(T) when erlang:is_map(T) -> true;
is_map(_) -> false.
is_array(T) when is_list(T) -> true;
is_array(_) -> false.
%%------------------------------------------------------------------------------
%% String Funcs
%%------------------------------------------------------------------------------
lower(S) when is_binary(S) ->
string:lowercase(S).
ltrim(S) when is_binary(S) ->
string:trim(S, leading).
reverse(S) when is_binary(S) ->
iolist_to_binary(string:reverse(S)).
rtrim(S) when is_binary(S) ->
string:trim(S, trailing).
strlen(S) when is_binary(S) ->
string:length(S).
substr(S, Start) when is_binary(S), is_integer(Start) ->
string:slice(S, Start).
substr(S, Start, Length) when is_binary(S),
is_integer(Start),
is_integer(Length) ->
string:slice(S, Start, Length).
trim(S) when is_binary(S) ->
string:trim(S).
upper(S) when is_binary(S) ->
string:uppercase(S).
split(S, P) when is_binary(S),is_binary(P) ->
[R || R <- string:split(S, P, all), R =/= <<>> andalso R =/= ""].
split(S, P, <<"notrim">>) ->
string:split(S, P, all);
split(S, P, <<"leading_notrim">>) ->
string:split(S, P, leading);
split(S, P, <<"leading">>) when is_binary(S),is_binary(P) ->
[R || R <- string:split(S, P, leading), R =/= <<>> andalso R =/= ""];
split(S, P, <<"trailing_notrim">>) ->
string:split(S, P, trailing);
split(S, P, <<"trailing">>) when is_binary(S),is_binary(P) ->
[R || R <- string:split(S, P, trailing), R =/= <<>> andalso R =/= ""].
tokens(S, Separators) ->
[list_to_binary(R) || R <- string:lexemes(binary_to_list(S), binary_to_list(Separators))].
tokens(S, Separators, <<"nocrlf">>) ->
[list_to_binary(R) || R <- string:lexemes(binary_to_list(S), binary_to_list(Separators) ++ [$\r,$\n,[$\r,$\n]])].
concat(S1, S2) when is_binary(S1), is_binary(S2) ->
unicode:characters_to_binary([S1, S2], unicode).
sprintf_s(Format, Args) when is_list(Args) ->
erlang:iolist_to_binary(io_lib:format(binary_to_list(Format), Args)).
pad(S, Len) when is_binary(S), is_integer(Len) ->
iolist_to_binary(string:pad(S, Len, trailing)).
pad(S, Len, <<"trailing">>) when is_binary(S), is_integer(Len) ->
iolist_to_binary(string:pad(S, Len, trailing));
pad(S, Len, <<"both">>) when is_binary(S), is_integer(Len) ->
iolist_to_binary(string:pad(S, Len, both));
pad(S, Len, <<"leading">>) when is_binary(S), is_integer(Len) ->
iolist_to_binary(string:pad(S, Len, leading)).
pad(S, Len, <<"trailing">>, Char) when is_binary(S), is_integer(Len), is_binary(Char) ->
Chars = unicode:characters_to_list(Char, utf8),
iolist_to_binary(string:pad(S, Len, trailing, Chars));
pad(S, Len, <<"both">>, Char) when is_binary(S), is_integer(Len), is_binary(Char) ->
Chars = unicode:characters_to_list(Char, utf8),
iolist_to_binary(string:pad(S, Len, both, Chars));
pad(S, Len, <<"leading">>, Char) when is_binary(S), is_integer(Len), is_binary(Char) ->
Chars = unicode:characters_to_list(Char, utf8),
iolist_to_binary(string:pad(S, Len, leading, Chars)).
replace(SrcStr, P, RepStr) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) ->
iolist_to_binary(string:replace(SrcStr, P, RepStr, all)).
replace(SrcStr, P, RepStr, <<"all">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) ->
iolist_to_binary(string:replace(SrcStr, P, RepStr, all));
replace(SrcStr, P, RepStr, <<"trailing">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) ->
iolist_to_binary(string:replace(SrcStr, P, RepStr, trailing));
replace(SrcStr, P, RepStr, <<"leading">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) ->
iolist_to_binary(string:replace(SrcStr, P, RepStr, leading)).
regex_match(Str, RE) ->
case re:run(Str, RE, [global,{capture,none}]) of
match -> true;
nomatch -> false
end.
regex_replace(SrcStr, RE, RepStr) ->
re:replace(SrcStr, RE, RepStr, [global, {return,binary}]).
ascii(Char) when is_binary(Char) ->
[FirstC| _] = binary_to_list(Char),
FirstC.
find(S, P) when is_binary(S), is_binary(P) ->
find_s(S, P, leading).
find(S, P, <<"trailing">>) when is_binary(S), is_binary(P) ->
find_s(S, P, trailing);
find(S, P, <<"leading">>) when is_binary(S), is_binary(P) ->
find_s(S, P, leading).
find_s(S, P, Dir) ->
case string:find(S, P, Dir) of
nomatch -> <<"">>;
SubStr -> SubStr
end.
%%------------------------------------------------------------------------------
%% Array Funcs
%%------------------------------------------------------------------------------
nth(N, L) when is_integer(N), is_list(L) ->
lists:nth(N, L).
length(List) when is_list(List) ->
erlang:length(List).
sublist(Len, List) when is_integer(Len), is_list(List) ->
lists:sublist(List, Len).
sublist(Start, Len, List) when is_integer(Start), is_integer(Len), is_list(List) ->
lists:sublist(List, Start, Len).
first(List) when is_list(List) ->
hd(List).
last(List) when is_list(List) ->
lists:last(List).
contains(Elm, List) when is_list(List) ->
lists:member(Elm, List).
map_new() ->
#{}.
map_get(Key, Map) ->
map_get(Key, Map, undefined).
map_get(Key, Map, Default) ->
dgiot_rule_maps:nested_get(map_path(Key), Map, Default).
map_put(Key, Val, Map) ->
dgiot_rule_maps:nested_put(map_path(Key), Val, Map).
mget(Key, Map) ->
mget(Key, Map, undefined).
mget(Key, Map, Default) ->
case maps:find(Key, Map) of
{ok, Val} -> Val;
error when is_atom(Key) ->
%% the map may have an equivalent binary-form key
BinKey = dgiot_rule_utils:bin(Key),
case maps:find(BinKey, Map) of
{ok, Val} -> Val;
error -> Default
end;
error when is_binary(Key) ->
try %% the map may have an equivalent atom-form key
AtomKey = list_to_existing_atom(binary_to_list(Key)),
case maps:find(AtomKey, Map) of
{ok, Val} -> Val;
error -> Default
end
catch error:badarg ->
Default
end;
error ->
Default
end.
mput(Key, Val, Map) ->
case maps:find(Key, Map) of
{ok, _} -> maps:put(Key, Val, Map);
error when is_atom(Key) ->
%% the map may have an equivalent binary-form key
BinKey = dgiot_rule_utils:bin(Key),
case maps:find(BinKey, Map) of
{ok, _} -> maps:put(BinKey, Val, Map);
error -> maps:put(Key, Val, Map)
end;
error when is_binary(Key) ->
try %% the map may have an equivalent atom-form key
AtomKey = list_to_existing_atom(binary_to_list(Key)),
case maps:find(AtomKey, Map) of
{ok, _} -> maps:put(AtomKey, Val, Map);
error -> maps:put(Key, Val, Map)
end
catch error:badarg ->
maps:put(Key, Val, Map)
end;
error ->
maps:put(Key, Val, Map)
end.
%%------------------------------------------------------------------------------
%% Hash Funcs
%%------------------------------------------------------------------------------
md5(S) when is_binary(S) ->
hash(md5, S).
sha(S) when is_binary(S) ->
hash(sha, S).
sha256(S) when is_binary(S) ->
hash(sha256, S).
hash(Type, Data) ->
dgiot_utils:bin2hexstr_a_f(crypto:hash(Type, Data)).
%%------------------------------------------------------------------------------
Data encode and decode Funcs
%%------------------------------------------------------------------------------
base64_encode(Data) when is_binary(Data) ->
base64:encode(Data).
base64_decode(Data) when is_binary(Data) ->
base64:decode(Data).
json_encode(Data) ->
dgiot_json:encode(Data).
json_decode(Data) ->
dgiot_json:decode(Data, [return_maps]).
term_encode(Term) ->
erlang:term_to_binary(Term).
term_decode(Data) when is_binary(Data) ->
erlang:binary_to_term(Data).
%%------------------------------------------------------------------------------
%% Dict Funcs
%%------------------------------------------------------------------------------
-define(DICT_KEY(KEY), {'@rule_engine', KEY}).
proc_dict_get(Key) ->
erlang:get(?DICT_KEY(Key)).
proc_dict_put(Key, Val) ->
erlang:put(?DICT_KEY(Key), Val).
proc_dict_del(Key) ->
erlang:erase(?DICT_KEY(Key)).
kv_store_put(Key, Val) ->
ets:insert(?KV_TAB, {Key, Val}).
kv_store_get(Key) ->
kv_store_get(Key, undefined).
kv_store_get(Key, Default) ->
case ets:lookup(?KV_TAB, Key) of
[{_, Val}] -> Val;
_ -> Default
end.
kv_store_del(Key) ->
ets:delete(?KV_TAB, Key).
%%--------------------------------------------------------------------
%% Date functions
%%--------------------------------------------------------------------
now_rfc3339() ->
now_rfc3339(<<"second">>).
now_rfc3339(Unit) ->
unix_ts_to_rfc3339(now_timestamp(Unit), Unit).
unix_ts_to_rfc3339(Epoch) ->
unix_ts_to_rfc3339(Epoch, <<"second">>).
unix_ts_to_rfc3339(Epoch, Unit) when is_integer(Epoch) ->
dgiot_rule_utils:bin(
calendar:system_time_to_rfc3339(
Epoch, [{unit, time_unit(Unit)}])).
rfc3339_to_unix_ts(DateTime) ->
rfc3339_to_unix_ts(DateTime, <<"second">>).
rfc3339_to_unix_ts(DateTime, Unit) when is_binary(DateTime) ->
calendar:rfc3339_to_system_time(binary_to_list(DateTime),
[{unit, time_unit(Unit)}]).
now_timestamp() ->
erlang:system_time(second).
now_timestamp(Unit) ->
erlang:system_time(time_unit(Unit)).
time_unit(<<"second">>) -> second;
time_unit(<<"millisecond">>) -> millisecond;
time_unit(<<"microsecond">>) -> microsecond;
time_unit(<<"nanosecond">>) -> nanosecond.
%% @doc This is for sql funcs that should be handled in the specific modules.
%% Here the emqx_rule_funcs module acts as a proxy, forwarding
%% the function handling to the worker module.
%% @end
-ifdef(EMQX_ENTERPRISE).
'$handle_undefined_function'(schema_decode, [SchemaId, Data|MoreArgs]) ->
emqx_schema_parser:decode(SchemaId, Data, MoreArgs);
'$handle_undefined_function'(schema_decode, Args) ->
error({args_count_error, {schema_decode, Args}});
'$handle_undefined_function'(schema_encode, [SchemaId, Term|MoreArgs]) ->
emqx_schema_parser:encode(SchemaId, Term, MoreArgs);
'$handle_undefined_function'(schema_encode, Args) ->
error({args_count_error, {schema_encode, Args}});
'$handle_undefined_function'(sprintf, [Format|Args]) ->
erlang:apply(fun sprintf_s/2, [Format, Args]);
'$handle_undefined_function'(Fun, Args) ->
error({sql_function_not_supported, function_literal(Fun, Args)}).
-else.
'$handle_undefined_function'(sprintf, [Format|Args]) ->
erlang:apply(fun sprintf_s/2, [Format, Args]);
'$handle_undefined_function'(Fun, Args) ->
error({sql_function_not_supported, function_literal(Fun, Args)}).
-endif. % EMQX_ENTERPRISE
map_path(Key) ->
{path, [{key, P} || P <- string:split(Key, ".", all)]}.
function_literal(Fun, []) when is_atom(Fun) ->
atom_to_list(Fun) ++ "()";
function_literal(Fun, [FArg | Args]) when is_atom(Fun), is_list(Args) ->
WithFirstArg = io_lib:format("~s(~0p", [atom_to_list(Fun), FArg]),
lists:foldl(fun(Arg, Literal) ->
io_lib:format("~s, ~0p", [Literal, Arg])
end, WithFirstArg, Args) ++ ")";
function_literal(Fun, Args) ->
{invalid_func, {Fun, Args}}.
| null | https://raw.githubusercontent.com/dgiot/dgiot/777c878acd0c89e445c3b8992febbc925b8ee060/apps/dgiot/src/rules/dgiot_rule_funcs.erl | erlang | --------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--------------------------------------------------------------------
IoT Funcs
Math Funcs
Bits Funcs
Data Type Validation Funcs
String Funcs
Map Funcs
Array Funcs
Hash Funcs
Data encode and decode
Date functions
Proc Dict Func
@doc "qos()" Func
@doc "topic()" Func
@doc "topic(N)" Func
@doc "flags()" Func
@doc "username()" Func
@doc "clientip()" Func
@doc Check if a topic_filter contains a specific topic
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Math Funcs
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Bits Funcs
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Data Type Convertion Funcs
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
String Funcs
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Array Funcs
------------------------------------------------------------------------------
the map may have an equivalent binary-form key
the map may have an equivalent atom-form key
the map may have an equivalent binary-form key
the map may have an equivalent atom-form key
------------------------------------------------------------------------------
Hash Funcs
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Dict Funcs
------------------------------------------------------------------------------
--------------------------------------------------------------------
Date functions
--------------------------------------------------------------------
@doc This is for sql funcs that should be handled in the specific modules.
Here the emqx_rule_funcs module acts as a proxy, forwarding
the function handling to the worker module.
@end
EMQX_ENTERPRISE | Copyright ( c ) 2020 - 2021 EMQ Technologies Co. , Ltd. All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(dgiot_rule_funcs).
-include("rule_engine.hrl").
-export([ msgid/0
, qos/0
, flags/0
, flag/1
, topic/0
, topic/1
, clientid/0
, clientip/0
, peerhost/0
, username/0
, payload/0
, payload/1
, contains_topic/2
, contains_topic/3
, contains_topic_match/2
, contains_topic_match/3
, null/0
]).
Arithmetic Funcs
-export([ '+'/2
, '-'/2
, '*'/2
, '/'/2
, 'div'/2
, mod/2
, eq/2
]).
-export([ abs/1
, acos/1
, acosh/1
, asin/1
, asinh/1
, atan/1
, atanh/1
, ceil/1
, cos/1
, cosh/1
, exp/1
, floor/1
, fmod/2
, log/1
, log10/1
, log2/1
, power/2
, round/1
, sin/1
, sinh/1
, sqrt/1
, tan/1
, tanh/1
]).
-export([ bitnot/1
, bitand/2
, bitor/2
, bitxor/2
, bitsl/2
, bitsr/2
, bitsize/1
, subbits/2
, subbits/3
, subbits/6
]).
Data Type Convertion
-export([ str/1
, str_utf8/1
, bool/1
, int/1
, float/1
, map/1
, bin2hexstr/1
, hexstr2bin/1
]).
-export([ is_null/1
, is_not_null/1
, is_str/1
, is_bool/1
, is_int/1
, is_float/1
, is_num/1
, is_map/1
, is_array/1
]).
-export([ lower/1
, ltrim/1
, reverse/1
, rtrim/1
, strlen/1
, substr/2
, substr/3
, trim/1
, upper/1
, split/2
, split/3
, concat/2
, tokens/2
, tokens/3
, sprintf_s/2
, pad/2
, pad/3
, pad/4
, replace/3
, replace/4
, regex_match/2
, regex_replace/3
, ascii/1
, find/2
, find/3
]).
-export([ map_new/0
]).
-export([ map_get/2
, map_get/3
, map_put/3
]).
For backword compatibility
-export([ mget/2
, mget/3
, mput/3
]).
-export([ nth/2
, length/1
, sublist/2
, sublist/3
, first/1
, last/1
, contains/2
]).
-export([ md5/1
, sha/1
, sha256/1
]).
-export([ base64_encode/1
, base64_decode/1
, json_decode/1
, json_encode/1
, term_decode/1
, term_encode/1
]).
-export([ now_rfc3339/0
, now_rfc3339/1
, unix_ts_to_rfc3339/1
, unix_ts_to_rfc3339/2
, rfc3339_to_unix_ts/1
, rfc3339_to_unix_ts/2
, now_timestamp/0
, now_timestamp/1
]).
-export([ proc_dict_get/1
, proc_dict_put/2
, proc_dict_del/1
, kv_store_get/1
, kv_store_get/2
, kv_store_put/2
, kv_store_del/1
]).
-export(['$handle_undefined_function'/2]).
-compile({no_auto_import,
[ abs/1
, ceil/1
, floor/1
, round/1
, map_get/2
]}).
-define(is_var(X), is_binary(X)).
@doc " msgid ( ) " Func
msgid() ->
fun(#{id := MsgId}) -> MsgId; (_) -> undefined end.
qos() ->
fun(#{qos := QoS}) -> QoS; (_) -> undefined end.
topic() ->
fun(#{topic := Topic}) -> Topic; (_) -> undefined end.
topic(I) when is_integer(I) ->
fun(#{topic := Topic}) ->
lists:nth(I, dgiot_topic:tokens(Topic));
(_) -> undefined
end.
flags() ->
fun(#{flags := Flags}) -> Flags; (_) -> #{} end.
@doc " flags(Name ) " Func
flag(Name) ->
fun(#{flags := Flags}) -> dgiot_rule_maps:nested_get({var,Name}, Flags); (_) -> undefined end.
@doc " ( ) " Func
clientid() ->
fun(#{from := ClientId}) -> ClientId; (_) -> undefined end.
username() ->
fun(#{username := Username}) -> Username; (_) -> undefined end.
clientip() ->
peerhost().
peerhost() ->
fun(#{peerhost := Addr}) -> Addr; (_) -> undefined end.
payload() ->
fun(#{payload := Payload}) -> Payload; (_) -> undefined end.
payload(Path) ->
fun(#{payload := Payload}) when erlang:is_map(Payload) ->
dgiot_rule_maps:nested_get(map_path(Path), Payload);
(_) -> undefined
end.
TopicFilters = [ { < < " t / a " > > , # { qos = > 0 } ] .
-spec(contains_topic(emqx_mqtt_types:topic_filters(), emqx_types:topic())
-> true | false).
contains_topic(TopicFilters, Topic) ->
case find_topic_filter(Topic, TopicFilters, fun eq/2) of
not_found -> false;
_ -> true
end.
contains_topic(TopicFilters, Topic, QoS) ->
case find_topic_filter(Topic, TopicFilters, fun eq/2) of
{_, #{qos := QoS}} -> true;
_ -> false
end.
-spec(contains_topic_match(emqx_mqtt_types:topic_filters(), emqx_types:topic())
-> true | false).
contains_topic_match(TopicFilters, Topic) ->
case find_topic_filter(Topic, TopicFilters, fun dgiot_topic:match/2) of
not_found -> false;
_ -> true
end.
contains_topic_match(TopicFilters, Topic, QoS) ->
case find_topic_filter(Topic, TopicFilters, fun dgiot_topic:match/2) of
{_, #{qos := QoS}} -> true;
_ -> false
end.
find_topic_filter(Filter, TopicFilters, Func) ->
try
[case Func(Topic, Filter) of
true -> throw(Result);
false -> ok
end || Result = #{topic := Topic} <- TopicFilters],
not_found
catch
throw:Result -> Result
end.
null() ->
undefined.
Arithmetic Funcs
plus 2 numbers
'+'(X, Y) when is_number(X), is_number(Y) ->
X + Y;
concat 2 strings
'+'(X, Y) when is_binary(X), is_binary(Y) ->
concat(X, Y).
'-'(X, Y) when is_number(X), is_number(Y) ->
X - Y.
'*'(X, Y) when is_number(X), is_number(Y) ->
X * Y.
'/'(X, Y) when is_number(X), is_number(Y) ->
X / Y.
'div'(X, Y) when is_integer(X), is_integer(Y) ->
X div Y.
mod(X, Y) when is_integer(X), is_integer(Y) ->
X rem Y.
eq(X, Y) ->
X == Y.
abs(N) when is_integer(N) ->
erlang:abs(N).
acos(N) when is_number(N) ->
math:acos(N).
acosh(N) when is_number(N) ->
math:acosh(N).
asin(N) when is_number(N)->
math:asin(N).
asinh(N) when is_number(N) ->
math:asinh(N).
atan(N) when is_number(N) ->
math:atan(N).
atanh(N) when is_number(N)->
math:atanh(N).
ceil(N) when is_number(N) ->
erlang:ceil(N).
cos(N) when is_number(N)->
math:cos(N).
cosh(N) when is_number(N) ->
math:cosh(N).
exp(N) when is_number(N)->
math:exp(N).
floor(N) when is_number(N) ->
erlang:floor(N).
fmod(X, Y) when is_number(X), is_number(Y) ->
math:fmod(X, Y).
log(N) when is_number(N) ->
math:log(N).
log10(N) when is_number(N) ->
math:log10(N).
log2(N) when is_number(N)->
math:log2(N).
power(X, Y) when is_number(X), is_number(Y) ->
math:pow(X, Y).
round(N) when is_number(N) ->
erlang:round(N).
sin(N) when is_number(N) ->
math:sin(N).
sinh(N) when is_number(N) ->
math:sinh(N).
sqrt(N) when is_number(N) ->
math:sqrt(N).
tan(N) when is_number(N) ->
math:tan(N).
tanh(N) when is_number(N) ->
math:tanh(N).
bitnot(I) when is_integer(I) ->
bnot I.
bitand(X, Y) when is_integer(X), is_integer(Y) ->
X band Y.
bitor(X, Y) when is_integer(X), is_integer(Y) ->
X bor Y.
bitxor(X, Y) when is_integer(X), is_integer(Y) ->
X bxor Y.
bitsl(X, I) when is_integer(X), is_integer(I) ->
X bsl I.
bitsr(X, I) when is_integer(X), is_integer(I) ->
X bsr I.
bitsize(Bits) when is_bitstring(Bits) ->
bit_size(Bits).
subbits(Bits, Len) when is_integer(Len), is_bitstring(Bits) ->
subbits(Bits, 1, Len).
subbits(Bits, Start, Len) when is_integer(Start), is_integer(Len), is_bitstring(Bits) ->
get_subbits(Bits, Start, Len, <<"integer">>, <<"unsigned">>, <<"big">>).
subbits(Bits, Start, Len, Type, Signedness, Endianness) when is_integer(Start), is_integer(Len), is_bitstring(Bits) ->
get_subbits(Bits, Start, Len, Type, Signedness, Endianness).
get_subbits(Bits, Start, Len, Type, Signedness, Endianness) ->
Begin = Start - 1,
case Bits of
<<_:Begin, Rem/bits>> when Rem =/= <<>> ->
Sz = bit_size(Rem),
do_get_subbits(Rem, Sz, Len, Type, Signedness, Endianness);
_ -> undefined
end.
-define(match_bits(Bits0, Pattern, ElesePattern),
case Bits0 of
Pattern ->
SubBits;
ElesePattern ->
SubBits
end).
do_get_subbits(Bits, Sz, Len, <<"integer">>, <<"unsigned">>, <<"big">>) ->
?match_bits(Bits, <<SubBits:Len/integer-unsigned-big-unit:1, _/bits>>,
<<SubBits:Sz/integer-unsigned-big-unit:1>>);
do_get_subbits(Bits, Sz, Len, <<"float">>, <<"unsigned">>, <<"big">>) ->
?match_bits(Bits, <<SubBits:Len/float-unsigned-big-unit:1, _/bits>>,
<<SubBits:Sz/float-unsigned-big-unit:1>>);
do_get_subbits(Bits, Sz, Len, <<"bits">>, <<"unsigned">>, <<"big">>) ->
?match_bits(Bits, <<SubBits:Len/bits-unsigned-big-unit:1, _/bits>>,
<<SubBits:Sz/bits-unsigned-big-unit:1>>);
do_get_subbits(Bits, Sz, Len, <<"integer">>, <<"signed">>, <<"big">>) ->
?match_bits(Bits, <<SubBits:Len/integer-signed-big-unit:1, _/bits>>,
<<SubBits:Sz/integer-signed-big-unit:1>>);
do_get_subbits(Bits, Sz, Len, <<"float">>, <<"signed">>, <<"big">>) ->
?match_bits(Bits, <<SubBits:Len/float-signed-big-unit:1, _/bits>>,
<<SubBits:Sz/float-signed-big-unit:1>>);
do_get_subbits(Bits, Sz, Len, <<"bits">>, <<"signed">>, <<"big">>) ->
?match_bits(Bits, <<SubBits:Len/bits-signed-big-unit:1, _/bits>>,
<<SubBits:Sz/bits-signed-big-unit:1>>);
do_get_subbits(Bits, Sz, Len, <<"integer">>, <<"unsigned">>, <<"little">>) ->
?match_bits(Bits, <<SubBits:Len/integer-unsigned-little-unit:1, _/bits>>,
<<SubBits:Sz/integer-unsigned-little-unit:1>>);
do_get_subbits(Bits, Sz, Len, <<"float">>, <<"unsigned">>, <<"little">>) ->
?match_bits(Bits, <<SubBits:Len/float-unsigned-little-unit:1, _/bits>>,
<<SubBits:Sz/float-unsigned-little-unit:1>>);
do_get_subbits(Bits, Sz, Len, <<"bits">>, <<"unsigned">>, <<"little">>) ->
?match_bits(Bits, <<SubBits:Len/bits-unsigned-little-unit:1, _/bits>>,
<<SubBits:Sz/bits-unsigned-little-unit:1>>);
do_get_subbits(Bits, Sz, Len, <<"integer">>, <<"signed">>, <<"little">>) ->
?match_bits(Bits, <<SubBits:Len/integer-signed-little-unit:1, _/bits>>,
<<SubBits:Sz/integer-signed-little-unit:1>>);
do_get_subbits(Bits, Sz, Len, <<"float">>, <<"signed">>, <<"little">>) ->
?match_bits(Bits, <<SubBits:Len/float-signed-little-unit:1, _/bits>>,
<<SubBits:Sz/float-signed-little-unit:1>>);
do_get_subbits(Bits, Sz, Len, <<"bits">>, <<"signed">>, <<"little">>) ->
?match_bits(Bits, <<SubBits:Len/bits-signed-little-unit:1, _/bits>>,
<<SubBits:Sz/bits-signed-little-unit:1>>).
str(Data) ->
dgiot_rule_utils:bin(Data).
str_utf8(Data) ->
dgiot_rule_utils:utf8_bin(Data).
bool(Data) ->
dgiot_rule_utils:bool(Data).
int(Data) ->
dgiot_rule_utils:int(Data).
float(Data) ->
dgiot_rule_utils:float(Data).
map(Data) ->
dgiot_rule_utils:map(Data).
bin2hexstr(Bin) when is_binary(Bin) ->
dgiot_utils:bin2hexstr_A_F(Bin).
hexstr2bin(Str) when is_binary(Str) ->
dgiot_utils:hexstr2bin(Str).
NULL Funcs
is_null(undefined) -> true;
is_null(_Data) -> false.
is_not_null(Data) ->
not is_null(Data).
is_str(T) when is_binary(T) -> true;
is_str(_) -> false.
is_bool(T) when is_boolean(T) -> true;
is_bool(_) -> false.
is_int(T) when is_integer(T) -> true;
is_int(_) -> false.
is_float(T) when erlang:is_float(T) -> true;
is_float(_) -> false.
is_num(T) when is_number(T) -> true;
is_num(_) -> false.
is_map(T) when erlang:is_map(T) -> true;
is_map(_) -> false.
is_array(T) when is_list(T) -> true;
is_array(_) -> false.
lower(S) when is_binary(S) ->
string:lowercase(S).
ltrim(S) when is_binary(S) ->
string:trim(S, leading).
reverse(S) when is_binary(S) ->
iolist_to_binary(string:reverse(S)).
rtrim(S) when is_binary(S) ->
string:trim(S, trailing).
strlen(S) when is_binary(S) ->
string:length(S).
substr(S, Start) when is_binary(S), is_integer(Start) ->
string:slice(S, Start).
substr(S, Start, Length) when is_binary(S),
is_integer(Start),
is_integer(Length) ->
string:slice(S, Start, Length).
trim(S) when is_binary(S) ->
string:trim(S).
upper(S) when is_binary(S) ->
string:uppercase(S).
split(S, P) when is_binary(S),is_binary(P) ->
[R || R <- string:split(S, P, all), R =/= <<>> andalso R =/= ""].
split(S, P, <<"notrim">>) ->
string:split(S, P, all);
split(S, P, <<"leading_notrim">>) ->
string:split(S, P, leading);
split(S, P, <<"leading">>) when is_binary(S),is_binary(P) ->
[R || R <- string:split(S, P, leading), R =/= <<>> andalso R =/= ""];
split(S, P, <<"trailing_notrim">>) ->
string:split(S, P, trailing);
split(S, P, <<"trailing">>) when is_binary(S),is_binary(P) ->
[R || R <- string:split(S, P, trailing), R =/= <<>> andalso R =/= ""].
tokens(S, Separators) ->
[list_to_binary(R) || R <- string:lexemes(binary_to_list(S), binary_to_list(Separators))].
tokens(S, Separators, <<"nocrlf">>) ->
[list_to_binary(R) || R <- string:lexemes(binary_to_list(S), binary_to_list(Separators) ++ [$\r,$\n,[$\r,$\n]])].
concat(S1, S2) when is_binary(S1), is_binary(S2) ->
unicode:characters_to_binary([S1, S2], unicode).
sprintf_s(Format, Args) when is_list(Args) ->
erlang:iolist_to_binary(io_lib:format(binary_to_list(Format), Args)).
pad(S, Len) when is_binary(S), is_integer(Len) ->
iolist_to_binary(string:pad(S, Len, trailing)).
pad(S, Len, <<"trailing">>) when is_binary(S), is_integer(Len) ->
iolist_to_binary(string:pad(S, Len, trailing));
pad(S, Len, <<"both">>) when is_binary(S), is_integer(Len) ->
iolist_to_binary(string:pad(S, Len, both));
pad(S, Len, <<"leading">>) when is_binary(S), is_integer(Len) ->
iolist_to_binary(string:pad(S, Len, leading)).
pad(S, Len, <<"trailing">>, Char) when is_binary(S), is_integer(Len), is_binary(Char) ->
Chars = unicode:characters_to_list(Char, utf8),
iolist_to_binary(string:pad(S, Len, trailing, Chars));
pad(S, Len, <<"both">>, Char) when is_binary(S), is_integer(Len), is_binary(Char) ->
Chars = unicode:characters_to_list(Char, utf8),
iolist_to_binary(string:pad(S, Len, both, Chars));
pad(S, Len, <<"leading">>, Char) when is_binary(S), is_integer(Len), is_binary(Char) ->
Chars = unicode:characters_to_list(Char, utf8),
iolist_to_binary(string:pad(S, Len, leading, Chars)).
replace(SrcStr, P, RepStr) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) ->
iolist_to_binary(string:replace(SrcStr, P, RepStr, all)).
replace(SrcStr, P, RepStr, <<"all">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) ->
iolist_to_binary(string:replace(SrcStr, P, RepStr, all));
replace(SrcStr, P, RepStr, <<"trailing">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) ->
iolist_to_binary(string:replace(SrcStr, P, RepStr, trailing));
replace(SrcStr, P, RepStr, <<"leading">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) ->
iolist_to_binary(string:replace(SrcStr, P, RepStr, leading)).
regex_match(Str, RE) ->
case re:run(Str, RE, [global,{capture,none}]) of
match -> true;
nomatch -> false
end.
regex_replace(SrcStr, RE, RepStr) ->
re:replace(SrcStr, RE, RepStr, [global, {return,binary}]).
ascii(Char) when is_binary(Char) ->
[FirstC| _] = binary_to_list(Char),
FirstC.
find(S, P) when is_binary(S), is_binary(P) ->
find_s(S, P, leading).
find(S, P, <<"trailing">>) when is_binary(S), is_binary(P) ->
find_s(S, P, trailing);
find(S, P, <<"leading">>) when is_binary(S), is_binary(P) ->
find_s(S, P, leading).
find_s(S, P, Dir) ->
case string:find(S, P, Dir) of
nomatch -> <<"">>;
SubStr -> SubStr
end.
nth(N, L) when is_integer(N), is_list(L) ->
lists:nth(N, L).
length(List) when is_list(List) ->
erlang:length(List).
sublist(Len, List) when is_integer(Len), is_list(List) ->
lists:sublist(List, Len).
sublist(Start, Len, List) when is_integer(Start), is_integer(Len), is_list(List) ->
lists:sublist(List, Start, Len).
first(List) when is_list(List) ->
hd(List).
last(List) when is_list(List) ->
lists:last(List).
contains(Elm, List) when is_list(List) ->
lists:member(Elm, List).
map_new() ->
#{}.
map_get(Key, Map) ->
map_get(Key, Map, undefined).
map_get(Key, Map, Default) ->
dgiot_rule_maps:nested_get(map_path(Key), Map, Default).
map_put(Key, Val, Map) ->
dgiot_rule_maps:nested_put(map_path(Key), Val, Map).
mget(Key, Map) ->
mget(Key, Map, undefined).
mget(Key, Map, Default) ->
case maps:find(Key, Map) of
{ok, Val} -> Val;
error when is_atom(Key) ->
BinKey = dgiot_rule_utils:bin(Key),
case maps:find(BinKey, Map) of
{ok, Val} -> Val;
error -> Default
end;
error when is_binary(Key) ->
AtomKey = list_to_existing_atom(binary_to_list(Key)),
case maps:find(AtomKey, Map) of
{ok, Val} -> Val;
error -> Default
end
catch error:badarg ->
Default
end;
error ->
Default
end.
mput(Key, Val, Map) ->
case maps:find(Key, Map) of
{ok, _} -> maps:put(Key, Val, Map);
error when is_atom(Key) ->
BinKey = dgiot_rule_utils:bin(Key),
case maps:find(BinKey, Map) of
{ok, _} -> maps:put(BinKey, Val, Map);
error -> maps:put(Key, Val, Map)
end;
error when is_binary(Key) ->
AtomKey = list_to_existing_atom(binary_to_list(Key)),
case maps:find(AtomKey, Map) of
{ok, _} -> maps:put(AtomKey, Val, Map);
error -> maps:put(Key, Val, Map)
end
catch error:badarg ->
maps:put(Key, Val, Map)
end;
error ->
maps:put(Key, Val, Map)
end.
md5(S) when is_binary(S) ->
hash(md5, S).
sha(S) when is_binary(S) ->
hash(sha, S).
sha256(S) when is_binary(S) ->
hash(sha256, S).
hash(Type, Data) ->
dgiot_utils:bin2hexstr_a_f(crypto:hash(Type, Data)).
Data encode and decode Funcs
base64_encode(Data) when is_binary(Data) ->
base64:encode(Data).
base64_decode(Data) when is_binary(Data) ->
base64:decode(Data).
json_encode(Data) ->
dgiot_json:encode(Data).
json_decode(Data) ->
dgiot_json:decode(Data, [return_maps]).
term_encode(Term) ->
erlang:term_to_binary(Term).
term_decode(Data) when is_binary(Data) ->
erlang:binary_to_term(Data).
-define(DICT_KEY(KEY), {'@rule_engine', KEY}).
proc_dict_get(Key) ->
erlang:get(?DICT_KEY(Key)).
proc_dict_put(Key, Val) ->
erlang:put(?DICT_KEY(Key), Val).
proc_dict_del(Key) ->
erlang:erase(?DICT_KEY(Key)).
kv_store_put(Key, Val) ->
ets:insert(?KV_TAB, {Key, Val}).
kv_store_get(Key) ->
kv_store_get(Key, undefined).
kv_store_get(Key, Default) ->
case ets:lookup(?KV_TAB, Key) of
[{_, Val}] -> Val;
_ -> Default
end.
kv_store_del(Key) ->
ets:delete(?KV_TAB, Key).
now_rfc3339() ->
now_rfc3339(<<"second">>).
now_rfc3339(Unit) ->
unix_ts_to_rfc3339(now_timestamp(Unit), Unit).
unix_ts_to_rfc3339(Epoch) ->
unix_ts_to_rfc3339(Epoch, <<"second">>).
unix_ts_to_rfc3339(Epoch, Unit) when is_integer(Epoch) ->
dgiot_rule_utils:bin(
calendar:system_time_to_rfc3339(
Epoch, [{unit, time_unit(Unit)}])).
rfc3339_to_unix_ts(DateTime) ->
rfc3339_to_unix_ts(DateTime, <<"second">>).
rfc3339_to_unix_ts(DateTime, Unit) when is_binary(DateTime) ->
calendar:rfc3339_to_system_time(binary_to_list(DateTime),
[{unit, time_unit(Unit)}]).
now_timestamp() ->
erlang:system_time(second).
now_timestamp(Unit) ->
erlang:system_time(time_unit(Unit)).
time_unit(<<"second">>) -> second;
time_unit(<<"millisecond">>) -> millisecond;
time_unit(<<"microsecond">>) -> microsecond;
time_unit(<<"nanosecond">>) -> nanosecond.
-ifdef(EMQX_ENTERPRISE).
'$handle_undefined_function'(schema_decode, [SchemaId, Data|MoreArgs]) ->
emqx_schema_parser:decode(SchemaId, Data, MoreArgs);
'$handle_undefined_function'(schema_decode, Args) ->
error({args_count_error, {schema_decode, Args}});
'$handle_undefined_function'(schema_encode, [SchemaId, Term|MoreArgs]) ->
emqx_schema_parser:encode(SchemaId, Term, MoreArgs);
'$handle_undefined_function'(schema_encode, Args) ->
error({args_count_error, {schema_encode, Args}});
'$handle_undefined_function'(sprintf, [Format|Args]) ->
erlang:apply(fun sprintf_s/2, [Format, Args]);
'$handle_undefined_function'(Fun, Args) ->
error({sql_function_not_supported, function_literal(Fun, Args)}).
-else.
'$handle_undefined_function'(sprintf, [Format|Args]) ->
erlang:apply(fun sprintf_s/2, [Format, Args]);
'$handle_undefined_function'(Fun, Args) ->
error({sql_function_not_supported, function_literal(Fun, Args)}).
map_path(Key) ->
{path, [{key, P} || P <- string:split(Key, ".", all)]}.
function_literal(Fun, []) when is_atom(Fun) ->
atom_to_list(Fun) ++ "()";
function_literal(Fun, [FArg | Args]) when is_atom(Fun), is_list(Args) ->
WithFirstArg = io_lib:format("~s(~0p", [atom_to_list(Fun), FArg]),
lists:foldl(fun(Arg, Literal) ->
io_lib:format("~s, ~0p", [Literal, Arg])
end, WithFirstArg, Args) ++ ")";
function_literal(Fun, Args) ->
{invalid_func, {Fun, Args}}.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.