_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
3a2024604a57f1ab55bd30e64c29cc1bd899af34f67da29b3a588072c68803ae | Opetushallitus/ataru | field_visibility.cljs | (ns ataru.hakija.application.field-visibility
(:require [clojure.set :as set]
[clojure.string :as string]
[ataru.application.option-visibility :as option-visibility]
[ataru.util :as u]))
(defn- ylioppilastutkinto? [db]
(boolean (some #(or (= "pohjakoulutus_yo" %)
(= "pohjakoulutus_yo_ammatillinen" %)
(= "pohjakoulutus_yo_kansainvalinen_suomessa" %)
(= "pohjakoulutus_yo_ulkomainen" %))
(get-in db [:application :answers :higher-completed-base-education :value]))))
(defn- selected-hakukohteet [db]
(get-in db [:application :answers :hakukohteet :value]))
(defn selected-hakukohteet-and-ryhmat [db]
(let [selected-hakukohteet (set (selected-hakukohteet db))
selected-hakukohteet-tarjonta (when (not-empty selected-hakukohteet)
(filter #(contains? selected-hakukohteet (:oid %))
(get-in db [:form :tarjonta :hakukohteet])))
selected-hakukohderyhmat (set (mapcat :hakukohderyhmat selected-hakukohteet-tarjonta))
selected-ei-jyemp-hakukohteet-tarjonta (set (remove :jos-ylioppilastutkinto-ei-muita-pohjakoulutusliitepyyntoja?
selected-hakukohteet-tarjonta))
selected-ei-jyemp-hakukohderyhmat (set (mapcat :hakukohderyhmat selected-ei-jyemp-hakukohteet-tarjonta))
selected-ei-jyemp-hakukohteet (set (map :oid selected-ei-jyemp-hakukohteet-tarjonta))]
[(set/union selected-hakukohteet selected-hakukohderyhmat)
(set/union selected-ei-jyemp-hakukohteet selected-ei-jyemp-hakukohderyhmat)]))
(defn- belongs-to [field-descriptor]
(set (concat (:belongs-to-hakukohderyhma field-descriptor)
(:belongs-to-hakukohteet field-descriptor))))
(defn- field-belongs-to [field-descriptor [selected-hakukohteet-and-ryhmat selected-ei-jyemp-hakukohteet-and-ryhmat] jyemp?]
(let [belongs-to (belongs-to field-descriptor)]
(when (not (empty? belongs-to))
(not (empty? (set/intersection
belongs-to
(if jyemp?
selected-ei-jyemp-hakukohteet-and-ryhmat
selected-hakukohteet-and-ryhmat)))))))
(defn- jyemp? [ylioppilastutkinto? db field-descriptor]
(let [excluded-attachment-ids-when-yo-and-jyemp (get-in db [:application :excluded-attachment-ids-when-yo-and-jyemp])]
(and ylioppilastutkinto?
(contains? excluded-attachment-ids-when-yo-and-jyemp (:id field-descriptor)))))
(defn- nested-visilibity-inner [db {:keys [children options] :as field} visible? hakukohteet-and-ryhmat]
(let [id (-> field :id keyword)
belongs-to-fn (fn []
(->> (jyemp? (ylioppilastutkinto? db) db field)
(field-belongs-to field hakukohteet-and-ryhmat)))
visible? (and visible?
(case (belongs-to-fn)
nil visible?
true visible?
false false))
reduce-fn (fn [db child] (nested-visilibity-inner db child visible? hakukohteet-and-ryhmat))]
(as-> db db'
(assoc-in db' [:application :ui id :visible?] visible?)
(reduce reduce-fn db' (mapcat :followups options))
(reduce reduce-fn db' children))))
(defn set-nested-visibility ([db id visible?]
(set-nested-visibility db id visible? (selected-hakukohteet-and-ryhmat db)))
([db id visible? hakukohteet-and-ryhmat]
(nested-visilibity-inner
db
(u/find-field (get-in db [:form :content]) id)
visible?
hakukohteet-and-ryhmat)))
(declare set-field-visibility)
(defn- set-followup-visibility [db field-descriptor show-followups? show-conditional-followups-fn ylioppilastutkinto? hakukohteet-and-ryhmat]
(let [field-id (-> field-descriptor :id keyword)
value (get-in db [:application :answers field-id :value])
fields-by-id (u/form-sections-by-id-memo (:form db))
remove-fn (fn [condition]
(when show-followups?
(or
(string/blank? value)
(option-visibility/non-blank-answer-satisfies-condition? value condition))))
conditional-sections (->> (:section-visibility-conditions field-descriptor)
(remove remove-fn)
(map (comp keyword :section-name))
(keep (partial get fields-by-id)))]
(as-> db db'
(set-field-visibility db' field-descriptor show-followups? ylioppilastutkinto? hakukohteet-and-ryhmat)
(reduce #(set-nested-visibility %1 (:id %2) (show-conditional-followups-fn show-followups? %2) hakukohteet-and-ryhmat)
db'
conditional-sections))))
(defn- set-visibility-for-option-followups [db options show-followups-fn show-conditional-followups-fn ylioppilastutkinto? hakukohteet-and-ryhmat]
(reduce (fn [db option]
(let [show-followups? (show-followups-fn option)]
(reduce #(set-followup-visibility %1 %2 show-followups? show-conditional-followups-fn ylioppilastutkinto? hakukohteet-and-ryhmat)
db
(:followups option))))
db
options))
(defn- set-followups-visibility
[db field-descriptor visible? ylioppilastutkinto? hakukohteet-and-ryhmat]
(let [component-visibility (atom {})
answer-value (get-in db [:application :answers (keyword (:id field-descriptor)) :value])
visibility-checker (option-visibility/visibility-checker field-descriptor answer-value)
show-followups-fn #(and visible?
(visibility-checker %))
show-conditional-followups-fn (fn [show? field-descriptor]
(let [id (:id field-descriptor)
should-show? (or show? (get @component-visibility id false))]
(swap! component-visibility assoc id should-show?)
should-show?))]
(set-visibility-for-option-followups db
(:options field-descriptor)
show-followups-fn
show-conditional-followups-fn
ylioppilastutkinto?
hakukohteet-and-ryhmat)))
(defn- set-option-visibility [db [index option] visible? id selected-hakukohteet-and-ryhmat]
(let [belongs-to (set (concat (:belongs-to-hakukohderyhma option)
(:belongs-to-hakukohteet option)))]
(assoc-in db [:application :ui id index :visible?]
(boolean
(and visible?
(or (empty? belongs-to)
(not (empty? (set/intersection
belongs-to
selected-hakukohteet-and-ryhmat)))))))))
(defn set-field-visibility
([db field-descriptor]
(set-field-visibility
db
field-descriptor
true
(ylioppilastutkinto? db)
(selected-hakukohteet-and-ryhmat db)))
([db
field-descriptor
visible?
ylioppilastutkinto?
[selected-hakukohteet-and-ryhmat selected-ei-jyemp-hakukohteet-and-ryhmat]]
(let [hakukohteet-and-ryhmat [selected-hakukohteet-and-ryhmat selected-ei-jyemp-hakukohteet-and-ryhmat]
id (keyword (:id field-descriptor))
belongs-to (belongs-to field-descriptor)
jyemp? (jyemp? ylioppilastutkinto? db field-descriptor)
form (:form db)
answers (get-in db [:application :answers])
visible? (and (not (or (get-in field-descriptor [:params :hidden])
(get-in field-descriptor [:hidden])))
visible?
(or (not jyemp?) (not (empty? selected-ei-jyemp-hakukohteet-and-ryhmat)))
(or (empty? belongs-to)
(not (empty? (set/intersection
belongs-to
(if jyemp?
selected-ei-jyemp-hakukohteet-and-ryhmat
selected-hakukohteet-and-ryhmat)))))
(or (not (= :hakukohteet id)) (some? (get-in db [:form :tarjonta])))
(not (u/is-field-hidden-by-section-visibility-conditions form answers field-descriptor)))
child-visibility (fn [db]
(reduce #(set-field-visibility %1 %2 visible? ylioppilastutkinto? hakukohteet-and-ryhmat)
db
(:children field-descriptor)))
option-visibility (fn [db]
(reduce #(set-option-visibility %1 %2 visible? id selected-hakukohteet-and-ryhmat)
db
(map-indexed vector (:options field-descriptor))))
field-visibility (fn [db]
(assoc-in db
[:application :ui id :visible?]
(boolean
(and visible?
(or (empty? (:children field-descriptor))
(some #(get-in db [:application :ui (keyword (:id %)) :visible?])
(:children field-descriptor)))))))]
(cond-> (-> db
child-visibility
option-visibility
field-visibility)
(#{"dropdown" "multipleChoice" "singleChoice" "textField"} (:fieldType field-descriptor))
(set-followups-visibility field-descriptor visible? ylioppilastutkinto? hakukohteet-and-ryhmat)))))
| null | https://raw.githubusercontent.com/Opetushallitus/ataru/ff720c87ecb337f08daf12acedb8cbc2c9329bb6/src/cljs/ataru/hakija/application/field_visibility.cljs | clojure | (ns ataru.hakija.application.field-visibility
(:require [clojure.set :as set]
[clojure.string :as string]
[ataru.application.option-visibility :as option-visibility]
[ataru.util :as u]))
(defn- ylioppilastutkinto? [db]
(boolean (some #(or (= "pohjakoulutus_yo" %)
(= "pohjakoulutus_yo_ammatillinen" %)
(= "pohjakoulutus_yo_kansainvalinen_suomessa" %)
(= "pohjakoulutus_yo_ulkomainen" %))
(get-in db [:application :answers :higher-completed-base-education :value]))))
(defn- selected-hakukohteet [db]
(get-in db [:application :answers :hakukohteet :value]))
(defn selected-hakukohteet-and-ryhmat [db]
(let [selected-hakukohteet (set (selected-hakukohteet db))
selected-hakukohteet-tarjonta (when (not-empty selected-hakukohteet)
(filter #(contains? selected-hakukohteet (:oid %))
(get-in db [:form :tarjonta :hakukohteet])))
selected-hakukohderyhmat (set (mapcat :hakukohderyhmat selected-hakukohteet-tarjonta))
selected-ei-jyemp-hakukohteet-tarjonta (set (remove :jos-ylioppilastutkinto-ei-muita-pohjakoulutusliitepyyntoja?
selected-hakukohteet-tarjonta))
selected-ei-jyemp-hakukohderyhmat (set (mapcat :hakukohderyhmat selected-ei-jyemp-hakukohteet-tarjonta))
selected-ei-jyemp-hakukohteet (set (map :oid selected-ei-jyemp-hakukohteet-tarjonta))]
[(set/union selected-hakukohteet selected-hakukohderyhmat)
(set/union selected-ei-jyemp-hakukohteet selected-ei-jyemp-hakukohderyhmat)]))
(defn- belongs-to [field-descriptor]
(set (concat (:belongs-to-hakukohderyhma field-descriptor)
(:belongs-to-hakukohteet field-descriptor))))
(defn- field-belongs-to [field-descriptor [selected-hakukohteet-and-ryhmat selected-ei-jyemp-hakukohteet-and-ryhmat] jyemp?]
(let [belongs-to (belongs-to field-descriptor)]
(when (not (empty? belongs-to))
(not (empty? (set/intersection
belongs-to
(if jyemp?
selected-ei-jyemp-hakukohteet-and-ryhmat
selected-hakukohteet-and-ryhmat)))))))
(defn- jyemp? [ylioppilastutkinto? db field-descriptor]
(let [excluded-attachment-ids-when-yo-and-jyemp (get-in db [:application :excluded-attachment-ids-when-yo-and-jyemp])]
(and ylioppilastutkinto?
(contains? excluded-attachment-ids-when-yo-and-jyemp (:id field-descriptor)))))
(defn- nested-visilibity-inner [db {:keys [children options] :as field} visible? hakukohteet-and-ryhmat]
(let [id (-> field :id keyword)
belongs-to-fn (fn []
(->> (jyemp? (ylioppilastutkinto? db) db field)
(field-belongs-to field hakukohteet-and-ryhmat)))
visible? (and visible?
(case (belongs-to-fn)
nil visible?
true visible?
false false))
reduce-fn (fn [db child] (nested-visilibity-inner db child visible? hakukohteet-and-ryhmat))]
(as-> db db'
(assoc-in db' [:application :ui id :visible?] visible?)
(reduce reduce-fn db' (mapcat :followups options))
(reduce reduce-fn db' children))))
(defn set-nested-visibility ([db id visible?]
(set-nested-visibility db id visible? (selected-hakukohteet-and-ryhmat db)))
([db id visible? hakukohteet-and-ryhmat]
(nested-visilibity-inner
db
(u/find-field (get-in db [:form :content]) id)
visible?
hakukohteet-and-ryhmat)))
(declare set-field-visibility)
(defn- set-followup-visibility [db field-descriptor show-followups? show-conditional-followups-fn ylioppilastutkinto? hakukohteet-and-ryhmat]
(let [field-id (-> field-descriptor :id keyword)
value (get-in db [:application :answers field-id :value])
fields-by-id (u/form-sections-by-id-memo (:form db))
remove-fn (fn [condition]
(when show-followups?
(or
(string/blank? value)
(option-visibility/non-blank-answer-satisfies-condition? value condition))))
conditional-sections (->> (:section-visibility-conditions field-descriptor)
(remove remove-fn)
(map (comp keyword :section-name))
(keep (partial get fields-by-id)))]
(as-> db db'
(set-field-visibility db' field-descriptor show-followups? ylioppilastutkinto? hakukohteet-and-ryhmat)
(reduce #(set-nested-visibility %1 (:id %2) (show-conditional-followups-fn show-followups? %2) hakukohteet-and-ryhmat)
db'
conditional-sections))))
(defn- set-visibility-for-option-followups [db options show-followups-fn show-conditional-followups-fn ylioppilastutkinto? hakukohteet-and-ryhmat]
(reduce (fn [db option]
(let [show-followups? (show-followups-fn option)]
(reduce #(set-followup-visibility %1 %2 show-followups? show-conditional-followups-fn ylioppilastutkinto? hakukohteet-and-ryhmat)
db
(:followups option))))
db
options))
(defn- set-followups-visibility
[db field-descriptor visible? ylioppilastutkinto? hakukohteet-and-ryhmat]
(let [component-visibility (atom {})
answer-value (get-in db [:application :answers (keyword (:id field-descriptor)) :value])
visibility-checker (option-visibility/visibility-checker field-descriptor answer-value)
show-followups-fn #(and visible?
(visibility-checker %))
show-conditional-followups-fn (fn [show? field-descriptor]
(let [id (:id field-descriptor)
should-show? (or show? (get @component-visibility id false))]
(swap! component-visibility assoc id should-show?)
should-show?))]
(set-visibility-for-option-followups db
(:options field-descriptor)
show-followups-fn
show-conditional-followups-fn
ylioppilastutkinto?
hakukohteet-and-ryhmat)))
(defn- set-option-visibility [db [index option] visible? id selected-hakukohteet-and-ryhmat]
(let [belongs-to (set (concat (:belongs-to-hakukohderyhma option)
(:belongs-to-hakukohteet option)))]
(assoc-in db [:application :ui id index :visible?]
(boolean
(and visible?
(or (empty? belongs-to)
(not (empty? (set/intersection
belongs-to
selected-hakukohteet-and-ryhmat)))))))))
(defn set-field-visibility
([db field-descriptor]
(set-field-visibility
db
field-descriptor
true
(ylioppilastutkinto? db)
(selected-hakukohteet-and-ryhmat db)))
([db
field-descriptor
visible?
ylioppilastutkinto?
[selected-hakukohteet-and-ryhmat selected-ei-jyemp-hakukohteet-and-ryhmat]]
(let [hakukohteet-and-ryhmat [selected-hakukohteet-and-ryhmat selected-ei-jyemp-hakukohteet-and-ryhmat]
id (keyword (:id field-descriptor))
belongs-to (belongs-to field-descriptor)
jyemp? (jyemp? ylioppilastutkinto? db field-descriptor)
form (:form db)
answers (get-in db [:application :answers])
visible? (and (not (or (get-in field-descriptor [:params :hidden])
(get-in field-descriptor [:hidden])))
visible?
(or (not jyemp?) (not (empty? selected-ei-jyemp-hakukohteet-and-ryhmat)))
(or (empty? belongs-to)
(not (empty? (set/intersection
belongs-to
(if jyemp?
selected-ei-jyemp-hakukohteet-and-ryhmat
selected-hakukohteet-and-ryhmat)))))
(or (not (= :hakukohteet id)) (some? (get-in db [:form :tarjonta])))
(not (u/is-field-hidden-by-section-visibility-conditions form answers field-descriptor)))
child-visibility (fn [db]
(reduce #(set-field-visibility %1 %2 visible? ylioppilastutkinto? hakukohteet-and-ryhmat)
db
(:children field-descriptor)))
option-visibility (fn [db]
(reduce #(set-option-visibility %1 %2 visible? id selected-hakukohteet-and-ryhmat)
db
(map-indexed vector (:options field-descriptor))))
field-visibility (fn [db]
(assoc-in db
[:application :ui id :visible?]
(boolean
(and visible?
(or (empty? (:children field-descriptor))
(some #(get-in db [:application :ui (keyword (:id %)) :visible?])
(:children field-descriptor)))))))]
(cond-> (-> db
child-visibility
option-visibility
field-visibility)
(#{"dropdown" "multipleChoice" "singleChoice" "textField"} (:fieldType field-descriptor))
(set-followups-visibility field-descriptor visible? ylioppilastutkinto? hakukohteet-and-ryhmat)))))
|
|
da28d8ad3abd4bacee95c0f255771ed6cc9ac06e46ba9bbbc7f857795cf14899 | reborg/clojure-essential-reference | 1.clj | < 1 >
(ns-name (.ns #'a))
;; user
(ns ns1)
< 2 >
(ns-name (.ns #'b))
;; ns1 | null | https://raw.githubusercontent.com/reborg/clojure-essential-reference/c37fa19d45dd52b2995a191e3e96f0ebdc3f6d69/VarsandNamespaces/ns%2Cin-ns%2Ccreate-nsandremove-ns/1.clj | clojure | user
ns1 | < 1 >
(ns-name (.ns #'a))
(ns ns1)
< 2 >
(ns-name (.ns #'b)) |
b2ec4bd1f7e69c84ccabf7bdbbbd8bb2d8fc9111b372a741b366c8c5d953c6a4 | ml4tp/tcoq | ocaml.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
(*s Production of Ocaml syntax. *)
open Pp
open CErrors
open Util
open Names
open Nameops
open Globnames
open Table
open Miniml
open Mlutil
open Modutil
open Common
(*s Some utility functions. *)
let pp_tvar id = str ("'" ^ Id.to_string id)
let pp_abst = function
| [] -> mt ()
| l ->
str "fun " ++ prlist_with_sep (fun () -> str " ") pr_id l ++
str " ->" ++ spc ()
let pp_parameters l =
(pp_boxed_tuple pp_tvar l ++ space_if (not (List.is_empty l)))
let pp_string_parameters l =
(pp_boxed_tuple str l ++ space_if (not (List.is_empty l)))
let pp_letin pat def body =
let fstline = str "let " ++ pat ++ str " =" ++ spc () ++ def in
hv 0 (hv 0 (hov 2 fstline ++ spc () ++ str "in") ++ spc () ++ hov 0 body)
(*s Ocaml renaming issues. *)
let keywords =
List.fold_right (fun s -> Id.Set.add (Id.of_string s))
[ "and"; "as"; "assert"; "begin"; "class"; "constraint"; "do";
"done"; "downto"; "else"; "end"; "exception"; "external"; "false";
"for"; "fun"; "function"; "functor"; "if"; "in"; "include";
"inherit"; "initializer"; "lazy"; "let"; "match"; "method";
"module"; "mutable"; "new"; "object"; "of"; "open"; "or";
"parser"; "private"; "rec"; "sig"; "struct"; "then"; "to"; "true";
"try"; "type"; "val"; "virtual"; "when"; "while"; "with"; "mod";
"land"; "lor"; "lxor"; "lsl"; "lsr"; "asr" ; "unit" ; "_" ; "__" ]
Id.Set.empty
(* Note: do not shorten [str "foo" ++ fnl ()] into [str "foo\n"],
the '\n' character interacts badly with the Format boxing mechanism *)
let pp_open mp = str ("open "^ string_of_modfile mp) ++ fnl ()
let pp_comment s = str "(* " ++ hov 0 s ++ str " *)"
let pp_header_comment = function
| None -> mt ()
| Some com -> pp_comment com ++ fnl2 ()
let then_nl pp = if Pp.is_empty pp then mt () else pp ++ fnl ()
let pp_tdummy usf =
if usf.tdummy || usf.tunknown then str "type __ = Obj.t" ++ fnl () else mt ()
let pp_mldummy usf =
if usf.mldummy then
str "let __ = let rec f _ = Obj.repr f in Obj.repr f" ++ fnl ()
else mt ()
let preamble _ comment used_modules usf =
pp_header_comment comment ++
then_nl (prlist pp_open used_modules) ++
then_nl (pp_tdummy usf ++ pp_mldummy usf)
let sig_preamble _ comment used_modules usf =
pp_header_comment comment ++
then_nl (prlist pp_open used_modules) ++
then_nl (pp_tdummy usf)
(*s The pretty-printer for Ocaml syntax*)
(* Beware of the side-effects of [pp_global] and [pp_modname].
They are used to update table of content for modules. Many [let]
below should not be altered since they force evaluation order.
*)
let str_global k r =
if is_inline_custom r then find_custom r else Common.pp_global k r
let pp_global k r = str (str_global k r)
let pp_modname mp = str (Common.pp_module mp)
let is_infix r =
is_inline_custom r &&
(let s = find_custom r in
let l = String.length s in
l >= 2 && s.[0] == '(' && s.[l-1] == ')')
let get_infix r =
let s = find_custom r in
String.sub s 1 (String.length s - 2)
let get_ind = function
| IndRef _ as r -> r
| ConstructRef (ind,_) -> IndRef ind
| _ -> assert false
let pp_one_field r i = function
| Some r -> pp_global Term r
| None -> pp_global Type (get_ind r) ++ str "__" ++ int i
let pp_field r fields i = pp_one_field r i (List.nth fields i)
let pp_fields r fields = List.map_i (pp_one_field r) 0 fields
(*s Pretty-printing of types. [par] is a boolean indicating whether parentheses
are needed or not. *)
let pp_type par vl t =
let rec pp_rec par = function
| Tmeta _ | Tvar' _ | Taxiom -> assert false
| Tvar i -> (try pp_tvar (List.nth vl (pred i))
with Failure _ -> (str "'a" ++ int i))
| Tglob (r,[a1;a2]) when is_infix r ->
pp_par par (pp_rec true a1 ++ str (get_infix r) ++ pp_rec true a2)
| Tglob (r,[]) -> pp_global Type r
| Tglob (IndRef(kn,0),l)
when not (keep_singleton ()) && MutInd.equal kn (mk_ind "Coq.Init.Specif" "sig") ->
pp_tuple_light pp_rec l
| Tglob (r,l) ->
pp_tuple_light pp_rec l ++ spc () ++ pp_global Type r
| Tarr (t1,t2) ->
pp_par par
(pp_rec true t1 ++ spc () ++ str "->" ++ spc () ++ pp_rec false t2)
| Tdummy _ -> str "__"
| Tunknown -> str "__"
in
hov 0 (pp_rec par t)
s Pretty - printing of expressions . [ par ] indicates whether
parentheses are needed or not . [ env ] is the list of names for the
de Bruijn variables . [ args ] is the list of collected arguments
( already pretty - printed ) .
parentheses are needed or not. [env] is the list of names for the
de Bruijn variables. [args] is the list of collected arguments
(already pretty-printed). *)
let is_bool_patt p s =
try
let r = match p with
| Pusual r -> r
| Pcons (r,[]) -> r
| _ -> raise Not_found
in
String.equal (find_custom r) s
with Not_found -> false
let is_ifthenelse = function
| [|([],p1,_);([],p2,_)|] -> is_bool_patt p1 "true" && is_bool_patt p2 "false"
| _ -> false
let expr_needs_par = function
| MLlam _ -> true
| MLcase (_,_,[|_|]) -> false
| MLcase (_,_,pv) -> not (is_ifthenelse pv)
| _ -> false
let rec pp_expr par env args =
let apply st = pp_apply st par args
and apply2 st = pp_apply2 st par args in
function
| MLrel n ->
let id = get_db_name n env in
Try to survive to the occurrence of a Dummy rel .
TODO : we should get rid of this hack ( cf . # 592 )
TODO: we should get rid of this hack (cf. #592) *)
let id = if Id.equal id dummy_name then Id.of_string "__" else id in
apply (pr_id id)
| MLapp (f,args') ->
let stl = List.map (pp_expr true env []) args' in
pp_expr par env (stl @ args) f
| MLlam _ as a ->
let fl,a' = collect_lams a in
let fl = List.map id_of_mlid fl in
let fl,env' = push_vars fl env in
let st = pp_abst (List.rev fl) ++ pp_expr false env' [] a' in
apply2 st
| MLletin (id,a1,a2) ->
let i,env' = push_vars [id_of_mlid id] env in
let pp_id = pr_id (List.hd i)
and pp_a1 = pp_expr false env [] a1
and pp_a2 = pp_expr (not par && expr_needs_par a2) env' [] a2 in
hv 0 (apply2 (pp_letin pp_id pp_a1 pp_a2))
| MLglob r ->
(try
let args = List.skipn (projection_arity r) args in
let record = List.hd args in
pp_apply (record ++ str "." ++ pp_global Term r) par (List.tl args)
with e when CErrors.noncritical e -> apply (pp_global Term r))
| MLfix (i,ids,defs) ->
let ids',env' = push_vars (List.rev (Array.to_list ids)) env in
pp_fix par env' i (Array.of_list (List.rev ids'),defs) args
| MLexn s ->
An [ MLexn ] may be applied , but I do n't really care .
pp_par par (str "assert false" ++ spc () ++ str ("(* "^s^" *)"))
| MLdummy k ->
(* An [MLdummy] may be applied, but I don't really care. *)
(match msg_of_implicit k with
| "" -> str "__"
| s -> str "__" ++ spc () ++ str ("(* "^s^" *)"))
| MLmagic a ->
pp_apply (str "Obj.magic") par (pp_expr true env [] a :: args)
| MLaxiom ->
pp_par par (str "failwith \"AXIOM TO BE REALIZED\"")
| MLcons (_,r,a) as c ->
assert (List.is_empty args);
begin match a with
| _ when is_native_char c -> pp_native_char c
| [a1;a2] when is_infix r ->
let pp = pp_expr true env [] in
pp_par par (pp a1 ++ str (get_infix r) ++ pp a2)
| _ when is_coinductive r ->
let ne = not (List.is_empty a) in
let tuple = space_if ne ++ pp_tuple (pp_expr true env []) a in
pp_par par (str "lazy " ++ pp_par ne (pp_global Cons r ++ tuple))
| [] -> pp_global Cons r
| _ ->
let fds = get_record_fields r in
if not (List.is_empty fds) then
pp_record_pat (pp_fields r fds, List.map (pp_expr true env []) a)
else
let tuple = pp_tuple (pp_expr true env []) a in
hack Extract Inductive prod
then tuple
else pp_par par (pp_global Cons r ++ spc () ++ tuple)
end
| MLtuple l ->
assert (List.is_empty args);
pp_boxed_tuple (pp_expr true env []) l
| MLcase (_, t, pv) when is_custom_match pv ->
if not (is_regular_match pv) then
error "Cannot mix yet user-given match and general patterns.";
let mkfun (ids,_,e) =
if not (List.is_empty ids) then named_lams (List.rev ids) e
else dummy_lams (ast_lift 1 e) 1
in
let pp_branch tr = pp_expr true env [] (mkfun tr) ++ fnl () in
let inner =
str (find_custom_match pv) ++ fnl () ++
prvect pp_branch pv ++
pp_expr true env [] t
in
apply2 (hov 2 inner)
| MLcase (typ, t, pv) ->
let head =
if not (is_coinductive_type typ) then pp_expr false env [] t
else (str "Lazy.force" ++ spc () ++ pp_expr true env [] t)
in
First , can this match be printed as a mere record projection ?
(try pp_record_proj par env typ t pv args
with Impossible ->
Second , can this match be printed as a let - in ?
if Int.equal (Array.length pv) 1 then
let s1,s2 = pp_one_pat env pv.(0) in
hv 0 (apply2 (pp_letin s1 head s2))
else
(* Third, can this match be printed as [if ... then ... else] ? *)
(try apply2 (pp_ifthenelse env head pv)
with Not_found ->
(* Otherwise, standard match *)
apply2
(v 0 (str "match " ++ head ++ str " with" ++ fnl () ++
pp_pat env pv))))
and pp_record_proj par env typ t pv args =
(* Can a match be printed as a mere record projection ? *)
let fields = record_fields_of_type typ in
if List.is_empty fields then raise Impossible;
if not (Int.equal (Array.length pv) 1) then raise Impossible;
if has_deep_pattern pv then raise Impossible;
let (ids,pat,body) = pv.(0) in
let n = List.length ids in
let no_patvar a = not (List.exists (ast_occurs_itvl 1 n) a) in
let rel_i,a = match body with
| MLrel i when i <= n -> i,[]
| MLapp(MLrel i, a) when i<=n && no_patvar a -> i,a
| _ -> raise Impossible
in
let rec lookup_rel i idx = function
| Prel j :: l -> if Int.equal i j then idx else lookup_rel i (idx+1) l
| Pwild :: l -> lookup_rel i (idx+1) l
| _ -> raise Impossible
in
let r,idx = match pat with
| Pusual r -> r, n-rel_i
| Pcons (r,l) -> r, lookup_rel rel_i 0 l
| _ -> raise Impossible
in
if is_infix r then raise Impossible;
let env' = snd (push_vars (List.rev_map id_of_mlid ids) env) in
let pp_args = (List.map (pp_expr true env' []) a) @ args in
let pp_head = pp_expr true env [] t ++ str "." ++ pp_field r fields idx
in
pp_apply pp_head par pp_args
and pp_record_pat (fields, args) =
str "{ " ++
prlist_with_sep (fun () -> str ";" ++ spc ())
(fun (f,a) -> f ++ str " =" ++ spc () ++ a)
(List.combine fields args) ++
str " }"
and pp_cons_pat r ppl =
if is_infix r && Int.equal (List.length ppl) 2 then
List.hd ppl ++ str (get_infix r) ++ List.hd (List.tl ppl)
else
let fields = get_record_fields r in
if not (List.is_empty fields) then pp_record_pat (pp_fields r fields, ppl)
else if String.is_empty (str_global Cons r) then
pp_boxed_tuple identity ppl (* Hack Extract Inductive prod *)
else
pp_global Cons r ++ space_if (not (List.is_empty ppl)) ++ pp_boxed_tuple identity ppl
and pp_gen_pat ids env = function
| Pcons (r, l) -> pp_cons_pat r (List.map (pp_gen_pat ids env) l)
| Pusual r -> pp_cons_pat r (List.map pr_id ids)
| Ptuple l -> pp_boxed_tuple (pp_gen_pat ids env) l
| Pwild -> str "_"
| Prel n -> pr_id (get_db_name n env)
and pp_ifthenelse env expr pv = match pv with
| [|([],tru,the);([],fal,els)|] when
(is_bool_patt tru "true") && (is_bool_patt fal "false")
->
hv 0 (hov 2 (str "if " ++ expr) ++ spc () ++
hov 2 (str "then " ++
hov 2 (pp_expr (expr_needs_par the) env [] the)) ++ spc () ++
hov 2 (str "else " ++
hov 2 (pp_expr (expr_needs_par els) env [] els)))
| _ -> raise Not_found
and pp_one_pat env (ids,p,t) =
let ids',env' = push_vars (List.rev_map id_of_mlid ids) env in
pp_gen_pat (List.rev ids') env' p,
pp_expr (expr_needs_par t) env' [] t
and pp_pat env pv =
prvecti
(fun i x ->
let s1,s2 = pp_one_pat env x in
hv 2 (hov 4 (str "| " ++ s1 ++ str " ->") ++ spc () ++ hov 2 s2) ++
if Int.equal i (Array.length pv - 1) then mt () else fnl ())
pv
and pp_function env t =
let bl,t' = collect_lams t in
let bl,env' = push_vars (List.map id_of_mlid bl) env in
match t' with
| MLcase(Tglob(r,_),MLrel 1,pv) when
not (is_coinductive r) && List.is_empty (get_record_fields r) &&
not (is_custom_match pv) ->
if not (ast_occurs 1 (MLcase(Tunknown,MLaxiom,pv))) then
pr_binding (List.rev (List.tl bl)) ++
str " = function" ++ fnl () ++
v 0 (pp_pat env' pv)
else
pr_binding (List.rev bl) ++
str " = match " ++ pr_id (List.hd bl) ++ str " with" ++ fnl () ++
v 0 (pp_pat env' pv)
| _ ->
pr_binding (List.rev bl) ++
str " =" ++ fnl () ++ str " " ++
hov 2 (pp_expr false env' [] t')
(*s names of the functions ([ids]) are already pushed in [env],
and passed here just for convenience. *)
and pp_fix par env i (ids,bl) args =
pp_par par
(v 0 (str "let rec " ++
prvect_with_sep
(fun () -> fnl () ++ str "and ")
(fun (fi,ti) -> pr_id fi ++ pp_function env ti)
(Array.map2 (fun id b -> (id,b)) ids bl) ++
fnl () ++
hov 2 (str "in " ++ pp_apply (pr_id ids.(i)) false args)))
(* Ad-hoc double-newline in v boxes, with enough negative whitespace
to avoid indenting the intermediate blank line *)
let cut2 () = brk (0,-100000) ++ brk (0,0)
let pp_val e typ =
hov 4 (str "(** val " ++ e ++ str " :" ++ spc () ++ pp_type false [] typ ++
str " **)") ++ cut2 ()
(*s Pretty-printing of [Dfix] *)
let pp_Dfix (rv,c,t) =
let names = Array.map
(fun r -> if is_inline_custom r then mt () else pp_global Term r) rv
in
let rec pp init i =
if i >= Array.length rv then mt ()
else
let void = is_inline_custom rv.(i) ||
(not (is_custom rv.(i)) &&
match c.(i) with MLexn "UNUSED" -> true | _ -> false)
in
if void then pp init (i+1)
else
let def =
if is_custom rv.(i) then str " = " ++ str (find_custom rv.(i))
else pp_function (empty_env ()) c.(i)
in
(if init then mt () else cut2 ()) ++
pp_val names.(i) t.(i) ++
str (if init then "let rec " else "and ") ++ names.(i) ++ def ++
pp false (i+1)
in pp true 0
(*s Pretty-printing of inductive types declaration. *)
let pp_equiv param_list name = function
| NoEquiv, _ -> mt ()
| Equiv kn, i ->
str " = " ++ pp_parameters param_list ++ pp_global Type (IndRef (mind_of_kn kn,i))
| RenEquiv ren, _ ->
str " = " ++ pp_parameters param_list ++ str (ren^".") ++ name
let pp_one_ind prefix ip_equiv pl name cnames ctyps =
let pl = rename_tvars keywords pl in
let pp_constructor i typs =
(if Int.equal i 0 then mt () else fnl ()) ++
hov 3 (str "| " ++ cnames.(i) ++
(if List.is_empty typs then mt () else str " of ") ++
prlist_with_sep
(fun () -> spc () ++ str "* ") (pp_type true pl) typs)
in
pp_parameters pl ++ str prefix ++ name ++
pp_equiv pl name ip_equiv ++ str " =" ++
if Int.equal (Array.length ctyps) 0 then str " unit (* empty inductive *)"
else fnl () ++ v 0 (prvecti pp_constructor ctyps)
let pp_logical_ind packet =
pp_comment (pr_id packet.ip_typename ++ str " : logical inductive") ++
fnl () ++
pp_comment (str "with constructors : " ++
prvect_with_sep spc pr_id packet.ip_consnames) ++
fnl ()
let pp_singleton kn packet =
let name = pp_global Type (IndRef (kn,0)) in
let l = rename_tvars keywords packet.ip_vars in
hov 2 (str "type " ++ pp_parameters l ++ name ++ str " =" ++ spc () ++
pp_type false l (List.hd packet.ip_types.(0)) ++ fnl () ++
pp_comment (str "singleton inductive, whose constructor was " ++
pr_id packet.ip_consnames.(0)))
let pp_record kn fields ip_equiv packet =
let ind = IndRef (kn,0) in
let name = pp_global Type ind in
let fieldnames = pp_fields ind fields in
let l = List.combine fieldnames packet.ip_types.(0) in
let pl = rename_tvars keywords packet.ip_vars in
str "type " ++ pp_parameters pl ++ name ++
pp_equiv pl name ip_equiv ++ str " = { "++
hov 0 (prlist_with_sep (fun () -> str ";" ++ spc ())
(fun (p,t) -> p ++ str " : " ++ pp_type true pl t) l)
++ str " }"
let pp_coind pl name =
let pl = rename_tvars keywords pl in
pp_parameters pl ++ name ++ str " = " ++
pp_parameters pl ++ str "__" ++ name ++ str " Lazy.t" ++
fnl() ++ str "and "
let pp_ind co kn ind =
let prefix = if co then "__" else "" in
let initkwd = str "type " in
let nextkwd = fnl () ++ str "and " in
let names =
Array.mapi (fun i p -> if p.ip_logical then mt () else
pp_global Type (IndRef (kn,i)))
ind.ind_packets
in
let cnames =
Array.mapi
(fun i p -> if p.ip_logical then [||] else
Array.mapi (fun j _ -> pp_global Cons (ConstructRef ((kn,i),j+1)))
p.ip_types)
ind.ind_packets
in
let rec pp i kwd =
if i >= Array.length ind.ind_packets then mt ()
else
let ip = (kn,i) in
let ip_equiv = ind.ind_equiv, i in
let p = ind.ind_packets.(i) in
if is_custom (IndRef ip) then pp (i+1) kwd
else if p.ip_logical then pp_logical_ind p ++ pp (i+1) kwd
else
kwd ++ (if co then pp_coind p.ip_vars names.(i) else mt ()) ++
pp_one_ind prefix ip_equiv p.ip_vars names.(i) cnames.(i) p.ip_types ++
pp (i+1) nextkwd
in
pp 0 initkwd
(*s Pretty-printing of a declaration. *)
let pp_mind kn i =
match i.ind_kind with
| Singleton -> pp_singleton kn i.ind_packets.(0)
| Coinductive -> pp_ind true kn i
| Record fields -> pp_record kn fields (i.ind_equiv,0) i.ind_packets.(0)
| Standard -> pp_ind false kn i
let pp_decl = function
| Dtype (r,_,_) when is_inline_custom r -> mt ()
| Dterm (r,_,_) when is_inline_custom r -> mt ()
| Dind (kn,i) -> pp_mind kn i
| Dtype (r, l, t) ->
let name = pp_global Type r in
let l = rename_tvars keywords l in
let ids, def =
try
let ids,s = find_type_custom r in
pp_string_parameters ids, str " =" ++ spc () ++ str s
with Not_found ->
pp_parameters l,
if t == Taxiom then str " (* AXIOM TO BE REALIZED *)"
else str " =" ++ spc () ++ pp_type false l t
in
hov 2 (str "type " ++ ids ++ name ++ def)
| Dterm (r, a, t) ->
let def =
if is_custom r then str (" = " ^ find_custom r)
else if is_projection r then
(prvect str (Array.make (projection_arity r) " _")) ++
str " x = x."
else pp_function (empty_env ()) a
in
let name = pp_global Term r in
let postdef = if is_projection r then name else mt () in
pp_val name t ++ hov 0 (str "let " ++ name ++ def ++ postdef)
| Dfix (rv,defs,typs) ->
pp_Dfix (rv,defs,typs)
let pp_alias_decl ren = function
| Dind (kn,i) -> pp_mind kn { i with ind_equiv = RenEquiv ren }
| Dtype (r, l, _) ->
let name = pp_global Type r in
let l = rename_tvars keywords l in
let ids = pp_parameters l in
hov 2 (str "type " ++ ids ++ name ++ str " =" ++ spc () ++ ids ++
str (ren^".") ++ name)
| Dterm (r, a, t) ->
let name = pp_global Term r in
hov 2 (str "let " ++ name ++ str (" = "^ren^".") ++ name)
| Dfix (rv, _, _) ->
prvecti (fun i r -> if is_inline_custom r then mt () else
let name = pp_global Term r in
hov 2 (str "let " ++ name ++ str (" = "^ren^".") ++ name) ++
fnl ())
rv
let pp_spec = function
| Sval (r,_) when is_inline_custom r -> mt ()
| Stype (r,_,_) when is_inline_custom r -> mt ()
| Sind (kn,i) -> pp_mind kn i
| Sval (r,t) ->
let def = pp_type false [] t in
let name = pp_global Term r in
hov 2 (str "val " ++ name ++ str " :" ++ spc () ++ def)
| Stype (r,vl,ot) ->
let name = pp_global Type r in
let l = rename_tvars keywords vl in
let ids, def =
try
let ids, s = find_type_custom r in
pp_string_parameters ids, str " =" ++ spc () ++ str s
with Not_found ->
let ids = pp_parameters l in
match ot with
| None -> ids, mt ()
| Some Taxiom -> ids, str " (* AXIOM TO BE REALIZED *)"
| Some t -> ids, str " =" ++ spc () ++ pp_type false l t
in
hov 2 (str "type " ++ ids ++ name ++ def)
let pp_alias_spec ren = function
| Sind (kn,i) -> pp_mind kn { i with ind_equiv = RenEquiv ren }
| Stype (r,l,_) ->
let name = pp_global Type r in
let l = rename_tvars keywords l in
let ids = pp_parameters l in
hov 2 (str "type " ++ ids ++ name ++ str " =" ++ spc () ++ ids ++
str (ren^".") ++ name)
| Sval _ -> assert false
let rec pp_specif = function
| (_,Spec (Sval _ as s)) -> pp_spec s
| (l,Spec s) ->
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
hov 1 (str ("module "^ren^" : sig") ++ fnl () ++ pp_spec s) ++
fnl () ++ str "end" ++ fnl () ++
pp_alias_spec ren s
with Not_found -> pp_spec s)
| (l,Smodule mt) ->
let def = pp_module_type [] mt in
let name = pp_modname (MPdot (top_visible_mp (), l)) in
hov 1 (str "module " ++ name ++ str " :" ++ fnl () ++ def) ++
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
fnl () ++
hov 1 (str ("module "^ren^" :") ++ spc () ++
str "module type of struct include " ++ name ++ str " end")
with Not_found -> Pp.mt ())
| (l,Smodtype mt) ->
let def = pp_module_type [] mt in
let name = pp_modname (MPdot (top_visible_mp (), l)) in
hov 1 (str "module type " ++ name ++ str " =" ++ fnl () ++ def) ++
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
fnl () ++ str ("module type "^ren^" = ") ++ name
with Not_found -> Pp.mt ())
and pp_module_type params = function
| MTident kn ->
pp_modname kn
| MTfunsig (mbid, mt, mt') ->
let typ = pp_module_type [] mt in
let name = pp_modname (MPbound mbid) in
let def = pp_module_type (MPbound mbid :: params) mt' in
str "functor (" ++ name ++ str ":" ++ typ ++ str ") ->" ++ fnl () ++ def
| MTsig (mp, sign) ->
push_visible mp params;
let try_pp_specif l x =
let px = pp_specif x in
if Pp.is_empty px then l else px::l
in
(* We cannot use fold_right here due to side effects in pp_specif *)
let l = List.fold_left try_pp_specif [] sign in
let l = List.rev l in
pop_visible ();
str "sig" ++ fnl () ++
v 1 (str " " ++ prlist_with_sep cut2 identity l) ++
fnl () ++ str "end"
| MTwith(mt,ML_With_type(idl,vl,typ)) ->
let ids = pp_parameters (rename_tvars keywords vl) in
let mp_mt = msid_of_mt mt in
let l,idl' = List.sep_last idl in
let mp_w =
List.fold_left (fun mp l -> MPdot(mp,Label.of_id l)) mp_mt idl'
in
let r = ConstRef (Constant.make2 mp_w (Label.of_id l)) in
push_visible mp_mt [];
let pp_w = str " with type " ++ ids ++ pp_global Type r in
pop_visible();
pp_module_type [] mt ++ pp_w ++ str " = " ++ pp_type false vl typ
| MTwith(mt,ML_With_module(idl,mp)) ->
let mp_mt = msid_of_mt mt in
let mp_w =
List.fold_left (fun mp id -> MPdot(mp,Label.of_id id)) mp_mt idl
in
push_visible mp_mt [];
let pp_w = str " with module " ++ pp_modname mp_w in
pop_visible ();
pp_module_type [] mt ++ pp_w ++ str " = " ++ pp_modname mp
let is_short = function MEident _ | MEapply _ -> true | _ -> false
let rec pp_structure_elem = function
| (l,SEdecl d) ->
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
hov 1 (str ("module "^ren^" = struct") ++ fnl () ++ pp_decl d) ++
fnl () ++ str "end" ++ fnl () ++
pp_alias_decl ren d
with Not_found -> pp_decl d)
| (l,SEmodule m) ->
let typ =
(* virtual printing of the type, in order to have a correct mli later*)
if Common.get_phase () == Pre then
str ": " ++ pp_module_type [] m.ml_mod_type
else mt ()
in
let def = pp_module_expr [] m.ml_mod_expr in
let name = pp_modname (MPdot (top_visible_mp (), l)) in
hov 1
(str "module " ++ name ++ typ ++ str " =" ++
(if is_short m.ml_mod_expr then spc () else fnl ()) ++ def) ++
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
fnl () ++ str ("module "^ren^" = ") ++ name
with Not_found -> mt ())
| (l,SEmodtype m) ->
let def = pp_module_type [] m in
let name = pp_modname (MPdot (top_visible_mp (), l)) in
hov 1 (str "module type " ++ name ++ str " =" ++ fnl () ++ def) ++
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
fnl () ++ str ("module type "^ren^" = ") ++ name
with Not_found -> mt ())
and pp_module_expr params = function
| MEident mp -> pp_modname mp
| MEapply (me, me') ->
pp_module_expr [] me ++ str "(" ++ pp_module_expr [] me' ++ str ")"
| MEfunctor (mbid, mt, me) ->
let name = pp_modname (MPbound mbid) in
let typ = pp_module_type [] mt in
let def = pp_module_expr (MPbound mbid :: params) me in
str "functor (" ++ name ++ str ":" ++ typ ++ str ") ->" ++ fnl () ++ def
| MEstruct (mp, sel) ->
push_visible mp params;
let try_pp_structure_elem l x =
let px = pp_structure_elem x in
if Pp.is_empty px then l else px::l
in
(* We cannot use fold_right here due to side effects in pp_structure_elem *)
let l = List.fold_left try_pp_structure_elem [] sel in
let l = List.rev l in
pop_visible ();
str "struct" ++ fnl () ++
v 1 (str " " ++ prlist_with_sep cut2 identity l) ++
fnl () ++ str "end"
let rec prlist_sep_nonempty sep f = function
| [] -> mt ()
| [h] -> f h
| h::t ->
let e = f h in
let r = prlist_sep_nonempty sep f t in
if Pp.is_empty e then r
else e ++ sep () ++ r
let do_struct f s =
let ppl (mp,sel) =
push_visible mp [];
let p = prlist_sep_nonempty cut2 f sel in
(* for monolithic extraction, we try to simulate the unavailability
of [MPfile] in names by artificially nesting these [MPfile] *)
(if modular () then pop_visible ()); p
in
let p = prlist_sep_nonempty cut2 ppl s in
(if not (modular ()) then repeat (List.length s) pop_visible ());
v 0 p ++ fnl ()
let pp_struct s = do_struct pp_structure_elem s
let pp_signature s = do_struct pp_specif s
let ocaml_descr = {
keywords = keywords;
file_suffix = ".ml";
file_naming = file_of_modfile;
preamble = preamble;
pp_struct = pp_struct;
sig_suffix = Some ".mli";
sig_preamble = sig_preamble;
pp_sig = pp_signature;
pp_decl = pp_decl;
}
| null | https://raw.githubusercontent.com/ml4tp/tcoq/7a78c31df480fba721648f277ab0783229c8bece/plugins/extraction/ocaml.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
s Production of Ocaml syntax.
s Some utility functions.
s Ocaml renaming issues.
Note: do not shorten [str "foo" ++ fnl ()] into [str "foo\n"],
the '\n' character interacts badly with the Format boxing mechanism
s The pretty-printer for Ocaml syntax
Beware of the side-effects of [pp_global] and [pp_modname].
They are used to update table of content for modules. Many [let]
below should not be altered since they force evaluation order.
s Pretty-printing of types. [par] is a boolean indicating whether parentheses
are needed or not.
An [MLdummy] may be applied, but I don't really care.
Third, can this match be printed as [if ... then ... else] ?
Otherwise, standard match
Can a match be printed as a mere record projection ?
Hack Extract Inductive prod
s names of the functions ([ids]) are already pushed in [env],
and passed here just for convenience.
Ad-hoc double-newline in v boxes, with enough negative whitespace
to avoid indenting the intermediate blank line
s Pretty-printing of [Dfix]
s Pretty-printing of inductive types declaration.
s Pretty-printing of a declaration.
We cannot use fold_right here due to side effects in pp_specif
virtual printing of the type, in order to have a correct mli later
We cannot use fold_right here due to side effects in pp_structure_elem
for monolithic extraction, we try to simulate the unavailability
of [MPfile] in names by artificially nesting these [MPfile] | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Pp
open CErrors
open Util
open Names
open Nameops
open Globnames
open Table
open Miniml
open Mlutil
open Modutil
open Common
let pp_tvar id = str ("'" ^ Id.to_string id)
let pp_abst = function
| [] -> mt ()
| l ->
str "fun " ++ prlist_with_sep (fun () -> str " ") pr_id l ++
str " ->" ++ spc ()
let pp_parameters l =
(pp_boxed_tuple pp_tvar l ++ space_if (not (List.is_empty l)))
let pp_string_parameters l =
(pp_boxed_tuple str l ++ space_if (not (List.is_empty l)))
let pp_letin pat def body =
let fstline = str "let " ++ pat ++ str " =" ++ spc () ++ def in
hv 0 (hv 0 (hov 2 fstline ++ spc () ++ str "in") ++ spc () ++ hov 0 body)
let keywords =
List.fold_right (fun s -> Id.Set.add (Id.of_string s))
[ "and"; "as"; "assert"; "begin"; "class"; "constraint"; "do";
"done"; "downto"; "else"; "end"; "exception"; "external"; "false";
"for"; "fun"; "function"; "functor"; "if"; "in"; "include";
"inherit"; "initializer"; "lazy"; "let"; "match"; "method";
"module"; "mutable"; "new"; "object"; "of"; "open"; "or";
"parser"; "private"; "rec"; "sig"; "struct"; "then"; "to"; "true";
"try"; "type"; "val"; "virtual"; "when"; "while"; "with"; "mod";
"land"; "lor"; "lxor"; "lsl"; "lsr"; "asr" ; "unit" ; "_" ; "__" ]
Id.Set.empty
let pp_open mp = str ("open "^ string_of_modfile mp) ++ fnl ()
let pp_comment s = str "(* " ++ hov 0 s ++ str " *)"
let pp_header_comment = function
| None -> mt ()
| Some com -> pp_comment com ++ fnl2 ()
let then_nl pp = if Pp.is_empty pp then mt () else pp ++ fnl ()
let pp_tdummy usf =
if usf.tdummy || usf.tunknown then str "type __ = Obj.t" ++ fnl () else mt ()
let pp_mldummy usf =
if usf.mldummy then
str "let __ = let rec f _ = Obj.repr f in Obj.repr f" ++ fnl ()
else mt ()
let preamble _ comment used_modules usf =
pp_header_comment comment ++
then_nl (prlist pp_open used_modules) ++
then_nl (pp_tdummy usf ++ pp_mldummy usf)
let sig_preamble _ comment used_modules usf =
pp_header_comment comment ++
then_nl (prlist pp_open used_modules) ++
then_nl (pp_tdummy usf)
let str_global k r =
if is_inline_custom r then find_custom r else Common.pp_global k r
let pp_global k r = str (str_global k r)
let pp_modname mp = str (Common.pp_module mp)
let is_infix r =
is_inline_custom r &&
(let s = find_custom r in
let l = String.length s in
l >= 2 && s.[0] == '(' && s.[l-1] == ')')
let get_infix r =
let s = find_custom r in
String.sub s 1 (String.length s - 2)
let get_ind = function
| IndRef _ as r -> r
| ConstructRef (ind,_) -> IndRef ind
| _ -> assert false
let pp_one_field r i = function
| Some r -> pp_global Term r
| None -> pp_global Type (get_ind r) ++ str "__" ++ int i
let pp_field r fields i = pp_one_field r i (List.nth fields i)
let pp_fields r fields = List.map_i (pp_one_field r) 0 fields
let pp_type par vl t =
let rec pp_rec par = function
| Tmeta _ | Tvar' _ | Taxiom -> assert false
| Tvar i -> (try pp_tvar (List.nth vl (pred i))
with Failure _ -> (str "'a" ++ int i))
| Tglob (r,[a1;a2]) when is_infix r ->
pp_par par (pp_rec true a1 ++ str (get_infix r) ++ pp_rec true a2)
| Tglob (r,[]) -> pp_global Type r
| Tglob (IndRef(kn,0),l)
when not (keep_singleton ()) && MutInd.equal kn (mk_ind "Coq.Init.Specif" "sig") ->
pp_tuple_light pp_rec l
| Tglob (r,l) ->
pp_tuple_light pp_rec l ++ spc () ++ pp_global Type r
| Tarr (t1,t2) ->
pp_par par
(pp_rec true t1 ++ spc () ++ str "->" ++ spc () ++ pp_rec false t2)
| Tdummy _ -> str "__"
| Tunknown -> str "__"
in
hov 0 (pp_rec par t)
s Pretty - printing of expressions . [ par ] indicates whether
parentheses are needed or not . [ env ] is the list of names for the
de Bruijn variables . [ args ] is the list of collected arguments
( already pretty - printed ) .
parentheses are needed or not. [env] is the list of names for the
de Bruijn variables. [args] is the list of collected arguments
(already pretty-printed). *)
let is_bool_patt p s =
try
let r = match p with
| Pusual r -> r
| Pcons (r,[]) -> r
| _ -> raise Not_found
in
String.equal (find_custom r) s
with Not_found -> false
let is_ifthenelse = function
| [|([],p1,_);([],p2,_)|] -> is_bool_patt p1 "true" && is_bool_patt p2 "false"
| _ -> false
let expr_needs_par = function
| MLlam _ -> true
| MLcase (_,_,[|_|]) -> false
| MLcase (_,_,pv) -> not (is_ifthenelse pv)
| _ -> false
let rec pp_expr par env args =
let apply st = pp_apply st par args
and apply2 st = pp_apply2 st par args in
function
| MLrel n ->
let id = get_db_name n env in
Try to survive to the occurrence of a Dummy rel .
TODO : we should get rid of this hack ( cf . # 592 )
TODO: we should get rid of this hack (cf. #592) *)
let id = if Id.equal id dummy_name then Id.of_string "__" else id in
apply (pr_id id)
| MLapp (f,args') ->
let stl = List.map (pp_expr true env []) args' in
pp_expr par env (stl @ args) f
| MLlam _ as a ->
let fl,a' = collect_lams a in
let fl = List.map id_of_mlid fl in
let fl,env' = push_vars fl env in
let st = pp_abst (List.rev fl) ++ pp_expr false env' [] a' in
apply2 st
| MLletin (id,a1,a2) ->
let i,env' = push_vars [id_of_mlid id] env in
let pp_id = pr_id (List.hd i)
and pp_a1 = pp_expr false env [] a1
and pp_a2 = pp_expr (not par && expr_needs_par a2) env' [] a2 in
hv 0 (apply2 (pp_letin pp_id pp_a1 pp_a2))
| MLglob r ->
(try
let args = List.skipn (projection_arity r) args in
let record = List.hd args in
pp_apply (record ++ str "." ++ pp_global Term r) par (List.tl args)
with e when CErrors.noncritical e -> apply (pp_global Term r))
| MLfix (i,ids,defs) ->
let ids',env' = push_vars (List.rev (Array.to_list ids)) env in
pp_fix par env' i (Array.of_list (List.rev ids'),defs) args
| MLexn s ->
An [ MLexn ] may be applied , but I do n't really care .
pp_par par (str "assert false" ++ spc () ++ str ("(* "^s^" *)"))
| MLdummy k ->
(match msg_of_implicit k with
| "" -> str "__"
| s -> str "__" ++ spc () ++ str ("(* "^s^" *)"))
| MLmagic a ->
pp_apply (str "Obj.magic") par (pp_expr true env [] a :: args)
| MLaxiom ->
pp_par par (str "failwith \"AXIOM TO BE REALIZED\"")
| MLcons (_,r,a) as c ->
assert (List.is_empty args);
begin match a with
| _ when is_native_char c -> pp_native_char c
| [a1;a2] when is_infix r ->
let pp = pp_expr true env [] in
pp_par par (pp a1 ++ str (get_infix r) ++ pp a2)
| _ when is_coinductive r ->
let ne = not (List.is_empty a) in
let tuple = space_if ne ++ pp_tuple (pp_expr true env []) a in
pp_par par (str "lazy " ++ pp_par ne (pp_global Cons r ++ tuple))
| [] -> pp_global Cons r
| _ ->
let fds = get_record_fields r in
if not (List.is_empty fds) then
pp_record_pat (pp_fields r fds, List.map (pp_expr true env []) a)
else
let tuple = pp_tuple (pp_expr true env []) a in
hack Extract Inductive prod
then tuple
else pp_par par (pp_global Cons r ++ spc () ++ tuple)
end
| MLtuple l ->
assert (List.is_empty args);
pp_boxed_tuple (pp_expr true env []) l
| MLcase (_, t, pv) when is_custom_match pv ->
if not (is_regular_match pv) then
error "Cannot mix yet user-given match and general patterns.";
let mkfun (ids,_,e) =
if not (List.is_empty ids) then named_lams (List.rev ids) e
else dummy_lams (ast_lift 1 e) 1
in
let pp_branch tr = pp_expr true env [] (mkfun tr) ++ fnl () in
let inner =
str (find_custom_match pv) ++ fnl () ++
prvect pp_branch pv ++
pp_expr true env [] t
in
apply2 (hov 2 inner)
| MLcase (typ, t, pv) ->
let head =
if not (is_coinductive_type typ) then pp_expr false env [] t
else (str "Lazy.force" ++ spc () ++ pp_expr true env [] t)
in
First , can this match be printed as a mere record projection ?
(try pp_record_proj par env typ t pv args
with Impossible ->
Second , can this match be printed as a let - in ?
if Int.equal (Array.length pv) 1 then
let s1,s2 = pp_one_pat env pv.(0) in
hv 0 (apply2 (pp_letin s1 head s2))
else
(try apply2 (pp_ifthenelse env head pv)
with Not_found ->
apply2
(v 0 (str "match " ++ head ++ str " with" ++ fnl () ++
pp_pat env pv))))
and pp_record_proj par env typ t pv args =
let fields = record_fields_of_type typ in
if List.is_empty fields then raise Impossible;
if not (Int.equal (Array.length pv) 1) then raise Impossible;
if has_deep_pattern pv then raise Impossible;
let (ids,pat,body) = pv.(0) in
let n = List.length ids in
let no_patvar a = not (List.exists (ast_occurs_itvl 1 n) a) in
let rel_i,a = match body with
| MLrel i when i <= n -> i,[]
| MLapp(MLrel i, a) when i<=n && no_patvar a -> i,a
| _ -> raise Impossible
in
let rec lookup_rel i idx = function
| Prel j :: l -> if Int.equal i j then idx else lookup_rel i (idx+1) l
| Pwild :: l -> lookup_rel i (idx+1) l
| _ -> raise Impossible
in
let r,idx = match pat with
| Pusual r -> r, n-rel_i
| Pcons (r,l) -> r, lookup_rel rel_i 0 l
| _ -> raise Impossible
in
if is_infix r then raise Impossible;
let env' = snd (push_vars (List.rev_map id_of_mlid ids) env) in
let pp_args = (List.map (pp_expr true env' []) a) @ args in
let pp_head = pp_expr true env [] t ++ str "." ++ pp_field r fields idx
in
pp_apply pp_head par pp_args
and pp_record_pat (fields, args) =
str "{ " ++
prlist_with_sep (fun () -> str ";" ++ spc ())
(fun (f,a) -> f ++ str " =" ++ spc () ++ a)
(List.combine fields args) ++
str " }"
and pp_cons_pat r ppl =
if is_infix r && Int.equal (List.length ppl) 2 then
List.hd ppl ++ str (get_infix r) ++ List.hd (List.tl ppl)
else
let fields = get_record_fields r in
if not (List.is_empty fields) then pp_record_pat (pp_fields r fields, ppl)
else if String.is_empty (str_global Cons r) then
else
pp_global Cons r ++ space_if (not (List.is_empty ppl)) ++ pp_boxed_tuple identity ppl
and pp_gen_pat ids env = function
| Pcons (r, l) -> pp_cons_pat r (List.map (pp_gen_pat ids env) l)
| Pusual r -> pp_cons_pat r (List.map pr_id ids)
| Ptuple l -> pp_boxed_tuple (pp_gen_pat ids env) l
| Pwild -> str "_"
| Prel n -> pr_id (get_db_name n env)
and pp_ifthenelse env expr pv = match pv with
| [|([],tru,the);([],fal,els)|] when
(is_bool_patt tru "true") && (is_bool_patt fal "false")
->
hv 0 (hov 2 (str "if " ++ expr) ++ spc () ++
hov 2 (str "then " ++
hov 2 (pp_expr (expr_needs_par the) env [] the)) ++ spc () ++
hov 2 (str "else " ++
hov 2 (pp_expr (expr_needs_par els) env [] els)))
| _ -> raise Not_found
and pp_one_pat env (ids,p,t) =
let ids',env' = push_vars (List.rev_map id_of_mlid ids) env in
pp_gen_pat (List.rev ids') env' p,
pp_expr (expr_needs_par t) env' [] t
and pp_pat env pv =
prvecti
(fun i x ->
let s1,s2 = pp_one_pat env x in
hv 2 (hov 4 (str "| " ++ s1 ++ str " ->") ++ spc () ++ hov 2 s2) ++
if Int.equal i (Array.length pv - 1) then mt () else fnl ())
pv
and pp_function env t =
let bl,t' = collect_lams t in
let bl,env' = push_vars (List.map id_of_mlid bl) env in
match t' with
| MLcase(Tglob(r,_),MLrel 1,pv) when
not (is_coinductive r) && List.is_empty (get_record_fields r) &&
not (is_custom_match pv) ->
if not (ast_occurs 1 (MLcase(Tunknown,MLaxiom,pv))) then
pr_binding (List.rev (List.tl bl)) ++
str " = function" ++ fnl () ++
v 0 (pp_pat env' pv)
else
pr_binding (List.rev bl) ++
str " = match " ++ pr_id (List.hd bl) ++ str " with" ++ fnl () ++
v 0 (pp_pat env' pv)
| _ ->
pr_binding (List.rev bl) ++
str " =" ++ fnl () ++ str " " ++
hov 2 (pp_expr false env' [] t')
and pp_fix par env i (ids,bl) args =
pp_par par
(v 0 (str "let rec " ++
prvect_with_sep
(fun () -> fnl () ++ str "and ")
(fun (fi,ti) -> pr_id fi ++ pp_function env ti)
(Array.map2 (fun id b -> (id,b)) ids bl) ++
fnl () ++
hov 2 (str "in " ++ pp_apply (pr_id ids.(i)) false args)))
let cut2 () = brk (0,-100000) ++ brk (0,0)
let pp_val e typ =
hov 4 (str "(** val " ++ e ++ str " :" ++ spc () ++ pp_type false [] typ ++
str " **)") ++ cut2 ()
let pp_Dfix (rv,c,t) =
let names = Array.map
(fun r -> if is_inline_custom r then mt () else pp_global Term r) rv
in
let rec pp init i =
if i >= Array.length rv then mt ()
else
let void = is_inline_custom rv.(i) ||
(not (is_custom rv.(i)) &&
match c.(i) with MLexn "UNUSED" -> true | _ -> false)
in
if void then pp init (i+1)
else
let def =
if is_custom rv.(i) then str " = " ++ str (find_custom rv.(i))
else pp_function (empty_env ()) c.(i)
in
(if init then mt () else cut2 ()) ++
pp_val names.(i) t.(i) ++
str (if init then "let rec " else "and ") ++ names.(i) ++ def ++
pp false (i+1)
in pp true 0
let pp_equiv param_list name = function
| NoEquiv, _ -> mt ()
| Equiv kn, i ->
str " = " ++ pp_parameters param_list ++ pp_global Type (IndRef (mind_of_kn kn,i))
| RenEquiv ren, _ ->
str " = " ++ pp_parameters param_list ++ str (ren^".") ++ name
let pp_one_ind prefix ip_equiv pl name cnames ctyps =
let pl = rename_tvars keywords pl in
let pp_constructor i typs =
(if Int.equal i 0 then mt () else fnl ()) ++
hov 3 (str "| " ++ cnames.(i) ++
(if List.is_empty typs then mt () else str " of ") ++
prlist_with_sep
(fun () -> spc () ++ str "* ") (pp_type true pl) typs)
in
pp_parameters pl ++ str prefix ++ name ++
pp_equiv pl name ip_equiv ++ str " =" ++
if Int.equal (Array.length ctyps) 0 then str " unit (* empty inductive *)"
else fnl () ++ v 0 (prvecti pp_constructor ctyps)
let pp_logical_ind packet =
pp_comment (pr_id packet.ip_typename ++ str " : logical inductive") ++
fnl () ++
pp_comment (str "with constructors : " ++
prvect_with_sep spc pr_id packet.ip_consnames) ++
fnl ()
let pp_singleton kn packet =
let name = pp_global Type (IndRef (kn,0)) in
let l = rename_tvars keywords packet.ip_vars in
hov 2 (str "type " ++ pp_parameters l ++ name ++ str " =" ++ spc () ++
pp_type false l (List.hd packet.ip_types.(0)) ++ fnl () ++
pp_comment (str "singleton inductive, whose constructor was " ++
pr_id packet.ip_consnames.(0)))
let pp_record kn fields ip_equiv packet =
let ind = IndRef (kn,0) in
let name = pp_global Type ind in
let fieldnames = pp_fields ind fields in
let l = List.combine fieldnames packet.ip_types.(0) in
let pl = rename_tvars keywords packet.ip_vars in
str "type " ++ pp_parameters pl ++ name ++
pp_equiv pl name ip_equiv ++ str " = { "++
hov 0 (prlist_with_sep (fun () -> str ";" ++ spc ())
(fun (p,t) -> p ++ str " : " ++ pp_type true pl t) l)
++ str " }"
let pp_coind pl name =
let pl = rename_tvars keywords pl in
pp_parameters pl ++ name ++ str " = " ++
pp_parameters pl ++ str "__" ++ name ++ str " Lazy.t" ++
fnl() ++ str "and "
let pp_ind co kn ind =
let prefix = if co then "__" else "" in
let initkwd = str "type " in
let nextkwd = fnl () ++ str "and " in
let names =
Array.mapi (fun i p -> if p.ip_logical then mt () else
pp_global Type (IndRef (kn,i)))
ind.ind_packets
in
let cnames =
Array.mapi
(fun i p -> if p.ip_logical then [||] else
Array.mapi (fun j _ -> pp_global Cons (ConstructRef ((kn,i),j+1)))
p.ip_types)
ind.ind_packets
in
let rec pp i kwd =
if i >= Array.length ind.ind_packets then mt ()
else
let ip = (kn,i) in
let ip_equiv = ind.ind_equiv, i in
let p = ind.ind_packets.(i) in
if is_custom (IndRef ip) then pp (i+1) kwd
else if p.ip_logical then pp_logical_ind p ++ pp (i+1) kwd
else
kwd ++ (if co then pp_coind p.ip_vars names.(i) else mt ()) ++
pp_one_ind prefix ip_equiv p.ip_vars names.(i) cnames.(i) p.ip_types ++
pp (i+1) nextkwd
in
pp 0 initkwd
let pp_mind kn i =
match i.ind_kind with
| Singleton -> pp_singleton kn i.ind_packets.(0)
| Coinductive -> pp_ind true kn i
| Record fields -> pp_record kn fields (i.ind_equiv,0) i.ind_packets.(0)
| Standard -> pp_ind false kn i
let pp_decl = function
| Dtype (r,_,_) when is_inline_custom r -> mt ()
| Dterm (r,_,_) when is_inline_custom r -> mt ()
| Dind (kn,i) -> pp_mind kn i
| Dtype (r, l, t) ->
let name = pp_global Type r in
let l = rename_tvars keywords l in
let ids, def =
try
let ids,s = find_type_custom r in
pp_string_parameters ids, str " =" ++ spc () ++ str s
with Not_found ->
pp_parameters l,
if t == Taxiom then str " (* AXIOM TO BE REALIZED *)"
else str " =" ++ spc () ++ pp_type false l t
in
hov 2 (str "type " ++ ids ++ name ++ def)
| Dterm (r, a, t) ->
let def =
if is_custom r then str (" = " ^ find_custom r)
else if is_projection r then
(prvect str (Array.make (projection_arity r) " _")) ++
str " x = x."
else pp_function (empty_env ()) a
in
let name = pp_global Term r in
let postdef = if is_projection r then name else mt () in
pp_val name t ++ hov 0 (str "let " ++ name ++ def ++ postdef)
| Dfix (rv,defs,typs) ->
pp_Dfix (rv,defs,typs)
let pp_alias_decl ren = function
| Dind (kn,i) -> pp_mind kn { i with ind_equiv = RenEquiv ren }
| Dtype (r, l, _) ->
let name = pp_global Type r in
let l = rename_tvars keywords l in
let ids = pp_parameters l in
hov 2 (str "type " ++ ids ++ name ++ str " =" ++ spc () ++ ids ++
str (ren^".") ++ name)
| Dterm (r, a, t) ->
let name = pp_global Term r in
hov 2 (str "let " ++ name ++ str (" = "^ren^".") ++ name)
| Dfix (rv, _, _) ->
prvecti (fun i r -> if is_inline_custom r then mt () else
let name = pp_global Term r in
hov 2 (str "let " ++ name ++ str (" = "^ren^".") ++ name) ++
fnl ())
rv
let pp_spec = function
| Sval (r,_) when is_inline_custom r -> mt ()
| Stype (r,_,_) when is_inline_custom r -> mt ()
| Sind (kn,i) -> pp_mind kn i
| Sval (r,t) ->
let def = pp_type false [] t in
let name = pp_global Term r in
hov 2 (str "val " ++ name ++ str " :" ++ spc () ++ def)
| Stype (r,vl,ot) ->
let name = pp_global Type r in
let l = rename_tvars keywords vl in
let ids, def =
try
let ids, s = find_type_custom r in
pp_string_parameters ids, str " =" ++ spc () ++ str s
with Not_found ->
let ids = pp_parameters l in
match ot with
| None -> ids, mt ()
| Some Taxiom -> ids, str " (* AXIOM TO BE REALIZED *)"
| Some t -> ids, str " =" ++ spc () ++ pp_type false l t
in
hov 2 (str "type " ++ ids ++ name ++ def)
let pp_alias_spec ren = function
| Sind (kn,i) -> pp_mind kn { i with ind_equiv = RenEquiv ren }
| Stype (r,l,_) ->
let name = pp_global Type r in
let l = rename_tvars keywords l in
let ids = pp_parameters l in
hov 2 (str "type " ++ ids ++ name ++ str " =" ++ spc () ++ ids ++
str (ren^".") ++ name)
| Sval _ -> assert false
let rec pp_specif = function
| (_,Spec (Sval _ as s)) -> pp_spec s
| (l,Spec s) ->
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
hov 1 (str ("module "^ren^" : sig") ++ fnl () ++ pp_spec s) ++
fnl () ++ str "end" ++ fnl () ++
pp_alias_spec ren s
with Not_found -> pp_spec s)
| (l,Smodule mt) ->
let def = pp_module_type [] mt in
let name = pp_modname (MPdot (top_visible_mp (), l)) in
hov 1 (str "module " ++ name ++ str " :" ++ fnl () ++ def) ++
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
fnl () ++
hov 1 (str ("module "^ren^" :") ++ spc () ++
str "module type of struct include " ++ name ++ str " end")
with Not_found -> Pp.mt ())
| (l,Smodtype mt) ->
let def = pp_module_type [] mt in
let name = pp_modname (MPdot (top_visible_mp (), l)) in
hov 1 (str "module type " ++ name ++ str " =" ++ fnl () ++ def) ++
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
fnl () ++ str ("module type "^ren^" = ") ++ name
with Not_found -> Pp.mt ())
and pp_module_type params = function
| MTident kn ->
pp_modname kn
| MTfunsig (mbid, mt, mt') ->
let typ = pp_module_type [] mt in
let name = pp_modname (MPbound mbid) in
let def = pp_module_type (MPbound mbid :: params) mt' in
str "functor (" ++ name ++ str ":" ++ typ ++ str ") ->" ++ fnl () ++ def
| MTsig (mp, sign) ->
push_visible mp params;
let try_pp_specif l x =
let px = pp_specif x in
if Pp.is_empty px then l else px::l
in
let l = List.fold_left try_pp_specif [] sign in
let l = List.rev l in
pop_visible ();
str "sig" ++ fnl () ++
v 1 (str " " ++ prlist_with_sep cut2 identity l) ++
fnl () ++ str "end"
| MTwith(mt,ML_With_type(idl,vl,typ)) ->
let ids = pp_parameters (rename_tvars keywords vl) in
let mp_mt = msid_of_mt mt in
let l,idl' = List.sep_last idl in
let mp_w =
List.fold_left (fun mp l -> MPdot(mp,Label.of_id l)) mp_mt idl'
in
let r = ConstRef (Constant.make2 mp_w (Label.of_id l)) in
push_visible mp_mt [];
let pp_w = str " with type " ++ ids ++ pp_global Type r in
pop_visible();
pp_module_type [] mt ++ pp_w ++ str " = " ++ pp_type false vl typ
| MTwith(mt,ML_With_module(idl,mp)) ->
let mp_mt = msid_of_mt mt in
let mp_w =
List.fold_left (fun mp id -> MPdot(mp,Label.of_id id)) mp_mt idl
in
push_visible mp_mt [];
let pp_w = str " with module " ++ pp_modname mp_w in
pop_visible ();
pp_module_type [] mt ++ pp_w ++ str " = " ++ pp_modname mp
let is_short = function MEident _ | MEapply _ -> true | _ -> false
let rec pp_structure_elem = function
| (l,SEdecl d) ->
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
hov 1 (str ("module "^ren^" = struct") ++ fnl () ++ pp_decl d) ++
fnl () ++ str "end" ++ fnl () ++
pp_alias_decl ren d
with Not_found -> pp_decl d)
| (l,SEmodule m) ->
let typ =
if Common.get_phase () == Pre then
str ": " ++ pp_module_type [] m.ml_mod_type
else mt ()
in
let def = pp_module_expr [] m.ml_mod_expr in
let name = pp_modname (MPdot (top_visible_mp (), l)) in
hov 1
(str "module " ++ name ++ typ ++ str " =" ++
(if is_short m.ml_mod_expr then spc () else fnl ()) ++ def) ++
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
fnl () ++ str ("module "^ren^" = ") ++ name
with Not_found -> mt ())
| (l,SEmodtype m) ->
let def = pp_module_type [] m in
let name = pp_modname (MPdot (top_visible_mp (), l)) in
hov 1 (str "module type " ++ name ++ str " =" ++ fnl () ++ def) ++
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
fnl () ++ str ("module type "^ren^" = ") ++ name
with Not_found -> mt ())
and pp_module_expr params = function
| MEident mp -> pp_modname mp
| MEapply (me, me') ->
pp_module_expr [] me ++ str "(" ++ pp_module_expr [] me' ++ str ")"
| MEfunctor (mbid, mt, me) ->
let name = pp_modname (MPbound mbid) in
let typ = pp_module_type [] mt in
let def = pp_module_expr (MPbound mbid :: params) me in
str "functor (" ++ name ++ str ":" ++ typ ++ str ") ->" ++ fnl () ++ def
| MEstruct (mp, sel) ->
push_visible mp params;
let try_pp_structure_elem l x =
let px = pp_structure_elem x in
if Pp.is_empty px then l else px::l
in
let l = List.fold_left try_pp_structure_elem [] sel in
let l = List.rev l in
pop_visible ();
str "struct" ++ fnl () ++
v 1 (str " " ++ prlist_with_sep cut2 identity l) ++
fnl () ++ str "end"
let rec prlist_sep_nonempty sep f = function
| [] -> mt ()
| [h] -> f h
| h::t ->
let e = f h in
let r = prlist_sep_nonempty sep f t in
if Pp.is_empty e then r
else e ++ sep () ++ r
let do_struct f s =
let ppl (mp,sel) =
push_visible mp [];
let p = prlist_sep_nonempty cut2 f sel in
(if modular () then pop_visible ()); p
in
let p = prlist_sep_nonempty cut2 ppl s in
(if not (modular ()) then repeat (List.length s) pop_visible ());
v 0 p ++ fnl ()
let pp_struct s = do_struct pp_structure_elem s
let pp_signature s = do_struct pp_specif s
let ocaml_descr = {
keywords = keywords;
file_suffix = ".ml";
file_naming = file_of_modfile;
preamble = preamble;
pp_struct = pp_struct;
sig_suffix = Some ".mli";
sig_preamble = sig_preamble;
pp_sig = pp_signature;
pp_decl = pp_decl;
}
|
e881bdc4db6669c5f276766b9c339b4ec505ef87def3a2c05eb9994b61aa65e8 | michaelmelanson/erlyweb | customer.erl | -module(customer).
-compile(export_all).
relations() ->
[{many_to_many, [customer]}].
| null | https://raw.githubusercontent.com/michaelmelanson/erlyweb/997df18b70459bfaaf8c3ab70ab4f54907045d0f/test/erlydb/customer.erl | erlang | -module(customer).
-compile(export_all).
relations() ->
[{many_to_many, [customer]}].
|
|
d417617bda9a1bf6a043d499d358727a2306c3325b541cef27d18d1d761ffd3c | nwtgck/platy-lang-haskell | SemanticCheckSpec.hs | # LANGUAGE NamedFieldPuns #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE QuasiQuotes #
module Platy.SemanticCheckSpec where
import Test.Hspec
import Test.Hspec.QuickCheck
import Test.QuickCheck
import qualified Data.String.Here as Here
import qualified Data.ByteString.Char8 as BS
import qualified Data.Text.Lazy.IO as TIO
import qualified Data.ByteString as ByteString
import qualified Data.Map as Map
import Data.Map (Map)
import qualified Control.Monad.State as Monad.State
import qualified LLVM.Pretty
import Debug.Trace
import Platy.Datatypes
import Platy . Codegen
import Platy.SemanticCheck
import Platy.Utils
import qualified Platy.TestUtils as TestUtils
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "[positive] exprToTypedExpr" $ do
it "int literal" $ do
let expr1 = LitExpr {anno=(), lit=IntLit 3232}
let initEnv = SemanticCheckEnv {globalVarTable=Map.empty, localVarTables=[]}
let actual = Monad.State.evalStateT (runSemanticCheck (exprToTypedExpr expr1)) initEnv
let expect = Right LitExpr {anno=IntTy, lit=IntLit 3232}
actual `shouldBe` expect
it "char literal" $ do
let expr1 = LitExpr {anno=(), lit=CharLit 'm'}
let initEnv = SemanticCheckEnv {globalVarTable=Map.empty, localVarTables=[]}
let actual = Monad.State.evalStateT (runSemanticCheck (exprToTypedExpr expr1)) initEnv
let expect = Right LitExpr {anno=CharTy, lit=CharLit 'm'}
actual `shouldBe` expect
it "local identifier" $ do
let expr1 = IdentExpr {anno=(), ident=Ident "a"}
let initEnv = SemanticCheckEnv {globalVarTable=Map.empty, localVarTables=[Map.fromList [(Ident "a", LVarIdentInfo{ty=IntTy})] ]}
let actual = Monad.State.evalStateT (runSemanticCheck (exprToTypedExpr expr1)) initEnv
let expect = Right IdentExpr {anno=IntTy, ident=Ident "a"}
actual `shouldBe` expect
it "global identifier" $ do
let expr1 = IdentExpr {anno=(), ident=Ident "a"}
let initEnv = SemanticCheckEnv {globalVarTable=Map.fromList [(Ident "a", GVarIdentInfo{ty=IntTy})], localVarTables=[]}
let actual = Monad.State.evalStateT (runSemanticCheck (exprToTypedExpr expr1)) initEnv
let expect = Right IdentExpr {anno=IntTy, ident=Ident "a"}
actual `shouldBe` expect
it "if expression" $ do
let expr1 = IfExpr
{ anno = ()
, condExpr =
LitExpr
{ anno = ()
, lit = BoolLit True
}
, thenExpr =
LitExpr
{ anno = ()
, lit = IntLit 18181
}
, elseExpr =
LitExpr
{ anno = ()
, lit = IntLit 2332
}
}
let initEnv = SemanticCheckEnv {globalVarTable=Map.empty, localVarTables=[]}
let actual = Monad.State.evalStateT (runSemanticCheck (exprToTypedExpr expr1)) initEnv
let expect = Right IfExpr
{ anno = IntTy
, condExpr =
LitExpr
{ anno = BoolTy
, lit = BoolLit True
}
, thenExpr =
LitExpr
{ anno = IntTy
, lit = IntLit 18181
}
, elseExpr =
LitExpr
{ anno = IntTy
, lit = IntLit 2332
}
}
actual `shouldBe` expect
it "apply" $ do
let expr1 = ApplyExpr
{ anno = ()
, calleeIdent = Ident "myfunc"
, argExprs =
[ LitExpr
{ anno = ()
, lit = BoolLit False
}
, LitExpr
{ anno = ()
, lit = CharLit 's'
}
]
}
let initEnv = SemanticCheckEnv
{ globalVarTable =
Map.fromList
[ ( Ident "myfunc"
, FuncIdentInfo
{ retTy = IntTy
, paramTys = [BoolTy, CharTy]
})
]
, localVarTables = []
}
let actual = Monad.State.evalStateT (runSemanticCheck (exprToTypedExpr expr1)) initEnv
let expect = Right ApplyExpr
{ anno = IntTy
, calleeIdent = Ident "myfunc"
, argExprs =
[ LitExpr
{ anno = BoolTy
, lit = BoolLit False
}
, LitExpr
{ anno = CharTy
, lit = CharLit 's'
}
]
}
actual `shouldBe` expect
it "let-expression" $ do
let expr1 = LetExpr
{ anno = ()
, binds =
[ Bind
{ ident = Ident "a"
, ty = IntTy
, bodyExpr = LitExpr {anno=(), lit=IntLit 889922}
}
, Bind
{ ident = Ident "b"
, ty = CharTy
, bodyExpr = LitExpr {anno=(), lit=CharLit 'j'}
}
]
, inExpr = IdentExpr{anno=(), ident=Ident "a"}
}
let initEnv = SemanticCheckEnv {globalVarTable=Map.empty, localVarTables=[]}
let actual = Monad.State.evalStateT (runSemanticCheck (exprToTypedExpr expr1)) initEnv
let expect = Right LetExpr
{ anno = IntTy
, binds =
[ Bind
{ ident = Ident "a"
, ty = IntTy
, bodyExpr = LitExpr {anno=IntTy, lit=IntLit 889922}
}
, Bind
{ ident = Ident "b"
, ty = CharTy
, bodyExpr = LitExpr {anno=CharTy, lit=CharLit 'j'}
}
]
, inExpr = IdentExpr{anno=IntTy, ident=Ident "a"}
}
actual `shouldBe` expect
describe "[positive] programToTypedProgram" $ do
it "global-let" $ do
let prog1 = Program
{ gdefs =
[ LetGdef
{ bind =
Bind
{ ident = Ident "a"
, ty = IntTy
, bodyExpr = LitExpr {anno=(), lit=IntLit 337733}
}
}
, LetGdef
{ bind =
Bind
{ ident = Ident "b"
, ty = IntTy
, bodyExpr = LitExpr {anno=(), lit=IntLit 909}
}
}
]
}
let actual = programToTypedProgram prog1
let expect = Right Program
{ gdefs =
[ LetGdef
{ bind =
Bind
{ ident = Ident "a"
, ty = IntTy
, bodyExpr = LitExpr {anno=IntTy, lit=IntLit 337733}
}
}
, LetGdef
{ bind =
Bind
{ ident = Ident "b"
, ty = IntTy
, bodyExpr = LitExpr {anno=IntTy, lit=IntLit 909}
}
}
]
}
actual `shouldBe` expect
it "func" $ do
let prog1 = Program
{ gdefs =
[ FuncGdef
{ ident = Ident "myfunc"
, params = [Param {ident=Ident "p", ty=IntTy}, Param {ident=Ident "q", ty=CharTy}]
, retTy = IntTy
, bodyExpr = IdentExpr{anno=(), ident=Ident "p"}
}
]
}
let actual = programToTypedProgram prog1
let expect = Right Program
{ gdefs =
[ FuncGdef
{ ident = Ident "myfunc"
, params = [Param {ident=Ident "p", ty=IntTy}, Param {ident=Ident "q", ty=CharTy}]
, retTy = IntTy
, bodyExpr = IdentExpr{anno=IntTy, ident=Ident "p"}
}
]
}
actual `shouldBe` expect | null | https://raw.githubusercontent.com/nwtgck/platy-lang-haskell/a8f84ab65207161b0cebd8378eb62863202723f9/test/Platy/SemanticCheckSpec.hs | haskell | # LANGUAGE NamedFieldPuns #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE QuasiQuotes #
module Platy.SemanticCheckSpec where
import Test.Hspec
import Test.Hspec.QuickCheck
import Test.QuickCheck
import qualified Data.String.Here as Here
import qualified Data.ByteString.Char8 as BS
import qualified Data.Text.Lazy.IO as TIO
import qualified Data.ByteString as ByteString
import qualified Data.Map as Map
import Data.Map (Map)
import qualified Control.Monad.State as Monad.State
import qualified LLVM.Pretty
import Debug.Trace
import Platy.Datatypes
import Platy . Codegen
import Platy.SemanticCheck
import Platy.Utils
import qualified Platy.TestUtils as TestUtils
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "[positive] exprToTypedExpr" $ do
it "int literal" $ do
let expr1 = LitExpr {anno=(), lit=IntLit 3232}
let initEnv = SemanticCheckEnv {globalVarTable=Map.empty, localVarTables=[]}
let actual = Monad.State.evalStateT (runSemanticCheck (exprToTypedExpr expr1)) initEnv
let expect = Right LitExpr {anno=IntTy, lit=IntLit 3232}
actual `shouldBe` expect
it "char literal" $ do
let expr1 = LitExpr {anno=(), lit=CharLit 'm'}
let initEnv = SemanticCheckEnv {globalVarTable=Map.empty, localVarTables=[]}
let actual = Monad.State.evalStateT (runSemanticCheck (exprToTypedExpr expr1)) initEnv
let expect = Right LitExpr {anno=CharTy, lit=CharLit 'm'}
actual `shouldBe` expect
it "local identifier" $ do
let expr1 = IdentExpr {anno=(), ident=Ident "a"}
let initEnv = SemanticCheckEnv {globalVarTable=Map.empty, localVarTables=[Map.fromList [(Ident "a", LVarIdentInfo{ty=IntTy})] ]}
let actual = Monad.State.evalStateT (runSemanticCheck (exprToTypedExpr expr1)) initEnv
let expect = Right IdentExpr {anno=IntTy, ident=Ident "a"}
actual `shouldBe` expect
it "global identifier" $ do
let expr1 = IdentExpr {anno=(), ident=Ident "a"}
let initEnv = SemanticCheckEnv {globalVarTable=Map.fromList [(Ident "a", GVarIdentInfo{ty=IntTy})], localVarTables=[]}
let actual = Monad.State.evalStateT (runSemanticCheck (exprToTypedExpr expr1)) initEnv
let expect = Right IdentExpr {anno=IntTy, ident=Ident "a"}
actual `shouldBe` expect
it "if expression" $ do
let expr1 = IfExpr
{ anno = ()
, condExpr =
LitExpr
{ anno = ()
, lit = BoolLit True
}
, thenExpr =
LitExpr
{ anno = ()
, lit = IntLit 18181
}
, elseExpr =
LitExpr
{ anno = ()
, lit = IntLit 2332
}
}
let initEnv = SemanticCheckEnv {globalVarTable=Map.empty, localVarTables=[]}
let actual = Monad.State.evalStateT (runSemanticCheck (exprToTypedExpr expr1)) initEnv
let expect = Right IfExpr
{ anno = IntTy
, condExpr =
LitExpr
{ anno = BoolTy
, lit = BoolLit True
}
, thenExpr =
LitExpr
{ anno = IntTy
, lit = IntLit 18181
}
, elseExpr =
LitExpr
{ anno = IntTy
, lit = IntLit 2332
}
}
actual `shouldBe` expect
it "apply" $ do
let expr1 = ApplyExpr
{ anno = ()
, calleeIdent = Ident "myfunc"
, argExprs =
[ LitExpr
{ anno = ()
, lit = BoolLit False
}
, LitExpr
{ anno = ()
, lit = CharLit 's'
}
]
}
let initEnv = SemanticCheckEnv
{ globalVarTable =
Map.fromList
[ ( Ident "myfunc"
, FuncIdentInfo
{ retTy = IntTy
, paramTys = [BoolTy, CharTy]
})
]
, localVarTables = []
}
let actual = Monad.State.evalStateT (runSemanticCheck (exprToTypedExpr expr1)) initEnv
let expect = Right ApplyExpr
{ anno = IntTy
, calleeIdent = Ident "myfunc"
, argExprs =
[ LitExpr
{ anno = BoolTy
, lit = BoolLit False
}
, LitExpr
{ anno = CharTy
, lit = CharLit 's'
}
]
}
actual `shouldBe` expect
it "let-expression" $ do
let expr1 = LetExpr
{ anno = ()
, binds =
[ Bind
{ ident = Ident "a"
, ty = IntTy
, bodyExpr = LitExpr {anno=(), lit=IntLit 889922}
}
, Bind
{ ident = Ident "b"
, ty = CharTy
, bodyExpr = LitExpr {anno=(), lit=CharLit 'j'}
}
]
, inExpr = IdentExpr{anno=(), ident=Ident "a"}
}
let initEnv = SemanticCheckEnv {globalVarTable=Map.empty, localVarTables=[]}
let actual = Monad.State.evalStateT (runSemanticCheck (exprToTypedExpr expr1)) initEnv
let expect = Right LetExpr
{ anno = IntTy
, binds =
[ Bind
{ ident = Ident "a"
, ty = IntTy
, bodyExpr = LitExpr {anno=IntTy, lit=IntLit 889922}
}
, Bind
{ ident = Ident "b"
, ty = CharTy
, bodyExpr = LitExpr {anno=CharTy, lit=CharLit 'j'}
}
]
, inExpr = IdentExpr{anno=IntTy, ident=Ident "a"}
}
actual `shouldBe` expect
describe "[positive] programToTypedProgram" $ do
it "global-let" $ do
let prog1 = Program
{ gdefs =
[ LetGdef
{ bind =
Bind
{ ident = Ident "a"
, ty = IntTy
, bodyExpr = LitExpr {anno=(), lit=IntLit 337733}
}
}
, LetGdef
{ bind =
Bind
{ ident = Ident "b"
, ty = IntTy
, bodyExpr = LitExpr {anno=(), lit=IntLit 909}
}
}
]
}
let actual = programToTypedProgram prog1
let expect = Right Program
{ gdefs =
[ LetGdef
{ bind =
Bind
{ ident = Ident "a"
, ty = IntTy
, bodyExpr = LitExpr {anno=IntTy, lit=IntLit 337733}
}
}
, LetGdef
{ bind =
Bind
{ ident = Ident "b"
, ty = IntTy
, bodyExpr = LitExpr {anno=IntTy, lit=IntLit 909}
}
}
]
}
actual `shouldBe` expect
it "func" $ do
let prog1 = Program
{ gdefs =
[ FuncGdef
{ ident = Ident "myfunc"
, params = [Param {ident=Ident "p", ty=IntTy}, Param {ident=Ident "q", ty=CharTy}]
, retTy = IntTy
, bodyExpr = IdentExpr{anno=(), ident=Ident "p"}
}
]
}
let actual = programToTypedProgram prog1
let expect = Right Program
{ gdefs =
[ FuncGdef
{ ident = Ident "myfunc"
, params = [Param {ident=Ident "p", ty=IntTy}, Param {ident=Ident "q", ty=CharTy}]
, retTy = IntTy
, bodyExpr = IdentExpr{anno=IntTy, ident=Ident "p"}
}
]
}
actual `shouldBe` expect |
|
2369f22c30983402851737e4b2252140c1ba388761fa9f90e54f305f88fc3a3c | vlaaad/reveal | prefs.clj | (ns vlaaad.reveal.prefs
(:require [clojure.spec.alpha :as s]
[clojure.edn :as edn]
[clojure.main :as m])
(:import [java.net URL MalformedURLException]
[javafx.scene.text Font]))
(s/def ::font-size
(s/and number? pos?))
(defn- valid-url? [s]
(try
(URL. s) true
(catch MalformedURLException _ false)))
(def ^:private system-font-families
(delay (set (Font/getFamilies))))
(defn- system-font? [s]
(contains? @system-font-families s))
(s/def ::font-family
(s/or :url-string (s/and string? valid-url?)
:system-font system-font?))
(s/def ::theme
#{:dark :light})
(s/def ::prefs
(s/keys :opt-un [::font-family ::font-size ::theme]))
(def prefs
(delay
(try
(let [raw (edn/read-string (System/getProperty "vlaaad.reveal.prefs" "{}"))
prefs (s/conform ::prefs raw)]
(when (s/invalid? prefs)
(throw (ex-info "Invalid prefs" (s/explain-data ::prefs raw))))
prefs)
(catch Exception e
(println "Failed to read reveal prefs")
(println (-> e Throwable->map m/ex-triage m/ex-str)))))) | null | https://raw.githubusercontent.com/vlaaad/reveal/61c157b557c767aa34feb29e1e1aea197b1eed08/src/vlaaad/reveal/prefs.clj | clojure | (ns vlaaad.reveal.prefs
(:require [clojure.spec.alpha :as s]
[clojure.edn :as edn]
[clojure.main :as m])
(:import [java.net URL MalformedURLException]
[javafx.scene.text Font]))
(s/def ::font-size
(s/and number? pos?))
(defn- valid-url? [s]
(try
(URL. s) true
(catch MalformedURLException _ false)))
(def ^:private system-font-families
(delay (set (Font/getFamilies))))
(defn- system-font? [s]
(contains? @system-font-families s))
(s/def ::font-family
(s/or :url-string (s/and string? valid-url?)
:system-font system-font?))
(s/def ::theme
#{:dark :light})
(s/def ::prefs
(s/keys :opt-un [::font-family ::font-size ::theme]))
(def prefs
(delay
(try
(let [raw (edn/read-string (System/getProperty "vlaaad.reveal.prefs" "{}"))
prefs (s/conform ::prefs raw)]
(when (s/invalid? prefs)
(throw (ex-info "Invalid prefs" (s/explain-data ::prefs raw))))
prefs)
(catch Exception e
(println "Failed to read reveal prefs")
(println (-> e Throwable->map m/ex-triage m/ex-str)))))) |
|
57a9682a449020ea47f1b086b0f7b535d9638fd01294a30e8b086dcc56ad5be3 | andrewthad/haskell-ip | IPv6.hs | {-# LANGUAGE BangPatterns #-}
# LANGUAGE CPP #
# LANGUAGE DataKinds #
{-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE DeriveGeneric #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE InstanceSigs #
# LANGUAGE LambdaCase #
# LANGUAGE MagicHash #
# LANGUAGE ScopedTypeVariables #
{-# LANGUAGE TypeInType #-}
# LANGUAGE UnboxedTuples #
{-| This module provides the IPv6 data type and functions for working
with it.
-}
module Net.IPv6
( -- * Convert
ipv6
, fromOctets
, fromWord16s
, fromWord32s
, fromTupleWord16s
, fromTupleWord32s
, toWord16s
, toWord32s
-- * Special IP Addresses
, any
, loopback
, localhost
-- * Textual Conversion
-- ** Text
, encode
, encodeShort
, decode
, decodeShort
, parser
-- * UTF-8 Bytes
, parserUtf8Bytes
, decodeUtf8Bytes
, boundedBuilderUtf8
-- ** Printing
, print
-- * IPv6 Ranges
-- ** Range functions
, range
, fromBounds
, normalize
, contains
, isSubsetOf
, member
, lowerInclusive
, upperInclusive
-- ** Textual Conversion
-- *** Text
, encodeRange
, decodeRange
, parserRange
, printRange
-- ** UTF-8 Bytes
, parserRangeUtf8Bytes
, parserRangeUtf8BytesLenient
-- * Types
, IPv6(..)
, IPv6Range(..)
) where
import Prelude hiding (any, print)
import Net.IPv4 (IPv4(..))
import Control.Applicative
import Control.DeepSeq (NFData)
import Control.Monad (mzero)
import Control.Monad.ST (ST)
import Data.Bits
import Data.Char (chr)
import Data.Data (Data)
import Data.Ix (Ix)
import Data.Hashable (Hashable,hashWithSalt)
import Data.List (intercalate, group)
import Data.Primitive (MutablePrimArray)
import Data.Primitive.Types (Prim)
import Data.Text (Text)
import Data.Text.Short (ShortText)
import Data.WideWord.Word128 (Word128(..), zeroWord128)
import Data.Word
import Foreign.Storable (Storable)
import GHC.Exts (Int#,Word#,Int(I#))
import GHC.Generics (Generic)
import Numeric (showHex)
import Text.ParserCombinators.ReadPrec (prec,step)
import Text.Read (Read(..),Lexeme(Ident),lexP,parens)
import qualified Arithmetic.Lte as Lte
import qualified Arithmetic.Nat as Nat
import qualified Data.Aeson as Aeson
import qualified Data.Attoparsec.Text as AT
import qualified Data.Attoparsec.Text as Atto
import qualified Data.Bytes.Builder.Bounded as BB
import qualified Data.Bytes as Bytes
import qualified Data.Bytes.Parser as Parser
import qualified Data.Bytes.Parser.Latin as Latin
import qualified Data.ByteString.Short.Internal as BSS
import qualified Data.Primitive as PM
import qualified Data.Text as Text
import qualified Data.Text.IO as TIO
import qualified Data.Text.Short.Unsafe as TS
import qualified Data.Text.Short as TS
import qualified GHC.Word.Compat as Compat
import qualified Net.IPv4 as IPv4
-- $setup
--
-- These are here to get doctest work.
--
> > > import qualified Prelude as P
-- >>> import qualified Data.Text.IO as T
-- >>> import qualified Data.Text as Text
> > > import qualified Data . . Text as
> > > import qualified Data . Bytes . Text . Ascii as Ascii
-- >>> import Test.QuickCheck (Arbitrary(..))
-- >>> instance Arbitrary Word128 where { arbitrary = Word128 <$> arbitrary <*> arbitrary }
-- >>> instance Arbitrary IPv6 where { arbitrary = IPv6 <$> arbitrary }
-- >>> instance Arbitrary IPv6.IPv6Range where { arbitrary = IPv6.IPv6Range <$> arbitrary <*> arbitrary }
--
| A 128 - bit Internet Protocol version 6 address .
newtype IPv6 = IPv6 { getIPv6 :: Word128 }
deriving (Bounded,Enum,Eq,Ord,Storable,Bits,FiniteBits,NFData,Prim,Ix,Data,Generic)
instance Hashable IPv6 where
hashWithSalt s (IPv6 (Word128 a b)) = hashWithSalt (hashWithSalt s a) b
instance Show IPv6 where
showsPrec p addr = showParen (p > 10)
$ showString "ipv6 "
. showHexWord16 a
. showChar ' '
. showHexWord16 b
. showChar ' '
. showHexWord16 c
. showChar ' '
. showHexWord16 d
. showChar ' '
. showHexWord16 e
. showChar ' '
. showHexWord16 f
. showChar ' '
. showHexWord16 g
. showChar ' '
. showHexWord16 h
where
(a,b,c,d,e,f,g,h) = toWord16s addr
-- | Print an 'IPv6' using the textual encoding.
print :: IPv6 -> IO ()
print = TIO.putStrLn . encode
-- | Decode 'ShortText' as an 'IPv6' address.
--
-- >>> decodeShort "ffff::2:b"
-- Just (ipv6 0xffff 0x0000 0x0000 0x0000 0x0000 0x0000 0x0002 0x000b)
decodeShort :: ShortText -> Maybe IPv6
decodeShort t = decodeUtf8Bytes (Bytes.fromByteArray b)
where b = shortByteStringToByteArray (TS.toShortByteString t)
shortByteStringToByteArray :: BSS.ShortByteString -> PM.ByteArray
shortByteStringToByteArray (BSS.SBS x) = PM.ByteArray x
showHexWord16 :: Word16 -> ShowS
showHexWord16 w =
showString "0x"
. showChar (nibbleToHex (unsafeShiftR (fromIntegral w) 12))
. showChar (nibbleToHex ((unsafeShiftR (fromIntegral w) 8) .&. 0xF))
. showChar (nibbleToHex ((unsafeShiftR (fromIntegral w) 4) .&. 0xF))
. showChar (nibbleToHex ((fromIntegral w) .&. 0xF))
invariant : argument must be less than 16
nibbleToHex :: Word -> Char
nibbleToHex w
| w < 10 = chr (fromIntegral (w + 48))
| otherwise = chr (fromIntegral (w + 87))
instance Read IPv6 where
readPrec = parens $ prec 10 $ do
Ident "ipv6" <- lexP
a <- step readPrec
b <- step readPrec
c <- step readPrec
d <- step readPrec
e <- step readPrec
f <- step readPrec
g <- step readPrec
h <- step readPrec
return (fromWord16s a b c d e f g h)
instance Aeson.ToJSON IPv6 where
toJSON = Aeson.String . encode
instance Aeson.FromJSON IPv6 where
parseJSON = Aeson.withText "IPv6" $ \t -> case decode t of
Nothing -> fail "invalid IPv6 address"
Just i -> return i
rightToMaybe :: Either a b -> Maybe b
rightToMaybe = either (const Nothing) Just
-- | This could be useful for the rare occasion
-- in which one could construct an 'IPv6' from
-- octets.
--
Note that while @Net . IPv4.'Net . IPv4.fromOctets ' = Net . IPv4.'Net . IPv4.ipv4'@ ,
-- @Net.IPv6.fromOctets /= Net.IPv6.ipv6@. While this should be obvious
-- from their types, it is worth mentioning since the similarity in naming
-- might be confusing.
fromOctets ::
Word8 -> Word8 -> Word8 -> Word8
-> Word8 -> Word8 -> Word8 -> Word8
-> Word8 -> Word8 -> Word8 -> Word8
-> Word8 -> Word8 -> Word8 -> Word8
-> IPv6
fromOctets a b c d e f g h i j k l m n o p =
IPv6 $ fromOctetsWord128
(fromIntegral a) (fromIntegral b) (fromIntegral c) (fromIntegral d)
(fromIntegral e) (fromIntegral f) (fromIntegral g) (fromIntegral h)
(fromIntegral i) (fromIntegral j) (fromIntegral k) (fromIntegral l)
(fromIntegral m) (fromIntegral n) (fromIntegral o) (fromIntegral p)
fromOctetsWord128 ::
Word128 -> Word128 -> Word128 -> Word128
-> Word128 -> Word128 -> Word128 -> Word128
-> Word128 -> Word128 -> Word128 -> Word128
-> Word128 -> Word128 -> Word128 -> Word128
-> Word128
fromOctetsWord128 a b c d e f g h i j k l m n o p = fromIntegral
( shiftL a 120
.|. shiftL b 112
.|. shiftL c 104
.|. shiftL d 96
.|. shiftL e 88
.|. shiftL f 80
.|. shiftL g 72
.|. shiftL h 64
.|. shiftL i 56
.|. shiftL j 48
.|. shiftL k 40
.|. shiftL l 32
.|. shiftL m 24
.|. shiftL n 16
.|. shiftL o 8
.|. p
)
| Create an ' IPv6 ' address from the eight 16 - bit fragments that make
-- it up. This closely resembles the standard IPv6 notation, so
-- is used for the 'Show' instance. Note that this lacks the formatting
-- feature for suppress zeroes in an 'IPv6' address, but it should be
-- readable enough for hacking in GHCi.
--
> > > let addr = ipv6 0x3124 0x0 0x0 0xDEAD 0xCAFE 0xFF 0xFE00 0x1
-- >>> addr
ipv6 0x3124 0x0000 0x0000 0xdead 0xcafe 0x00ff 0xfe00 0x0001
-- >>> T.putStrLn (encode addr)
-- 3124::dead:cafe:ff:fe00:1
ipv6 ::
Word16 -> Word16 -> Word16 -> Word16
-> Word16 -> Word16 -> Word16 -> Word16
-> IPv6
ipv6 = fromWord16s
-- | An alias for the 'ipv6' smart constructor.
fromWord16s ::
Word16 -> Word16 -> Word16 -> Word16
-> Word16 -> Word16 -> Word16 -> Word16
-> IPv6
fromWord16s a b c d e f g h =
IPv6 $ fromWord16sWord128
(fromIntegral a) (fromIntegral b) (fromIntegral c) (fromIntegral d)
(fromIntegral e) (fromIntegral f) (fromIntegral g) (fromIntegral h)
fromWord16sWord128 ::
Word128 -> Word128 -> Word128 -> Word128
-> Word128 -> Word128 -> Word128 -> Word128
-> Word128
fromWord16sWord128 a b c d e f g h = fromIntegral
( shiftL a 112
.|. shiftL b 96
.|. shiftL c 80
.|. shiftL d 64
.|. shiftL e 48
.|. shiftL f 32
.|. shiftL g 16
.|. h
)
| Convert an ' IPv6 ' to eight 16 - bit words .
toWord16s :: IPv6 -> (Word16,Word16,Word16,Word16,Word16,Word16,Word16,Word16)
toWord16s (IPv6 (Word128 a b)) =
-- Note: implementing this as 2 Word64 shifts with 'unsafeShiftR'
is up to 40 % faster than using 128 - bit shifts on a Word128 value .
( fromIntegral (unsafeShiftR a 48)
, fromIntegral (unsafeShiftR a 32)
, fromIntegral (unsafeShiftR a 16)
, fromIntegral a
, fromIntegral (unsafeShiftR b 48)
, fromIntegral (unsafeShiftR b 32)
, fromIntegral (unsafeShiftR b 16)
, fromIntegral b
)
| Uncurried variant of ' fromWord16s ' .
fromTupleWord16s :: (Word16,Word16,Word16,Word16,Word16,Word16,Word16,Word16) -> IPv6
fromTupleWord16s (a,b,c,d,e,f,g,h) = fromWord16s a b c d e f g h
| Build an ' IPv6 ' from four 32 - bit words . The leftmost argument
-- is the high word and the rightword is the low word.
fromWord32s :: Word32 -> Word32 -> Word32 -> Word32 -> IPv6
fromWord32s a b c d =
IPv6 $ fromWord32sWord128
(fromIntegral a) (fromIntegral b) (fromIntegral c) (fromIntegral d)
fromWord32sWord128 ::
Word128 -> Word128 -> Word128 -> Word128
-> Word128
fromWord32sWord128 a b c d = fromIntegral
( shiftL a 96
.|. shiftL b 64
.|. shiftL c 32
.|. d
)
| Uncurried variant of ' fromWord32s ' .
fromTupleWord32s :: (Word32,Word32,Word32,Word32) -> IPv6
fromTupleWord32s (a,b,c,d) = fromWord32s a b c d
| Convert an ' IPv6 ' to four 32 - bit words .
toWord32s :: IPv6 -> (Word32,Word32,Word32,Word32)
toWord32s (IPv6 (Word128 a b)) =
-- Note: implementing this as 2 Word64 shifts with 'unsafeShiftR'
is about 10 % faster than using 128 - bit shifts on a Word128 value .
( fromIntegral (unsafeShiftR a 32)
, fromIntegral a
, fromIntegral (unsafeShiftR b 32)
, fromIntegral b
)
-- | The local loopback IP address.
--
-- >>> IPv6.loopback
-- ipv6 0x0000 0x0000 0x0000 0x0000 0x0000 0x0000 0x0000 0x0001
loopback :: IPv6
loopback = IPv6 (Word128 0 1)
-- | A useful alias for 'loopback'.
--
-- >>> IPv6.localhost
-- ipv6 0x0000 0x0000 0x0000 0x0000 0x0000 0x0000 0x0000 0x0001
localhost :: IPv6
localhost = loopback
-- | The IP address representing any host.
--
-- >>> IPv6.any
-- ipv6 0x0000 0x0000 0x0000 0x0000 0x0000 0x0000 0x0000 0x0000
any :: IPv6
any = IPv6 zeroWord128
| Encodes the ' IPv6 ' address using zero - compression on the leftmost longest
string of zeroes in the address .
-- Per <#section-5 RFC 5952 Section 5>,
this uses mixed notation when encoding an IPv4 - mapped IPv6 address :
--
> > > T.putStrLn $ IPv6.encode $ IPv6.fromWord16s 0xDEAD 0xBEEF 0x0 0x0 0x0 0x0 0x0 0x1234
-- dead:beef::1234
> > > T.putStrLn $ IPv6.encode $ IPv6.fromWord16s 0x0 0x0 0x0 0x0 0x0 0xFFFF 0x6437 0xA5B4
-- ::ffff:100.55.165.180
> > > T.putStrLn $ IPv6.encode $ IPv6.fromWord16s 0x0 0x0 0x0 0x0 0x0 0x0 0x0 0x0
-- ::
--
-- Per <#section-4.2.2 Section 4.2.2> of the
same RFC , this does not use @::@ to shorten a single 16 - bit 0 field . Only
-- runs of multiple 0 fields are considered.
encode :: IPv6 -> Text
encode !ip =
-- TODO: This implementation, while correct, is not particularly efficient.
-- It uses string all over the place.
if isIPv4Mapped ip
-- This representation is RECOMMENDED by #section-5
then
Text.pack "::ffff:"
`mappend`
IPv4.encode (IPv4.IPv4 (fromIntegral w7 `unsafeShiftL` 16 .|. fromIntegral w8))
else toText [w1, w2, w3, w4, w5, w6, w7, w8]
where
(w1, w2, w3, w4, w5, w6, w7, w8) = toWord16s ip
toText ws = Text.pack $ intercalate ":"
$ expand 0 (if longestZ > 1 then longestZ else 0) grouped
where
expand !_ 8 !_ = ["::"]
expand !_ !_ [] = []
expand !i !longest ((x, len):wsNext)
zero - compressed group :
| x == 0 && len == longest =
first and last need an extra colon since there 's nothing
-- to concat against
(if i == 0 || (i+len) == 8 then ":" else "")
: expand (i+len) 0 wsNext
-- normal group:
| otherwise = replicate len (showHex x "") ++ expand (i+len) longest wsNext
longestZ = maximum . (0:) . map snd . filter ((==0) . fst) $ grouped
grouped = map (\x -> (head x, length x)) (group ws)
isIPv4Mapped :: IPv6 -> Bool
isIPv4Mapped (IPv6 (Word128 w1 w2)) =
w1 == 0 && (0xFFFFFFFF00000000 .&. w2 == 0x0000FFFF00000000)
-- | Decode UTF-8-encoded 'Bytes' into an 'IPv6' address.
--
-- >>> decodeUtf8Bytes (Ascii.fromString "::cab:1")
-- Just (ipv6 0x0000 0x0000 0x0000 0x0000 0x0000 0x0000 0x0cab 0x0001)
decodeUtf8Bytes :: Bytes.Bytes -> Maybe IPv6
decodeUtf8Bytes !b = case Parser.parseBytes (parserUtf8Bytes ()) b of
Parser.Success (Parser.Slice _ len addr) -> case len of
0 -> Just addr
_ -> Nothing
Parser.Failure _ -> Nothing
| Encodes the ' IPv6 ' address using zero - compression on the
leftmost longest string of zeroes in the address .
--
> > > BB.run Nat.constant $ IPv6.boundedBuilderUtf8 $ IPv6.fromWord16s 0xDEAD 0xBEEF 0x0 0x0 0x0 0x0 0x0 0x1234
[ 0x64 , 0x65 , 0x61 , 0x64 , 0x3a , 0x62 , 0x65 , 0x65 , 0x66 , 0x3a , 0x3a , 0x31 , 0x32 , 0x33 , 0x34 ]
boundedBuilderUtf8 :: IPv6 -> BB.Builder 39
boundedBuilderUtf8 !ip@(IPv6 (Word128 hi lo))
| hi == 0 && lo == 0 = BB.weaken Lte.constant
(BB.ascii ':' `BB.append` BB.ascii ':')
| isIPv4Mapped ip = BB.weaken Lte.constant $
BB.ascii ':'
`BB.append`
BB.ascii ':'
`BB.append`
BB.ascii 'f'
`BB.append`
BB.ascii 'f'
`BB.append`
BB.ascii 'f'
`BB.append`
BB.ascii 'f'
`BB.append`
BB.ascii ':'
`BB.append`
IPv4.boundedBuilderUtf8 (IPv4.IPv4 (fromIntegral lo))
| otherwise =
let (w0,w1,w2,w3,w4,w5,w6,w7) = toWord16s ip
IntTriple startLongest longest _ = longestRun w0 w1 w2 w3 w4 w5 w6 w7
start = startLongest
end = start + longest
-- start is inclusive. end is exclusive
in firstPiece w0 start
`BB.append`
piece 1 w1 start end
`BB.append`
piece 2 w2 start end
`BB.append`
piece 3 w3 start end
`BB.append`
piece 4 w4 start end
`BB.append`
piece 5 w5 start end
`BB.append`
piece 6 w6 start end
`BB.append`
lastPiece w7 end
firstPiece :: Word16 -> Int -> BB.Builder 4
firstPiece !w !start = case start of
0 -> BB.weaken Lte.constant (BB.ascii ':')
_ -> BB.word16LowerHex w
-- Note about the implementation of piece:
-- It is important to manually perform worker-wrapper so that
we can stop piece from inlining . If we do not do this , GHC
inlines piece , leading to enormous blowup in the generated
Core . The implementation of boundedBuilderUtf8 becomes
thousands of lines of Core . Even in the microbenchmark that
-- comes with this library, it can be observed that preventing
this inlining improves performance of encodeShort by 50 % .
piece :: Int -> Word16 -> Int -> Int -> BB.Builder 5
# inline piece #
piece (I# ix) (Compat.W16# w) (I# start) (I# end) =
piece# ix w start end
piece# :: Int# -> Word# -> Int# -> Int# -> BB.Builder 5
# noinline piece # #
piece# !ix# !w# !start# !end# = case compare ix start of
LT -> BB.ascii ':' `BB.append` BB.word16LowerHex w
EQ -> BB.weaken Lte.constant (BB.ascii ':')
GT -> if ix < end
then BB.weaken Lte.constant BB.empty
else BB.ascii ':' `BB.append` BB.word16LowerHex w
where
ix = I# ix#
start = I# start#
end = I# end#
w = Compat.W16# w#
lastPiece :: Word16 -> Int -> BB.Builder 5
lastPiece !w !end = case end of
8 -> BB.weaken Lte.constant (BB.ascii ':')
_ -> BB.ascii ':' `BB.append` BB.word16LowerHex w
data IntTriple = IntTriple !Int !Int !Int
-- Choose the longest run. Prefer the leftmost run in the
-- event of a tie.
stepZeroRunLength :: Int -> Word16 -> IntTriple -> IntTriple
stepZeroRunLength !ix !w (IntTriple startLongest longest current) = case w of
0 -> let !x = current + 1 in
if x > longest
then IntTriple (ix - current) x x
else IntTriple startLongest longest x
_ -> IntTriple startLongest longest 0
We start out by setting the longest run to size 1 . This
means that we will only detect runs of length two or greater .
longestRun ::
Word16
-> Word16
-> Word16
-> Word16
-> Word16
-> Word16
-> Word16
-> Word16
-> IntTriple
longestRun !w0 !w1 !w2 !w3 !w4 !w5 !w6 !w7 = id
$ stepZeroRunLength 7 w7
$ stepZeroRunLength 6 w6
$ stepZeroRunLength 5 w5
$ stepZeroRunLength 4 w4
$ stepZeroRunLength 3 w3
$ stepZeroRunLength 2 w2
$ stepZeroRunLength 1 w1
$ stepZeroRunLength 0 w0
$ IntTriple (-1) 1 0
| Encodes the ' IPv6 ' address as ' ShortText ' using zero - compression on
the leftmost longest string of zeroes in the address .
-- Per <#section-5 RFC 5952 Section 5>,
this uses mixed notation when encoding an IPv4 - mapped IPv6 address .
--
> > > IPv6.encodeShort $ IPv6.fromWord16s 0xDEAD 0xBEEF 0x0 0x0 0x0 0x0ABC 0x0 0x1234
" dead : "
encodeShort :: IPv6 -> ShortText
encodeShort w = id
$ TS.fromShortByteStringUnsafe
$ byteArrayToShortByteString
$ BB.run Nat.constant
$ boundedBuilderUtf8
$ w
byteArrayToShortByteString :: PM.ByteArray -> BSS.ShortByteString
byteArrayToShortByteString (PM.ByteArray x) = BSS.SBS x
-- | Decode an 'IPv6' address. This accepts both standard IPv6
notation ( with zero compression ) and mixed notation for
IPv4 - mapped IPv6 addresses . For a decoding function that
additionally accepts dot - decimal - encoded IPv4 addresses ,
-- see @Net.IP.decode@.
decode :: Text -> Maybe IPv6
decode t = rightToMaybe (AT.parseOnly (parser <* AT.endOfInput) t)
-- | Parse UTF-8-encoded 'Bytes' as an 'IPv6' address. This accepts
-- both uppercase and lowercase characters in the hexadecimal components.
--
-- >>> let str = "dead:beef:3240:a426:ba68:1cd0:4263:109b -> alive"
> > > Parser.parseBytes ( parserUtf8Bytes ( ) ) ( )
Success ( Slice { offset = 39 , length = 9 , value = ipv6 0xdead 0xbeef 0x3240 0xa426 0xba68 0x1cd0 0x4263 0x109b } )
--
This does not currently support parsing embedded IPv4 address
( e.g. @ff00:8000 : abc::224.1.2.3@ ) .
parserUtf8Bytes :: e -> Parser.Parser e s IPv6
parserUtf8Bytes e = do
marr <- Parser.effect (PM.newPrimArray 8)
We can not immidiately call preZeroes since it wants a
-- leading colon present.
Latin.trySatisfy (== ':') >>= \case
True -> do
Latin.char e ':'
postZeroesBegin e marr 0 0
False -> do
w <- pieceParser e
Parser.effect (PM.writePrimArray marr 0 w)
preZeroes e marr 1
-- This is called when we are positioned before a colon.
-- We may encounter another colon immidiately after
the one that we consume here . This indicates zero
-- compression. Or we may encounter another hex-encoded
-- number.
preZeroes ::
e
length must be 8
-> Int
-> Parser.Parser e s IPv6
preZeroes e !marr !ix = case ix of
8 -> Parser.effect (combinePieces marr)
_ -> do
Latin.char e ':'
Latin.trySatisfy (== ':') >>= \case
True -> postZeroesBegin e marr ix ix
False -> do
w <- pieceParser e
Parser.effect (PM.writePrimArray marr ix w)
preZeroes e marr (ix + 1)
The same as postZeroes except that there is no
-- leading that gets consumed. This is called right
-- after a double colon is consumed.
Precondition : the index is less than 8 . This parser
is only called by preZeroes , which ensures that
-- this holds.
postZeroesBegin ::
e
length must be 8
-> Int -- current index in array
-> Int -- index where compression happened
-> Parser.Parser e s IPv6
postZeroesBegin e !marr !ix !compress = do
optionalPieceParser e >>= \case
Nothing -> do -- the end has come
Parser.effect (conclude marr ix compress)
Just w -> do
Parser.effect (PM.writePrimArray marr ix w)
postZeroes e marr (ix + 1) compress
-- Should be run right before a colon.
postZeroes ::
e
length must be 8
-> Int -- current index in array
-> Int -- index where compression happened
-> Parser.Parser e s IPv6
postZeroes e !marr !ix !compress = case ix of
8 -> Parser.fail e
_ -> do
Latin.trySatisfy (== ':') >>= \case
False -> -- The end has come
Parser.effect (conclude marr ix compress)
True -> do
w <- pieceParser e
Parser.effect (PM.writePrimArray marr ix w)
postZeroes e marr (ix + 1) compress
conclude :: MutablePrimArray s Word16 -> Int -> Int -> ST s IPv6
conclude !marr !ix !compress = do
This will overlap , but GHC 's copy primop is fine with that .
let postCompressionLen = ix - compress
PM.copyMutablePrimArray marr (8 - postCompressionLen) marr compress postCompressionLen
let compressedArea = 8 - ix
PM.setPrimArray marr compress compressedArea (0 :: Word16)
combinePieces marr
-- Example memmove that may need to happen:
-- A B C H ==> A B C 0 0 0 0 H
-- *
ix = 4 , compress = 3 , postCompressionLen = 1 , compressedArea = 4
copyPrimArray marr 7 marr 3 1
setPrimArray marr 3 4 ( 0 : : )
combinePieces ::
MutablePrimArray s Word16
-> ST s IPv6
combinePieces !marr = fromWord16s
<$> PM.readPrimArray marr 0
<*> PM.readPrimArray marr 1
<*> PM.readPrimArray marr 2
<*> PM.readPrimArray marr 3
<*> PM.readPrimArray marr 4
<*> PM.readPrimArray marr 5
<*> PM.readPrimArray marr 6
<*> PM.readPrimArray marr 7
optionalPieceParser :: e -> Parser.Parser e s (Maybe Word16)
optionalPieceParser e = Latin.tryHexNibble >>= \case
Nothing -> pure Nothing
Just w0 -> do
r <- pieceParserStep e w0
pure (Just r)
This should probably be moved into bytesmith and renamed .
pieceParser :: e -> Parser.Parser e s Word16
pieceParser e = Latin.hexNibble e >>= pieceParserStep e
-- Parses the remainder of a lowercase hexadecimal number.
-- Leaves trailing colons alone. This fails if there are
more than four hex digits unless there are leading zeroes .
-- I cannot find a spec that is clear about what to do
-- if someone puts 00000 in a piece of an encoded IPv6
-- address, so I veer on the side of leniency.
pieceParserStep ::
e
-> Word
-> Parser.Parser e s Word16
pieceParserStep e !acc = if acc > 0xFFFF
then Parser.fail e
else Latin.tryHexNibble >>= \case
Nothing -> pure (fromIntegral acc)
Just w -> pieceParserStep e (16 * acc + w)
-- | Parse UTF-8-encoded 'Bytes' into an 'IPv4Range'.
-- This requires the mask to be present.
--
-- >>> maybe (putStrLn "nope") IPv6.printRange $ Parser.parseBytesMaybe (IPv6.parserRangeUtf8Bytes ()) (Ascii.fromString "1b02:f001:5:200b::/80")
-- 1b02:f001:5:200b::/80
-- >>> maybe (putStrLn "nope") IPv6.printRange $ Parser.parseBytesMaybe (IPv6.parserRangeUtf8Bytes ()) (Ascii.fromString "abcd::")
-- nope
--
-- See 'parserRangeUtf8BytesLenient' for a variant that treats
-- a missing mask as a @/32@ mask.
parserRangeUtf8Bytes :: e -> Parser.Parser e s IPv6Range
parserRangeUtf8Bytes e = do
base <- parserUtf8Bytes e
Latin.char e '/'
theMask <- Latin.decWord8 e
if theMask > 128
then Parser.fail e
else pure $! normalize (IPv6Range base theMask)
-- | Variant of 'parserRangeUtf8Bytes' that allows the mask
-- to be omitted. An omitted mask is treated as a @/128@ mask.
--
-- >>> maybe (putStrLn "nope") IPv6.printRange $ Parser.parseBytesMaybe (IPv6.parserRangeUtf8BytesLenient ()) (Ascii.fromString "1b02:f001:5:200b::/80")
-- 1b02:f001:5:200b::/80
-- >>> maybe (putStrLn "nope") IPv6.printRange $ Parser.parseBytesMaybe (IPv6.parserRangeUtf8BytesLenient ()) (Ascii.fromString "abcd::")
abcd::/128
parserRangeUtf8BytesLenient :: e -> Parser.Parser e s IPv6Range
parserRangeUtf8BytesLenient e = do
base <- parserUtf8Bytes e
Latin.trySatisfy (=='/') >>= \case
True -> do
theMask <- Latin.decWord8 e
if theMask > 128
then Parser.fail e
else pure $! normalize (IPv6Range base theMask)
False -> pure $! IPv6Range base 128
| Parse an ' IPv6 ' using ' Atto . ' .
--
-- >>> Atto.parseOnly IPv6.parser (Text.pack "dead:beef:3240:a426:ba68:1cd0:4263:109b")
Right ( ipv6 0xdead 0xbeef 0x3240 0xa426 0xba68 0x1cd0 0x4263 0x109b )
parser :: Atto.Parser IPv6
parser = makeIP <$> ip
where
makeIP [w1, w2, w3, w4, w5, w6, w7, w8] = fromWord16s w1 w2 w3 w4 w5 w6 w7 w8
makeIP _ = error "Net.IPv6.parser: Implementation error. Please open a bug report."
ip = (Atto.char ':' *> Atto.char ':' *> doubleColon 0) <|> part 0
part :: Int -> Atto.Parser [Word16]
part n =
case n of
max 8 parts in an IPv6 address
7 -> pure <$> Atto.hexadecimal
after 6 parts it could end in IPv4 dotted notation
6 -> ipv4 <|> hexPart
_ -> hexPart
where
hexPart = (:)
<$> Atto.hexadecimal
<*> (Atto.char ':' *>
(
(Atto.char ':' *> doubleColon (n+1))
<|>
part (n+1)
)
)
doubleColon :: Int -> Atto.Parser [Word16]
doubleColon count = do
rest <- afterDoubleColon <|> pure []
let fillerLength = (8 - count - length rest)
if fillerLength <= 0
then fail "too many parts in IPv6 address"
else pure (replicate fillerLength 0 ++ rest)
after double colon , IPv4 dotted notation could appear anywhere
afterDoubleColon :: Atto.Parser [Word16]
afterDoubleColon =
ipv4 <|>
(:) <$> Atto.hexadecimal <*> ((Atto.char ':' *> afterDoubleColon) <|> pure [])
ipv4 :: Atto.Parser [Word16]
ipv4 = ipv4ToWord16s <$> IPv4.parser
ipv4ToWord16s :: IPv4 -> [Word16]
ipv4ToWord16s (IPv4 word) = [fromIntegral (word `unsafeShiftR` 16), fromIntegral (word .&. 0xFFFF)]
| An ' IPv6Range ' . It is made up of the first ' IPv6 ' in the range
-- and its length.
data IPv6Range = IPv6Range
{ ipv6RangeBase :: {-# UNPACK #-} !IPv6
, ipv6RangeLength :: {-# UNPACK #-} !Word8
} deriving (Eq,Ord,Show,Read,Generic,Data)
instance NFData IPv6Range
instance Aeson.ToJSON IPv6Range where
toJSON = Aeson.String . encodeRange
instance Aeson.FromJSON IPv6Range where
parseJSON (Aeson.String t) = case decodeRange t of
Nothing -> fail "Could not decodeRange IPv6 range"
Just res -> return res
parseJSON _ = mzero
mask128 :: IPv6
mask128 = maxBound
mask :: Word8 -> IPv6
mask = complement . shiftR mask128 . fromIntegral
| Normalize an ' IPv6Range ' . The first result of this is that the
' IPv6 ' inside the ' IPv6Range ' is changed so that the insignificant
-- bits are zeroed out. For example:
--
> > > addr1 = IPv6.ipv6 0x0192 0x0168 0x0001 0x0019 0x0000 0x0000 0x0000 0x0000
> > > addr2 = IPv6.ipv6 0x0192 0x0168 0x0001 0x0163 0x0000 0x0000 0x0000 0x0000
-- >>> IPv6.printRange $ IPv6.normalize $ IPv6.IPv6Range addr1 24
-- 192:100::/24
> > > IPv6.printRange $ IPv6.normalize $ IPv6.IPv6Range addr2 28
-- 192:160::/28
--
The second effect of this is that the mask length is lowered to be 128
or smaller . Working with ' IPv6Range 's that have not been normalized does
-- not cause any issues for this library, although other applications may
reject such ranges ( especially those with a mask length above 128 ) .
--
-- Note that 'normalize is idempotent, that is:
--
-- prop> IPv6.normalize r == (IPv6.normalize . IPv6.normalize) r
normalize :: IPv6Range -> IPv6Range
normalize (IPv6Range ip len) =
let len' = min len 128
ip' = ip .&. mask len'
in IPv6Range ip' len'
| Encode an ' IPv6Range ' as ' Text ' .
--
> > > addr = IPv6.ipv6 0xDEAD 0xBEEF 0x3240 0xA426 0xBA68 0x1CD0 0x4263 0x109B
> > > T.putStrLn $ IPv6.encodeRange $ IPv6.IPv6Range addr 28
dead : beef:3240 : a426 : ba68:1cd0:4263:109b/28
encodeRange :: IPv6Range -> Text
encodeRange x = encode (ipv6RangeBase x) <> Text.pack "/" <> (Text.pack $ (show . fromEnum) $ ipv6RangeLength x)
| Decode an ' IPv6Range ' from ' Text ' .
--
> > > addr = IPv6.ipv6 0xDEAD 0xBEEF 0x3240 0xA426 0xBA68 0x1CD0 0x4263 0x109B
> > > fmap IPv6.encodeRange $ IPv6.decodeRange ( Text.pack " dead : beef:3240 : a426 : ba68:1cd0:4263:109b/28 " )
-- Just "dead:bee0::/28"
decodeRange :: Text -> Maybe IPv6Range
decodeRange = rightToMaybe . AT.parseOnly (parserRange <* AT.endOfInput)
| Parse an ' IPv6Range ' using a ' AT.Parser ' .
parserRange :: AT.Parser IPv6Range
parserRange = do
ip <- parser
_ <- AT.char '/'
theMask <- AT.decimal >>= limitSize
return (normalize (IPv6Range ip theMask))
where
limitSize i =
if i > 128
then fail "An IP range length must be between 0 and 128"
else return i
| Checks to see if an ' IPv6 ' address belongs in the ' IPv6Range ' .
--
> > > let ip = IPv6.ipv6 0x2001 0x0db8 0x0db8 0x1094 0x2051 0x0000 0x0000 0x0001
> > > let iprange mask = IPv6.IPv6Range ( IPv6.ipv6 0x2001 0x0db8 0x0000 0x0000 0x0000 0x0000 0x0000 0x0001 ) mask
-- >>> IPv6.contains (iprange 8) ip
-- True
> > > IPv6.contains ( iprange 48 ) ip
-- False
--
-- Typically, element-testing functions are written to take the element
as the first argument and the set as the second argument . This is intentionally
-- written the other way for better performance when iterating over a collection.
-- For example, you might test elements in a list for membership like this:
--
> > > let r = IPv6.IPv6Range ( IPv6.ipv6 0x2001 0x0db8 0x0000 0x0000 0x0000 0x0000 0x0000 0x0001 ) 64
> > > fmap ( IPv6.contains r ) ( take 5 $ iterate succ $ IPv6.ipv6 0x2001 0x0db8 0x0000 0x0000 0xffff 0xffff 0xffff 0xfffe )
-- [True,True,False,False,False]
--
The implementation of ' contains ' ensures that ( with GHC ) , the bitmask
-- creation and range normalization only occur once in the above example.
-- They are reused as the list is iterated.
contains :: IPv6Range -> IPv6 -> Bool
contains (IPv6Range subnet len) =
let theMask = mask len
subnetNormalized = subnet .&. theMask
in \ip -> (ip .&. theMask) == subnetNormalized
| Checks if the first range is a subset of the second range .
isSubsetOf :: IPv6Range -> IPv6Range -> Bool
isSubsetOf a b =
lowerInclusive a >= lowerInclusive b
&&
upperInclusive a <= upperInclusive b
-- | This is provided to mirror the interface provided by @Data.Set@. It
-- behaves just like 'contains' but with flipped arguments.
--
-- prop> IPv6.member ip r == IPv6.contains r ip
member :: IPv6 -> IPv6Range -> Bool
member = flip contains
| The inclusive lower bound of an ' IPv6Range ' . This is conventionally
-- understood to be the broadcast address of a subnet. For example:
--
> > > T.putStrLn $ IPv6.encode $ IPv6.lowerInclusive $ IPv6.IPv6Range ( IPv6.ipv6 0x2001 0x0db8 0x0000 0x0000 0x0000 0x0000 0x0000 0x0001 ) 25
2001 : d80 : :
--
Note that the lower bound of a normalized ' IPv6Range ' is simply the
-- ip address of the range:
--
prop > IPv6.lowerInclusive r = = IPv6.ipv6RangeBase ( IPv6.normalize r )
lowerInclusive :: IPv6Range -> IPv6
lowerInclusive = ipv6RangeBase . normalize
| The inclusive upper bound of an ' IPv6Range ' .
--
> > > let 0xDEAD 0xBEEF 0x3240 0xA426 0xBA68 0x1CD0 0x4263 0x109B
> > > T.putStrLn $ IPv6.encode $ IPv6.upperInclusive $ IPv6.IPv6Range addr 25
dead : : ffff : ffff : ffff : ffff : ffff :
--
upperInclusive :: IPv6Range -> IPv6
upperInclusive (IPv6Range ip len) =
let len' = min 128 len
theInvertedMask :: IPv6
theInvertedMask = shiftR mask128 (fromIntegral len')
in ip .|. theInvertedMask
| Print an ' IPv6Range ' using the textual encoding .
printRange :: IPv6Range -> IO ()
printRange = TIO.putStrLn . encodeRange
| Smart constructor for ' IPv6Range ' . Ensures the mask is appropriately
sized and sets masked bits in the ' IPv6 ' to zero .
--
> > > let 0xDEAD 0xBEEF 0x3240 0xA426 0xBA68 0x1CD0 0x4263 0x109B
> > > IPv6.printRange $ IPv6.range addr 25
-- dead:be80::/25
range :: IPv6 -> Word8 -> IPv6Range
range addr len = normalize (IPv6Range addr len)
| Given an inclusive lower and upper ip address , create the smallest ' IPv6Range '
that contains the two . This is helpful in situations where input is given as a
-- range, like @ @.
--
-- This makes the range broader if it cannot be represented in <-Domain_Routing CIDR> notation.
--
> > > addrLower = IPv6.ipv6 0xDEAD 0xBE80 0x0000 0x0000 0x0000 0x0000 0x0000 0x0000
> > > addrUpper = IPv6.ipv6 0xDEAD 0xBEFF 0xFFFF 0xFFFF 0xFFFF 0xFFFF 0xFFFF 0xFFFF
> > > IPv6.printRange $
-- dead:be80::/25
fromBounds :: IPv6 -> IPv6 -> IPv6Range
fromBounds lo hi =
normalize (IPv6Range lo (maskFromBounds lo hi))
maskFromBounds :: IPv6 -> IPv6 -> Word8
maskFromBounds lo hi = fromIntegral (countLeadingZeros $ xor lo hi)
| null | https://raw.githubusercontent.com/andrewthad/haskell-ip/3c6528a73660140c1c9b50914803ba54f12fbcd9/src/Net/IPv6.hs | haskell | # LANGUAGE BangPatterns #
# LANGUAGE DeriveDataTypeable #
# LANGUAGE TypeInType #
| This module provides the IPv6 data type and functions for working
with it.
* Convert
* Special IP Addresses
* Textual Conversion
** Text
* UTF-8 Bytes
** Printing
* IPv6 Ranges
** Range functions
** Textual Conversion
*** Text
** UTF-8 Bytes
* Types
$setup
These are here to get doctest work.
>>> import qualified Data.Text.IO as T
>>> import qualified Data.Text as Text
>>> import Test.QuickCheck (Arbitrary(..))
>>> instance Arbitrary Word128 where { arbitrary = Word128 <$> arbitrary <*> arbitrary }
>>> instance Arbitrary IPv6 where { arbitrary = IPv6 <$> arbitrary }
>>> instance Arbitrary IPv6.IPv6Range where { arbitrary = IPv6.IPv6Range <$> arbitrary <*> arbitrary }
| Print an 'IPv6' using the textual encoding.
| Decode 'ShortText' as an 'IPv6' address.
>>> decodeShort "ffff::2:b"
Just (ipv6 0xffff 0x0000 0x0000 0x0000 0x0000 0x0000 0x0002 0x000b)
| This could be useful for the rare occasion
in which one could construct an 'IPv6' from
octets.
@Net.IPv6.fromOctets /= Net.IPv6.ipv6@. While this should be obvious
from their types, it is worth mentioning since the similarity in naming
might be confusing.
it up. This closely resembles the standard IPv6 notation, so
is used for the 'Show' instance. Note that this lacks the formatting
feature for suppress zeroes in an 'IPv6' address, but it should be
readable enough for hacking in GHCi.
>>> addr
>>> T.putStrLn (encode addr)
3124::dead:cafe:ff:fe00:1
| An alias for the 'ipv6' smart constructor.
Note: implementing this as 2 Word64 shifts with 'unsafeShiftR'
is the high word and the rightword is the low word.
Note: implementing this as 2 Word64 shifts with 'unsafeShiftR'
| The local loopback IP address.
>>> IPv6.loopback
ipv6 0x0000 0x0000 0x0000 0x0000 0x0000 0x0000 0x0000 0x0001
| A useful alias for 'loopback'.
>>> IPv6.localhost
ipv6 0x0000 0x0000 0x0000 0x0000 0x0000 0x0000 0x0000 0x0001
| The IP address representing any host.
>>> IPv6.any
ipv6 0x0000 0x0000 0x0000 0x0000 0x0000 0x0000 0x0000 0x0000
Per <#section-5 RFC 5952 Section 5>,
dead:beef::1234
::ffff:100.55.165.180
::
Per <#section-4.2.2 Section 4.2.2> of the
runs of multiple 0 fields are considered.
TODO: This implementation, while correct, is not particularly efficient.
It uses string all over the place.
This representation is RECOMMENDED by #section-5
to concat against
normal group:
| Decode UTF-8-encoded 'Bytes' into an 'IPv6' address.
>>> decodeUtf8Bytes (Ascii.fromString "::cab:1")
Just (ipv6 0x0000 0x0000 0x0000 0x0000 0x0000 0x0000 0x0cab 0x0001)
start is inclusive. end is exclusive
Note about the implementation of piece:
It is important to manually perform worker-wrapper so that
comes with this library, it can be observed that preventing
Choose the longest run. Prefer the leftmost run in the
event of a tie.
Per <#section-5 RFC 5952 Section 5>,
| Decode an 'IPv6' address. This accepts both standard IPv6
see @Net.IP.decode@.
| Parse UTF-8-encoded 'Bytes' as an 'IPv6' address. This accepts
both uppercase and lowercase characters in the hexadecimal components.
>>> let str = "dead:beef:3240:a426:ba68:1cd0:4263:109b -> alive"
leading colon present.
This is called when we are positioned before a colon.
We may encounter another colon immidiately after
compression. Or we may encounter another hex-encoded
number.
leading that gets consumed. This is called right
after a double colon is consumed.
this holds.
current index in array
index where compression happened
the end has come
Should be run right before a colon.
current index in array
index where compression happened
The end has come
Example memmove that may need to happen:
A B C H ==> A B C 0 0 0 0 H
*
Parses the remainder of a lowercase hexadecimal number.
Leaves trailing colons alone. This fails if there are
I cannot find a spec that is clear about what to do
if someone puts 00000 in a piece of an encoded IPv6
address, so I veer on the side of leniency.
| Parse UTF-8-encoded 'Bytes' into an 'IPv4Range'.
This requires the mask to be present.
>>> maybe (putStrLn "nope") IPv6.printRange $ Parser.parseBytesMaybe (IPv6.parserRangeUtf8Bytes ()) (Ascii.fromString "1b02:f001:5:200b::/80")
1b02:f001:5:200b::/80
>>> maybe (putStrLn "nope") IPv6.printRange $ Parser.parseBytesMaybe (IPv6.parserRangeUtf8Bytes ()) (Ascii.fromString "abcd::")
nope
See 'parserRangeUtf8BytesLenient' for a variant that treats
a missing mask as a @/32@ mask.
| Variant of 'parserRangeUtf8Bytes' that allows the mask
to be omitted. An omitted mask is treated as a @/128@ mask.
>>> maybe (putStrLn "nope") IPv6.printRange $ Parser.parseBytesMaybe (IPv6.parserRangeUtf8BytesLenient ()) (Ascii.fromString "1b02:f001:5:200b::/80")
1b02:f001:5:200b::/80
>>> maybe (putStrLn "nope") IPv6.printRange $ Parser.parseBytesMaybe (IPv6.parserRangeUtf8BytesLenient ()) (Ascii.fromString "abcd::")
>>> Atto.parseOnly IPv6.parser (Text.pack "dead:beef:3240:a426:ba68:1cd0:4263:109b")
and its length.
# UNPACK #
# UNPACK #
bits are zeroed out. For example:
>>> IPv6.printRange $ IPv6.normalize $ IPv6.IPv6Range addr1 24
192:100::/24
192:160::/28
not cause any issues for this library, although other applications may
Note that 'normalize is idempotent, that is:
prop> IPv6.normalize r == (IPv6.normalize . IPv6.normalize) r
Just "dead:bee0::/28"
>>> IPv6.contains (iprange 8) ip
True
False
Typically, element-testing functions are written to take the element
written the other way for better performance when iterating over a collection.
For example, you might test elements in a list for membership like this:
[True,True,False,False,False]
creation and range normalization only occur once in the above example.
They are reused as the list is iterated.
| This is provided to mirror the interface provided by @Data.Set@. It
behaves just like 'contains' but with flipped arguments.
prop> IPv6.member ip r == IPv6.contains r ip
understood to be the broadcast address of a subnet. For example:
ip address of the range:
dead:be80::/25
range, like @ @.
This makes the range broader if it cannot be represented in <-Domain_Routing CIDR> notation.
dead:be80::/25 | # LANGUAGE CPP #
# LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE InstanceSigs #
# LANGUAGE LambdaCase #
# LANGUAGE MagicHash #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE UnboxedTuples #
module Net.IPv6
ipv6
, fromOctets
, fromWord16s
, fromWord32s
, fromTupleWord16s
, fromTupleWord32s
, toWord16s
, toWord32s
, any
, loopback
, localhost
, encode
, encodeShort
, decode
, decodeShort
, parser
, parserUtf8Bytes
, decodeUtf8Bytes
, boundedBuilderUtf8
, print
, range
, fromBounds
, normalize
, contains
, isSubsetOf
, member
, lowerInclusive
, upperInclusive
, encodeRange
, decodeRange
, parserRange
, printRange
, parserRangeUtf8Bytes
, parserRangeUtf8BytesLenient
, IPv6(..)
, IPv6Range(..)
) where
import Prelude hiding (any, print)
import Net.IPv4 (IPv4(..))
import Control.Applicative
import Control.DeepSeq (NFData)
import Control.Monad (mzero)
import Control.Monad.ST (ST)
import Data.Bits
import Data.Char (chr)
import Data.Data (Data)
import Data.Ix (Ix)
import Data.Hashable (Hashable,hashWithSalt)
import Data.List (intercalate, group)
import Data.Primitive (MutablePrimArray)
import Data.Primitive.Types (Prim)
import Data.Text (Text)
import Data.Text.Short (ShortText)
import Data.WideWord.Word128 (Word128(..), zeroWord128)
import Data.Word
import Foreign.Storable (Storable)
import GHC.Exts (Int#,Word#,Int(I#))
import GHC.Generics (Generic)
import Numeric (showHex)
import Text.ParserCombinators.ReadPrec (prec,step)
import Text.Read (Read(..),Lexeme(Ident),lexP,parens)
import qualified Arithmetic.Lte as Lte
import qualified Arithmetic.Nat as Nat
import qualified Data.Aeson as Aeson
import qualified Data.Attoparsec.Text as AT
import qualified Data.Attoparsec.Text as Atto
import qualified Data.Bytes.Builder.Bounded as BB
import qualified Data.Bytes as Bytes
import qualified Data.Bytes.Parser as Parser
import qualified Data.Bytes.Parser.Latin as Latin
import qualified Data.ByteString.Short.Internal as BSS
import qualified Data.Primitive as PM
import qualified Data.Text as Text
import qualified Data.Text.IO as TIO
import qualified Data.Text.Short.Unsafe as TS
import qualified Data.Text.Short as TS
import qualified GHC.Word.Compat as Compat
import qualified Net.IPv4 as IPv4
> > > import qualified Prelude as P
> > > import qualified Data . . Text as
> > > import qualified Data . Bytes . Text . Ascii as Ascii
| A 128 - bit Internet Protocol version 6 address .
newtype IPv6 = IPv6 { getIPv6 :: Word128 }
deriving (Bounded,Enum,Eq,Ord,Storable,Bits,FiniteBits,NFData,Prim,Ix,Data,Generic)
instance Hashable IPv6 where
hashWithSalt s (IPv6 (Word128 a b)) = hashWithSalt (hashWithSalt s a) b
instance Show IPv6 where
showsPrec p addr = showParen (p > 10)
$ showString "ipv6 "
. showHexWord16 a
. showChar ' '
. showHexWord16 b
. showChar ' '
. showHexWord16 c
. showChar ' '
. showHexWord16 d
. showChar ' '
. showHexWord16 e
. showChar ' '
. showHexWord16 f
. showChar ' '
. showHexWord16 g
. showChar ' '
. showHexWord16 h
where
(a,b,c,d,e,f,g,h) = toWord16s addr
print :: IPv6 -> IO ()
print = TIO.putStrLn . encode
decodeShort :: ShortText -> Maybe IPv6
decodeShort t = decodeUtf8Bytes (Bytes.fromByteArray b)
where b = shortByteStringToByteArray (TS.toShortByteString t)
shortByteStringToByteArray :: BSS.ShortByteString -> PM.ByteArray
shortByteStringToByteArray (BSS.SBS x) = PM.ByteArray x
showHexWord16 :: Word16 -> ShowS
showHexWord16 w =
showString "0x"
. showChar (nibbleToHex (unsafeShiftR (fromIntegral w) 12))
. showChar (nibbleToHex ((unsafeShiftR (fromIntegral w) 8) .&. 0xF))
. showChar (nibbleToHex ((unsafeShiftR (fromIntegral w) 4) .&. 0xF))
. showChar (nibbleToHex ((fromIntegral w) .&. 0xF))
invariant : argument must be less than 16
nibbleToHex :: Word -> Char
nibbleToHex w
| w < 10 = chr (fromIntegral (w + 48))
| otherwise = chr (fromIntegral (w + 87))
instance Read IPv6 where
readPrec = parens $ prec 10 $ do
Ident "ipv6" <- lexP
a <- step readPrec
b <- step readPrec
c <- step readPrec
d <- step readPrec
e <- step readPrec
f <- step readPrec
g <- step readPrec
h <- step readPrec
return (fromWord16s a b c d e f g h)
instance Aeson.ToJSON IPv6 where
toJSON = Aeson.String . encode
instance Aeson.FromJSON IPv6 where
parseJSON = Aeson.withText "IPv6" $ \t -> case decode t of
Nothing -> fail "invalid IPv6 address"
Just i -> return i
rightToMaybe :: Either a b -> Maybe b
rightToMaybe = either (const Nothing) Just
Note that while @Net . IPv4.'Net . IPv4.fromOctets ' = Net . IPv4.'Net . IPv4.ipv4'@ ,
fromOctets ::
Word8 -> Word8 -> Word8 -> Word8
-> Word8 -> Word8 -> Word8 -> Word8
-> Word8 -> Word8 -> Word8 -> Word8
-> Word8 -> Word8 -> Word8 -> Word8
-> IPv6
fromOctets a b c d e f g h i j k l m n o p =
IPv6 $ fromOctetsWord128
(fromIntegral a) (fromIntegral b) (fromIntegral c) (fromIntegral d)
(fromIntegral e) (fromIntegral f) (fromIntegral g) (fromIntegral h)
(fromIntegral i) (fromIntegral j) (fromIntegral k) (fromIntegral l)
(fromIntegral m) (fromIntegral n) (fromIntegral o) (fromIntegral p)
fromOctetsWord128 ::
Word128 -> Word128 -> Word128 -> Word128
-> Word128 -> Word128 -> Word128 -> Word128
-> Word128 -> Word128 -> Word128 -> Word128
-> Word128 -> Word128 -> Word128 -> Word128
-> Word128
fromOctetsWord128 a b c d e f g h i j k l m n o p = fromIntegral
( shiftL a 120
.|. shiftL b 112
.|. shiftL c 104
.|. shiftL d 96
.|. shiftL e 88
.|. shiftL f 80
.|. shiftL g 72
.|. shiftL h 64
.|. shiftL i 56
.|. shiftL j 48
.|. shiftL k 40
.|. shiftL l 32
.|. shiftL m 24
.|. shiftL n 16
.|. shiftL o 8
.|. p
)
| Create an ' IPv6 ' address from the eight 16 - bit fragments that make
> > > let addr = ipv6 0x3124 0x0 0x0 0xDEAD 0xCAFE 0xFF 0xFE00 0x1
ipv6 0x3124 0x0000 0x0000 0xdead 0xcafe 0x00ff 0xfe00 0x0001
ipv6 ::
Word16 -> Word16 -> Word16 -> Word16
-> Word16 -> Word16 -> Word16 -> Word16
-> IPv6
ipv6 = fromWord16s
fromWord16s ::
Word16 -> Word16 -> Word16 -> Word16
-> Word16 -> Word16 -> Word16 -> Word16
-> IPv6
fromWord16s a b c d e f g h =
IPv6 $ fromWord16sWord128
(fromIntegral a) (fromIntegral b) (fromIntegral c) (fromIntegral d)
(fromIntegral e) (fromIntegral f) (fromIntegral g) (fromIntegral h)
fromWord16sWord128 ::
Word128 -> Word128 -> Word128 -> Word128
-> Word128 -> Word128 -> Word128 -> Word128
-> Word128
fromWord16sWord128 a b c d e f g h = fromIntegral
( shiftL a 112
.|. shiftL b 96
.|. shiftL c 80
.|. shiftL d 64
.|. shiftL e 48
.|. shiftL f 32
.|. shiftL g 16
.|. h
)
| Convert an ' IPv6 ' to eight 16 - bit words .
toWord16s :: IPv6 -> (Word16,Word16,Word16,Word16,Word16,Word16,Word16,Word16)
toWord16s (IPv6 (Word128 a b)) =
is up to 40 % faster than using 128 - bit shifts on a Word128 value .
( fromIntegral (unsafeShiftR a 48)
, fromIntegral (unsafeShiftR a 32)
, fromIntegral (unsafeShiftR a 16)
, fromIntegral a
, fromIntegral (unsafeShiftR b 48)
, fromIntegral (unsafeShiftR b 32)
, fromIntegral (unsafeShiftR b 16)
, fromIntegral b
)
| Uncurried variant of ' fromWord16s ' .
fromTupleWord16s :: (Word16,Word16,Word16,Word16,Word16,Word16,Word16,Word16) -> IPv6
fromTupleWord16s (a,b,c,d,e,f,g,h) = fromWord16s a b c d e f g h
| Build an ' IPv6 ' from four 32 - bit words . The leftmost argument
fromWord32s :: Word32 -> Word32 -> Word32 -> Word32 -> IPv6
fromWord32s a b c d =
IPv6 $ fromWord32sWord128
(fromIntegral a) (fromIntegral b) (fromIntegral c) (fromIntegral d)
fromWord32sWord128 ::
Word128 -> Word128 -> Word128 -> Word128
-> Word128
fromWord32sWord128 a b c d = fromIntegral
( shiftL a 96
.|. shiftL b 64
.|. shiftL c 32
.|. d
)
| Uncurried variant of ' fromWord32s ' .
fromTupleWord32s :: (Word32,Word32,Word32,Word32) -> IPv6
fromTupleWord32s (a,b,c,d) = fromWord32s a b c d
| Convert an ' IPv6 ' to four 32 - bit words .
toWord32s :: IPv6 -> (Word32,Word32,Word32,Word32)
toWord32s (IPv6 (Word128 a b)) =
is about 10 % faster than using 128 - bit shifts on a Word128 value .
( fromIntegral (unsafeShiftR a 32)
, fromIntegral a
, fromIntegral (unsafeShiftR b 32)
, fromIntegral b
)
loopback :: IPv6
loopback = IPv6 (Word128 0 1)
localhost :: IPv6
localhost = loopback
any :: IPv6
any = IPv6 zeroWord128
| Encodes the ' IPv6 ' address using zero - compression on the leftmost longest
string of zeroes in the address .
this uses mixed notation when encoding an IPv4 - mapped IPv6 address :
> > > T.putStrLn $ IPv6.encode $ IPv6.fromWord16s 0xDEAD 0xBEEF 0x0 0x0 0x0 0x0 0x0 0x1234
> > > T.putStrLn $ IPv6.encode $ IPv6.fromWord16s 0x0 0x0 0x0 0x0 0x0 0xFFFF 0x6437 0xA5B4
> > > T.putStrLn $ IPv6.encode $ IPv6.fromWord16s 0x0 0x0 0x0 0x0 0x0 0x0 0x0 0x0
same RFC , this does not use @::@ to shorten a single 16 - bit 0 field . Only
encode :: IPv6 -> Text
encode !ip =
if isIPv4Mapped ip
then
Text.pack "::ffff:"
`mappend`
IPv4.encode (IPv4.IPv4 (fromIntegral w7 `unsafeShiftL` 16 .|. fromIntegral w8))
else toText [w1, w2, w3, w4, w5, w6, w7, w8]
where
(w1, w2, w3, w4, w5, w6, w7, w8) = toWord16s ip
toText ws = Text.pack $ intercalate ":"
$ expand 0 (if longestZ > 1 then longestZ else 0) grouped
where
expand !_ 8 !_ = ["::"]
expand !_ !_ [] = []
expand !i !longest ((x, len):wsNext)
zero - compressed group :
| x == 0 && len == longest =
first and last need an extra colon since there 's nothing
(if i == 0 || (i+len) == 8 then ":" else "")
: expand (i+len) 0 wsNext
| otherwise = replicate len (showHex x "") ++ expand (i+len) longest wsNext
longestZ = maximum . (0:) . map snd . filter ((==0) . fst) $ grouped
grouped = map (\x -> (head x, length x)) (group ws)
isIPv4Mapped :: IPv6 -> Bool
isIPv4Mapped (IPv6 (Word128 w1 w2)) =
w1 == 0 && (0xFFFFFFFF00000000 .&. w2 == 0x0000FFFF00000000)
decodeUtf8Bytes :: Bytes.Bytes -> Maybe IPv6
decodeUtf8Bytes !b = case Parser.parseBytes (parserUtf8Bytes ()) b of
Parser.Success (Parser.Slice _ len addr) -> case len of
0 -> Just addr
_ -> Nothing
Parser.Failure _ -> Nothing
| Encodes the ' IPv6 ' address using zero - compression on the
leftmost longest string of zeroes in the address .
> > > BB.run Nat.constant $ IPv6.boundedBuilderUtf8 $ IPv6.fromWord16s 0xDEAD 0xBEEF 0x0 0x0 0x0 0x0 0x0 0x1234
[ 0x64 , 0x65 , 0x61 , 0x64 , 0x3a , 0x62 , 0x65 , 0x65 , 0x66 , 0x3a , 0x3a , 0x31 , 0x32 , 0x33 , 0x34 ]
boundedBuilderUtf8 :: IPv6 -> BB.Builder 39
boundedBuilderUtf8 !ip@(IPv6 (Word128 hi lo))
| hi == 0 && lo == 0 = BB.weaken Lte.constant
(BB.ascii ':' `BB.append` BB.ascii ':')
| isIPv4Mapped ip = BB.weaken Lte.constant $
BB.ascii ':'
`BB.append`
BB.ascii ':'
`BB.append`
BB.ascii 'f'
`BB.append`
BB.ascii 'f'
`BB.append`
BB.ascii 'f'
`BB.append`
BB.ascii 'f'
`BB.append`
BB.ascii ':'
`BB.append`
IPv4.boundedBuilderUtf8 (IPv4.IPv4 (fromIntegral lo))
| otherwise =
let (w0,w1,w2,w3,w4,w5,w6,w7) = toWord16s ip
IntTriple startLongest longest _ = longestRun w0 w1 w2 w3 w4 w5 w6 w7
start = startLongest
end = start + longest
in firstPiece w0 start
`BB.append`
piece 1 w1 start end
`BB.append`
piece 2 w2 start end
`BB.append`
piece 3 w3 start end
`BB.append`
piece 4 w4 start end
`BB.append`
piece 5 w5 start end
`BB.append`
piece 6 w6 start end
`BB.append`
lastPiece w7 end
firstPiece :: Word16 -> Int -> BB.Builder 4
firstPiece !w !start = case start of
0 -> BB.weaken Lte.constant (BB.ascii ':')
_ -> BB.word16LowerHex w
we can stop piece from inlining . If we do not do this , GHC
inlines piece , leading to enormous blowup in the generated
Core . The implementation of boundedBuilderUtf8 becomes
thousands of lines of Core . Even in the microbenchmark that
this inlining improves performance of encodeShort by 50 % .
piece :: Int -> Word16 -> Int -> Int -> BB.Builder 5
# inline piece #
piece (I# ix) (Compat.W16# w) (I# start) (I# end) =
piece# ix w start end
piece# :: Int# -> Word# -> Int# -> Int# -> BB.Builder 5
# noinline piece # #
piece# !ix# !w# !start# !end# = case compare ix start of
LT -> BB.ascii ':' `BB.append` BB.word16LowerHex w
EQ -> BB.weaken Lte.constant (BB.ascii ':')
GT -> if ix < end
then BB.weaken Lte.constant BB.empty
else BB.ascii ':' `BB.append` BB.word16LowerHex w
where
ix = I# ix#
start = I# start#
end = I# end#
w = Compat.W16# w#
lastPiece :: Word16 -> Int -> BB.Builder 5
lastPiece !w !end = case end of
8 -> BB.weaken Lte.constant (BB.ascii ':')
_ -> BB.ascii ':' `BB.append` BB.word16LowerHex w
data IntTriple = IntTriple !Int !Int !Int
stepZeroRunLength :: Int -> Word16 -> IntTriple -> IntTriple
stepZeroRunLength !ix !w (IntTriple startLongest longest current) = case w of
0 -> let !x = current + 1 in
if x > longest
then IntTriple (ix - current) x x
else IntTriple startLongest longest x
_ -> IntTriple startLongest longest 0
We start out by setting the longest run to size 1 . This
means that we will only detect runs of length two or greater .
longestRun ::
Word16
-> Word16
-> Word16
-> Word16
-> Word16
-> Word16
-> Word16
-> Word16
-> IntTriple
longestRun !w0 !w1 !w2 !w3 !w4 !w5 !w6 !w7 = id
$ stepZeroRunLength 7 w7
$ stepZeroRunLength 6 w6
$ stepZeroRunLength 5 w5
$ stepZeroRunLength 4 w4
$ stepZeroRunLength 3 w3
$ stepZeroRunLength 2 w2
$ stepZeroRunLength 1 w1
$ stepZeroRunLength 0 w0
$ IntTriple (-1) 1 0
| Encodes the ' IPv6 ' address as ' ShortText ' using zero - compression on
the leftmost longest string of zeroes in the address .
this uses mixed notation when encoding an IPv4 - mapped IPv6 address .
> > > IPv6.encodeShort $ IPv6.fromWord16s 0xDEAD 0xBEEF 0x0 0x0 0x0 0x0ABC 0x0 0x1234
" dead : "
encodeShort :: IPv6 -> ShortText
encodeShort w = id
$ TS.fromShortByteStringUnsafe
$ byteArrayToShortByteString
$ BB.run Nat.constant
$ boundedBuilderUtf8
$ w
byteArrayToShortByteString :: PM.ByteArray -> BSS.ShortByteString
byteArrayToShortByteString (PM.ByteArray x) = BSS.SBS x
notation ( with zero compression ) and mixed notation for
IPv4 - mapped IPv6 addresses . For a decoding function that
additionally accepts dot - decimal - encoded IPv4 addresses ,
decode :: Text -> Maybe IPv6
decode t = rightToMaybe (AT.parseOnly (parser <* AT.endOfInput) t)
> > > Parser.parseBytes ( parserUtf8Bytes ( ) ) ( )
Success ( Slice { offset = 39 , length = 9 , value = ipv6 0xdead 0xbeef 0x3240 0xa426 0xba68 0x1cd0 0x4263 0x109b } )
This does not currently support parsing embedded IPv4 address
( e.g. @ff00:8000 : abc::224.1.2.3@ ) .
parserUtf8Bytes :: e -> Parser.Parser e s IPv6
parserUtf8Bytes e = do
marr <- Parser.effect (PM.newPrimArray 8)
We can not immidiately call preZeroes since it wants a
Latin.trySatisfy (== ':') >>= \case
True -> do
Latin.char e ':'
postZeroesBegin e marr 0 0
False -> do
w <- pieceParser e
Parser.effect (PM.writePrimArray marr 0 w)
preZeroes e marr 1
the one that we consume here . This indicates zero
preZeroes ::
e
length must be 8
-> Int
-> Parser.Parser e s IPv6
preZeroes e !marr !ix = case ix of
8 -> Parser.effect (combinePieces marr)
_ -> do
Latin.char e ':'
Latin.trySatisfy (== ':') >>= \case
True -> postZeroesBegin e marr ix ix
False -> do
w <- pieceParser e
Parser.effect (PM.writePrimArray marr ix w)
preZeroes e marr (ix + 1)
The same as postZeroes except that there is no
Precondition : the index is less than 8 . This parser
is only called by preZeroes , which ensures that
postZeroesBegin ::
e
length must be 8
-> Parser.Parser e s IPv6
postZeroesBegin e !marr !ix !compress = do
optionalPieceParser e >>= \case
Parser.effect (conclude marr ix compress)
Just w -> do
Parser.effect (PM.writePrimArray marr ix w)
postZeroes e marr (ix + 1) compress
postZeroes ::
e
length must be 8
-> Parser.Parser e s IPv6
postZeroes e !marr !ix !compress = case ix of
8 -> Parser.fail e
_ -> do
Latin.trySatisfy (== ':') >>= \case
Parser.effect (conclude marr ix compress)
True -> do
w <- pieceParser e
Parser.effect (PM.writePrimArray marr ix w)
postZeroes e marr (ix + 1) compress
conclude :: MutablePrimArray s Word16 -> Int -> Int -> ST s IPv6
conclude !marr !ix !compress = do
This will overlap , but GHC 's copy primop is fine with that .
let postCompressionLen = ix - compress
PM.copyMutablePrimArray marr (8 - postCompressionLen) marr compress postCompressionLen
let compressedArea = 8 - ix
PM.setPrimArray marr compress compressedArea (0 :: Word16)
combinePieces marr
ix = 4 , compress = 3 , postCompressionLen = 1 , compressedArea = 4
copyPrimArray marr 7 marr 3 1
setPrimArray marr 3 4 ( 0 : : )
combinePieces ::
MutablePrimArray s Word16
-> ST s IPv6
combinePieces !marr = fromWord16s
<$> PM.readPrimArray marr 0
<*> PM.readPrimArray marr 1
<*> PM.readPrimArray marr 2
<*> PM.readPrimArray marr 3
<*> PM.readPrimArray marr 4
<*> PM.readPrimArray marr 5
<*> PM.readPrimArray marr 6
<*> PM.readPrimArray marr 7
optionalPieceParser :: e -> Parser.Parser e s (Maybe Word16)
optionalPieceParser e = Latin.tryHexNibble >>= \case
Nothing -> pure Nothing
Just w0 -> do
r <- pieceParserStep e w0
pure (Just r)
This should probably be moved into bytesmith and renamed .
pieceParser :: e -> Parser.Parser e s Word16
pieceParser e = Latin.hexNibble e >>= pieceParserStep e
more than four hex digits unless there are leading zeroes .
pieceParserStep ::
e
-> Word
-> Parser.Parser e s Word16
pieceParserStep e !acc = if acc > 0xFFFF
then Parser.fail e
else Latin.tryHexNibble >>= \case
Nothing -> pure (fromIntegral acc)
Just w -> pieceParserStep e (16 * acc + w)
parserRangeUtf8Bytes :: e -> Parser.Parser e s IPv6Range
parserRangeUtf8Bytes e = do
base <- parserUtf8Bytes e
Latin.char e '/'
theMask <- Latin.decWord8 e
if theMask > 128
then Parser.fail e
else pure $! normalize (IPv6Range base theMask)
abcd::/128
parserRangeUtf8BytesLenient :: e -> Parser.Parser e s IPv6Range
parserRangeUtf8BytesLenient e = do
base <- parserUtf8Bytes e
Latin.trySatisfy (=='/') >>= \case
True -> do
theMask <- Latin.decWord8 e
if theMask > 128
then Parser.fail e
else pure $! normalize (IPv6Range base theMask)
False -> pure $! IPv6Range base 128
| Parse an ' IPv6 ' using ' Atto . ' .
Right ( ipv6 0xdead 0xbeef 0x3240 0xa426 0xba68 0x1cd0 0x4263 0x109b )
parser :: Atto.Parser IPv6
parser = makeIP <$> ip
where
makeIP [w1, w2, w3, w4, w5, w6, w7, w8] = fromWord16s w1 w2 w3 w4 w5 w6 w7 w8
makeIP _ = error "Net.IPv6.parser: Implementation error. Please open a bug report."
ip = (Atto.char ':' *> Atto.char ':' *> doubleColon 0) <|> part 0
part :: Int -> Atto.Parser [Word16]
part n =
case n of
max 8 parts in an IPv6 address
7 -> pure <$> Atto.hexadecimal
after 6 parts it could end in IPv4 dotted notation
6 -> ipv4 <|> hexPart
_ -> hexPart
where
hexPart = (:)
<$> Atto.hexadecimal
<*> (Atto.char ':' *>
(
(Atto.char ':' *> doubleColon (n+1))
<|>
part (n+1)
)
)
doubleColon :: Int -> Atto.Parser [Word16]
doubleColon count = do
rest <- afterDoubleColon <|> pure []
let fillerLength = (8 - count - length rest)
if fillerLength <= 0
then fail "too many parts in IPv6 address"
else pure (replicate fillerLength 0 ++ rest)
after double colon , IPv4 dotted notation could appear anywhere
afterDoubleColon :: Atto.Parser [Word16]
afterDoubleColon =
ipv4 <|>
(:) <$> Atto.hexadecimal <*> ((Atto.char ':' *> afterDoubleColon) <|> pure [])
ipv4 :: Atto.Parser [Word16]
ipv4 = ipv4ToWord16s <$> IPv4.parser
ipv4ToWord16s :: IPv4 -> [Word16]
ipv4ToWord16s (IPv4 word) = [fromIntegral (word `unsafeShiftR` 16), fromIntegral (word .&. 0xFFFF)]
| An ' IPv6Range ' . It is made up of the first ' IPv6 ' in the range
data IPv6Range = IPv6Range
} deriving (Eq,Ord,Show,Read,Generic,Data)
instance NFData IPv6Range
instance Aeson.ToJSON IPv6Range where
toJSON = Aeson.String . encodeRange
instance Aeson.FromJSON IPv6Range where
parseJSON (Aeson.String t) = case decodeRange t of
Nothing -> fail "Could not decodeRange IPv6 range"
Just res -> return res
parseJSON _ = mzero
mask128 :: IPv6
mask128 = maxBound
mask :: Word8 -> IPv6
mask = complement . shiftR mask128 . fromIntegral
| Normalize an ' IPv6Range ' . The first result of this is that the
' IPv6 ' inside the ' IPv6Range ' is changed so that the insignificant
> > > addr1 = IPv6.ipv6 0x0192 0x0168 0x0001 0x0019 0x0000 0x0000 0x0000 0x0000
> > > addr2 = IPv6.ipv6 0x0192 0x0168 0x0001 0x0163 0x0000 0x0000 0x0000 0x0000
> > > IPv6.printRange $ IPv6.normalize $ IPv6.IPv6Range addr2 28
The second effect of this is that the mask length is lowered to be 128
or smaller . Working with ' IPv6Range 's that have not been normalized does
reject such ranges ( especially those with a mask length above 128 ) .
normalize :: IPv6Range -> IPv6Range
normalize (IPv6Range ip len) =
let len' = min len 128
ip' = ip .&. mask len'
in IPv6Range ip' len'
| Encode an ' IPv6Range ' as ' Text ' .
> > > addr = IPv6.ipv6 0xDEAD 0xBEEF 0x3240 0xA426 0xBA68 0x1CD0 0x4263 0x109B
> > > T.putStrLn $ IPv6.encodeRange $ IPv6.IPv6Range addr 28
dead : beef:3240 : a426 : ba68:1cd0:4263:109b/28
encodeRange :: IPv6Range -> Text
encodeRange x = encode (ipv6RangeBase x) <> Text.pack "/" <> (Text.pack $ (show . fromEnum) $ ipv6RangeLength x)
| Decode an ' IPv6Range ' from ' Text ' .
> > > addr = IPv6.ipv6 0xDEAD 0xBEEF 0x3240 0xA426 0xBA68 0x1CD0 0x4263 0x109B
> > > fmap IPv6.encodeRange $ IPv6.decodeRange ( Text.pack " dead : beef:3240 : a426 : ba68:1cd0:4263:109b/28 " )
decodeRange :: Text -> Maybe IPv6Range
decodeRange = rightToMaybe . AT.parseOnly (parserRange <* AT.endOfInput)
| Parse an ' IPv6Range ' using a ' AT.Parser ' .
parserRange :: AT.Parser IPv6Range
parserRange = do
ip <- parser
_ <- AT.char '/'
theMask <- AT.decimal >>= limitSize
return (normalize (IPv6Range ip theMask))
where
limitSize i =
if i > 128
then fail "An IP range length must be between 0 and 128"
else return i
| Checks to see if an ' IPv6 ' address belongs in the ' IPv6Range ' .
> > > let ip = IPv6.ipv6 0x2001 0x0db8 0x0db8 0x1094 0x2051 0x0000 0x0000 0x0001
> > > let iprange mask = IPv6.IPv6Range ( IPv6.ipv6 0x2001 0x0db8 0x0000 0x0000 0x0000 0x0000 0x0000 0x0001 ) mask
> > > IPv6.contains ( iprange 48 ) ip
as the first argument and the set as the second argument . This is intentionally
> > > let r = IPv6.IPv6Range ( IPv6.ipv6 0x2001 0x0db8 0x0000 0x0000 0x0000 0x0000 0x0000 0x0001 ) 64
> > > fmap ( IPv6.contains r ) ( take 5 $ iterate succ $ IPv6.ipv6 0x2001 0x0db8 0x0000 0x0000 0xffff 0xffff 0xffff 0xfffe )
The implementation of ' contains ' ensures that ( with GHC ) , the bitmask
contains :: IPv6Range -> IPv6 -> Bool
contains (IPv6Range subnet len) =
let theMask = mask len
subnetNormalized = subnet .&. theMask
in \ip -> (ip .&. theMask) == subnetNormalized
| Checks if the first range is a subset of the second range .
isSubsetOf :: IPv6Range -> IPv6Range -> Bool
isSubsetOf a b =
lowerInclusive a >= lowerInclusive b
&&
upperInclusive a <= upperInclusive b
member :: IPv6 -> IPv6Range -> Bool
member = flip contains
| The inclusive lower bound of an ' IPv6Range ' . This is conventionally
> > > T.putStrLn $ IPv6.encode $ IPv6.lowerInclusive $ IPv6.IPv6Range ( IPv6.ipv6 0x2001 0x0db8 0x0000 0x0000 0x0000 0x0000 0x0000 0x0001 ) 25
2001 : d80 : :
Note that the lower bound of a normalized ' IPv6Range ' is simply the
prop > IPv6.lowerInclusive r = = IPv6.ipv6RangeBase ( IPv6.normalize r )
lowerInclusive :: IPv6Range -> IPv6
lowerInclusive = ipv6RangeBase . normalize
| The inclusive upper bound of an ' IPv6Range ' .
> > > let 0xDEAD 0xBEEF 0x3240 0xA426 0xBA68 0x1CD0 0x4263 0x109B
> > > T.putStrLn $ IPv6.encode $ IPv6.upperInclusive $ IPv6.IPv6Range addr 25
dead : : ffff : ffff : ffff : ffff : ffff :
upperInclusive :: IPv6Range -> IPv6
upperInclusive (IPv6Range ip len) =
let len' = min 128 len
theInvertedMask :: IPv6
theInvertedMask = shiftR mask128 (fromIntegral len')
in ip .|. theInvertedMask
| Print an ' IPv6Range ' using the textual encoding .
printRange :: IPv6Range -> IO ()
printRange = TIO.putStrLn . encodeRange
| Smart constructor for ' IPv6Range ' . Ensures the mask is appropriately
sized and sets masked bits in the ' IPv6 ' to zero .
> > > let 0xDEAD 0xBEEF 0x3240 0xA426 0xBA68 0x1CD0 0x4263 0x109B
> > > IPv6.printRange $ IPv6.range addr 25
range :: IPv6 -> Word8 -> IPv6Range
range addr len = normalize (IPv6Range addr len)
| Given an inclusive lower and upper ip address , create the smallest ' IPv6Range '
that contains the two . This is helpful in situations where input is given as a
> > > addrLower = IPv6.ipv6 0xDEAD 0xBE80 0x0000 0x0000 0x0000 0x0000 0x0000 0x0000
> > > addrUpper = IPv6.ipv6 0xDEAD 0xBEFF 0xFFFF 0xFFFF 0xFFFF 0xFFFF 0xFFFF 0xFFFF
> > > IPv6.printRange $
fromBounds :: IPv6 -> IPv6 -> IPv6Range
fromBounds lo hi =
normalize (IPv6Range lo (maskFromBounds lo hi))
maskFromBounds :: IPv6 -> IPv6 -> Word8
maskFromBounds lo hi = fromIntegral (countLeadingZeros $ xor lo hi)
|
6ba982acce4f2b91e9ed146edbd5671c22d7f5329459368d48423a7244275975 | franzinc/clim2 | graphics.lisp | -*- Mode : Lisp ; Syntax : ANSI - Common - Lisp ; Package : SILICA ; Base : 10 ; Lowercase : Yes -*-
;; See the file LICENSE for the full license governing this code.
;;
(in-package :silica)
" Copyright ( c ) 1990 , 1991 , 1992 Symbolics , Inc. All rights reserved .
Portions copyright ( c ) 1991 , 1992 Franz , Inc. All rights reserved . "
(eval-when (compile load eval)
;; NOTE: if you change this list of keywords, you also have to change the keyword arguments
;; accepted by (CLOS:METHOD INVOKE-WITH-DRAWING-OPTIONS (DRAWING-STATE-MIXIN T))
(defparameter *all-drawing-options*
'(:ink :clipping-region :transformation
:line-style :line-unit :line-thickness :line-dashes
:line-joint-shape :line-cap-shape
:text-style :text-family :text-face :text-size))
(defparameter *always-meaningful-drawing-options* '(:ink :clipping-region :transformation))
(defparameter *drawing-option-subsets*
'((:point :line-style :line-thickness :line-unit)
(:line-cap :line-style :line-thickness :line-unit
:line-dashes :line-cap-shape)
(:line-joint :line-style :line-thickness :line-unit
:line-dashes :line-joint-shape)
(:line-joint-cap :line-style :line-thickness :line-unit
:line-dashes :line-joint-shape :line-cap-shape)
(:text :text-style :text-family :text-face :text-size)
(:pixmap )))
(defun non-drawing-option-keywords (arglist)
(do ((l (cdr (member '&key arglist)) (cdr l))
(non-drawing-option-keywords nil)
k)
((null l) non-drawing-option-keywords)
(setq k (cond ((atom (car l)) (intern (symbol-name (car l)) :keyword))
((atom (caar l)) (intern (symbol-name (caar l)) :keyword))
(t (caaar l))))
(unless (member k *all-drawing-options*)
(push k non-drawing-option-keywords))))
Caller must stick & key in front
;;; If drawing-options isn't nil, it's a list of the option keywords accepted.
(defun all-drawing-options-lambda-list (drawing-options)
(mapcar #'(lambda (keyword) (intern (symbol-name keyword)))
(cond ((null drawing-options) *all-drawing-options*)
((atom drawing-options)
(append (let ((x (assoc drawing-options *drawing-option-subsets*)))
(unless x
(warn "~S was specified in :drawing-options but is not ~
a known drawing-option subset."
drawing-options))
(cdr x))
*always-meaningful-drawing-options*))
(t
(dolist (option drawing-options)
(unless (member option *all-drawing-options*)
(warn "~S was specified in :drawing-options but ~
is not a known drawing option."
option)))
(append drawing-options *always-meaningful-drawing-options*)))))
) ;eval-when
(eval-when (compile load eval)
(defun write-graphics-function-transformer (name
medium-graphics-function-name
unspread-argument-names
spread-arguments
spread-name
spread-argument-names
drawing-options
unspread-other-keyword-arguments
other-keyword-arguments
arguments
keyword-arguments-to-spread)
(declare (ignore spread-arguments))
(list
`(define-compiler-macro ,spread-name
(&whole form medium-or-stream ,@spread-argument-names
&rest drawing-options-and-keyword-arguments)
(or (transform-graphics-function-call
medium-or-stream
',medium-graphics-function-name
',drawing-options
',other-keyword-arguments
(list ,@spread-argument-names)
drawing-options-and-keyword-arguments)
form))
`(define-compiler-macro ,name
(&whole form medium-or-stream ,@unspread-argument-names
&rest drawing-options-and-keyword-arguments)
(or (transform-graphics-function-call
medium-or-stream
',medium-graphics-function-name
',drawing-options
',unspread-other-keyword-arguments
(list ,@unspread-argument-names)
drawing-options-and-keyword-arguments
',arguments
',keyword-arguments-to-spread)
form))))
(defun generate-argument-spreading-code (x)
(if (consp x)
(destructuring-bind (argname type . names) x
(ecase type
(point-sequence
(destructuring-bind (new-name) names
(values argname
(list `(spread-point-sequence ,argname))
(list new-name))))
(point
(destructuring-bind (x y) names
(values argname
(list `(point-x ,argname)
`(point-y ,argname))
(list x y))))))
(values x (list x) (list x))))
(defun decode-graphics-function-arguments (arguments keyword-arguments-to-spread)
(let* ((keyn (position '&key arguments))
(no-keyword (subseq arguments 0 keyn))
(keyword (and keyn (subseq arguments (1+ keyn))))
unspread-argument-names
spread-arguments
spread-argument-names)
(dolist (x no-keyword)
(multiple-value-bind (argname spread-args spread-values)
(generate-argument-spreading-code x)
(push argname unspread-argument-names)
(dolist (x spread-args) (push x spread-arguments))
(dolist (x spread-values) (push x spread-argument-names))))
(let ((original-keywords keyword)
(new-keywords
(mapcan #'(lambda (x)
(let ((y (assoc (if (consp x) (car x) x)
keyword-arguments-to-spread)))
(if y (copy-list (cddr y)) (list x))))
keyword)))
(values (nreverse unspread-argument-names)
(nreverse spread-arguments)
(nreverse spread-argument-names)
(mapcar #'(lambda (x) (if (consp x) (car x) x)) new-keywords)
original-keywords
new-keywords
(mapcar #'(lambda (x)
(intern (symbol-name (if (consp x) (car x) x)) :keyword))
new-keywords)))))
(defun transform-graphics-function-call (medium-or-stream
medium-graphics-function-name
drawing-options
other-keyword-arguments
required-arguments
rest-argument
&optional arguments keyword-arguments-to-spread)
(let ((drawing-options
(mapcar #'(lambda (x)
(intern (symbol-name x) :keyword))
drawing-options)))
(flet ((kw-arg-keyword (x)
(intern (symbol-name (if (consp x) (car x) x)) :keyword))
(kw-arg-default-value (x)
(and (consp x) (second x))))
(when (do ((args rest-argument (cddr args)))
(nil)
(cond ((null args) (return t))
((null (cdr args)) (return nil))
((not (or (member (car args) drawing-options)
(dolist (arg other-keyword-arguments)
(when (eq (kw-arg-keyword arg) (car args))
(return t)))))
(return nil))))
(let ((bindings nil))
(when arguments
(setq required-arguments
(mapcan #'(lambda (arg req-arg)
(let ((g (gensym)))
(push (list g req-arg) bindings)
(if (consp arg)
(multiple-value-bind (name spread)
(generate-argument-spreading-code
(cons g (cdr arg)))
(declare (ignore name))
spread)
(list g))))
arguments
required-arguments))
(setq bindings (nreverse bindings)))
(let* ((stuff
(do ((args rest-argument (cddr args))
(result nil))
((null args)
(nreverse result))
(let ((kw (car args))
(value (cadr args)))
(push (list kw (gensymbol kw) value) result))))
(medium-or-stream-name (gensymbol 'medium))
(call
`(,medium-graphics-function-name
,medium-or-stream-name
,@required-arguments
,@(mapcan #'(lambda (kw-arg)
(let ((v (or (second (assoc (kw-arg-keyword kw-arg) stuff))
(kw-arg-default-value kw-arg)))
(ks (assoc kw-arg keyword-arguments-to-spread)))
(if ks
(ecase (second ks)
(point (list `(and ,v (point-x ,v))
`(and ,v (point-y ,v)))))
(list v))))
other-keyword-arguments)))
(supplied-drawing-options
(mapcan #'(lambda (do)
(let ((x (assoc do stuff)))
(and x (list do (second x)))))
drawing-options)))
`(let ((,medium-or-stream-name ,medium-or-stream))
(let ,bindings
(let ,(mapcar #'(lambda (x)
(list (second x) (third x)))
stuff)
,(if supplied-drawing-options
`(with-drawing-options
(,medium-or-stream-name ,@supplied-drawing-options)
,call)
call))))))))))
) ;eval-when
;; Modifies the positions
(defmacro transform-positions (transform &body positions)
(when positions
(assert (evenp (length positions)) ()
"Positions must be x/y pairs, but there are an odd number of elements in ~S"
positions)
(let ((xform '#:transform))
`(let ((,xform ,transform))
(unless (eq ,xform +identity-transformation+)
,@(do* ((positions positions (cddr positions))
(x (first positions) (first positions))
(y (second positions) (second positions))
(forms nil))
((null positions) (nreverse forms))
(push `(multiple-value-setq (,x ,y)
(transform-position ,xform ,x ,y))
forms)))))))
;; Modifies the distances
(defmacro transform-distances (transform &body distances)
(when distances
(assert (evenp (length distances)) ()
"Distances must be dx/dy pairs, but there are an odd number of elements in ~S"
distances)
(let ((xform '#:transform))
`(let ((,xform ,transform))
(unless (eq ,xform +identity-transformation+)
,@(do* ((distances distances (cddr distances))
(dx (first distances) (first distances))
(dy (second distances) (second distances))
(forms nil))
((null distances) (nreverse forms))
(push `(multiple-value-setq (,dx ,dy)
(transform-distance ,xform ,dx ,dy))
forms)))))))
(defun map-position-sequence (function positions)
(declare (dynamic-extent function))
(if (listp positions)
(loop
(when (null positions) (return))
(let* ((x (pop positions))
(y (pop positions)))
(funcall function x y)))
(let ((length (length positions))
#+Genera (positions positions))
(declare (type vector positions))
(do ((i 0 (+ i 2)))
((>= i length))
(funcall function (aref positions i) (aref positions (1+ i))))))
nil)
(defun map-endpoint-sequence (function positions)
(declare (dynamic-extent function))
(let ((lastx nil) (lasty nil))
(cond ((listp positions)
(setq lastx (pop positions))
(setq lasty (pop positions))
(loop
(when (null positions) (return))
(let* ((x (pop positions))
(y (pop positions)))
(funcall function lastx lasty x y)
(setq lastx x lasty y))))
(t
(let ((length (length positions))
(i 0))
(declare (type vector positions) (fixnum i))
(assert (evenp length))
(setq lastx (aref positions i))
(setq lasty (aref positions (1+ i)))
(incf i 2)
(loop
(when (>= i length) (return))
(let* ((x (aref positions i))
(y (aref positions (1+ i))))
(funcall function lastx lasty x y)
(setq lastx x lasty y)
(incf i 2))))))
nil))
;; Transforms all of the positions in the sequence. This returns the
;; original sequence if the transformation is the identity and COPY-P
;; is false, otherwise it returns a new vector containing the result.
(defun transform-position-sequence (transform positions &optional copy-p)
(if (eq transform +identity-transformation+)
(if copy-p
(make-array (length positions) :initial-contents positions)
positions)
(let* ((length (length positions))
(result (make-array length)))
(declare (simple-vector result)
(optimize (speed 3) (safety 0)))
(assert (evenp length) ()
"Positions sequences must be x/y pairs, but there are an odd number of elements in ~S"
positions)
;; Inline MAP-POSITION-SEQUENCE for speed...
(if (listp positions)
(let ((i -1))
(loop
(when (null positions) (return))
(let* ((x (pop positions))
(y (pop positions)))
(multiple-value-setq (x y)
(transform-position transform x y))
(setf (svref result (incf i)) x
(svref result (incf i)) y))))
(let (#+Genera (positions positions))
(declare (type vector positions))
(do ((i 0 (+ 2 i)))
((= i length))
(multiple-value-bind (x y)
(transform-position transform
(aref positions i) (aref positions (1+ i)))
(setf (svref result i) x
(svref result (1+ i)) y)))))
result)))
(defun spread-point-sequence (sequence)
(declare (optimize (speed 3) (safety 0)))
(let* ((length (length sequence))
(result (make-array (* 2 length)))
(i -1))
(doseq (point sequence)
(setf (svref result (incf i)) (point-x point))
(setf (svref result (incf i)) (point-y point)))
result))
(defmacro define-graphics-generic (name arguments
&rest args
&key keywords-to-spread
drawing-options
optional-positions-to-transform
positions-to-transform
distances-to-transform
position-sequences-to-transform
medium-method-body)
(let* ((spread-name (fintern "~A*" name))
(continuation-name (fintern "~A-~A*" 'call name))
(drawing-options
(all-drawing-options-lambda-list drawing-options))
(medium-graphics-function-name
(fintern "~A~A*" 'medium- name)))
(multiple-value-bind (unspread-argument-names spread-arguments
spread-argument-names keyword-argument-names
unspread-other-keyword-arguments
other-keyword-arguments keywords)
(decode-graphics-function-arguments arguments keywords-to-spread)
`(progn
(defun ,name (medium ,@unspread-argument-names &rest args
&key ,@drawing-options ,@unspread-other-keyword-arguments)
(declare (ignore ,@drawing-options ,@keyword-argument-names)
(dynamic-extent args))
,(if keywords-to-spread
`(with-keywords-removed
(args args ',(mapcar #'(lambda (x)
(intern (symbol-name (car x)) :keyword))
keywords-to-spread))
(apply #',spread-name
medium
,@spread-arguments
,@(mapcan
#'(lambda (x)
(destructuring-bind (name type . rest) x
(ecase type
(point
(list (intern (symbol-name (first rest)) :keyword)
`(and ,name (point-x ,name))
(intern (symbol-name (second rest)) :keyword)
`(and ,name (point-y ,name)))))))
keywords-to-spread)
args))
`(apply #',spread-name
medium
,@spread-arguments
args)))
(defun ,spread-name (medium ,@spread-argument-names &rest args
&key ,@drawing-options ,@other-keyword-arguments)
(declare (ignore ,@drawing-options)
(dynamic-extent args))
,(if keywords
`(with-keywords-removed (args args ',keywords)
(flet ((,continuation-name ()
(,medium-graphics-function-name
medium
,@spread-argument-names
,@keyword-argument-names)))
(declare (dynamic-extent #',continuation-name))
(apply #'invoke-with-drawing-options
medium #',continuation-name args)))
`(flet ((,continuation-name ()
(,medium-graphics-function-name
medium
,@spread-argument-names
,@keyword-argument-names)))
(declare (dynamic-extent #',continuation-name))
(apply #'invoke-with-drawing-options
medium #',continuation-name args))))
(setf (get ',name 'args)
'((,@spread-argument-names ,@keyword-argument-names)
,@args))
(defmethod ,medium-graphics-function-name
((sheet basic-sheet) ,@spread-argument-names ,@keyword-argument-names)
#+Genera (declare (sys:function-parent ,name define-graphics-generic))
(with-sheet-medium (medium sheet)
(,medium-graphics-function-name medium
,@spread-argument-names
,@keyword-argument-names)))
(defmethod ,medium-graphics-function-name
((sheet permanent-medium-sheet-output-mixin)
,@spread-argument-names ,@keyword-argument-names)
#+Genera (declare (sys:function-parent ,name define-graphics-generic))
(,medium-graphics-function-name (sheet-medium sheet)
,@spread-argument-names
,@keyword-argument-names))
(defmethod ,medium-graphics-function-name :around
((medium basic-medium) ,@spread-argument-names ,@keyword-argument-names)
#+Genera (declare (sys:function-parent ,name define-graphics-generic))
;; Want to tranform stuff, set up clipping region etc etc
,(or medium-method-body
`(progn
,(and positions-to-transform
(do ((pts positions-to-transform (cddr pts))
(tf '#:transform)
(r nil))
((null pts)
`(let ((,tf (medium-transformation medium)))
,@(nreverse r)))
(let ((b `(transform-positions
,tf ,(first pts) ,(second pts))))
(if (member (car pts) optional-positions-to-transform)
(push `(when ,(car pts) ,b) r)
(push b r)))))
,@(and distances-to-transform
`((transform-distances
(medium-transformation medium)
,@distances-to-transform)))
,@(mapcar #'(lambda (seq)
`(setq ,seq (transform-position-sequence
(medium-transformation medium) ,seq)))
position-sequences-to-transform)
(call-next-method medium
,@spread-argument-names ,@keyword-argument-names))))
,@(write-graphics-function-transformer
name
medium-graphics-function-name
unspread-argument-names
spread-arguments
spread-name
spread-argument-names
drawing-options
unspread-other-keyword-arguments
other-keyword-arguments
arguments
keywords-to-spread)))))
(defun get-drawing-function-description (name)
(or (get name 'args)
(error "Cannot find description for: ~S" name)))
(define-graphics-generic draw-polygon ((points point-sequence position-seq)
&key (closed t) (filled t))
:drawing-options :line-joint-cap
:position-sequences-to-transform (position-seq))
(define-graphics-generic draw-point ((point point x y))
:drawing-options :point
:positions-to-transform (x y))
(define-graphics-generic draw-points ((points point-sequence position-seq))
:drawing-options :point
:position-sequences-to-transform (position-seq))
(define-graphics-generic draw-line ((point1 point x1 y1)
(point2 point x2 y2))
:drawing-options :line-cap
:positions-to-transform (x1 y1 x2 y2))
(define-graphics-generic draw-lines ((points point-sequence position-seq))
:drawing-options :line-cap
:position-sequences-to-transform (position-seq))
(defun draw-arrow* (medium x1 y1 x2 y2
&rest args
&key (from-head nil) (to-head t) (head-length 10) (head-width 5)
&allow-other-keys)
(declare (dynamic-extent args))
(declare (arglist medium x1 y1 x2 y2
&rest args
&key (from-head nil) (to-head t) (head-length 10) (head-width 5)
. #.(all-drawing-options-lambda-list :line-cap)))
(flet ((draw-arrow ()
(let* ((dx (- x2 x1))
(dy (- y2 y1))
(norm (if (zerop dx)
(if (zerop dy)
nil
(/ 1.0 (abs dy)))
(if (zerop dy)
(/ 1.0 (abs dx))
(/ (sqrt (+ (* dx dx) (* dy dy))))))))
(when norm
(let* ((length-norm (* head-length norm))
(ldx (* dx length-norm))
(ldy (* dy length-norm))
(base-norm (* head-width norm 0.5))
(bdx (* dy base-norm))
(bdy (* dx base-norm)))
(draw-line* medium x1 y1 x2 y2)
(when from-head
(let ((xa (+ x1 ldx)) (ya (+ y1 ldy)))
(with-stack-list (points x1 y1
(+ xa bdx) (- ya bdy)
(- xa bdx) (+ ya bdy))
(draw-polygon* medium points :filled t))
(setq x1 xa y1 ya)))
(when to-head
(let ((xa (- x2 ldx)) (ya (- y2 ldy)))
(with-stack-list (points x2 y2
(+ xa bdx) (- ya bdy)
(- xa bdx) (+ ya bdy))
(draw-polygon* medium points :filled t)
(setq x2 xa y2 ya)))))))))
(declare (dynamic-extent #'draw-arrow))
(with-keywords-removed (options args
'(:from-head :to-head :head-length :head-width))
(apply #'invoke-with-drawing-options medium #'draw-arrow options))))
(defun draw-arrow (medium point1 point2 &rest args)
(declare (dynamic-extent args))
(declare (arglist medium x1 y1 x2 y2
&rest args
&key (from-head nil) (to-head t) (head-length 10) (head-width 5)
. #.(all-drawing-options-lambda-list :line-cap)))
(apply #'draw-arrow*
medium (point-x point1) (point-y point1) (point-x point2) (point-y point2) args))
(define-graphics-generic draw-rectangle ((point1 point x1 y1)
(point2 point x2 y2)
&key (filled t))
:drawing-options :line-joint
:positions-to-transform (x1 y1 x2 y2)
:medium-method-body
(let ((transform (medium-transformation medium)))
(cond ((rectilinear-transformation-p transform)
(transform-positions transform x1 y1 x2 y2)
(call-next-method medium x1 y1 x2 y2 filled))
(t
;;--- Massively inefficient
(with-stack-list (list x1 y1 x2 y1 x2 y2 x1 y2)
(medium-draw-polygon* medium list t filled))))))
(define-graphics-generic draw-rectangles ((points point-sequence position-seq)
&key (filled t))
:drawing-options :line-joint
:position-sequences-to-transform (position-seq)
:medium-method-body
(let ((transform (medium-transformation medium)))
(cond ((rectilinear-transformation-p transform)
(setq position-seq (transform-position-sequence transform position-seq))
(call-next-method medium position-seq filled))
(t
(medium-draw-transformed-rectangles* medium position-seq filled)))))
(defun medium-draw-transformed-rectangles* (medium position-seq filled)
(let ((len (length position-seq)))
(assert (zerop (mod len 4)))
(macrolet ((draw-one (x1 y1 x2 y2)
`(let ((x1 ,x1)
(y1 ,y1)
(x2 ,x2)
(y2 ,y2))
(with-stack-list (list x1 y1 x2 y1 x2 y2 x1 y2)
(medium-draw-polygon* medium list t filled)))))
(if (listp position-seq)
(do ((position-seq position-seq))
((null position-seq))
(draw-one (pop position-seq) (pop position-seq)
(pop position-seq) (pop position-seq)))
(do ((i 0 (+ i 4)))
((= i len))
(draw-one (aref position-seq i)
(aref position-seq (+ 1 i))
(aref position-seq (+ 2 i))
(aref position-seq (+ 3 i))))))))
;; DRAW-PATTERN* is a special case of DRAW-RECTANGLE*, believe it or not...
(defun draw-pattern* (medium pattern x y &key clipping-region transformation)
(check-type pattern pattern)
(let ((width (pattern-width pattern))
(height (pattern-height pattern)))
(if (or clipping-region transformation)
(with-drawing-options (medium :clipping-region clipping-region
:transformation transformation
:ink pattern)
(draw-rectangle* medium x y (+ x width) (+ y height)
:filled t))
(with-drawing-options (medium :ink pattern)
(draw-rectangle* medium x y (+ x width) (+ y height)
:filled t)))))
(defun draw-regular-polygon* (medium x1 y1 x2 y2 nsides
&rest args &key (handedness :left) (closed t) &allow-other-keys)
(declare (dynamic-extent args))
(declare (arglist medium x1 y1 x2 y2 nsides
&rest args
&key (filled t) (handedness :left) (closed t)
. #.(all-drawing-options-lambda-list :line-joint-cap)))
(let* ((theta (* (float (* pi (/ 2.0 nsides)) 0.0)
(ecase handedness
(:left +1)
(:right -1))))
(transform (make-rotation-transformation theta))
(coordinates (list x1 y1 x2 y2))
(dx (- x2 x1))
(dy (- y2 y1))
(next-x x2)
(next-y y2))
(repeat (- nsides 2)
(multiple-value-setq (dx dy)
(transform-distance transform dx dy))
(incf next-x dx)
(incf next-y dy)
(setq coordinates (nconc coordinates (list next-x next-y))))
(when closed
(setq coordinates (nconc coordinates (list x1 y1))))
(with-keywords-removed (args args '(:handedness))
(apply #'draw-polygon* medium coordinates args))))
(defun draw-regular-polygon (medium point1 point2 nsides &rest args)
(declare (dynamic-extent args))
(declare (arglist medium point1 point2 nsides
&rest args
&key (handedness :left) (closed t) (filled t)
. #.(all-drawing-options-lambda-list :line-joint-cap)))
(apply #'draw-regular-polygon* medium
(point-x point1) (point-y point1)
(point-x point2) (point-y point2)
nsides args))
(defun draw-triangle (medium p1 p2 p3 &rest args)
(declare (dynamic-extent args))
(declare (arglist medium p1 p2 p3
&rest args
&key (filled t) . #.(all-drawing-options-lambda-list :line-joint)))
(with-stack-list (points p1 p2 p3)
(apply #'draw-polygon medium points :closed t args)))
(defun draw-triangle* (medium x1 y1 x2 y2 x3 y3 &rest args)
(declare (dynamic-extent args))
(declare (arglist medium x1 y1 x2 y2 x3 y3
&rest args
&key (filled t) . #.(all-drawing-options-lambda-list :line-joint)))
(with-stack-list (points x1 y1 x2 y2 x3 y3)
(apply #'draw-polygon* medium points :closed t args)))
(define-graphics-generic draw-ellipse ((center point center-x center-y)
radius-1-dx radius-1-dy
radius-2-dx radius-2-dy
&key (start-angle 0) (end-angle 2pi)
(filled t))
:drawing-options :line-cap
:positions-to-transform (center-x center-y)
:distances-to-transform (radius-1-dx radius-1-dy radius-2-dx radius-2-dy))
(defun draw-circle (medium center radius &rest args)
(declare (dynamic-extent args))
(declare (arglist medium center radius
&rest args
&key start-angle end-angle (filled t)
. #.(all-drawing-options-lambda-list :line-cap)))
(apply #'draw-ellipse medium center radius 0 0 radius args))
(define-compiler-macro draw-circle (medium center radius &rest args)
(let ((gm (gensymbol 'medium))
(gc (gensymbol 'center))
(gr (gensymbol 'radius)))
`(let ((,gm ,medium)
(,gc ,center)
(,gr ,radius))
(draw-ellipse ,gm ,gc ,gr 0 0 ,gr ,@args))))
(defun draw-circle* (medium center-x center-y radius &rest args)
(declare (dynamic-extent args))
(declare (arglist medium center-x center-y radius
&rest args
&key start-angle end-angle (filled t)
. #.(all-drawing-options-lambda-list :line-cap)))
(apply #'draw-ellipse* medium center-x center-y radius 0 0 radius args))
(define-compiler-macro draw-circle* (medium center-x center-y radius &rest args)
(let ((gm (gensymbol 'medium))
(gx (gensymbol 'x))
(gy (gensymbol 'y))
(gr (gensymbol 'radius)))
`(let ((,gm ,medium)
(,gx ,center-x)
(,gy ,center-y)
(,gr ,radius))
(draw-ellipse* ,gm ,gx ,gy ,gr 0 0 ,gr ,@args))))
(defun draw-oval* (medium center-x center-y x-radius y-radius
&rest args &key (filled t) &allow-other-keys)
(declare (dynamic-extent args))
(declare (arglist medium center-x center-y x-radius y-radius
&rest args
. #.(all-drawing-options-lambda-list :line-cap)))
(flet ((draw-oval ()
(let ((left (- center-x x-radius))
(right (+ center-x x-radius))
(top (- center-y y-radius))
(bottom (+ center-y y-radius)))
(cond ((or (= x-radius y-radius)
(zerop x-radius))
(draw-ellipse* medium center-x center-y y-radius 0 0 y-radius
:filled filled))
((zerop y-radius)
(draw-ellipse* medium center-x center-y x-radius 0 0 x-radius
:filled filled))
((> x-radius y-radius)
(let ((rect-left (+ left y-radius))
(rect-right (- right y-radius)))
(cond (filled
(draw-rectangle* medium rect-left top rect-right bottom))
(t
(draw-line* medium rect-left top rect-right top)
(draw-line* medium rect-left bottom rect-right bottom)))
(let ((north (float (* pi 1/2) 0.0))
(south (float (* pi 3/2) 0.0)))
(draw-ellipse* medium rect-left center-y y-radius 0 0 y-radius
:start-angle north :end-angle south
:filled filled)
(draw-ellipse* medium rect-right center-y y-radius 0 0 y-radius
:start-angle south :end-angle north
:filled filled))))
(t
(let ((rect-top (+ top x-radius))
(rect-bottom (- bottom x-radius)))
(cond (filled
(draw-rectangle* medium left rect-top right rect-bottom))
(t
(draw-line* medium left rect-top left rect-bottom)
(draw-line* medium right rect-top right rect-bottom)))
(let ((east 0.0)
(west (float pi 0.0)))
(draw-ellipse* medium center-x rect-top x-radius 0 0 x-radius
:start-angle east :end-angle west
:filled filled)
(draw-ellipse* medium center-x rect-bottom x-radius 0 0 x-radius
:start-angle west :end-angle east
:filled filled))))))))
(declare (dynamic-extent #'draw-oval))
(apply #'invoke-with-drawing-options medium #'draw-oval args)))
(defun draw-oval (medium center x-radius y-radius &rest args)
(declare (dynamic-extent args))
(declare (arglist medium point x-radius y-radius
&rest args
. #.(all-drawing-options-lambda-list :line-cap)))
(apply #'draw-oval*
medium (point-x center) (point-y center) x-radius y-radius args))
(define-graphics-generic draw-text (string-or-char (point point x y)
&key (start 0) (end nil)
(align-x :left) (align-y :baseline)
towards-point transform-glyphs)
:positions-to-transform (x y towards-x towards-y)
:optional-positions-to-transform (towards-x towards-y)
:keywords-to-spread ((towards-point point towards-x towards-y))
:drawing-options :text)
;; Some mediums can do better than this...
;; Note that the coordinates are unaffected by the medium transformation!
(defmethod medium-clear-area ((medium basic-medium) left top right bottom)
(letf-globally (((medium-ink medium) +background-ink+)
((medium-transformation medium) +identity-transformation+))
(medium-draw-rectangle* medium left top right bottom t)))
Cubic splines and curves
(define-graphics-generic draw-bezier-curve ((points point-sequence position-seq)
&key (filled nil))
:drawing-options :line-cap
:position-sequences-to-transform (position-seq))
(defmethod medium-draw-bezier-curve* ((medium basic-medium) position-seq filled)
(let* ((npoints (length position-seq))
(last (1- npoints))
(new-points (cons nil nil))
(head new-points)
(distance 1))
(assert (zerop (mod (- (/ npoints 2) 4) 3)))
(flet ((collect (x y)
(let ((more (list x y)))
(setf (cdr new-points) more
new-points (cdr more)))))
(declare (dynamic-extent #'collect))
(collect (elt position-seq 0) (elt position-seq 1))
(do ((i 0 (+ i 6)))
((= i (1- last)))
(render-bezier-curve #'collect
(elt position-seq i) (elt position-seq (+ 1 i))
(elt position-seq (+ 2 i)) (elt position-seq (+ 3 i))
(elt position-seq (+ 4 i)) (elt position-seq (+ 5 i))
(elt position-seq (+ 6 i)) (elt position-seq (+ 7 i))
distance)
(collect (elt position-seq (+ 6 i)) (elt position-seq (+ 7 i)))))
(with-identity-transformation (medium)
(medium-draw-polygon* medium (cdr head) nil filled))))
(defun render-bezier-curve (function x0 y0 x1 y1 x2 y2 x3 y3 distance)
(flet ((split-bezier-curve (x0 y0 x1 y1 x2 y2 x3 y3)
;; We should write a matrix multiplication macro
(values
The first 1/2
x0 y0
(+ (/ x0 2) (/ x1 2)) (+ (/ y0 2) (/ y1 2))
(+ (/ x0 4) (/ x1 2) (/ x2 4)) (+ (/ y0 4) (/ y1 2) (/ y2 4))
(+ (* x0 1/8) (* x1 3/8) (* x2 3/8) (* x3 1/8))
(+ (* y0 1/8) (* y1 3/8) (* y2 3/8) (* y3 1/8))
The second 1/2
(+ (* x0 1/8) (* x1 3/8) (* x2 3/8) (* x3 1/8))
(+ (* y0 1/8) (* y1 3/8) (* y2 3/8) (* y3 1/8))
(+ (/ x1 4) (/ x2 2) (/ x3 4)) (+ (/ y1 4) (/ y2 2) (/ y3 4))
(+ (/ x2 2) (/ x3 2)) (+ (/ y2 2) (/ y3 2))
x3 y3))
(distance-from-line (x0 y0 x1 y1 x y)
(let* ((dx (- x1 x0))
(dy (- y1 y0))
(r-p-x (- x x0))
(r-p-y (- y y0))
(dot-v (+ (* dx dx) (* dy dy)))
(dot-r-v (+ (* r-p-x dx) (* r-p-y dy)))
(closest-x (+ x0 (* (/ dot-r-v dot-v) dx)))
(closest-y (+ y0 (* (/ dot-r-v dot-v) dy))))
(let ((ax (- x closest-x))
(ay (- y closest-y)))
(values (+ (* ax ax) (* ay ay)) closest-x closest-y)))))
(declare (dynamic-extent #'split-bezier-curve #'distance-from-line))
(let ((d1 (distance-from-line x0 y0 x3 y3 x1 y1))
(d2 (distance-from-line x0 y0 x3 y3 x2 y2)))
(if (and (< d1 distance) (< d2 distance))
nil
(multiple-value-bind (x00 y00 x10 y10 x20 y20 x30 y30
x01 y01 x11 y11 x21 y21 x31 y31)
(split-bezier-curve x0 y0 x1 y1 x2 y2 x3 y3)
(render-bezier-curve function x00 y00 x10 y10 x20 y20 x30 y30 distance)
(funcall function x30 y30)
(render-bezier-curve function x01 y01 x11 y11 x21 y21 x31 y31 distance))))))
(define-graphics-generic draw-pixmap (pixmap (point point x y)
&key (function boole-1))
:positions-to-transform (x y)
:drawing-options :pixmap)
| null | https://raw.githubusercontent.com/franzinc/clim2/e8d03da80e1f000be40c37d088e283d95365bfdd/silica/graphics.lisp | lisp | Syntax : ANSI - Common - Lisp ; Package : SILICA ; Base : 10 ; Lowercase : Yes -*-
See the file LICENSE for the full license governing this code.
NOTE: if you change this list of keywords, you also have to change the keyword arguments
accepted by (CLOS:METHOD INVOKE-WITH-DRAWING-OPTIONS (DRAWING-STATE-MIXIN T))
If drawing-options isn't nil, it's a list of the option keywords accepted.
eval-when
eval-when
Modifies the positions
Modifies the distances
Transforms all of the positions in the sequence. This returns the
original sequence if the transformation is the identity and COPY-P
is false, otherwise it returns a new vector containing the result.
Inline MAP-POSITION-SEQUENCE for speed...
Want to tranform stuff, set up clipping region etc etc
--- Massively inefficient
DRAW-PATTERN* is a special case of DRAW-RECTANGLE*, believe it or not...
Some mediums can do better than this...
Note that the coordinates are unaffected by the medium transformation!
We should write a matrix multiplication macro |
(in-package :silica)
" Copyright ( c ) 1990 , 1991 , 1992 Symbolics , Inc. All rights reserved .
Portions copyright ( c ) 1991 , 1992 Franz , Inc. All rights reserved . "
(eval-when (compile load eval)
(defparameter *all-drawing-options*
'(:ink :clipping-region :transformation
:line-style :line-unit :line-thickness :line-dashes
:line-joint-shape :line-cap-shape
:text-style :text-family :text-face :text-size))
(defparameter *always-meaningful-drawing-options* '(:ink :clipping-region :transformation))
(defparameter *drawing-option-subsets*
'((:point :line-style :line-thickness :line-unit)
(:line-cap :line-style :line-thickness :line-unit
:line-dashes :line-cap-shape)
(:line-joint :line-style :line-thickness :line-unit
:line-dashes :line-joint-shape)
(:line-joint-cap :line-style :line-thickness :line-unit
:line-dashes :line-joint-shape :line-cap-shape)
(:text :text-style :text-family :text-face :text-size)
(:pixmap )))
(defun non-drawing-option-keywords (arglist)
(do ((l (cdr (member '&key arglist)) (cdr l))
(non-drawing-option-keywords nil)
k)
((null l) non-drawing-option-keywords)
(setq k (cond ((atom (car l)) (intern (symbol-name (car l)) :keyword))
((atom (caar l)) (intern (symbol-name (caar l)) :keyword))
(t (caaar l))))
(unless (member k *all-drawing-options*)
(push k non-drawing-option-keywords))))
Caller must stick & key in front
(defun all-drawing-options-lambda-list (drawing-options)
(mapcar #'(lambda (keyword) (intern (symbol-name keyword)))
(cond ((null drawing-options) *all-drawing-options*)
((atom drawing-options)
(append (let ((x (assoc drawing-options *drawing-option-subsets*)))
(unless x
(warn "~S was specified in :drawing-options but is not ~
a known drawing-option subset."
drawing-options))
(cdr x))
*always-meaningful-drawing-options*))
(t
(dolist (option drawing-options)
(unless (member option *all-drawing-options*)
(warn "~S was specified in :drawing-options but ~
is not a known drawing option."
option)))
(append drawing-options *always-meaningful-drawing-options*)))))
(eval-when (compile load eval)
(defun write-graphics-function-transformer (name
medium-graphics-function-name
unspread-argument-names
spread-arguments
spread-name
spread-argument-names
drawing-options
unspread-other-keyword-arguments
other-keyword-arguments
arguments
keyword-arguments-to-spread)
(declare (ignore spread-arguments))
(list
`(define-compiler-macro ,spread-name
(&whole form medium-or-stream ,@spread-argument-names
&rest drawing-options-and-keyword-arguments)
(or (transform-graphics-function-call
medium-or-stream
',medium-graphics-function-name
',drawing-options
',other-keyword-arguments
(list ,@spread-argument-names)
drawing-options-and-keyword-arguments)
form))
`(define-compiler-macro ,name
(&whole form medium-or-stream ,@unspread-argument-names
&rest drawing-options-and-keyword-arguments)
(or (transform-graphics-function-call
medium-or-stream
',medium-graphics-function-name
',drawing-options
',unspread-other-keyword-arguments
(list ,@unspread-argument-names)
drawing-options-and-keyword-arguments
',arguments
',keyword-arguments-to-spread)
form))))
(defun generate-argument-spreading-code (x)
(if (consp x)
(destructuring-bind (argname type . names) x
(ecase type
(point-sequence
(destructuring-bind (new-name) names
(values argname
(list `(spread-point-sequence ,argname))
(list new-name))))
(point
(destructuring-bind (x y) names
(values argname
(list `(point-x ,argname)
`(point-y ,argname))
(list x y))))))
(values x (list x) (list x))))
(defun decode-graphics-function-arguments (arguments keyword-arguments-to-spread)
(let* ((keyn (position '&key arguments))
(no-keyword (subseq arguments 0 keyn))
(keyword (and keyn (subseq arguments (1+ keyn))))
unspread-argument-names
spread-arguments
spread-argument-names)
(dolist (x no-keyword)
(multiple-value-bind (argname spread-args spread-values)
(generate-argument-spreading-code x)
(push argname unspread-argument-names)
(dolist (x spread-args) (push x spread-arguments))
(dolist (x spread-values) (push x spread-argument-names))))
(let ((original-keywords keyword)
(new-keywords
(mapcan #'(lambda (x)
(let ((y (assoc (if (consp x) (car x) x)
keyword-arguments-to-spread)))
(if y (copy-list (cddr y)) (list x))))
keyword)))
(values (nreverse unspread-argument-names)
(nreverse spread-arguments)
(nreverse spread-argument-names)
(mapcar #'(lambda (x) (if (consp x) (car x) x)) new-keywords)
original-keywords
new-keywords
(mapcar #'(lambda (x)
(intern (symbol-name (if (consp x) (car x) x)) :keyword))
new-keywords)))))
(defun transform-graphics-function-call (medium-or-stream
medium-graphics-function-name
drawing-options
other-keyword-arguments
required-arguments
rest-argument
&optional arguments keyword-arguments-to-spread)
(let ((drawing-options
(mapcar #'(lambda (x)
(intern (symbol-name x) :keyword))
drawing-options)))
(flet ((kw-arg-keyword (x)
(intern (symbol-name (if (consp x) (car x) x)) :keyword))
(kw-arg-default-value (x)
(and (consp x) (second x))))
(when (do ((args rest-argument (cddr args)))
(nil)
(cond ((null args) (return t))
((null (cdr args)) (return nil))
((not (or (member (car args) drawing-options)
(dolist (arg other-keyword-arguments)
(when (eq (kw-arg-keyword arg) (car args))
(return t)))))
(return nil))))
(let ((bindings nil))
(when arguments
(setq required-arguments
(mapcan #'(lambda (arg req-arg)
(let ((g (gensym)))
(push (list g req-arg) bindings)
(if (consp arg)
(multiple-value-bind (name spread)
(generate-argument-spreading-code
(cons g (cdr arg)))
(declare (ignore name))
spread)
(list g))))
arguments
required-arguments))
(setq bindings (nreverse bindings)))
(let* ((stuff
(do ((args rest-argument (cddr args))
(result nil))
((null args)
(nreverse result))
(let ((kw (car args))
(value (cadr args)))
(push (list kw (gensymbol kw) value) result))))
(medium-or-stream-name (gensymbol 'medium))
(call
`(,medium-graphics-function-name
,medium-or-stream-name
,@required-arguments
,@(mapcan #'(lambda (kw-arg)
(let ((v (or (second (assoc (kw-arg-keyword kw-arg) stuff))
(kw-arg-default-value kw-arg)))
(ks (assoc kw-arg keyword-arguments-to-spread)))
(if ks
(ecase (second ks)
(point (list `(and ,v (point-x ,v))
`(and ,v (point-y ,v)))))
(list v))))
other-keyword-arguments)))
(supplied-drawing-options
(mapcan #'(lambda (do)
(let ((x (assoc do stuff)))
(and x (list do (second x)))))
drawing-options)))
`(let ((,medium-or-stream-name ,medium-or-stream))
(let ,bindings
(let ,(mapcar #'(lambda (x)
(list (second x) (third x)))
stuff)
,(if supplied-drawing-options
`(with-drawing-options
(,medium-or-stream-name ,@supplied-drawing-options)
,call)
call))))))))))
(defmacro transform-positions (transform &body positions)
(when positions
(assert (evenp (length positions)) ()
"Positions must be x/y pairs, but there are an odd number of elements in ~S"
positions)
(let ((xform '#:transform))
`(let ((,xform ,transform))
(unless (eq ,xform +identity-transformation+)
,@(do* ((positions positions (cddr positions))
(x (first positions) (first positions))
(y (second positions) (second positions))
(forms nil))
((null positions) (nreverse forms))
(push `(multiple-value-setq (,x ,y)
(transform-position ,xform ,x ,y))
forms)))))))
(defmacro transform-distances (transform &body distances)
(when distances
(assert (evenp (length distances)) ()
"Distances must be dx/dy pairs, but there are an odd number of elements in ~S"
distances)
(let ((xform '#:transform))
`(let ((,xform ,transform))
(unless (eq ,xform +identity-transformation+)
,@(do* ((distances distances (cddr distances))
(dx (first distances) (first distances))
(dy (second distances) (second distances))
(forms nil))
((null distances) (nreverse forms))
(push `(multiple-value-setq (,dx ,dy)
(transform-distance ,xform ,dx ,dy))
forms)))))))
(defun map-position-sequence (function positions)
(declare (dynamic-extent function))
(if (listp positions)
(loop
(when (null positions) (return))
(let* ((x (pop positions))
(y (pop positions)))
(funcall function x y)))
(let ((length (length positions))
#+Genera (positions positions))
(declare (type vector positions))
(do ((i 0 (+ i 2)))
((>= i length))
(funcall function (aref positions i) (aref positions (1+ i))))))
nil)
(defun map-endpoint-sequence (function positions)
(declare (dynamic-extent function))
(let ((lastx nil) (lasty nil))
(cond ((listp positions)
(setq lastx (pop positions))
(setq lasty (pop positions))
(loop
(when (null positions) (return))
(let* ((x (pop positions))
(y (pop positions)))
(funcall function lastx lasty x y)
(setq lastx x lasty y))))
(t
(let ((length (length positions))
(i 0))
(declare (type vector positions) (fixnum i))
(assert (evenp length))
(setq lastx (aref positions i))
(setq lasty (aref positions (1+ i)))
(incf i 2)
(loop
(when (>= i length) (return))
(let* ((x (aref positions i))
(y (aref positions (1+ i))))
(funcall function lastx lasty x y)
(setq lastx x lasty y)
(incf i 2))))))
nil))
(defun transform-position-sequence (transform positions &optional copy-p)
(if (eq transform +identity-transformation+)
(if copy-p
(make-array (length positions) :initial-contents positions)
positions)
(let* ((length (length positions))
(result (make-array length)))
(declare (simple-vector result)
(optimize (speed 3) (safety 0)))
(assert (evenp length) ()
"Positions sequences must be x/y pairs, but there are an odd number of elements in ~S"
positions)
(if (listp positions)
(let ((i -1))
(loop
(when (null positions) (return))
(let* ((x (pop positions))
(y (pop positions)))
(multiple-value-setq (x y)
(transform-position transform x y))
(setf (svref result (incf i)) x
(svref result (incf i)) y))))
(let (#+Genera (positions positions))
(declare (type vector positions))
(do ((i 0 (+ 2 i)))
((= i length))
(multiple-value-bind (x y)
(transform-position transform
(aref positions i) (aref positions (1+ i)))
(setf (svref result i) x
(svref result (1+ i)) y)))))
result)))
(defun spread-point-sequence (sequence)
(declare (optimize (speed 3) (safety 0)))
(let* ((length (length sequence))
(result (make-array (* 2 length)))
(i -1))
(doseq (point sequence)
(setf (svref result (incf i)) (point-x point))
(setf (svref result (incf i)) (point-y point)))
result))
(defmacro define-graphics-generic (name arguments
&rest args
&key keywords-to-spread
drawing-options
optional-positions-to-transform
positions-to-transform
distances-to-transform
position-sequences-to-transform
medium-method-body)
(let* ((spread-name (fintern "~A*" name))
(continuation-name (fintern "~A-~A*" 'call name))
(drawing-options
(all-drawing-options-lambda-list drawing-options))
(medium-graphics-function-name
(fintern "~A~A*" 'medium- name)))
(multiple-value-bind (unspread-argument-names spread-arguments
spread-argument-names keyword-argument-names
unspread-other-keyword-arguments
other-keyword-arguments keywords)
(decode-graphics-function-arguments arguments keywords-to-spread)
`(progn
(defun ,name (medium ,@unspread-argument-names &rest args
&key ,@drawing-options ,@unspread-other-keyword-arguments)
(declare (ignore ,@drawing-options ,@keyword-argument-names)
(dynamic-extent args))
,(if keywords-to-spread
`(with-keywords-removed
(args args ',(mapcar #'(lambda (x)
(intern (symbol-name (car x)) :keyword))
keywords-to-spread))
(apply #',spread-name
medium
,@spread-arguments
,@(mapcan
#'(lambda (x)
(destructuring-bind (name type . rest) x
(ecase type
(point
(list (intern (symbol-name (first rest)) :keyword)
`(and ,name (point-x ,name))
(intern (symbol-name (second rest)) :keyword)
`(and ,name (point-y ,name)))))))
keywords-to-spread)
args))
`(apply #',spread-name
medium
,@spread-arguments
args)))
(defun ,spread-name (medium ,@spread-argument-names &rest args
&key ,@drawing-options ,@other-keyword-arguments)
(declare (ignore ,@drawing-options)
(dynamic-extent args))
,(if keywords
`(with-keywords-removed (args args ',keywords)
(flet ((,continuation-name ()
(,medium-graphics-function-name
medium
,@spread-argument-names
,@keyword-argument-names)))
(declare (dynamic-extent #',continuation-name))
(apply #'invoke-with-drawing-options
medium #',continuation-name args)))
`(flet ((,continuation-name ()
(,medium-graphics-function-name
medium
,@spread-argument-names
,@keyword-argument-names)))
(declare (dynamic-extent #',continuation-name))
(apply #'invoke-with-drawing-options
medium #',continuation-name args))))
(setf (get ',name 'args)
'((,@spread-argument-names ,@keyword-argument-names)
,@args))
(defmethod ,medium-graphics-function-name
((sheet basic-sheet) ,@spread-argument-names ,@keyword-argument-names)
#+Genera (declare (sys:function-parent ,name define-graphics-generic))
(with-sheet-medium (medium sheet)
(,medium-graphics-function-name medium
,@spread-argument-names
,@keyword-argument-names)))
(defmethod ,medium-graphics-function-name
((sheet permanent-medium-sheet-output-mixin)
,@spread-argument-names ,@keyword-argument-names)
#+Genera (declare (sys:function-parent ,name define-graphics-generic))
(,medium-graphics-function-name (sheet-medium sheet)
,@spread-argument-names
,@keyword-argument-names))
(defmethod ,medium-graphics-function-name :around
((medium basic-medium) ,@spread-argument-names ,@keyword-argument-names)
#+Genera (declare (sys:function-parent ,name define-graphics-generic))
,(or medium-method-body
`(progn
,(and positions-to-transform
(do ((pts positions-to-transform (cddr pts))
(tf '#:transform)
(r nil))
((null pts)
`(let ((,tf (medium-transformation medium)))
,@(nreverse r)))
(let ((b `(transform-positions
,tf ,(first pts) ,(second pts))))
(if (member (car pts) optional-positions-to-transform)
(push `(when ,(car pts) ,b) r)
(push b r)))))
,@(and distances-to-transform
`((transform-distances
(medium-transformation medium)
,@distances-to-transform)))
,@(mapcar #'(lambda (seq)
`(setq ,seq (transform-position-sequence
(medium-transformation medium) ,seq)))
position-sequences-to-transform)
(call-next-method medium
,@spread-argument-names ,@keyword-argument-names))))
,@(write-graphics-function-transformer
name
medium-graphics-function-name
unspread-argument-names
spread-arguments
spread-name
spread-argument-names
drawing-options
unspread-other-keyword-arguments
other-keyword-arguments
arguments
keywords-to-spread)))))
(defun get-drawing-function-description (name)
(or (get name 'args)
(error "Cannot find description for: ~S" name)))
(define-graphics-generic draw-polygon ((points point-sequence position-seq)
&key (closed t) (filled t))
:drawing-options :line-joint-cap
:position-sequences-to-transform (position-seq))
(define-graphics-generic draw-point ((point point x y))
:drawing-options :point
:positions-to-transform (x y))
(define-graphics-generic draw-points ((points point-sequence position-seq))
:drawing-options :point
:position-sequences-to-transform (position-seq))
(define-graphics-generic draw-line ((point1 point x1 y1)
(point2 point x2 y2))
:drawing-options :line-cap
:positions-to-transform (x1 y1 x2 y2))
(define-graphics-generic draw-lines ((points point-sequence position-seq))
:drawing-options :line-cap
:position-sequences-to-transform (position-seq))
(defun draw-arrow* (medium x1 y1 x2 y2
&rest args
&key (from-head nil) (to-head t) (head-length 10) (head-width 5)
&allow-other-keys)
(declare (dynamic-extent args))
(declare (arglist medium x1 y1 x2 y2
&rest args
&key (from-head nil) (to-head t) (head-length 10) (head-width 5)
. #.(all-drawing-options-lambda-list :line-cap)))
(flet ((draw-arrow ()
(let* ((dx (- x2 x1))
(dy (- y2 y1))
(norm (if (zerop dx)
(if (zerop dy)
nil
(/ 1.0 (abs dy)))
(if (zerop dy)
(/ 1.0 (abs dx))
(/ (sqrt (+ (* dx dx) (* dy dy))))))))
(when norm
(let* ((length-norm (* head-length norm))
(ldx (* dx length-norm))
(ldy (* dy length-norm))
(base-norm (* head-width norm 0.5))
(bdx (* dy base-norm))
(bdy (* dx base-norm)))
(draw-line* medium x1 y1 x2 y2)
(when from-head
(let ((xa (+ x1 ldx)) (ya (+ y1 ldy)))
(with-stack-list (points x1 y1
(+ xa bdx) (- ya bdy)
(- xa bdx) (+ ya bdy))
(draw-polygon* medium points :filled t))
(setq x1 xa y1 ya)))
(when to-head
(let ((xa (- x2 ldx)) (ya (- y2 ldy)))
(with-stack-list (points x2 y2
(+ xa bdx) (- ya bdy)
(- xa bdx) (+ ya bdy))
(draw-polygon* medium points :filled t)
(setq x2 xa y2 ya)))))))))
(declare (dynamic-extent #'draw-arrow))
(with-keywords-removed (options args
'(:from-head :to-head :head-length :head-width))
(apply #'invoke-with-drawing-options medium #'draw-arrow options))))
(defun draw-arrow (medium point1 point2 &rest args)
(declare (dynamic-extent args))
(declare (arglist medium x1 y1 x2 y2
&rest args
&key (from-head nil) (to-head t) (head-length 10) (head-width 5)
. #.(all-drawing-options-lambda-list :line-cap)))
(apply #'draw-arrow*
medium (point-x point1) (point-y point1) (point-x point2) (point-y point2) args))
(define-graphics-generic draw-rectangle ((point1 point x1 y1)
(point2 point x2 y2)
&key (filled t))
:drawing-options :line-joint
:positions-to-transform (x1 y1 x2 y2)
:medium-method-body
(let ((transform (medium-transformation medium)))
(cond ((rectilinear-transformation-p transform)
(transform-positions transform x1 y1 x2 y2)
(call-next-method medium x1 y1 x2 y2 filled))
(t
(with-stack-list (list x1 y1 x2 y1 x2 y2 x1 y2)
(medium-draw-polygon* medium list t filled))))))
(define-graphics-generic draw-rectangles ((points point-sequence position-seq)
&key (filled t))
:drawing-options :line-joint
:position-sequences-to-transform (position-seq)
:medium-method-body
(let ((transform (medium-transformation medium)))
(cond ((rectilinear-transformation-p transform)
(setq position-seq (transform-position-sequence transform position-seq))
(call-next-method medium position-seq filled))
(t
(medium-draw-transformed-rectangles* medium position-seq filled)))))
(defun medium-draw-transformed-rectangles* (medium position-seq filled)
(let ((len (length position-seq)))
(assert (zerop (mod len 4)))
(macrolet ((draw-one (x1 y1 x2 y2)
`(let ((x1 ,x1)
(y1 ,y1)
(x2 ,x2)
(y2 ,y2))
(with-stack-list (list x1 y1 x2 y1 x2 y2 x1 y2)
(medium-draw-polygon* medium list t filled)))))
(if (listp position-seq)
(do ((position-seq position-seq))
((null position-seq))
(draw-one (pop position-seq) (pop position-seq)
(pop position-seq) (pop position-seq)))
(do ((i 0 (+ i 4)))
((= i len))
(draw-one (aref position-seq i)
(aref position-seq (+ 1 i))
(aref position-seq (+ 2 i))
(aref position-seq (+ 3 i))))))))
(defun draw-pattern* (medium pattern x y &key clipping-region transformation)
(check-type pattern pattern)
(let ((width (pattern-width pattern))
(height (pattern-height pattern)))
(if (or clipping-region transformation)
(with-drawing-options (medium :clipping-region clipping-region
:transformation transformation
:ink pattern)
(draw-rectangle* medium x y (+ x width) (+ y height)
:filled t))
(with-drawing-options (medium :ink pattern)
(draw-rectangle* medium x y (+ x width) (+ y height)
:filled t)))))
(defun draw-regular-polygon* (medium x1 y1 x2 y2 nsides
&rest args &key (handedness :left) (closed t) &allow-other-keys)
(declare (dynamic-extent args))
(declare (arglist medium x1 y1 x2 y2 nsides
&rest args
&key (filled t) (handedness :left) (closed t)
. #.(all-drawing-options-lambda-list :line-joint-cap)))
(let* ((theta (* (float (* pi (/ 2.0 nsides)) 0.0)
(ecase handedness
(:left +1)
(:right -1))))
(transform (make-rotation-transformation theta))
(coordinates (list x1 y1 x2 y2))
(dx (- x2 x1))
(dy (- y2 y1))
(next-x x2)
(next-y y2))
(repeat (- nsides 2)
(multiple-value-setq (dx dy)
(transform-distance transform dx dy))
(incf next-x dx)
(incf next-y dy)
(setq coordinates (nconc coordinates (list next-x next-y))))
(when closed
(setq coordinates (nconc coordinates (list x1 y1))))
(with-keywords-removed (args args '(:handedness))
(apply #'draw-polygon* medium coordinates args))))
(defun draw-regular-polygon (medium point1 point2 nsides &rest args)
(declare (dynamic-extent args))
(declare (arglist medium point1 point2 nsides
&rest args
&key (handedness :left) (closed t) (filled t)
. #.(all-drawing-options-lambda-list :line-joint-cap)))
(apply #'draw-regular-polygon* medium
(point-x point1) (point-y point1)
(point-x point2) (point-y point2)
nsides args))
(defun draw-triangle (medium p1 p2 p3 &rest args)
(declare (dynamic-extent args))
(declare (arglist medium p1 p2 p3
&rest args
&key (filled t) . #.(all-drawing-options-lambda-list :line-joint)))
(with-stack-list (points p1 p2 p3)
(apply #'draw-polygon medium points :closed t args)))
(defun draw-triangle* (medium x1 y1 x2 y2 x3 y3 &rest args)
(declare (dynamic-extent args))
(declare (arglist medium x1 y1 x2 y2 x3 y3
&rest args
&key (filled t) . #.(all-drawing-options-lambda-list :line-joint)))
(with-stack-list (points x1 y1 x2 y2 x3 y3)
(apply #'draw-polygon* medium points :closed t args)))
(define-graphics-generic draw-ellipse ((center point center-x center-y)
radius-1-dx radius-1-dy
radius-2-dx radius-2-dy
&key (start-angle 0) (end-angle 2pi)
(filled t))
:drawing-options :line-cap
:positions-to-transform (center-x center-y)
:distances-to-transform (radius-1-dx radius-1-dy radius-2-dx radius-2-dy))
(defun draw-circle (medium center radius &rest args)
(declare (dynamic-extent args))
(declare (arglist medium center radius
&rest args
&key start-angle end-angle (filled t)
. #.(all-drawing-options-lambda-list :line-cap)))
(apply #'draw-ellipse medium center radius 0 0 radius args))
(define-compiler-macro draw-circle (medium center radius &rest args)
(let ((gm (gensymbol 'medium))
(gc (gensymbol 'center))
(gr (gensymbol 'radius)))
`(let ((,gm ,medium)
(,gc ,center)
(,gr ,radius))
(draw-ellipse ,gm ,gc ,gr 0 0 ,gr ,@args))))
(defun draw-circle* (medium center-x center-y radius &rest args)
(declare (dynamic-extent args))
(declare (arglist medium center-x center-y radius
&rest args
&key start-angle end-angle (filled t)
. #.(all-drawing-options-lambda-list :line-cap)))
(apply #'draw-ellipse* medium center-x center-y radius 0 0 radius args))
(define-compiler-macro draw-circle* (medium center-x center-y radius &rest args)
(let ((gm (gensymbol 'medium))
(gx (gensymbol 'x))
(gy (gensymbol 'y))
(gr (gensymbol 'radius)))
`(let ((,gm ,medium)
(,gx ,center-x)
(,gy ,center-y)
(,gr ,radius))
(draw-ellipse* ,gm ,gx ,gy ,gr 0 0 ,gr ,@args))))
(defun draw-oval* (medium center-x center-y x-radius y-radius
&rest args &key (filled t) &allow-other-keys)
(declare (dynamic-extent args))
(declare (arglist medium center-x center-y x-radius y-radius
&rest args
. #.(all-drawing-options-lambda-list :line-cap)))
(flet ((draw-oval ()
(let ((left (- center-x x-radius))
(right (+ center-x x-radius))
(top (- center-y y-radius))
(bottom (+ center-y y-radius)))
(cond ((or (= x-radius y-radius)
(zerop x-radius))
(draw-ellipse* medium center-x center-y y-radius 0 0 y-radius
:filled filled))
((zerop y-radius)
(draw-ellipse* medium center-x center-y x-radius 0 0 x-radius
:filled filled))
((> x-radius y-radius)
(let ((rect-left (+ left y-radius))
(rect-right (- right y-radius)))
(cond (filled
(draw-rectangle* medium rect-left top rect-right bottom))
(t
(draw-line* medium rect-left top rect-right top)
(draw-line* medium rect-left bottom rect-right bottom)))
(let ((north (float (* pi 1/2) 0.0))
(south (float (* pi 3/2) 0.0)))
(draw-ellipse* medium rect-left center-y y-radius 0 0 y-radius
:start-angle north :end-angle south
:filled filled)
(draw-ellipse* medium rect-right center-y y-radius 0 0 y-radius
:start-angle south :end-angle north
:filled filled))))
(t
(let ((rect-top (+ top x-radius))
(rect-bottom (- bottom x-radius)))
(cond (filled
(draw-rectangle* medium left rect-top right rect-bottom))
(t
(draw-line* medium left rect-top left rect-bottom)
(draw-line* medium right rect-top right rect-bottom)))
(let ((east 0.0)
(west (float pi 0.0)))
(draw-ellipse* medium center-x rect-top x-radius 0 0 x-radius
:start-angle east :end-angle west
:filled filled)
(draw-ellipse* medium center-x rect-bottom x-radius 0 0 x-radius
:start-angle west :end-angle east
:filled filled))))))))
(declare (dynamic-extent #'draw-oval))
(apply #'invoke-with-drawing-options medium #'draw-oval args)))
(defun draw-oval (medium center x-radius y-radius &rest args)
(declare (dynamic-extent args))
(declare (arglist medium point x-radius y-radius
&rest args
. #.(all-drawing-options-lambda-list :line-cap)))
(apply #'draw-oval*
medium (point-x center) (point-y center) x-radius y-radius args))
(define-graphics-generic draw-text (string-or-char (point point x y)
&key (start 0) (end nil)
(align-x :left) (align-y :baseline)
towards-point transform-glyphs)
:positions-to-transform (x y towards-x towards-y)
:optional-positions-to-transform (towards-x towards-y)
:keywords-to-spread ((towards-point point towards-x towards-y))
:drawing-options :text)
(defmethod medium-clear-area ((medium basic-medium) left top right bottom)
(letf-globally (((medium-ink medium) +background-ink+)
((medium-transformation medium) +identity-transformation+))
(medium-draw-rectangle* medium left top right bottom t)))
Cubic splines and curves
(define-graphics-generic draw-bezier-curve ((points point-sequence position-seq)
&key (filled nil))
:drawing-options :line-cap
:position-sequences-to-transform (position-seq))
(defmethod medium-draw-bezier-curve* ((medium basic-medium) position-seq filled)
(let* ((npoints (length position-seq))
(last (1- npoints))
(new-points (cons nil nil))
(head new-points)
(distance 1))
(assert (zerop (mod (- (/ npoints 2) 4) 3)))
(flet ((collect (x y)
(let ((more (list x y)))
(setf (cdr new-points) more
new-points (cdr more)))))
(declare (dynamic-extent #'collect))
(collect (elt position-seq 0) (elt position-seq 1))
(do ((i 0 (+ i 6)))
((= i (1- last)))
(render-bezier-curve #'collect
(elt position-seq i) (elt position-seq (+ 1 i))
(elt position-seq (+ 2 i)) (elt position-seq (+ 3 i))
(elt position-seq (+ 4 i)) (elt position-seq (+ 5 i))
(elt position-seq (+ 6 i)) (elt position-seq (+ 7 i))
distance)
(collect (elt position-seq (+ 6 i)) (elt position-seq (+ 7 i)))))
(with-identity-transformation (medium)
(medium-draw-polygon* medium (cdr head) nil filled))))
(defun render-bezier-curve (function x0 y0 x1 y1 x2 y2 x3 y3 distance)
(flet ((split-bezier-curve (x0 y0 x1 y1 x2 y2 x3 y3)
(values
The first 1/2
x0 y0
(+ (/ x0 2) (/ x1 2)) (+ (/ y0 2) (/ y1 2))
(+ (/ x0 4) (/ x1 2) (/ x2 4)) (+ (/ y0 4) (/ y1 2) (/ y2 4))
(+ (* x0 1/8) (* x1 3/8) (* x2 3/8) (* x3 1/8))
(+ (* y0 1/8) (* y1 3/8) (* y2 3/8) (* y3 1/8))
The second 1/2
(+ (* x0 1/8) (* x1 3/8) (* x2 3/8) (* x3 1/8))
(+ (* y0 1/8) (* y1 3/8) (* y2 3/8) (* y3 1/8))
(+ (/ x1 4) (/ x2 2) (/ x3 4)) (+ (/ y1 4) (/ y2 2) (/ y3 4))
(+ (/ x2 2) (/ x3 2)) (+ (/ y2 2) (/ y3 2))
x3 y3))
(distance-from-line (x0 y0 x1 y1 x y)
(let* ((dx (- x1 x0))
(dy (- y1 y0))
(r-p-x (- x x0))
(r-p-y (- y y0))
(dot-v (+ (* dx dx) (* dy dy)))
(dot-r-v (+ (* r-p-x dx) (* r-p-y dy)))
(closest-x (+ x0 (* (/ dot-r-v dot-v) dx)))
(closest-y (+ y0 (* (/ dot-r-v dot-v) dy))))
(let ((ax (- x closest-x))
(ay (- y closest-y)))
(values (+ (* ax ax) (* ay ay)) closest-x closest-y)))))
(declare (dynamic-extent #'split-bezier-curve #'distance-from-line))
(let ((d1 (distance-from-line x0 y0 x3 y3 x1 y1))
(d2 (distance-from-line x0 y0 x3 y3 x2 y2)))
(if (and (< d1 distance) (< d2 distance))
nil
(multiple-value-bind (x00 y00 x10 y10 x20 y20 x30 y30
x01 y01 x11 y11 x21 y21 x31 y31)
(split-bezier-curve x0 y0 x1 y1 x2 y2 x3 y3)
(render-bezier-curve function x00 y00 x10 y10 x20 y20 x30 y30 distance)
(funcall function x30 y30)
(render-bezier-curve function x01 y01 x11 y11 x21 y21 x31 y31 distance))))))
(define-graphics-generic draw-pixmap (pixmap (point point x y)
&key (function boole-1))
:positions-to-transform (x y)
:drawing-options :pixmap)
|
88d81caad5e2bc79291c9fcf6e86a00724e2ebca0e259e9c20087822d6708fb5 | melange-re/melange | hashtbl.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* Hash tables *)
(* We do dynamic hashing, and resize the table and rehash the elements
when buckets become too long. *)
type ('a, 'b) t =
{ mutable size: int; (* number of entries *)
mutable data: ('a, 'b) bucketlist array; (* the buckets *)
seed: int; (* for randomization *)
mutable initial_size: int; (* initial array size *)
}
and ('a, 'b) bucketlist =
Empty
| Cons of { mutable key: 'a;
mutable data: 'b;
mutable next: ('a, 'b) bucketlist }
(* The sign of initial_size encodes the fact that a traversal is
ongoing or not.
This disables the efficient in place implementation of resizing.
*)
let ongoing_traversal h =
h.initial_size < 0
let flip_ongoing_traversal h =
h.initial_size <- - h.initial_size
(* To pick random seeds if requested *)
let randomized_default =
#if BS then false
#else
let params =
try Sys.getenv "OCAMLRUNPARAM" with Not_found ->
try Sys.getenv "CAMLRUNPARAM" with Not_found -> "" in
String.contains params 'R'
#end
let randomized = ref randomized_default
let randomize () = randomized := true
let is_randomized () = !randomized
let prng = lazy (Random.State.make_self_init())
Functions which appear before the functorial interface must either be
independent of the hash function or take it as a parameter ( see # 2202 and
code below the functor definitions .
independent of the hash function or take it as a parameter (see #2202 and
code below the functor definitions. *)
(* Creating a fresh, empty table *)
let rec power_2_above x n =
if x >= n then x
#if BS then
else if x * 2 < x then x (* overflow *)
#else
else if x * 2 > Sys.max_array_length then x
#end
else power_2_above (x * 2) n
let create ?(random = !randomized) initial_size =
let s = power_2_above 16 initial_size in
let seed = if random then Random.State.bits (Lazy.force prng) else 0 in
{ initial_size = s; size = 0; seed = seed; data = Array.make s Empty }
let clear h =
if h.size > 0 then begin
h.size <- 0;
Array.fill h.data 0 (Array.length h.data) Empty
end
let reset h =
let len = Array.length h.data in
if len = abs h.initial_size then
clear h
else begin
h.size <- 0;
h.data <- Array.make (abs h.initial_size) Empty
end
let copy_bucketlist = function
| Empty -> Empty
| Cons {key; data; next} ->
let rec loop prec = function
| Empty -> ()
| Cons {key; data; next} ->
let r = Cons {key; data; next} in
begin match prec with
| Empty -> assert false
| Cons prec -> prec.next <- r
end;
loop r next
in
let r = Cons {key; data; next} in
loop r next;
r
let copy h = { h with data = Array.map copy_bucketlist h.data }
let length h = h.size
let insert_all_buckets indexfun inplace odata ndata =
let nsize = Array.length ndata in
let ndata_tail = Array.make nsize Empty in
let rec insert_bucket = function
| Empty -> ()
| Cons {key; data; next} as cell ->
let cell =
if inplace then cell
else Cons {key; data; next = Empty}
in
let nidx = indexfun key in
begin match ndata_tail.(nidx) with
| Empty -> ndata.(nidx) <- cell;
| Cons tail -> tail.next <- cell;
end;
ndata_tail.(nidx) <- cell;
insert_bucket next
in
for i = 0 to Array.length odata - 1 do
insert_bucket odata.(i)
done;
if inplace then
for i = 0 to nsize - 1 do
match ndata_tail.(i) with
| Empty -> ()
| Cons tail -> tail.next <- Empty
done
let resize indexfun h =
let odata = h.data in
let osize = Array.length odata in
let nsize = osize * 2 in
#if BS then
if nsize >= osize then begin
#else
if nsize < Sys.max_array_length then begin
#end
let ndata = Array.make nsize Empty in
let inplace = not (ongoing_traversal h) in
h.data <- ndata; (* so that indexfun sees the new bucket count *)
insert_all_buckets (indexfun h) inplace odata ndata
end
let iter f h =
let rec do_bucket = function
| Empty ->
()
| Cons{key; data; next} ->
f key data; do_bucket next in
let old_trav = ongoing_traversal h in
if not old_trav then flip_ongoing_traversal h;
try
let d = h.data in
for i = 0 to Array.length d - 1 do
do_bucket d.(i)
done;
if not old_trav then flip_ongoing_traversal h;
with exn when not old_trav ->
flip_ongoing_traversal h;
raise exn
let rec filter_map_inplace_bucket f h i prec = function
| Empty ->
begin match prec with
| Empty -> h.data.(i) <- Empty
| Cons c -> c.next <- Empty
end
| (Cons ({key; data; next} as c)) as slot ->
begin match f key data with
| None ->
h.size <- h.size - 1;
filter_map_inplace_bucket f h i prec next
| Some data ->
begin match prec with
| Empty -> h.data.(i) <- slot
| Cons c -> c.next <- slot
end;
c.data <- data;
filter_map_inplace_bucket f h i slot next
end
let filter_map_inplace f h =
let d = h.data in
let old_trav = ongoing_traversal h in
if not old_trav then flip_ongoing_traversal h;
try
for i = 0 to Array.length d - 1 do
filter_map_inplace_bucket f h i Empty h.data.(i)
done;
if not old_trav then flip_ongoing_traversal h
with exn when not old_trav ->
flip_ongoing_traversal h;
raise exn
let fold f h init =
let rec do_bucket b accu =
match b with
Empty ->
accu
| Cons{key; data; next} ->
do_bucket next (f key data accu) in
let old_trav = ongoing_traversal h in
if not old_trav then flip_ongoing_traversal h;
try
let d = h.data in
let accu = ref init in
for i = 0 to Array.length d - 1 do
accu := do_bucket d.(i) !accu
done;
if not old_trav then flip_ongoing_traversal h;
!accu
with exn when not old_trav ->
flip_ongoing_traversal h;
raise exn
type statistics = {
num_bindings: int;
num_buckets: int;
max_bucket_length: int;
bucket_histogram: int array
}
let rec bucket_length accu = function
| Empty -> accu
| Cons{next} -> bucket_length (accu + 1) next
let stats h =
let mbl =
Array.fold_left (fun m b -> Int.max m (bucket_length 0 b)) 0 h.data in
let histo = Array.make (mbl + 1) 0 in
Array.iter
(fun b ->
let l = bucket_length 0 b in
histo.(l) <- histo.(l) + 1)
h.data;
{ num_bindings = h.size;
num_buckets = Array.length h.data;
max_bucket_length = mbl;
bucket_histogram = histo }
(** {1 Iterators} *)
let to_seq tbl =
(* capture current array, so that even if the table is resized we
keep iterating on the same array *)
let tbl_data = tbl.data in
(* state: index * next bucket to traverse *)
let rec aux i buck () = match buck with
| Empty ->
if i = Array.length tbl_data
then Seq.Nil
else aux(i+1) tbl_data.(i) ()
| Cons {key; data; next} ->
Seq.Cons ((key, data), aux i next)
in
aux 0 Empty
let to_seq_keys m = Seq.map fst (to_seq m)
let to_seq_values m = Seq.map snd (to_seq m)
Functorial interface
module type HashedType =
sig
type t
val equal: t -> t -> bool
val hash: t -> int
end
module type SeededHashedType =
sig
type t
val equal: t -> t -> bool
val hash: int -> t -> int
end
module type S =
sig
type key
type !'a t
val create: int -> 'a t
val clear : 'a t -> unit
val reset : 'a t -> unit
val copy: 'a t -> 'a t
val add: 'a t -> key -> 'a -> unit
val remove: 'a t -> key -> unit
val find: 'a t -> key -> 'a
val find_opt: 'a t -> key -> 'a option
val find_all: 'a t -> key -> 'a list
val replace : 'a t -> key -> 'a -> unit
val mem : 'a t -> key -> bool
val iter: (key -> 'a -> unit) -> 'a t -> unit
val filter_map_inplace: (key -> 'a -> 'a option) -> 'a t -> unit
val fold: (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b
val length: 'a t -> int
val stats: 'a t -> statistics
val to_seq : 'a t -> (key * 'a) Seq.t
val to_seq_keys : _ t -> key Seq.t
val to_seq_values : 'a t -> 'a Seq.t
val add_seq : 'a t -> (key * 'a) Seq.t -> unit
val replace_seq : 'a t -> (key * 'a) Seq.t -> unit
val of_seq : (key * 'a) Seq.t -> 'a t
end
module type SeededS =
sig
type key
type !'a t
val create : ?random:bool -> int -> 'a t
val clear : 'a t -> unit
val reset : 'a t -> unit
val copy : 'a t -> 'a t
val add : 'a t -> key -> 'a -> unit
val remove : 'a t -> key -> unit
val find : 'a t -> key -> 'a
val find_opt: 'a t -> key -> 'a option
val find_all : 'a t -> key -> 'a list
val replace : 'a t -> key -> 'a -> unit
val mem : 'a t -> key -> bool
val iter : (key -> 'a -> unit) -> 'a t -> unit
val filter_map_inplace: (key -> 'a -> 'a option) -> 'a t -> unit
val fold : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b
val length : 'a t -> int
val stats: 'a t -> statistics
val to_seq : 'a t -> (key * 'a) Seq.t
val to_seq_keys : _ t -> key Seq.t
val to_seq_values : 'a t -> 'a Seq.t
val add_seq : 'a t -> (key * 'a) Seq.t -> unit
val replace_seq : 'a t -> (key * 'a) Seq.t -> unit
val of_seq : (key * 'a) Seq.t -> 'a t
end
module MakeSeeded(H: SeededHashedType): (SeededS with type key = H.t) =
struct
type key = H.t
type 'a hashtbl = (key, 'a) t
type 'a t = 'a hashtbl
let create = create
let clear = clear
let reset = reset
let copy = copy
let key_index h key =
(H.hash h.seed key) land (Array.length h.data - 1)
let add h key data =
let i = key_index h key in
let bucket = Cons{key; data; next=h.data.(i)} in
h.data.(i) <- bucket;
h.size <- h.size + 1;
if h.size > Array.length h.data lsl 1 then resize key_index h
let rec remove_bucket h i key prec = function
| Empty ->
()
| (Cons {key=k; next}) as c ->
if H.equal k key
then begin
h.size <- h.size - 1;
match prec with
| Empty -> h.data.(i) <- next
| Cons c -> c.next <- next
end
else remove_bucket h i key c next
let remove h key =
let i = key_index h key in
remove_bucket h i key Empty h.data.(i)
let rec find_rec key = function
| Empty ->
raise Not_found
| Cons{key=k; data; next} ->
if H.equal key k then data else find_rec key next
let find h key =
match h.data.(key_index h key) with
| Empty -> raise Not_found
| Cons{key=k1; data=d1; next=next1} ->
if H.equal key k1 then d1 else
match next1 with
| Empty -> raise Not_found
| Cons{key=k2; data=d2; next=next2} ->
if H.equal key k2 then d2 else
match next2 with
| Empty -> raise Not_found
| Cons{key=k3; data=d3; next=next3} ->
if H.equal key k3 then d3 else find_rec key next3
let rec find_rec_opt key = function
| Empty ->
None
| Cons{key=k; data; next} ->
if H.equal key k then Some data else find_rec_opt key next
let find_opt h key =
match h.data.(key_index h key) with
| Empty -> None
| Cons{key=k1; data=d1; next=next1} ->
if H.equal key k1 then Some d1 else
match next1 with
| Empty -> None
| Cons{key=k2; data=d2; next=next2} ->
if H.equal key k2 then Some d2 else
match next2 with
| Empty -> None
| Cons{key=k3; data=d3; next=next3} ->
if H.equal key k3 then Some d3 else find_rec_opt key next3
let find_all h key =
let rec find_in_bucket = function
| Empty ->
[]
| Cons{key=k; data=d; next} ->
if H.equal k key
then d :: find_in_bucket next
else find_in_bucket next in
find_in_bucket h.data.(key_index h key)
let rec replace_bucket key data = function
| Empty ->
true
| Cons ({key=k; next} as slot) ->
if H.equal k key
then (slot.key <- key; slot.data <- data; false)
else replace_bucket key data next
let replace h key data =
let i = key_index h key in
let l = h.data.(i) in
if replace_bucket key data l then begin
h.data.(i) <- Cons{key; data; next=l};
h.size <- h.size + 1;
if h.size > Array.length h.data lsl 1 then resize key_index h
end
let mem h key =
let rec mem_in_bucket = function
| Empty ->
false
| Cons{key=k; next} ->
H.equal k key || mem_in_bucket next in
mem_in_bucket h.data.(key_index h key)
let add_seq tbl i =
Seq.iter (fun (k,v) -> add tbl k v) i
let replace_seq tbl i =
Seq.iter (fun (k,v) -> replace tbl k v) i
let of_seq i =
let tbl = create 16 in
replace_seq tbl i;
tbl
let iter = iter
let filter_map_inplace = filter_map_inplace
let fold = fold
let length = length
let stats = stats
let to_seq = to_seq
let to_seq_keys = to_seq_keys
let to_seq_values = to_seq_values
end
module Make(H: HashedType): (S with type key = H.t) =
struct
include MakeSeeded(struct
type t = H.t
let equal = H.equal
let hash (_seed: int) x = H.hash x
end)
let create sz = create ~random:false sz
let of_seq i =
let tbl = create 16 in
replace_seq tbl i;
tbl
end
(* Polymorphic hash function-based tables *)
Code included below the functorial interface to guard against accidental
use - see # 2202
use - see #2202 *)
external seeded_hash_param :
int -> int -> int -> 'a -> int = "caml_hash" [@@noalloc]
let hash x = seeded_hash_param 10 100 0 x
let hash_param n1 n2 x = seeded_hash_param n1 n2 0 x
let seeded_hash seed x = seeded_hash_param 10 100 seed x
let key_index h key =
(seeded_hash_param 10 100 h.seed key) land (Array.length h.data - 1)
let add h key data =
let i = key_index h key in
let bucket = Cons{key; data; next=h.data.(i)} in
h.data.(i) <- bucket;
h.size <- h.size + 1;
if h.size > Array.length h.data lsl 1 then resize key_index h
let rec remove_bucket h i key prec = function
| Empty ->
()
| (Cons {key=k; next}) as c ->
if compare k key = 0
then begin
h.size <- h.size - 1;
match prec with
| Empty -> h.data.(i) <- next
| Cons c -> c.next <- next
end
else remove_bucket h i key c next
let remove h key =
let i = key_index h key in
remove_bucket h i key Empty h.data.(i)
let rec find_rec key = function
| Empty ->
raise Not_found
| Cons{key=k; data; next} ->
if compare key k = 0 then data else find_rec key next
let find h key =
match h.data.(key_index h key) with
| Empty -> raise Not_found
| Cons{key=k1; data=d1; next=next1} ->
if compare key k1 = 0 then d1 else
match next1 with
| Empty -> raise Not_found
| Cons{key=k2; data=d2; next=next2} ->
if compare key k2 = 0 then d2 else
match next2 with
| Empty -> raise Not_found
| Cons{key=k3; data=d3; next=next3} ->
if compare key k3 = 0 then d3 else find_rec key next3
let rec find_rec_opt key = function
| Empty ->
None
| Cons{key=k; data; next} ->
if compare key k = 0 then Some data else find_rec_opt key next
let find_opt h key =
match h.data.(key_index h key) with
| Empty -> None
| Cons{key=k1; data=d1; next=next1} ->
if compare key k1 = 0 then Some d1 else
match next1 with
| Empty -> None
| Cons{key=k2; data=d2; next=next2} ->
if compare key k2 = 0 then Some d2 else
match next2 with
| Empty -> None
| Cons{key=k3; data=d3; next=next3} ->
if compare key k3 = 0 then Some d3 else find_rec_opt key next3
let find_all h key =
let rec find_in_bucket = function
| Empty ->
[]
| Cons{key=k; data; next} ->
if compare k key = 0
then data :: find_in_bucket next
else find_in_bucket next in
find_in_bucket h.data.(key_index h key)
let rec replace_bucket key data = function
| Empty ->
true
| Cons ({key=k; next} as slot) ->
if compare k key = 0
then (slot.key <- key; slot.data <- data; false)
else replace_bucket key data next
let replace h key data =
let i = key_index h key in
let l = h.data.(i) in
if replace_bucket key data l then begin
h.data.(i) <- Cons{key; data; next=l};
h.size <- h.size + 1;
if h.size > Array.length h.data lsl 1 then resize key_index h
end
let mem h key =
let rec mem_in_bucket = function
| Empty ->
false
| Cons{key=k; next} ->
compare k key = 0 || mem_in_bucket next in
mem_in_bucket h.data.(key_index h key)
let add_seq tbl i =
Seq.iter (fun (k,v) -> add tbl k v) i
let replace_seq tbl i =
Seq.iter (fun (k,v) -> replace tbl k v) i
let of_seq i =
let tbl = create 16 in
replace_seq tbl i;
tbl
let rebuild ?(random = !randomized) h =
let s = power_2_above 16 (Array.length h.data) in
let seed =
if random then Random.State.bits (Lazy.force prng)
else h.seed
in
let h' = {
size = h.size;
data = Array.make s Empty;
seed = seed;
initial_size = h.initial_size
} in
insert_all_buckets (key_index h') false h.data h'.data;
h'
| null | https://raw.githubusercontent.com/melange-re/melange/246e6df78fe3b6cc124cb48e5a37fdffd99379ed/jscomp/stdlib-412/stdlib_modules/hashtbl.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Hash tables
We do dynamic hashing, and resize the table and rehash the elements
when buckets become too long.
number of entries
the buckets
for randomization
initial array size
The sign of initial_size encodes the fact that a traversal is
ongoing or not.
This disables the efficient in place implementation of resizing.
To pick random seeds if requested
Creating a fresh, empty table
overflow
so that indexfun sees the new bucket count
* {1 Iterators}
capture current array, so that even if the table is resized we
keep iterating on the same array
state: index * next bucket to traverse
Polymorphic hash function-based tables | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
type ('a, 'b) t =
}
and ('a, 'b) bucketlist =
Empty
| Cons of { mutable key: 'a;
mutable data: 'b;
mutable next: ('a, 'b) bucketlist }
let ongoing_traversal h =
h.initial_size < 0
let flip_ongoing_traversal h =
h.initial_size <- - h.initial_size
let randomized_default =
#if BS then false
#else
let params =
try Sys.getenv "OCAMLRUNPARAM" with Not_found ->
try Sys.getenv "CAMLRUNPARAM" with Not_found -> "" in
String.contains params 'R'
#end
let randomized = ref randomized_default
let randomize () = randomized := true
let is_randomized () = !randomized
let prng = lazy (Random.State.make_self_init())
Functions which appear before the functorial interface must either be
independent of the hash function or take it as a parameter ( see # 2202 and
code below the functor definitions .
independent of the hash function or take it as a parameter (see #2202 and
code below the functor definitions. *)
let rec power_2_above x n =
if x >= n then x
#if BS then
#else
else if x * 2 > Sys.max_array_length then x
#end
else power_2_above (x * 2) n
let create ?(random = !randomized) initial_size =
let s = power_2_above 16 initial_size in
let seed = if random then Random.State.bits (Lazy.force prng) else 0 in
{ initial_size = s; size = 0; seed = seed; data = Array.make s Empty }
let clear h =
if h.size > 0 then begin
h.size <- 0;
Array.fill h.data 0 (Array.length h.data) Empty
end
let reset h =
let len = Array.length h.data in
if len = abs h.initial_size then
clear h
else begin
h.size <- 0;
h.data <- Array.make (abs h.initial_size) Empty
end
let copy_bucketlist = function
| Empty -> Empty
| Cons {key; data; next} ->
let rec loop prec = function
| Empty -> ()
| Cons {key; data; next} ->
let r = Cons {key; data; next} in
begin match prec with
| Empty -> assert false
| Cons prec -> prec.next <- r
end;
loop r next
in
let r = Cons {key; data; next} in
loop r next;
r
let copy h = { h with data = Array.map copy_bucketlist h.data }
let length h = h.size
let insert_all_buckets indexfun inplace odata ndata =
let nsize = Array.length ndata in
let ndata_tail = Array.make nsize Empty in
let rec insert_bucket = function
| Empty -> ()
| Cons {key; data; next} as cell ->
let cell =
if inplace then cell
else Cons {key; data; next = Empty}
in
let nidx = indexfun key in
begin match ndata_tail.(nidx) with
| Empty -> ndata.(nidx) <- cell;
| Cons tail -> tail.next <- cell;
end;
ndata_tail.(nidx) <- cell;
insert_bucket next
in
for i = 0 to Array.length odata - 1 do
insert_bucket odata.(i)
done;
if inplace then
for i = 0 to nsize - 1 do
match ndata_tail.(i) with
| Empty -> ()
| Cons tail -> tail.next <- Empty
done
let resize indexfun h =
let odata = h.data in
let osize = Array.length odata in
let nsize = osize * 2 in
#if BS then
if nsize >= osize then begin
#else
if nsize < Sys.max_array_length then begin
#end
let ndata = Array.make nsize Empty in
let inplace = not (ongoing_traversal h) in
insert_all_buckets (indexfun h) inplace odata ndata
end
let iter f h =
let rec do_bucket = function
| Empty ->
()
| Cons{key; data; next} ->
f key data; do_bucket next in
let old_trav = ongoing_traversal h in
if not old_trav then flip_ongoing_traversal h;
try
let d = h.data in
for i = 0 to Array.length d - 1 do
do_bucket d.(i)
done;
if not old_trav then flip_ongoing_traversal h;
with exn when not old_trav ->
flip_ongoing_traversal h;
raise exn
let rec filter_map_inplace_bucket f h i prec = function
| Empty ->
begin match prec with
| Empty -> h.data.(i) <- Empty
| Cons c -> c.next <- Empty
end
| (Cons ({key; data; next} as c)) as slot ->
begin match f key data with
| None ->
h.size <- h.size - 1;
filter_map_inplace_bucket f h i prec next
| Some data ->
begin match prec with
| Empty -> h.data.(i) <- slot
| Cons c -> c.next <- slot
end;
c.data <- data;
filter_map_inplace_bucket f h i slot next
end
let filter_map_inplace f h =
let d = h.data in
let old_trav = ongoing_traversal h in
if not old_trav then flip_ongoing_traversal h;
try
for i = 0 to Array.length d - 1 do
filter_map_inplace_bucket f h i Empty h.data.(i)
done;
if not old_trav then flip_ongoing_traversal h
with exn when not old_trav ->
flip_ongoing_traversal h;
raise exn
let fold f h init =
let rec do_bucket b accu =
match b with
Empty ->
accu
| Cons{key; data; next} ->
do_bucket next (f key data accu) in
let old_trav = ongoing_traversal h in
if not old_trav then flip_ongoing_traversal h;
try
let d = h.data in
let accu = ref init in
for i = 0 to Array.length d - 1 do
accu := do_bucket d.(i) !accu
done;
if not old_trav then flip_ongoing_traversal h;
!accu
with exn when not old_trav ->
flip_ongoing_traversal h;
raise exn
type statistics = {
num_bindings: int;
num_buckets: int;
max_bucket_length: int;
bucket_histogram: int array
}
let rec bucket_length accu = function
| Empty -> accu
| Cons{next} -> bucket_length (accu + 1) next
let stats h =
let mbl =
Array.fold_left (fun m b -> Int.max m (bucket_length 0 b)) 0 h.data in
let histo = Array.make (mbl + 1) 0 in
Array.iter
(fun b ->
let l = bucket_length 0 b in
histo.(l) <- histo.(l) + 1)
h.data;
{ num_bindings = h.size;
num_buckets = Array.length h.data;
max_bucket_length = mbl;
bucket_histogram = histo }
let to_seq tbl =
let tbl_data = tbl.data in
let rec aux i buck () = match buck with
| Empty ->
if i = Array.length tbl_data
then Seq.Nil
else aux(i+1) tbl_data.(i) ()
| Cons {key; data; next} ->
Seq.Cons ((key, data), aux i next)
in
aux 0 Empty
let to_seq_keys m = Seq.map fst (to_seq m)
let to_seq_values m = Seq.map snd (to_seq m)
Functorial interface
module type HashedType =
sig
type t
val equal: t -> t -> bool
val hash: t -> int
end
module type SeededHashedType =
sig
type t
val equal: t -> t -> bool
val hash: int -> t -> int
end
module type S =
sig
type key
type !'a t
val create: int -> 'a t
val clear : 'a t -> unit
val reset : 'a t -> unit
val copy: 'a t -> 'a t
val add: 'a t -> key -> 'a -> unit
val remove: 'a t -> key -> unit
val find: 'a t -> key -> 'a
val find_opt: 'a t -> key -> 'a option
val find_all: 'a t -> key -> 'a list
val replace : 'a t -> key -> 'a -> unit
val mem : 'a t -> key -> bool
val iter: (key -> 'a -> unit) -> 'a t -> unit
val filter_map_inplace: (key -> 'a -> 'a option) -> 'a t -> unit
val fold: (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b
val length: 'a t -> int
val stats: 'a t -> statistics
val to_seq : 'a t -> (key * 'a) Seq.t
val to_seq_keys : _ t -> key Seq.t
val to_seq_values : 'a t -> 'a Seq.t
val add_seq : 'a t -> (key * 'a) Seq.t -> unit
val replace_seq : 'a t -> (key * 'a) Seq.t -> unit
val of_seq : (key * 'a) Seq.t -> 'a t
end
module type SeededS =
sig
type key
type !'a t
val create : ?random:bool -> int -> 'a t
val clear : 'a t -> unit
val reset : 'a t -> unit
val copy : 'a t -> 'a t
val add : 'a t -> key -> 'a -> unit
val remove : 'a t -> key -> unit
val find : 'a t -> key -> 'a
val find_opt: 'a t -> key -> 'a option
val find_all : 'a t -> key -> 'a list
val replace : 'a t -> key -> 'a -> unit
val mem : 'a t -> key -> bool
val iter : (key -> 'a -> unit) -> 'a t -> unit
val filter_map_inplace: (key -> 'a -> 'a option) -> 'a t -> unit
val fold : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b
val length : 'a t -> int
val stats: 'a t -> statistics
val to_seq : 'a t -> (key * 'a) Seq.t
val to_seq_keys : _ t -> key Seq.t
val to_seq_values : 'a t -> 'a Seq.t
val add_seq : 'a t -> (key * 'a) Seq.t -> unit
val replace_seq : 'a t -> (key * 'a) Seq.t -> unit
val of_seq : (key * 'a) Seq.t -> 'a t
end
module MakeSeeded(H: SeededHashedType): (SeededS with type key = H.t) =
struct
type key = H.t
type 'a hashtbl = (key, 'a) t
type 'a t = 'a hashtbl
let create = create
let clear = clear
let reset = reset
let copy = copy
let key_index h key =
(H.hash h.seed key) land (Array.length h.data - 1)
let add h key data =
let i = key_index h key in
let bucket = Cons{key; data; next=h.data.(i)} in
h.data.(i) <- bucket;
h.size <- h.size + 1;
if h.size > Array.length h.data lsl 1 then resize key_index h
let rec remove_bucket h i key prec = function
| Empty ->
()
| (Cons {key=k; next}) as c ->
if H.equal k key
then begin
h.size <- h.size - 1;
match prec with
| Empty -> h.data.(i) <- next
| Cons c -> c.next <- next
end
else remove_bucket h i key c next
let remove h key =
let i = key_index h key in
remove_bucket h i key Empty h.data.(i)
let rec find_rec key = function
| Empty ->
raise Not_found
| Cons{key=k; data; next} ->
if H.equal key k then data else find_rec key next
let find h key =
match h.data.(key_index h key) with
| Empty -> raise Not_found
| Cons{key=k1; data=d1; next=next1} ->
if H.equal key k1 then d1 else
match next1 with
| Empty -> raise Not_found
| Cons{key=k2; data=d2; next=next2} ->
if H.equal key k2 then d2 else
match next2 with
| Empty -> raise Not_found
| Cons{key=k3; data=d3; next=next3} ->
if H.equal key k3 then d3 else find_rec key next3
let rec find_rec_opt key = function
| Empty ->
None
| Cons{key=k; data; next} ->
if H.equal key k then Some data else find_rec_opt key next
let find_opt h key =
match h.data.(key_index h key) with
| Empty -> None
| Cons{key=k1; data=d1; next=next1} ->
if H.equal key k1 then Some d1 else
match next1 with
| Empty -> None
| Cons{key=k2; data=d2; next=next2} ->
if H.equal key k2 then Some d2 else
match next2 with
| Empty -> None
| Cons{key=k3; data=d3; next=next3} ->
if H.equal key k3 then Some d3 else find_rec_opt key next3
let find_all h key =
let rec find_in_bucket = function
| Empty ->
[]
| Cons{key=k; data=d; next} ->
if H.equal k key
then d :: find_in_bucket next
else find_in_bucket next in
find_in_bucket h.data.(key_index h key)
let rec replace_bucket key data = function
| Empty ->
true
| Cons ({key=k; next} as slot) ->
if H.equal k key
then (slot.key <- key; slot.data <- data; false)
else replace_bucket key data next
let replace h key data =
let i = key_index h key in
let l = h.data.(i) in
if replace_bucket key data l then begin
h.data.(i) <- Cons{key; data; next=l};
h.size <- h.size + 1;
if h.size > Array.length h.data lsl 1 then resize key_index h
end
let mem h key =
let rec mem_in_bucket = function
| Empty ->
false
| Cons{key=k; next} ->
H.equal k key || mem_in_bucket next in
mem_in_bucket h.data.(key_index h key)
let add_seq tbl i =
Seq.iter (fun (k,v) -> add tbl k v) i
let replace_seq tbl i =
Seq.iter (fun (k,v) -> replace tbl k v) i
let of_seq i =
let tbl = create 16 in
replace_seq tbl i;
tbl
let iter = iter
let filter_map_inplace = filter_map_inplace
let fold = fold
let length = length
let stats = stats
let to_seq = to_seq
let to_seq_keys = to_seq_keys
let to_seq_values = to_seq_values
end
module Make(H: HashedType): (S with type key = H.t) =
struct
include MakeSeeded(struct
type t = H.t
let equal = H.equal
let hash (_seed: int) x = H.hash x
end)
let create sz = create ~random:false sz
let of_seq i =
let tbl = create 16 in
replace_seq tbl i;
tbl
end
Code included below the functorial interface to guard against accidental
use - see # 2202
use - see #2202 *)
external seeded_hash_param :
int -> int -> int -> 'a -> int = "caml_hash" [@@noalloc]
let hash x = seeded_hash_param 10 100 0 x
let hash_param n1 n2 x = seeded_hash_param n1 n2 0 x
let seeded_hash seed x = seeded_hash_param 10 100 seed x
let key_index h key =
(seeded_hash_param 10 100 h.seed key) land (Array.length h.data - 1)
let add h key data =
let i = key_index h key in
let bucket = Cons{key; data; next=h.data.(i)} in
h.data.(i) <- bucket;
h.size <- h.size + 1;
if h.size > Array.length h.data lsl 1 then resize key_index h
let rec remove_bucket h i key prec = function
| Empty ->
()
| (Cons {key=k; next}) as c ->
if compare k key = 0
then begin
h.size <- h.size - 1;
match prec with
| Empty -> h.data.(i) <- next
| Cons c -> c.next <- next
end
else remove_bucket h i key c next
let remove h key =
let i = key_index h key in
remove_bucket h i key Empty h.data.(i)
let rec find_rec key = function
| Empty ->
raise Not_found
| Cons{key=k; data; next} ->
if compare key k = 0 then data else find_rec key next
let find h key =
match h.data.(key_index h key) with
| Empty -> raise Not_found
| Cons{key=k1; data=d1; next=next1} ->
if compare key k1 = 0 then d1 else
match next1 with
| Empty -> raise Not_found
| Cons{key=k2; data=d2; next=next2} ->
if compare key k2 = 0 then d2 else
match next2 with
| Empty -> raise Not_found
| Cons{key=k3; data=d3; next=next3} ->
if compare key k3 = 0 then d3 else find_rec key next3
let rec find_rec_opt key = function
| Empty ->
None
| Cons{key=k; data; next} ->
if compare key k = 0 then Some data else find_rec_opt key next
let find_opt h key =
match h.data.(key_index h key) with
| Empty -> None
| Cons{key=k1; data=d1; next=next1} ->
if compare key k1 = 0 then Some d1 else
match next1 with
| Empty -> None
| Cons{key=k2; data=d2; next=next2} ->
if compare key k2 = 0 then Some d2 else
match next2 with
| Empty -> None
| Cons{key=k3; data=d3; next=next3} ->
if compare key k3 = 0 then Some d3 else find_rec_opt key next3
let find_all h key =
let rec find_in_bucket = function
| Empty ->
[]
| Cons{key=k; data; next} ->
if compare k key = 0
then data :: find_in_bucket next
else find_in_bucket next in
find_in_bucket h.data.(key_index h key)
let rec replace_bucket key data = function
| Empty ->
true
| Cons ({key=k; next} as slot) ->
if compare k key = 0
then (slot.key <- key; slot.data <- data; false)
else replace_bucket key data next
let replace h key data =
let i = key_index h key in
let l = h.data.(i) in
if replace_bucket key data l then begin
h.data.(i) <- Cons{key; data; next=l};
h.size <- h.size + 1;
if h.size > Array.length h.data lsl 1 then resize key_index h
end
let mem h key =
let rec mem_in_bucket = function
| Empty ->
false
| Cons{key=k; next} ->
compare k key = 0 || mem_in_bucket next in
mem_in_bucket h.data.(key_index h key)
let add_seq tbl i =
Seq.iter (fun (k,v) -> add tbl k v) i
let replace_seq tbl i =
Seq.iter (fun (k,v) -> replace tbl k v) i
let of_seq i =
let tbl = create 16 in
replace_seq tbl i;
tbl
let rebuild ?(random = !randomized) h =
let s = power_2_above 16 (Array.length h.data) in
let seed =
if random then Random.State.bits (Lazy.force prng)
else h.seed
in
let h' = {
size = h.size;
data = Array.make s Empty;
seed = seed;
initial_size = h.initial_size
} in
insert_all_buckets (key_index h') false h.data h'.data;
h'
|
0ad1e0d8004214d3dc8f4888d0824d4d7467dfeb01e89c657d9606320b7737cc | aharisu/Gauche-SDL | collide_type.scm | ;;;
collide_type.scm
;;;
MIT License
Copyright 2011 - 2012 aharisu
;;; All rights reserved.
;;;
;;; Permission is hereby granted, free of charge, to any person obtaining a copy
;;; of this software and associated documentation files (the "Software"), to deal
in the Software without restriction , including without limitation the rights
;;; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is
;;; furnished to do so, subject to the following conditions:
;;;
;;; The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
;;;
;;;
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
;;; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
;;; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
;;; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
;;; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
;;; SOFTWARE.
;;;
;;;
;;; aharisu
;;;
;;;
(load "cv_struct_generator")
(use file.util)
(define (main args)
(gen-type (simplify-path (path-sans-extension (car args)))
structs foreign-pointer
(lambda () ;;prologue
(cgen-extern "//sdl header")
(cgen-extern "#include<SDL/SDL.h>")
(cgen-extern "#include<SDL_collide.h>")
(cgen-extern "")
)
(lambda () ;;epilogue
))
0)
sym - name sym - scm - type pointer ? finalize - name finalize - ref
(define structs
'(
))
sym - name sym - scm - type pointer ? finalize finalize - ref
(define foreign-pointer
'(
(SDL_CollideMask <cld-mask> #f "SDL_CollideFreeMask" "")
))
| null | https://raw.githubusercontent.com/aharisu/Gauche-SDL/29e997dacdb7c6b89e99843f0f0c52266abfee66/src/collide/collide_type.scm | scheme |
All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
aharisu
prologue
epilogue | collide_type.scm
MIT License
Copyright 2011 - 2012 aharisu
in the Software without restriction , including without limitation the rights
copies of the Software , and to permit persons to whom the Software is
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
(load "cv_struct_generator")
(use file.util)
(define (main args)
(gen-type (simplify-path (path-sans-extension (car args)))
structs foreign-pointer
(cgen-extern "//sdl header")
(cgen-extern "#include<SDL/SDL.h>")
(cgen-extern "#include<SDL_collide.h>")
(cgen-extern "")
)
))
0)
sym - name sym - scm - type pointer ? finalize - name finalize - ref
(define structs
'(
))
sym - name sym - scm - type pointer ? finalize finalize - ref
(define foreign-pointer
'(
(SDL_CollideMask <cld-mask> #f "SDL_CollideFreeMask" "")
))
|
7cc238d6b4f9fa80565b01544112f426cd47cc091d102168ea45042c6b82e4a4 | mfikes/fifth-postulate | ns90.cljs | (ns fifth-postulate.ns90)
(defn solve-for01 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for02 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for03 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for04 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for05 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for06 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for07 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for08 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for09 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for10 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for11 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for12 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for13 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for14 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for15 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for16 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for17 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for18 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for19 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
| null | https://raw.githubusercontent.com/mfikes/fifth-postulate/22cfd5f8c2b4a2dead1c15a96295bfeb4dba235e/src/fifth_postulate/ns90.cljs | clojure | (ns fifth-postulate.ns90)
(defn solve-for01 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for02 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for03 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for04 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for05 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for06 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for07 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for08 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for09 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for10 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for11 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for12 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for13 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for14 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for15 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for16 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for17 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for18 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for19 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
|
|
c0e70e18036b7bd695418950f1df656ef2d327b0950f4902d51cf1e6688c8661 | BinaryAnalysisPlatform/bap | bap_trace_event_types.ml | open Bap.Std
open Core_kernel[@@warning "-D"]
open Bap_knowledge
open Bap_core_theory
module KB = Knowledge
module Move = struct
type 'a t = {
cell : 'a;
data : word;
} [@@deriving bin_io, compare, fields, sexp]
end
module Chunk = struct
type t = {
addr : addr;
data : string;
} [@@deriving bin_io, compare, fields, sexp]
end
module Syscall = struct
type t = {
number : int;
args : word array;
} [@@deriving bin_io, compare, fields, sexp]
end
module Exn = struct
type t = {
number : int;
src : addr option;
dst : addr option;
} [@@deriving bin_io, compare, fields, sexp]
end
module Location = struct
type t = {
name : string option;
addr : addr;
} [@@deriving bin_io, compare, fields, sexp]
end
type location = Location.t [@@deriving bin_io, compare, sexp]
module Call = struct
type t = {
caller : location;
callee : location;
args : word array;
} [@@deriving bin_io, compare, fields, sexp]
end
module Return = struct
type t = {
caller : string;
callee : string;
} [@@deriving bin_io, compare, fields, sexp]
end
module Modload = struct
type t = {
name : string;
low : addr;
high : addr;
} [@@deriving bin_io, compare, fields, sexp]
end
module Mode = struct
include KB.Enum.Make()
let slot = KB.Class.property ~package:"bap" Theory.Program.cls "mode" domain
end
type 'a move = 'a Move.t [@@deriving bin_io, compare, sexp]
type chunk = Chunk.t [@@deriving bin_io, compare, sexp]
type syscall = Syscall.t [@@deriving bin_io, compare, sexp]
type exn = Exn.t [@@deriving bin_io, compare, sexp]
type call = Call.t [@@deriving bin_io, compare, sexp]
type return = Return.t [@@deriving bin_io, compare, sexp]
type modload = Modload.t [@@deriving bin_io, compare, sexp]
type mode = Mode.t [@@deriving bin_io, compare, sexp]
| null | https://raw.githubusercontent.com/BinaryAnalysisPlatform/bap/253afc171bbfd0fe1b34f6442795dbf4b1798348/lib/bap_traces/bap_trace_event_types.ml | ocaml | open Bap.Std
open Core_kernel[@@warning "-D"]
open Bap_knowledge
open Bap_core_theory
module KB = Knowledge
module Move = struct
type 'a t = {
cell : 'a;
data : word;
} [@@deriving bin_io, compare, fields, sexp]
end
module Chunk = struct
type t = {
addr : addr;
data : string;
} [@@deriving bin_io, compare, fields, sexp]
end
module Syscall = struct
type t = {
number : int;
args : word array;
} [@@deriving bin_io, compare, fields, sexp]
end
module Exn = struct
type t = {
number : int;
src : addr option;
dst : addr option;
} [@@deriving bin_io, compare, fields, sexp]
end
module Location = struct
type t = {
name : string option;
addr : addr;
} [@@deriving bin_io, compare, fields, sexp]
end
type location = Location.t [@@deriving bin_io, compare, sexp]
module Call = struct
type t = {
caller : location;
callee : location;
args : word array;
} [@@deriving bin_io, compare, fields, sexp]
end
module Return = struct
type t = {
caller : string;
callee : string;
} [@@deriving bin_io, compare, fields, sexp]
end
module Modload = struct
type t = {
name : string;
low : addr;
high : addr;
} [@@deriving bin_io, compare, fields, sexp]
end
module Mode = struct
include KB.Enum.Make()
let slot = KB.Class.property ~package:"bap" Theory.Program.cls "mode" domain
end
type 'a move = 'a Move.t [@@deriving bin_io, compare, sexp]
type chunk = Chunk.t [@@deriving bin_io, compare, sexp]
type syscall = Syscall.t [@@deriving bin_io, compare, sexp]
type exn = Exn.t [@@deriving bin_io, compare, sexp]
type call = Call.t [@@deriving bin_io, compare, sexp]
type return = Return.t [@@deriving bin_io, compare, sexp]
type modload = Modload.t [@@deriving bin_io, compare, sexp]
type mode = Mode.t [@@deriving bin_io, compare, sexp]
|
|
c775a9b15804311d544dfcf1f1824f4116b4f07b72d01d7f298a3e7c30d3f646 | esl/MongooseIM | mod_http_upload_backend.erl | %% Just a proxy interface module between the main mod_http_upload module and
%% the backend modules (i.e. mod_http_upload_s3).
-module(mod_http_upload_backend).
-export([init/2, create_slot/7]).
-define(MAIN_MODULE, mod_http_upload).
%%--------------------------------------------------------------------
%% Callbacks
%%--------------------------------------------------------------------
-callback create_slot(UTCDateTime :: calendar:datetime(), UUID :: binary(),
Filename :: unicode:unicode_binary(), ContentType :: binary() | undefined,
Size :: pos_integer(), gen_mod:module_opts()) ->
{PUTURL :: binary(), GETURL :: binary(), Headers :: #{binary() => binary()}}.
-spec init(HostType :: mongooseim:host_type(), Opts :: gen_mod:module_opts()) -> ok.
init(HostType, Opts) ->
mongoose_backend:init(HostType, ?MAIN_MODULE, [create_slot], Opts).
-spec create_slot(HostType::mongooseim:host_type(),
UTCDateTime :: calendar:datetime(),
UUID :: binary(),
Filename :: unicode:unicode_binary(),
ContentType :: binary() | undefined,
Size :: pos_integer(),
Opts :: gen_mod:module_opts()) ->
{PUTURL :: binary(), GETURL :: binary(), Headers :: #{binary() => binary()}}.
create_slot(HostType, UTCDateTime, UUID, Filename, ContentType, Size, Opts) ->
Args = [UTCDateTime, UUID, Filename, ContentType, Size, Opts],
mongoose_backend:call_tracked(HostType, ?MAIN_MODULE, ?FUNCTION_NAME, Args).
| null | https://raw.githubusercontent.com/esl/MongooseIM/95c55e26b4544472cf726824a04f7124e35a745d/src/http_upload/mod_http_upload_backend.erl | erlang | Just a proxy interface module between the main mod_http_upload module and
the backend modules (i.e. mod_http_upload_s3).
--------------------------------------------------------------------
Callbacks
-------------------------------------------------------------------- | -module(mod_http_upload_backend).
-export([init/2, create_slot/7]).
-define(MAIN_MODULE, mod_http_upload).
-callback create_slot(UTCDateTime :: calendar:datetime(), UUID :: binary(),
Filename :: unicode:unicode_binary(), ContentType :: binary() | undefined,
Size :: pos_integer(), gen_mod:module_opts()) ->
{PUTURL :: binary(), GETURL :: binary(), Headers :: #{binary() => binary()}}.
-spec init(HostType :: mongooseim:host_type(), Opts :: gen_mod:module_opts()) -> ok.
init(HostType, Opts) ->
mongoose_backend:init(HostType, ?MAIN_MODULE, [create_slot], Opts).
-spec create_slot(HostType::mongooseim:host_type(),
UTCDateTime :: calendar:datetime(),
UUID :: binary(),
Filename :: unicode:unicode_binary(),
ContentType :: binary() | undefined,
Size :: pos_integer(),
Opts :: gen_mod:module_opts()) ->
{PUTURL :: binary(), GETURL :: binary(), Headers :: #{binary() => binary()}}.
create_slot(HostType, UTCDateTime, UUID, Filename, ContentType, Size, Opts) ->
Args = [UTCDateTime, UUID, Filename, ContentType, Size, Opts],
mongoose_backend:call_tracked(HostType, ?MAIN_MODULE, ?FUNCTION_NAME, Args).
|
b4e8e87b75d76606a725d5fbc4509fad6191867de12daab0fad3ae9c92e74839 | haskell-works/avro | ContainerSpec.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
module Avro.Decode.ContainerSpec
where
import Data.Avro.Codec (Codec (..), deflateCodec, nullCodec)
import Data.ByteString.Char8 (unpack)
import Data.List (unfoldr)
import Avro.Data.Endpoint
import Avro.TestUtils
import HaskellWorks.Hspec.Hedgehog
import Hedgehog
import qualified Hedgehog.Gen as Gen
import Hedgehog.Range (Range)
import qualified Hedgehog.Range as Range
import Test.Hspec
HLINT ignore " Redundant do "
spec :: Spec
spec = do
containerSpec nullCodec
containerSpec deflateCodec
containerSpec :: Codec -> Spec
containerSpec codec = describe title $ do
it "should decode empty container" $ require $ withTests 1 $ property $ do
tripContainer []
it "should decode container with one block" $ require $ property $ do
msg <- forAll endpointGen
tripContainer [[msg]]
it "should decode container with empty blocks" $ require $ property $ do
msg <- forAll endpointGen
tripContainer [[msg], [], []]
it "should decode container with empty blocks in between" $ require $ property $ do
(msg1, msg2) <- forAll $ (,) <$> endpointGen <*> endpointGen
tripContainer [[msg1], [], [], [msg2]]
it "should decode container with multiple blocks" $ require $ property $ do
msgs <- forAll $ Gen.list (Range.linear 1 10) endpointGen
tripContainer (chunksOf 4 msgs)
where
tripContainer = roundtripContainer' codec schema'Endpoint
title =
"Avro.Decode.ContainerSpec (" ++ unpack (codecName codec) ++ ")"
chunksOf :: Int -> [a] -> [[a]]
chunksOf n = takeWhile (not.null) . unfoldr (Just . splitAt n)
| null | https://raw.githubusercontent.com/haskell-works/avro/aeea12b07a1c6fcc3708d1afe7209c5497665296/test/Avro/Decode/ContainerSpec.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE ScopedTypeVariables #
module Avro.Decode.ContainerSpec
where
import Data.Avro.Codec (Codec (..), deflateCodec, nullCodec)
import Data.ByteString.Char8 (unpack)
import Data.List (unfoldr)
import Avro.Data.Endpoint
import Avro.TestUtils
import HaskellWorks.Hspec.Hedgehog
import Hedgehog
import qualified Hedgehog.Gen as Gen
import Hedgehog.Range (Range)
import qualified Hedgehog.Range as Range
import Test.Hspec
HLINT ignore " Redundant do "
spec :: Spec
spec = do
containerSpec nullCodec
containerSpec deflateCodec
containerSpec :: Codec -> Spec
containerSpec codec = describe title $ do
it "should decode empty container" $ require $ withTests 1 $ property $ do
tripContainer []
it "should decode container with one block" $ require $ property $ do
msg <- forAll endpointGen
tripContainer [[msg]]
it "should decode container with empty blocks" $ require $ property $ do
msg <- forAll endpointGen
tripContainer [[msg], [], []]
it "should decode container with empty blocks in between" $ require $ property $ do
(msg1, msg2) <- forAll $ (,) <$> endpointGen <*> endpointGen
tripContainer [[msg1], [], [], [msg2]]
it "should decode container with multiple blocks" $ require $ property $ do
msgs <- forAll $ Gen.list (Range.linear 1 10) endpointGen
tripContainer (chunksOf 4 msgs)
where
tripContainer = roundtripContainer' codec schema'Endpoint
title =
"Avro.Decode.ContainerSpec (" ++ unpack (codecName codec) ++ ")"
chunksOf :: Int -> [a] -> [[a]]
chunksOf n = takeWhile (not.null) . unfoldr (Just . splitAt n)
|
aca7ba7254d23495cb8304af0a0455b93313b1f3129a2eb544b9e389af916d86 | afronski/bferl | brainfuck_io_SUITE.erl | -module(brainfuck_io_SUITE).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("../include/interpreter_definitions.hrl").
-export([ all/0, init_per_testcase/2, end_per_testcase/2 ]).
-export([ testing_input_and_output/1, testing_hello_world/1,
testing_loop_with_input_and_output/1,
testing_loop_with_input_and_memory_modification/1,
testing_nontrivial_programs_adding_two_digits_and_displaying_result_if_it_is_a_digit/1 ]).
all() ->
[ testing_input_and_output,
testing_hello_world,
testing_loop_with_input_and_output,
testing_loop_with_input_and_memory_modification,
testing_nontrivial_programs_adding_two_digits_and_displaying_result_if_it_is_a_digit ].
init_per_testcase(_TestCase, Config) ->
{ok, Pid} = bferl_io:start_link(),
[ {bferl_io, Pid} | Config ].
end_per_testcase(_TestCase, Config) ->
Pid = proplists:get_value(bferl_io, Config),
exit(Pid, normal),
ok.
testing_input_and_output(_Context) ->
State = bferl_programming_language_logic:new([",", "+", "."]),
bferl_io:tape("A"),
StateWithIO = bferl_programming_language_logic:register_tape(State),
InputTape = bferl_io:get_input_tape(),
?assertEqual("A", InputTape),
Output = bferl_programming_language_logic:run(StateWithIO),
InputTapeAfter = bferl_io:get_input_tape(),
Tape = bferl_io:get_output_tape(),
?assertEqual([], InputTapeAfter),
?assertEqual("B", Tape),
?assertEqual(length(Output#interpreter.instructions), Output#interpreter.instructions_counter).
testing_hello_world(_Context) ->
Program = bferl_tokenizer:from_file("../../../../test/assets/hello_world.bf"),
State = bferl_programming_language_logic:new(Program),
bferl_io:tape(""),
StateWithIO = bferl_programming_language_logic:register_tape(State),
bferl_programming_language_logic:run(StateWithIO),
Tape = bferl_io:get_output_tape(),
?assertEqual("Hello World!\n", Tape).
testing_loop_with_input_and_output(_Context) ->
State = bferl_programming_language_logic:new([",", "[", ".", ",", "]"]),
bferl_io:tape("ABC"),
StateWithIO = bferl_programming_language_logic:register_tape(State),
Output = bferl_programming_language_logic:run(StateWithIO),
Tape = bferl_io:get_output_tape(),
?assertEqual("ABC", Tape),
Calculation : 1x ' , ' , 3x ' [ . , ] ' and +1 for ` end_of_program ` .
?assertEqual(1 + 3 * 4 + 1, Output#interpreter.instructions_counter).
testing_loop_with_input_and_memory_modification(_Context) ->
State = bferl_programming_language_logic:new(["+", "+", "[", ">", ",", "+", ".", "<", "-", "]"]),
bferl_io:tape("AB"),
StateWithIO = bferl_programming_language_logic:register_tape(State),
Output = bferl_programming_language_logic:run(StateWithIO),
Tape = bferl_io:get_output_tape(),
?assertEqual("BC", Tape),
Calculation : 2x ' + ' , 2x ' [ > , + .<- ] ' and +1 for ` end_of_program ` .
?assertEqual(2 + 2 * 8 + 1, Output#interpreter.instructions_counter).
testing_nontrivial_programs_adding_two_digits_and_displaying_result_if_it_is_a_digit(_Context) ->
State = bferl_programming_language_logic:new(bferl_tokenizer:from_string(",>++++++[<-------->-],[<+>-]<.")),
bferl_io:tape("45"),
StateWithIO = bferl_programming_language_logic:register_tape(State),
bferl_programming_language_logic:run(StateWithIO),
?assertEqual("9", bferl_io:get_output_tape()).
| null | https://raw.githubusercontent.com/afronski/bferl/18d3482c71cdb0e39bde090d436245a2a9531f49/test/brainfuck_io_SUITE.erl | erlang | -module(brainfuck_io_SUITE).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("../include/interpreter_definitions.hrl").
-export([ all/0, init_per_testcase/2, end_per_testcase/2 ]).
-export([ testing_input_and_output/1, testing_hello_world/1,
testing_loop_with_input_and_output/1,
testing_loop_with_input_and_memory_modification/1,
testing_nontrivial_programs_adding_two_digits_and_displaying_result_if_it_is_a_digit/1 ]).
all() ->
[ testing_input_and_output,
testing_hello_world,
testing_loop_with_input_and_output,
testing_loop_with_input_and_memory_modification,
testing_nontrivial_programs_adding_two_digits_and_displaying_result_if_it_is_a_digit ].
init_per_testcase(_TestCase, Config) ->
{ok, Pid} = bferl_io:start_link(),
[ {bferl_io, Pid} | Config ].
end_per_testcase(_TestCase, Config) ->
Pid = proplists:get_value(bferl_io, Config),
exit(Pid, normal),
ok.
testing_input_and_output(_Context) ->
State = bferl_programming_language_logic:new([",", "+", "."]),
bferl_io:tape("A"),
StateWithIO = bferl_programming_language_logic:register_tape(State),
InputTape = bferl_io:get_input_tape(),
?assertEqual("A", InputTape),
Output = bferl_programming_language_logic:run(StateWithIO),
InputTapeAfter = bferl_io:get_input_tape(),
Tape = bferl_io:get_output_tape(),
?assertEqual([], InputTapeAfter),
?assertEqual("B", Tape),
?assertEqual(length(Output#interpreter.instructions), Output#interpreter.instructions_counter).
testing_hello_world(_Context) ->
Program = bferl_tokenizer:from_file("../../../../test/assets/hello_world.bf"),
State = bferl_programming_language_logic:new(Program),
bferl_io:tape(""),
StateWithIO = bferl_programming_language_logic:register_tape(State),
bferl_programming_language_logic:run(StateWithIO),
Tape = bferl_io:get_output_tape(),
?assertEqual("Hello World!\n", Tape).
testing_loop_with_input_and_output(_Context) ->
State = bferl_programming_language_logic:new([",", "[", ".", ",", "]"]),
bferl_io:tape("ABC"),
StateWithIO = bferl_programming_language_logic:register_tape(State),
Output = bferl_programming_language_logic:run(StateWithIO),
Tape = bferl_io:get_output_tape(),
?assertEqual("ABC", Tape),
Calculation : 1x ' , ' , 3x ' [ . , ] ' and +1 for ` end_of_program ` .
?assertEqual(1 + 3 * 4 + 1, Output#interpreter.instructions_counter).
testing_loop_with_input_and_memory_modification(_Context) ->
State = bferl_programming_language_logic:new(["+", "+", "[", ">", ",", "+", ".", "<", "-", "]"]),
bferl_io:tape("AB"),
StateWithIO = bferl_programming_language_logic:register_tape(State),
Output = bferl_programming_language_logic:run(StateWithIO),
Tape = bferl_io:get_output_tape(),
?assertEqual("BC", Tape),
Calculation : 2x ' + ' , 2x ' [ > , + .<- ] ' and +1 for ` end_of_program ` .
?assertEqual(2 + 2 * 8 + 1, Output#interpreter.instructions_counter).
testing_nontrivial_programs_adding_two_digits_and_displaying_result_if_it_is_a_digit(_Context) ->
State = bferl_programming_language_logic:new(bferl_tokenizer:from_string(",>++++++[<-------->-],[<+>-]<.")),
bferl_io:tape("45"),
StateWithIO = bferl_programming_language_logic:register_tape(State),
bferl_programming_language_logic:run(StateWithIO),
?assertEqual("9", bferl_io:get_output_tape()).
|
|
60e541e956b9e3295fe4a4d1bfc0d9466fd3a10d0c2067775c16b7244a20aef9 | ucsd-progsys/liquidhaskell | BadPragma2.hs | {-@ LIQUID "--expect-error-containing=Illegal pragma" @-}
{-@ LIQUID "--ghc-option=-O0" @-}
module BadPragma2 where
i :: Int
i = 1
| null | https://raw.githubusercontent.com/ucsd-progsys/liquidhaskell/f46dbafd6ce1f61af5b56f31924c21639c982a8a/tests/errors/BadPragma2.hs | haskell | @ LIQUID "--expect-error-containing=Illegal pragma" @
@ LIQUID "--ghc-option=-O0" @ |
module BadPragma2 where
i :: Int
i = 1
|
f191db955447dc2043dda0e73f2c3b55e459fa7a8d30abae98f045978dc39d21 | shuieryin/wechat_mud | login.erl | %%%-------------------------------------------------------------------
%%% @author Shuieryin
( C ) 2015 , Shuieryin
%%% @doc
%%%
%%% Login module
%%%
%%% @end
Created : 26 . Aug 2015 11:01 AM
%%%-------------------------------------------------------------------
-module(login).
-author("Shuieryin").
%% API
-export([
exec/3
]).
%%%===================================================================
%%% API
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
Log user in by creating player_statem and enters user 's last logout scene .
%%
%% This function returns "ok" immeidately and the scene info will
be respond to user from player_statem by sending responses to
%% DispatcherPid process.
%%
%% @end
%%--------------------------------------------------------------------
-spec exec(DispatcherPid, Uid, RawInput) -> ok when
Uid :: player_statem:uid(),
RawInput :: binary(),
DispatcherPid :: pid().
exec(DispatcherPid, Uid, _RawInput) ->
login_server:login(DispatcherPid, Uid).
%%%===================================================================
%%% Internal functions (N/A)
%%%=================================================================== | null | https://raw.githubusercontent.com/shuieryin/wechat_mud/b2a9251a9b208fee5cd8c4213759750b95c8b8aa/src/commands/login.erl | erlang | -------------------------------------------------------------------
@author Shuieryin
@doc
Login module
@end
-------------------------------------------------------------------
API
===================================================================
API
===================================================================
--------------------------------------------------------------------
@doc
This function returns "ok" immeidately and the scene info will
DispatcherPid process.
@end
--------------------------------------------------------------------
===================================================================
Internal functions (N/A)
=================================================================== | ( C ) 2015 , Shuieryin
Created : 26 . Aug 2015 11:01 AM
-module(login).
-author("Shuieryin").
-export([
exec/3
]).
Log user in by creating player_statem and enters user 's last logout scene .
be respond to user from player_statem by sending responses to
-spec exec(DispatcherPid, Uid, RawInput) -> ok when
Uid :: player_statem:uid(),
RawInput :: binary(),
DispatcherPid :: pid().
exec(DispatcherPid, Uid, _RawInput) ->
login_server:login(DispatcherPid, Uid).
|
95cf295253b6c577cd2f50733ad419e1ccf87f38d956db69554e01c4074115a2 | Rober-t/apxr_run | population_mgr_sup.erl | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Copyright ( C ) 2018 ApproximateReality
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%----------------------------------------------------------------------------
@doc PopulationMgr top supervisor .
%%% @end
%%%----------------------------------------------------------------------------
-module(population_mgr_sup).
-behaviour(supervisor).
Start / Stop
-export([
start_link/0
]).
%% API
-export([
start_population_mgr/0,
restart_population_mgr/0
]).
%% Supervisor callbacks
-export([
init/1
]).
Xref
-ignore_xref([
start_link/0
]).
%%%============================================================================
%%% Type
%%%============================================================================
-type sup_flags() :: #{
intensity => non_neg_integer(),
period => pos_integer(),
strategy => one_for_all | one_for_one | rest_for_one | simple_one_for_one
}.
-type child_spec() :: [#{
id := _,
start := {atom(), atom(), undefined | [any()]},
modules => dynamic | [atom()],
restart => permanent | temporary | transient,
shutdown => brutal_kill | infinity | non_neg_integer(),
type => supervisor | worker
}].
-export_type([
sup_flags/0,
child_spec/0
]).
%%%============================================================================
%%% API
%%%============================================================================
%%-----------------------------------------------------------------------------
%% @doc Starts the supervisor.
%% @end
%%-----------------------------------------------------------------------------
-spec start_link() -> {ok, pid()}.
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
%%-----------------------------------------------------------------------------
%% @doc Spawns populaton_monitor.
%% @end
%%-----------------------------------------------------------------------------
-spec start_population_mgr() -> {ok, pid()}.
start_population_mgr() ->
PopulationMgr = #{
id => population_mgr_worker,
start => {population_mgr_worker, start_link, []},
restart => transient,
shutdown => 30000,
type => worker,
modules => [population_mgr_worker]
},
{ok, _Pid} = supervisor:start_child(?MODULE, PopulationMgr).
%%-----------------------------------------------------------------------------
%% @doc Restarts populaton_monitor.
%% @end
%%-----------------------------------------------------------------------------
-spec restart_population_mgr() -> {ok, pid()}.
restart_population_mgr() ->
{ok, _Pid} = supervisor:restart_child(?MODULE, population_mgr_worker).
%%%============================================================================
%%% Supervisor callbacks
%%%============================================================================
%%-----------------------------------------------------------------------------
@private
@doc Whenever a supervisor is started using supervisor : start_link ,
%% this function is called by the new process to find out about restart
%% strategy, maximum restart frequency and child specifications. We also
%% make the supervisor the owner of the DB to improve fault tolerance.
%% @end
%%-----------------------------------------------------------------------------
-spec init([]) -> {ok, {sup_flags(), child_spec() | []}}.
init([]) ->
ets:new(population_status, [set, public, named_table,
{write_concurrency, true}, {read_concurrency, true}]),
ets:new(evaluations, [set, public, named_table,
{write_concurrency, true}, {read_concurrency, true}]),
ets:new(active_agents, [set, public, named_table,
{write_concurrency, true}, {read_concurrency, true}]),
ets:new(inactive_agents, [set, public, named_table,
{write_concurrency, true}, {read_concurrency, true}]),
SupFlags = #{
strategy => rest_for_one,
intensity => 4,
period => 20
},
ChildSpecs = [],
{ok, {SupFlags, ChildSpecs}}.
%%%============================================================================
Internal functions
%%%============================================================================ | null | https://raw.githubusercontent.com/Rober-t/apxr_run/9c62ab028af7ff3768ffe3f27b8eef1799540f05/src/population_mgr/population_mgr_sup.erl | erlang |
----------------------------------------------------------------------------
@end
----------------------------------------------------------------------------
API
Supervisor callbacks
============================================================================
Type
============================================================================
============================================================================
API
============================================================================
-----------------------------------------------------------------------------
@doc Starts the supervisor.
@end
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
@doc Spawns populaton_monitor.
@end
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
@doc Restarts populaton_monitor.
@end
-----------------------------------------------------------------------------
============================================================================
Supervisor callbacks
============================================================================
-----------------------------------------------------------------------------
this function is called by the new process to find out about restart
strategy, maximum restart frequency and child specifications. We also
make the supervisor the owner of the DB to improve fault tolerance.
@end
-----------------------------------------------------------------------------
============================================================================
============================================================================ | Copyright ( C ) 2018 ApproximateReality
@doc PopulationMgr top supervisor .
-module(population_mgr_sup).
-behaviour(supervisor).
Start / Stop
-export([
start_link/0
]).
-export([
start_population_mgr/0,
restart_population_mgr/0
]).
-export([
init/1
]).
Xref
-ignore_xref([
start_link/0
]).
-type sup_flags() :: #{
intensity => non_neg_integer(),
period => pos_integer(),
strategy => one_for_all | one_for_one | rest_for_one | simple_one_for_one
}.
-type child_spec() :: [#{
id := _,
start := {atom(), atom(), undefined | [any()]},
modules => dynamic | [atom()],
restart => permanent | temporary | transient,
shutdown => brutal_kill | infinity | non_neg_integer(),
type => supervisor | worker
}].
-export_type([
sup_flags/0,
child_spec/0
]).
-spec start_link() -> {ok, pid()}.
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-spec start_population_mgr() -> {ok, pid()}.
start_population_mgr() ->
PopulationMgr = #{
id => population_mgr_worker,
start => {population_mgr_worker, start_link, []},
restart => transient,
shutdown => 30000,
type => worker,
modules => [population_mgr_worker]
},
{ok, _Pid} = supervisor:start_child(?MODULE, PopulationMgr).
-spec restart_population_mgr() -> {ok, pid()}.
restart_population_mgr() ->
{ok, _Pid} = supervisor:restart_child(?MODULE, population_mgr_worker).
@private
@doc Whenever a supervisor is started using supervisor : start_link ,
-spec init([]) -> {ok, {sup_flags(), child_spec() | []}}.
init([]) ->
ets:new(population_status, [set, public, named_table,
{write_concurrency, true}, {read_concurrency, true}]),
ets:new(evaluations, [set, public, named_table,
{write_concurrency, true}, {read_concurrency, true}]),
ets:new(active_agents, [set, public, named_table,
{write_concurrency, true}, {read_concurrency, true}]),
ets:new(inactive_agents, [set, public, named_table,
{write_concurrency, true}, {read_concurrency, true}]),
SupFlags = #{
strategy => rest_for_one,
intensity => 4,
period => 20
},
ChildSpecs = [],
{ok, {SupFlags, ChildSpecs}}.
Internal functions |
248ce864e85c1e1d6a9928c6ac0b6e08b162cba13c08fff08109f92efd709853 | facebook/flow | object_parser.ml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
module Ast = Flow_ast
open Token
open Parser_env
open Flow_ast
module SMap = Flow_map.Make (String)
open Parser_common
open Comment_attachment
(* A module for parsing various object related things, like object literals
* and classes *)
module type OBJECT = sig
val key : ?class_body:bool -> env -> Loc.t * (Loc.t, Loc.t) Ast.Expression.Object.Property.key
val _initializer : env -> Loc.t * (Loc.t, Loc.t) Ast.Expression.Object.t * pattern_errors
val class_declaration :
env -> (Loc.t, Loc.t) Ast.Class.Decorator.t list -> (Loc.t, Loc.t) Ast.Statement.t
val class_expression : env -> (Loc.t, Loc.t) Ast.Expression.t
val class_implements : env -> attach_leading:bool -> (Loc.t, Loc.t) Ast.Class.Implements.t
val decorator_list : env -> (Loc.t, Loc.t) Ast.Class.Decorator.t list
end
module Object
(Parse : Parser_common.PARSER)
(Type : Type_parser.TYPE)
(Declaration : Declaration_parser.DECLARATION)
(Expression : Expression_parser.EXPRESSION)
(Pattern_cover : Pattern_cover.COVER) : OBJECT = struct
let decorator_list =
let expression env =
let expression = Expression.left_hand_side env in
let { remove_trailing; _ } =
if Peek.is_line_terminator env then
trailing_and_remover_after_last_line env
else
trailing_and_remover_after_last_loc env
in
remove_trailing expression (fun remover expression -> remover#expression expression)
in
let decorator env =
let leading = Peek.comments env in
Eat.token env;
{
Ast.Class.Decorator.expression = expression env;
comments = Flow_ast_utils.mk_comments_opt ~leading ();
}
in
let rec decorator_list_helper env decorators =
match Peek.token env with
| T_AT -> decorator_list_helper env (with_loc decorator env :: decorators)
| _ -> decorators
in
fun env ->
if (parse_options env).esproposal_decorators then
List.rev (decorator_list_helper env [])
else
[]
let key ?(class_body = false) env =
let open Ast.Expression.Object.Property in
let leading = Peek.comments env in
let tkn = Peek.token env in
match tkn with
| T_STRING (loc, value, raw, octal) ->
if octal then strict_error env Parse_error.StrictOctalLiteral;
Expect.token env (T_STRING (loc, value, raw, octal));
let value = Literal.String value in
let trailing = Eat.trailing_comments env in
( loc,
Literal
( loc,
{ Literal.value; raw; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }
)
)
| T_NUMBER { kind; raw } ->
let loc = Peek.loc env in
let value = Expression.number env kind raw in
let value = Literal.Number value in
let trailing = Eat.trailing_comments env in
( loc,
Literal
( loc,
{ Literal.value; raw; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }
)
)
| T_BIGINT { kind; raw } ->
let loc = Peek.loc env in
let value = Expression.bigint env kind raw in
let value = Literal.BigInt value in
let trailing = Eat.trailing_comments env in
( loc,
Literal
( loc,
{ Literal.value; raw; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }
)
)
| T_LBRACKET ->
let (loc, key) =
with_loc
(fun env ->
let leading = Peek.comments env in
Expect.token env T_LBRACKET;
let expr = Parse.assignment (env |> with_no_in false) in
Expect.token env T_RBRACKET;
let trailing = Eat.trailing_comments env in
{
ComputedKey.expression = expr;
comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();
})
env
in
(loc, Ast.Expression.Object.Property.Computed (loc, key))
| T_POUND when class_body ->
let ((loc, { PrivateName.name; _ }) as id) = private_identifier env in
add_declared_private env name;
(loc, PrivateName id)
| T_POUND ->
let (loc, id) =
with_loc
(fun env ->
Eat.token env;
Identifier (identifier_name env))
env
in
error_at env (loc, Parse_error.PrivateNotInClass);
(loc, id)
| _ ->
let ((loc, _) as id) = identifier_name env in
(loc, Identifier id)
let getter_or_setter env ~in_class_body is_getter =
(* this is a getter or setter, it cannot be async *)
let async = false in
let (generator, leading) = Declaration.generator env in
let (key_loc, key) = key ~class_body:in_class_body env in
let key = object_key_remove_trailing env key in
let value =
with_loc
(fun env ->
# sec - function - definitions - static - semantics - early - errors
let env = env |> with_allow_super Super_prop in
let (sig_loc, (tparams, params, return)) =
with_loc
(fun env ->
(* It's not clear how type params on getters & setters would make sense
* in Flow's type system. Since this is a Flow syntax extension, we might
* as well disallow it until we need it *)
let tparams = None in
let params =
let params = Declaration.function_params ~await:false ~yield:false env in
if Peek.token env = T_COLON then
params
else
function_params_remove_trailing env params
in
begin
match (is_getter, params) with
| (true, (_, { Ast.Function.Params.this_ = Some _; _ })) ->
error_at env (key_loc, Parse_error.GetterMayNotHaveThisParam)
| (false, (_, { Ast.Function.Params.this_ = Some _; _ })) ->
error_at env (key_loc, Parse_error.SetterMayNotHaveThisParam)
| ( true,
( _,
{ Ast.Function.Params.params = []; rest = None; this_ = None; comments = _ }
)
) ->
()
| (false, (_, { Ast.Function.Params.rest = Some _; _ })) ->
(* rest params don't make sense on a setter *)
error_at env (key_loc, Parse_error.SetterArity)
| ( false,
( _,
{
Ast.Function.Params.params = [_];
rest = None;
this_ = None;
comments = _;
}
)
) ->
()
| (true, _) -> error_at env (key_loc, Parse_error.GetterArity)
| (false, _) -> error_at env (key_loc, Parse_error.SetterArity)
end;
let return = type_annotation_hint_remove_trailing env (Type.annotation_opt env) in
(tparams, params, return))
env
in
let simple_params = is_simple_parameter_list params in
let (body, contains_use_strict) =
Declaration.function_body env ~async ~generator ~expression:false ~simple_params
in
Declaration.strict_post_check env ~contains_use_strict None params;
{
Function.id = None;
params;
body;
generator;
async;
predicate = None;
(* setters/getter are not predicates *)
return;
tparams;
sig_loc;
comments = Flow_ast_utils.mk_comments_opt ~leading ();
})
env
in
(key, value)
let _initializer =
let parse_assignment_cover env =
match Expression.assignment_cover env with
| Cover_expr expr -> (expr, Pattern_cover.empty_errors)
| Cover_patt (expr, errs) -> (expr, errs)
in
let get env start_loc leading =
let (loc, (key, value)) =
with_loc ~start_loc (fun env -> getter_or_setter env ~in_class_body:false true) env
in
let open Ast.Expression.Object in
Property
(loc, Property.Get { key; value; comments = Flow_ast_utils.mk_comments_opt ~leading () })
in
let set env start_loc leading =
let (loc, (key, value)) =
with_loc ~start_loc (fun env -> getter_or_setter env ~in_class_body:false false) env
in
let open Ast.Expression.Object in
Property
(loc, Property.Set { key; value; comments = Flow_ast_utils.mk_comments_opt ~leading () })
in
(* #prod-PropertyDefinition *)
let init =
let open Ast.Expression.Object.Property in
# prod - IdentifierReference
let parse_shorthand env key =
match key with
| Literal (loc, lit) ->
error_at env (loc, Parse_error.LiteralShorthandProperty);
(loc, Ast.Expression.Literal lit)
| Identifier ((loc, { Identifier.name; comments = _ }) as id) ->
# sec - identifiers - static - semantics - early - errors
if is_reserved name then
(* it is a syntax error if `name` is a reserved word other than await or yield *)
error_at env (loc, Parse_error.UnexpectedReserved)
else if is_strict_reserved name then
(* it is a syntax error if `name` is a strict reserved word, in strict mode *)
strict_error_at env (loc, Parse_error.StrictReservedWord);
(loc, Ast.Expression.Identifier id)
| PrivateName _ -> failwith "Internal Error: private name found in object props"
| Computed (_, { ComputedKey.expression = expr; comments = _ }) ->
error_at env (fst expr, Parse_error.ComputedShorthandProperty);
expr
in
(* #prod-MethodDefinition *)
let parse_method ~async ~generator ~leading =
with_loc (fun env ->
# sec - function - definitions - static - semantics - early - errors
let env = env |> with_allow_super Super_prop in
let (sig_loc, (tparams, params, return)) =
with_loc
(fun env ->
let tparams = type_params_remove_trailing env (Type.type_params env) in
let params =
let params = Declaration.function_params ~await:async ~yield:generator env in
if Peek.token env = T_COLON then
params
else
function_params_remove_trailing env params
in
let return = type_annotation_hint_remove_trailing env (Type.annotation_opt env) in
(tparams, params, return))
env
in
let simple_params = is_simple_parameter_list params in
let (body, contains_use_strict) =
Declaration.function_body env ~async ~generator ~expression:false ~simple_params
in
Declaration.strict_post_check env ~contains_use_strict None params;
{
Function.id = None;
params;
body;
generator;
async;
(* TODO: add support for object method predicates *)
predicate = None;
return;
tparams;
sig_loc;
comments = Flow_ast_utils.mk_comments_opt ~leading ();
}
)
in
(* PropertyName `:` AssignmentExpression *)
let parse_value env =
Expect.token env T_COLON;
parse_assignment_cover env
in
(* #prod-CoverInitializedName *)
let parse_assignment_pattern ~key env =
let open Ast.Expression.Object in
match key with
| Property.Identifier id ->
let assignment_loc = Peek.loc env in
let ast =
with_loc
~start_loc:(fst id)
(fun env ->
let leading = Peek.comments env in
Expect.token env T_ASSIGN;
let trailing = Eat.trailing_comments env in
let left = Parse.pattern_from_expr env (fst id, Ast.Expression.Identifier id) in
let right = Parse.assignment env in
let comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () in
Ast.Expression.Assignment
{ Ast.Expression.Assignment.operator = None; left; right; comments })
env
in
let errs =
{
if_expr = [(assignment_loc, Parse_error.Unexpected (Token.quote_token_value "="))];
if_patt = [];
}
in
(ast, errs)
| Property.Literal _
| Property.PrivateName _
| Property.Computed _ ->
parse_value env
in
let parse_init ~key ~async ~generator ~leading env =
if async || generator then
let key = object_key_remove_trailing env key in
(* the `async` and `*` modifiers are only valid on methods *)
let value = parse_method env ~async ~generator ~leading in
let prop = Method { key; value } in
(prop, Pattern_cover.empty_errors)
else
match Peek.token env with
| T_RCURLY
| T_COMMA ->
let value = parse_shorthand env key in
let prop = Init { key; value; shorthand = true } in
(prop, Pattern_cover.empty_errors)
| T_LESS_THAN
| T_LPAREN ->
let key = object_key_remove_trailing env key in
let value = parse_method env ~async ~generator ~leading in
let prop = Method { key; value } in
(prop, Pattern_cover.empty_errors)
| T_ASSIGN ->
let (value, errs) = parse_assignment_pattern ~key env in
let prop = Init { key; value; shorthand = true } in
(prop, errs)
| T_COLON ->
let (value, errs) = parse_value env in
let prop = Init { key; value; shorthand = false } in
(prop, errs)
| _ ->
(* error. we recover by treating it as a shorthand property so as to not
consume any more tokens and make the error worse. we don't error here
because we'll expect a comma before the next token. *)
let value = parse_shorthand env key in
let prop = Init { key; value; shorthand = true } in
(prop, Pattern_cover.empty_errors)
in
fun env start_loc key async generator leading ->
let (loc, (prop, errs)) =
with_loc ~start_loc (parse_init ~key ~async ~generator ~leading) env
in
(Ast.Expression.Object.Property (loc, prop), errs)
in
let property env =
let open Ast.Expression.Object in
if Peek.token env = T_ELLIPSIS then
(* Spread property *)
let leading = Peek.comments env in
let (loc, (argument, errs)) =
with_loc
(fun env ->
Expect.token env T_ELLIPSIS;
parse_assignment_cover env)
env
in
( SpreadProperty
(loc, { SpreadProperty.argument; comments = Flow_ast_utils.mk_comments_opt ~leading () }),
errs
)
else
let start_loc = Peek.loc env in
let (async, leading_async) =
match Peek.ith_token ~i:1 env with
| T_ASSIGN
(* { async = true } (destructuring) *)
| T_COLON
(* { async: true } *)
| T_LESS_THAN
(* { async<T>() {} } *)
| T_LPAREN
(* { async() {} } *)
| T_COMMA
(* { async, other, shorthand } *)
| T_RCURLY (* { async } *) ->
(false, [])
| _ -> Declaration.async env
in
let (generator, leading_generator) = Declaration.generator env in
let leading = leading_async @ leading_generator in
match (async, generator, Peek.token env) with
| (false, false, T_IDENTIFIER { raw = "get"; _ }) ->
let leading = Peek.comments env in
let (_, key) = key env in
begin
match Peek.token env with
| T_ASSIGN
| T_COLON
| T_LESS_THAN
| T_LPAREN
| T_COMMA
| T_RCURLY ->
init env start_loc key false false []
| _ ->
ignore (Comment_attachment.object_key_remove_trailing env key);
(get env start_loc leading, Pattern_cover.empty_errors)
end
| (false, false, T_IDENTIFIER { raw = "set"; _ }) ->
let leading = Peek.comments env in
let (_, key) = key env in
begin
match Peek.token env with
| T_ASSIGN
| T_COLON
| T_LESS_THAN
| T_LPAREN
| T_COMMA
| T_RCURLY ->
init env start_loc key false false []
| _ ->
ignore (Comment_attachment.object_key_remove_trailing env key);
(set env start_loc leading, Pattern_cover.empty_errors)
end
| (async, generator, _) ->
let (_, key) = key env in
init env start_loc key async generator leading
in
let rec properties env ~rest_trailing_comma (props, errs) =
match Peek.token env with
| T_EOF
| T_RCURLY ->
let errs =
match rest_trailing_comma with
| Some loc ->
{ errs with if_patt = (loc, Parse_error.TrailingCommaAfterRestElement) :: errs.if_patt }
| None -> errs
in
(List.rev props, Pattern_cover.rev_errors errs)
| _ ->
let (prop, new_errs) = property env in
let rest_trailing_comma =
match prop with
| Ast.Expression.Object.SpreadProperty _ when Peek.token env = T_COMMA ->
Some (Peek.loc env)
| _ -> None
in
let errs = Pattern_cover.rev_append_errors new_errs errs in
let errs =
match Peek.token env with
| T_RCURLY
| T_EOF ->
errs
| T_COMMA ->
Eat.token env;
errs
| _ ->
(* we could use [Expect.error env T_COMMA], but we're in a weird
cover grammar situation where we're storing errors in
[Pattern_cover]. if we used [Expect.error], the errors would
end up out of order. *)
let err = Expect.get_error env T_COMMA in
(* if the unexpected token is a semicolon, consume it to aid
recovery. using a semicolon instead of a comma is a common
mistake. *)
let _ = Eat.maybe env T_SEMICOLON in
Pattern_cover.cons_error err errs
in
properties env ~rest_trailing_comma (prop :: props, errs)
in
fun env ->
let (loc, (expr, errs)) =
with_loc
(fun env ->
let leading = Peek.comments env in
Expect.token env T_LCURLY;
let (props, errs) =
properties env ~rest_trailing_comma:None ([], Pattern_cover.empty_errors)
in
let internal = Peek.comments env in
Expect.token env T_RCURLY;
let trailing = Eat.trailing_comments env in
( {
Ast.Expression.Object.properties = props;
comments =
Flow_ast_utils.mk_comments_with_internal_opt ~leading ~trailing ~internal ();
},
errs
))
env
in
(loc, expr, errs)
let check_property_name env loc name static =
if String.equal name "constructor" || (String.equal name "prototype" && static) then
error_at
env
(loc, Parse_error.InvalidClassMemberName { name; static; method_ = false; private_ = false })
let check_private_names
env seen_names private_name (kind : [ `Method | `Field | `Getter | `Setter ]) =
let (loc, { PrivateName.name; comments = _ }) = private_name in
if String.equal name "constructor" then
let () =
error_at
env
( loc,
Parse_error.InvalidClassMemberName
{ name; static = false; method_ = kind = `Method; private_ = true }
)
in
seen_names
else
match SMap.find_opt name seen_names with
| Some seen ->
begin
match (kind, seen) with
| (`Getter, `Setter)
| (`Setter, `Getter) ->
one getter and one setter are allowed as long as it 's not used as a field
()
| _ -> error_at env (loc, Parse_error.DuplicatePrivateFields name)
end;
SMap.add name `Field seen_names
| None -> SMap.add name kind seen_names
let class_implements env ~attach_leading =
let rec interfaces env acc =
let interface =
with_loc
(fun env ->
let id =
let id = Type.type_identifier env in
if Peek.token env <> T_LESS_THAN then
id
else
let { remove_trailing; _ } = trailing_and_remover env in
remove_trailing id (fun remover id -> remover#identifier id)
in
let targs = Type.type_args env in
{ Ast.Class.Implements.Interface.id; targs })
env
in
let acc = interface :: acc in
match Peek.token env with
| T_COMMA ->
Expect.token env T_COMMA;
interfaces env acc
| _ -> List.rev acc
in
with_loc
(fun env ->
let leading =
if attach_leading then
Peek.comments env
else
[]
in
Expect.token env T_IMPLEMENTS;
let interfaces = interfaces env [] in
{ Ast.Class.Implements.interfaces; comments = Flow_ast_utils.mk_comments_opt ~leading () })
env
let class_extends ~leading =
with_loc (fun env ->
let expr =
let expr = Expression.left_hand_side (env |> with_allow_yield false) in
if Peek.token env <> T_LESS_THAN then
expr
else
let { remove_trailing; _ } = trailing_and_remover env in
remove_trailing expr (fun remover expr -> remover#expression expr)
in
let targs = Type.type_args env in
{ Class.Extends.expr; targs; comments = Flow_ast_utils.mk_comments_opt ~leading () }
)
(* /#prod-ClassHeritage *)
let class_heritage env =
let extends =
let leading = Peek.comments env in
if Eat.maybe env T_EXTENDS then
let (loc, extends) = class_extends ~leading env in
let { remove_trailing; _ } = trailing_and_remover env in
Some
(loc, remove_trailing extends (fun remover extends -> remover#class_extends loc extends))
else
None
in
let implements =
if Peek.token env = T_IMPLEMENTS then (
if not (should_parse_types env) then error env Parse_error.UnexpectedTypeInterface;
Some (class_implements_remove_trailing env (class_implements env ~attach_leading:true))
) else
None
in
(extends, implements)
(* In the ES6 draft, all elements are methods. No properties (though there
* are getter and setters allowed *)
let class_element =
let get env start_loc decorators static leading =
let (loc, (key, value)) =
with_loc ~start_loc (fun env -> getter_or_setter env ~in_class_body:true true) env
in
let open Ast.Class in
Body.Method
( loc,
{
Method.key;
value;
kind = Method.Get;
static;
decorators;
comments = Flow_ast_utils.mk_comments_opt ~leading ();
}
)
in
let set env start_loc decorators static leading =
let (loc, (key, value)) =
with_loc ~start_loc (fun env -> getter_or_setter env ~in_class_body:true false) env
in
let open Ast.Class in
Body.Method
( loc,
{
Method.key;
value;
kind = Method.Set;
static;
decorators;
comments = Flow_ast_utils.mk_comments_opt ~leading ();
}
)
in
let error_unsupported_variance env = function
| Some (loc, _) -> error_at env (loc, Parse_error.UnexpectedVariance)
| None -> ()
(* Class property with annotation *)
in
let error_unsupported_declare env = function
| Some loc -> error_at env (loc, Parse_error.DeclareClassElement)
| None -> ()
in
let property_end_and_semicolon env key annot value =
match Peek.token env with
| T_LBRACKET
| T_LPAREN ->
error_unexpected env;
(key, annot, value, [])
| T_SEMICOLON ->
Eat.token env;
let trailing =
match Peek.token env with
| T_EOF
| T_RCURLY ->
Eat.trailing_comments env
| _ when Peek.is_line_terminator env -> Eat.comments_until_next_line env
| _ -> []
in
(key, annot, value, trailing)
| _ ->
let remover =
match Peek.token env with
| T_EOF
| T_RCURLY ->
{ trailing = []; remove_trailing = (fun x _ -> x) }
| _ when Peek.is_line_terminator env ->
Comment_attachment.trailing_and_remover_after_last_line env
| _ -> Comment_attachment.trailing_and_remover_after_last_loc env
in
(* Remove trailing comments from the last node in this property *)
let (key, annot, value) =
match (annot, value) with
(* prop = init *)
| (_, Class.Property.Initialized expr) ->
( key,
annot,
Class.Property.Initialized
(remover.remove_trailing expr (fun remover expr -> remover#expression expr))
)
(* prop: annot *)
| (Ast.Type.Available annot, _) ->
( key,
Ast.Type.Available
(remover.remove_trailing annot (fun remover annot -> remover#type_annotation annot)),
value
)
(* prop *)
| _ ->
(remover.remove_trailing key (fun remover key -> remover#object_key key), annot, value)
in
(key, annot, value, [])
in
let property env start_loc decorators key static declare variance leading =
let (loc, (key, annot, value, comments)) =
with_loc
~start_loc
(fun env ->
let annot = Type.annotation_opt env in
let value =
match (declare, Peek.token env) with
| (None, T_ASSIGN) ->
Eat.token env;
Ast.Class.Property.Initialized
(Parse.expression (env |> with_allow_super Super_prop))
| (Some _, T_ASSIGN) ->
error env Parse_error.DeclareClassFieldInitializer;
Eat.token env;
Ast.Class.Property.Declared
| (None, _) -> Ast.Class.Property.Uninitialized
| (Some _, _) -> Ast.Class.Property.Declared
in
let (key, annot, value, trailing) = property_end_and_semicolon env key annot value in
(key, annot, value, Flow_ast_utils.mk_comments_opt ~leading ~trailing ()))
env
in
let open Ast.Class in
match key with
| Ast.Expression.Object.Property.PrivateName key ->
Body.PrivateField
(loc, { PrivateField.key; value; annot; static; variance; decorators; comments })
| _ ->
Body.Property (loc, { Property.key; value; annot; static; variance; decorators; comments })
in
let is_asi env =
match Peek.token env with
| T_LESS_THAN -> false
| T_LPAREN -> false
| _ when Peek.is_implicit_semicolon env -> true
| _ -> false
in
let rec init env start_loc decorators key ~async ~generator ~static ~declare variance leading =
match Peek.token env with
| T_COLON
| T_ASSIGN
| T_SEMICOLON
| T_RCURLY
when (not async) && not generator ->
property env start_loc decorators key static declare variance leading
| T_PLING ->
(* TODO: add support for optional class properties *)
error_unexpected env;
Eat.token env;
init env start_loc decorators key ~async ~generator ~static ~declare variance leading
| _ when is_asi env ->
(* an uninitialized, unannotated property *)
property env start_loc decorators key static declare variance leading
| _ ->
error_unsupported_declare env declare;
error_unsupported_variance env variance;
let (kind, env) =
match (static, key) with
| ( false,
Ast.Expression.Object.Property.Identifier
(_, { Identifier.name = "constructor"; comments = _ })
)
| ( false,
Ast.Expression.Object.Property.Literal
(_, { Literal.value = Literal.String "constructor"; _ })
) ->
(Ast.Class.Method.Constructor, env |> with_allow_super Super_prop_or_call)
| _ -> (Ast.Class.Method.Method, env |> with_allow_super Super_prop)
in
let key = object_key_remove_trailing env key in
let value =
with_loc
(fun env ->
let (sig_loc, (tparams, params, return)) =
with_loc
(fun env ->
let tparams = type_params_remove_trailing env (Type.type_params env) in
let params =
let params = Declaration.function_params ~await:async ~yield:generator env in
let params =
if Peek.token env = T_COLON then
params
else
function_params_remove_trailing env params
in
Ast.Function.Params.(
match params with
| (loc, ({ this_ = Some (this_loc, _); _ } as params))
when kind = Ast.Class.Method.Constructor ->
(* Disallow this param annotations for constructors *)
error_at env (this_loc, Parse_error.ThisParamBannedInConstructor);
(loc, { params with this_ = None })
| params -> params
)
in
let return =
type_annotation_hint_remove_trailing env (Type.annotation_opt env)
in
(tparams, params, return))
env
in
let simple_params = is_simple_parameter_list params in
let (body, contains_use_strict) =
Declaration.function_body env ~async ~generator ~expression:false ~simple_params
in
Declaration.strict_post_check env ~contains_use_strict None params;
{
Function.id = None;
params;
body;
generator;
async;
(* TODO: add support for method predicates *)
predicate = None;
return;
tparams;
sig_loc;
comments = None;
})
env
in
let open Ast.Class in
Body.Method
( Loc.btwn start_loc (fst value),
{
Method.key;
value;
kind;
static;
decorators;
comments = Flow_ast_utils.mk_comments_opt ~leading ();
}
)
in
let ith_implies_identifier ~i env =
match Peek.ith_token ~i env with
| T_LESS_THAN
| T_COLON
| T_ASSIGN
| T_SEMICOLON
| T_LPAREN
| T_RCURLY ->
true
| _ -> false
in
let implies_identifier = ith_implies_identifier ~i:0 in
fun env ->
let start_loc = Peek.loc env in
let decorators = decorator_list env in
let (declare, leading_declare) =
match Peek.token env with
| T_DECLARE when not (ith_implies_identifier ~i:1 env) ->
let ret = Some (Peek.loc env) in
let leading = Peek.comments env in
Eat.token env;
(ret, leading)
| _ -> (None, [])
in
(* Error on TS class visibility modifiers. *)
(match Peek.token env with
| (T_PUBLIC as t)
| (T_PRIVATE as t)
| (T_PROTECTED as t)
when Peek.ith_is_identifier ~i:1 env ->
let kind =
match t with
| T_PUBLIC -> `Public
| T_PRIVATE -> `Private
| T_PROTECTED -> `Protected
| _ -> failwith "Must be one of the above"
in
error env (Parse_error.TSClassVisibility kind);
Eat.token env
| _ -> ());
let static =
Peek.token env = T_STATIC
&&
match Peek.ith_token ~i:1 env with
static = 123
| T_COLON (* static: T *)
| T_EOF (* incomplete property *)
| T_LESS_THAN (* static<T>() {} *)
| T_LPAREN (* static() {} *)
| T_RCURLY (* end of class *)
| T_SEMICOLON (* explicit semicolon *) ->
false
| _ -> true
in
let leading_static =
if static then (
let leading = Peek.comments env in
Eat.token env;
leading
) else
[]
in
let async =
Peek.token env = T_ASYNC
&& (not (ith_implies_identifier ~i:1 env))
&& not (Peek.ith_is_line_terminator ~i:1 env)
in
(* consume `async` *)
let leading_async =
if async then (
let leading = Peek.comments env in
Eat.token env;
leading
) else
[]
in
let (generator, leading_generator) = Declaration.generator env in
let parse_readonly =
Peek.ith_is_identifier ~i:1 env || Peek.ith_token ~i:1 env = T_LBRACKET
in
let variance = Declaration.variance env ~parse_readonly async generator in
let (generator, leading_generator) =
match (generator, variance) with
| (false, Some _) -> Declaration.generator env
| _ -> (generator, leading_generator)
in
let leading =
List.concat [leading_declare; leading_static; leading_async; leading_generator]
in
match (async, generator, Peek.token env) with
| (false, false, T_IDENTIFIER { raw = "get"; _ }) ->
let leading_get = Peek.comments env in
let (_, key) = key ~class_body:true env in
if implies_identifier env then
init env start_loc decorators key ~async ~generator ~static ~declare variance leading
else (
error_unsupported_declare env declare;
error_unsupported_variance env variance;
ignore (object_key_remove_trailing env key);
get env start_loc decorators static (leading @ leading_get)
)
| (false, false, T_IDENTIFIER { raw = "set"; _ }) ->
let leading_set = Peek.comments env in
let (_, key) = key ~class_body:true env in
if implies_identifier env then
init env start_loc decorators key ~async ~generator ~static ~declare variance leading
else (
error_unsupported_declare env declare;
error_unsupported_variance env variance;
ignore (object_key_remove_trailing env key);
set env start_loc decorators static (leading @ leading_set)
)
| (_, _, _) ->
let (_, key) = key ~class_body:true env in
init env start_loc decorators key ~async ~generator ~static ~declare variance leading
let class_body =
let rec elements env seen_constructor private_names acc =
match Peek.token env with
| T_EOF
| T_RCURLY ->
List.rev acc
| T_SEMICOLON ->
(* Skip empty elements *)
Expect.token env T_SEMICOLON;
elements env seen_constructor private_names acc
| _ ->
let element = class_element env in
let (seen_constructor', private_names') =
match element with
| Ast.Class.Body.Method (loc, m) ->
let open Ast.Class.Method in
(match m.kind with
| Constructor ->
if m.static then
(seen_constructor, private_names)
else (
if seen_constructor then error_at env (loc, Parse_error.DuplicateConstructor);
(true, private_names)
)
| Method ->
let private_names =
match m.key with
| Ast.Expression.Object.Property.PrivateName name ->
check_private_names env private_names name `Method
| _ -> private_names
in
(seen_constructor, private_names)
| Get ->
let open Ast.Expression.Object.Property in
let private_names =
match m.key with
| PrivateName name -> check_private_names env private_names name `Getter
| _ -> private_names
in
(seen_constructor, private_names)
| Set ->
let open Ast.Expression.Object.Property in
let private_names =
match m.key with
| PrivateName name -> check_private_names env private_names name `Setter
| _ -> private_names
in
(seen_constructor, private_names))
| Ast.Class.Body.Property (_, { Ast.Class.Property.key; static; _ }) ->
let open Ast.Expression.Object.Property in
begin
match key with
| Identifier (loc, { Identifier.name; comments = _ })
| Literal (loc, { Literal.value = Literal.String name; _ }) ->
check_property_name env loc name static
| Literal _
| Computed _ ->
()
| PrivateName _ ->
failwith "unexpected PrivateName in Property, expected a PrivateField"
end;
(seen_constructor, private_names)
| Ast.Class.Body.PrivateField (_, { Ast.Class.PrivateField.key; _ }) ->
let private_names = check_private_names env private_names key `Field in
(seen_constructor, private_names)
in
elements env seen_constructor' private_names' (element :: acc)
in
fun ~expression env ->
with_loc
(fun env ->
let leading = Peek.comments env in
if Eat.maybe env T_LCURLY then (
enter_class env;
let body = elements env false SMap.empty [] in
exit_class env;
Expect.token env T_RCURLY;
let trailing =
match (expression, Peek.token env) with
| (true, _)
| (_, (T_RCURLY | T_EOF)) ->
Eat.trailing_comments env
| _ when Peek.is_line_terminator env -> Eat.comments_until_next_line env
| _ -> []
in
{ Ast.Class.Body.body; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }
) else (
Expect.error env T_LCURLY;
{ Ast.Class.Body.body = []; comments = None }
))
env
let _class ?(decorators = []) env ~optional_id ~expression =
(* 10.2.1 says all parts of a class definition are strict *)
let env = env |> with_strict true in
let decorators = decorators @ decorator_list env in
let leading = Peek.comments env in
(match Peek.token env with
| T_IDENTIFIER { raw = "abstract"; _ } ->
error env Parse_error.TSAbstractClass;
Eat.token env
| _ -> ());
Expect.token env T_CLASS;
let id =
let tmp_env = env |> with_no_let true in
match (optional_id, Peek.token tmp_env) with
| (true, (T_EXTENDS | T_IMPLEMENTS | T_LESS_THAN | T_LCURLY)) -> None
| _ when Peek.is_identifier env ->
let id = Parse.identifier tmp_env in
let { remove_trailing; _ } = trailing_and_remover env in
let id = remove_trailing id (fun remover id -> remover#identifier id) in
Some id
| _ ->
(* error, but don't consume a token like Parse.identifier does. this helps
with recovery, and the parser won't get stuck because we consumed the
`class` token above. *)
error_nameless_declaration env "class";
Some (Peek.loc env, { Identifier.name = ""; comments = None })
in
let tparams =
match Type.type_params env with
| None -> None
| Some tparams ->
let { remove_trailing; _ } = trailing_and_remover env in
Some (remove_trailing tparams (fun remover tparams -> remover#type_params tparams))
in
let (extends, implements) = class_heritage env in
let body = class_body env ~expression in
let comments = Flow_ast_utils.mk_comments_opt ~leading () in
{ Class.id; body; tparams; extends; implements; class_decorators = decorators; comments }
let class_declaration env decorators =
with_loc
(fun env ->
let optional_id = in_export_default env in
Ast.Statement.ClassDeclaration (_class env ~decorators ~optional_id ~expression:false))
env
let class_expression =
with_loc (fun env -> Ast.Expression.Class (_class env ~optional_id:true ~expression:true))
end
| null | https://raw.githubusercontent.com/facebook/flow/2422f3f1a00f5a3ea50bb25a22ec9108aca8c70e/src/parser/object_parser.ml | ocaml | A module for parsing various object related things, like object literals
* and classes
this is a getter or setter, it cannot be async
It's not clear how type params on getters & setters would make sense
* in Flow's type system. Since this is a Flow syntax extension, we might
* as well disallow it until we need it
rest params don't make sense on a setter
setters/getter are not predicates
#prod-PropertyDefinition
it is a syntax error if `name` is a reserved word other than await or yield
it is a syntax error if `name` is a strict reserved word, in strict mode
#prod-MethodDefinition
TODO: add support for object method predicates
PropertyName `:` AssignmentExpression
#prod-CoverInitializedName
the `async` and `*` modifiers are only valid on methods
error. we recover by treating it as a shorthand property so as to not
consume any more tokens and make the error worse. we don't error here
because we'll expect a comma before the next token.
Spread property
{ async = true } (destructuring)
{ async: true }
{ async<T>() {} }
{ async() {} }
{ async, other, shorthand }
{ async }
we could use [Expect.error env T_COMMA], but we're in a weird
cover grammar situation where we're storing errors in
[Pattern_cover]. if we used [Expect.error], the errors would
end up out of order.
if the unexpected token is a semicolon, consume it to aid
recovery. using a semicolon instead of a comma is a common
mistake.
/#prod-ClassHeritage
In the ES6 draft, all elements are methods. No properties (though there
* are getter and setters allowed
Class property with annotation
Remove trailing comments from the last node in this property
prop = init
prop: annot
prop
TODO: add support for optional class properties
an uninitialized, unannotated property
Disallow this param annotations for constructors
TODO: add support for method predicates
Error on TS class visibility modifiers.
static: T
incomplete property
static<T>() {}
static() {}
end of class
explicit semicolon
consume `async`
Skip empty elements
10.2.1 says all parts of a class definition are strict
error, but don't consume a token like Parse.identifier does. this helps
with recovery, and the parser won't get stuck because we consumed the
`class` token above. |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
module Ast = Flow_ast
open Token
open Parser_env
open Flow_ast
module SMap = Flow_map.Make (String)
open Parser_common
open Comment_attachment
module type OBJECT = sig
val key : ?class_body:bool -> env -> Loc.t * (Loc.t, Loc.t) Ast.Expression.Object.Property.key
val _initializer : env -> Loc.t * (Loc.t, Loc.t) Ast.Expression.Object.t * pattern_errors
val class_declaration :
env -> (Loc.t, Loc.t) Ast.Class.Decorator.t list -> (Loc.t, Loc.t) Ast.Statement.t
val class_expression : env -> (Loc.t, Loc.t) Ast.Expression.t
val class_implements : env -> attach_leading:bool -> (Loc.t, Loc.t) Ast.Class.Implements.t
val decorator_list : env -> (Loc.t, Loc.t) Ast.Class.Decorator.t list
end
module Object
(Parse : Parser_common.PARSER)
(Type : Type_parser.TYPE)
(Declaration : Declaration_parser.DECLARATION)
(Expression : Expression_parser.EXPRESSION)
(Pattern_cover : Pattern_cover.COVER) : OBJECT = struct
let decorator_list =
let expression env =
let expression = Expression.left_hand_side env in
let { remove_trailing; _ } =
if Peek.is_line_terminator env then
trailing_and_remover_after_last_line env
else
trailing_and_remover_after_last_loc env
in
remove_trailing expression (fun remover expression -> remover#expression expression)
in
let decorator env =
let leading = Peek.comments env in
Eat.token env;
{
Ast.Class.Decorator.expression = expression env;
comments = Flow_ast_utils.mk_comments_opt ~leading ();
}
in
let rec decorator_list_helper env decorators =
match Peek.token env with
| T_AT -> decorator_list_helper env (with_loc decorator env :: decorators)
| _ -> decorators
in
fun env ->
if (parse_options env).esproposal_decorators then
List.rev (decorator_list_helper env [])
else
[]
let key ?(class_body = false) env =
let open Ast.Expression.Object.Property in
let leading = Peek.comments env in
let tkn = Peek.token env in
match tkn with
| T_STRING (loc, value, raw, octal) ->
if octal then strict_error env Parse_error.StrictOctalLiteral;
Expect.token env (T_STRING (loc, value, raw, octal));
let value = Literal.String value in
let trailing = Eat.trailing_comments env in
( loc,
Literal
( loc,
{ Literal.value; raw; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }
)
)
| T_NUMBER { kind; raw } ->
let loc = Peek.loc env in
let value = Expression.number env kind raw in
let value = Literal.Number value in
let trailing = Eat.trailing_comments env in
( loc,
Literal
( loc,
{ Literal.value; raw; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }
)
)
| T_BIGINT { kind; raw } ->
let loc = Peek.loc env in
let value = Expression.bigint env kind raw in
let value = Literal.BigInt value in
let trailing = Eat.trailing_comments env in
( loc,
Literal
( loc,
{ Literal.value; raw; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }
)
)
| T_LBRACKET ->
let (loc, key) =
with_loc
(fun env ->
let leading = Peek.comments env in
Expect.token env T_LBRACKET;
let expr = Parse.assignment (env |> with_no_in false) in
Expect.token env T_RBRACKET;
let trailing = Eat.trailing_comments env in
{
ComputedKey.expression = expr;
comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();
})
env
in
(loc, Ast.Expression.Object.Property.Computed (loc, key))
| T_POUND when class_body ->
let ((loc, { PrivateName.name; _ }) as id) = private_identifier env in
add_declared_private env name;
(loc, PrivateName id)
| T_POUND ->
let (loc, id) =
with_loc
(fun env ->
Eat.token env;
Identifier (identifier_name env))
env
in
error_at env (loc, Parse_error.PrivateNotInClass);
(loc, id)
| _ ->
let ((loc, _) as id) = identifier_name env in
(loc, Identifier id)
let getter_or_setter env ~in_class_body is_getter =
let async = false in
let (generator, leading) = Declaration.generator env in
let (key_loc, key) = key ~class_body:in_class_body env in
let key = object_key_remove_trailing env key in
let value =
with_loc
(fun env ->
# sec - function - definitions - static - semantics - early - errors
let env = env |> with_allow_super Super_prop in
let (sig_loc, (tparams, params, return)) =
with_loc
(fun env ->
let tparams = None in
let params =
let params = Declaration.function_params ~await:false ~yield:false env in
if Peek.token env = T_COLON then
params
else
function_params_remove_trailing env params
in
begin
match (is_getter, params) with
| (true, (_, { Ast.Function.Params.this_ = Some _; _ })) ->
error_at env (key_loc, Parse_error.GetterMayNotHaveThisParam)
| (false, (_, { Ast.Function.Params.this_ = Some _; _ })) ->
error_at env (key_loc, Parse_error.SetterMayNotHaveThisParam)
| ( true,
( _,
{ Ast.Function.Params.params = []; rest = None; this_ = None; comments = _ }
)
) ->
()
| (false, (_, { Ast.Function.Params.rest = Some _; _ })) ->
error_at env (key_loc, Parse_error.SetterArity)
| ( false,
( _,
{
Ast.Function.Params.params = [_];
rest = None;
this_ = None;
comments = _;
}
)
) ->
()
| (true, _) -> error_at env (key_loc, Parse_error.GetterArity)
| (false, _) -> error_at env (key_loc, Parse_error.SetterArity)
end;
let return = type_annotation_hint_remove_trailing env (Type.annotation_opt env) in
(tparams, params, return))
env
in
let simple_params = is_simple_parameter_list params in
let (body, contains_use_strict) =
Declaration.function_body env ~async ~generator ~expression:false ~simple_params
in
Declaration.strict_post_check env ~contains_use_strict None params;
{
Function.id = None;
params;
body;
generator;
async;
predicate = None;
return;
tparams;
sig_loc;
comments = Flow_ast_utils.mk_comments_opt ~leading ();
})
env
in
(key, value)
let _initializer =
let parse_assignment_cover env =
match Expression.assignment_cover env with
| Cover_expr expr -> (expr, Pattern_cover.empty_errors)
| Cover_patt (expr, errs) -> (expr, errs)
in
let get env start_loc leading =
let (loc, (key, value)) =
with_loc ~start_loc (fun env -> getter_or_setter env ~in_class_body:false true) env
in
let open Ast.Expression.Object in
Property
(loc, Property.Get { key; value; comments = Flow_ast_utils.mk_comments_opt ~leading () })
in
let set env start_loc leading =
let (loc, (key, value)) =
with_loc ~start_loc (fun env -> getter_or_setter env ~in_class_body:false false) env
in
let open Ast.Expression.Object in
Property
(loc, Property.Set { key; value; comments = Flow_ast_utils.mk_comments_opt ~leading () })
in
let init =
let open Ast.Expression.Object.Property in
# prod - IdentifierReference
let parse_shorthand env key =
match key with
| Literal (loc, lit) ->
error_at env (loc, Parse_error.LiteralShorthandProperty);
(loc, Ast.Expression.Literal lit)
| Identifier ((loc, { Identifier.name; comments = _ }) as id) ->
# sec - identifiers - static - semantics - early - errors
if is_reserved name then
error_at env (loc, Parse_error.UnexpectedReserved)
else if is_strict_reserved name then
strict_error_at env (loc, Parse_error.StrictReservedWord);
(loc, Ast.Expression.Identifier id)
| PrivateName _ -> failwith "Internal Error: private name found in object props"
| Computed (_, { ComputedKey.expression = expr; comments = _ }) ->
error_at env (fst expr, Parse_error.ComputedShorthandProperty);
expr
in
let parse_method ~async ~generator ~leading =
with_loc (fun env ->
# sec - function - definitions - static - semantics - early - errors
let env = env |> with_allow_super Super_prop in
let (sig_loc, (tparams, params, return)) =
with_loc
(fun env ->
let tparams = type_params_remove_trailing env (Type.type_params env) in
let params =
let params = Declaration.function_params ~await:async ~yield:generator env in
if Peek.token env = T_COLON then
params
else
function_params_remove_trailing env params
in
let return = type_annotation_hint_remove_trailing env (Type.annotation_opt env) in
(tparams, params, return))
env
in
let simple_params = is_simple_parameter_list params in
let (body, contains_use_strict) =
Declaration.function_body env ~async ~generator ~expression:false ~simple_params
in
Declaration.strict_post_check env ~contains_use_strict None params;
{
Function.id = None;
params;
body;
generator;
async;
predicate = None;
return;
tparams;
sig_loc;
comments = Flow_ast_utils.mk_comments_opt ~leading ();
}
)
in
let parse_value env =
Expect.token env T_COLON;
parse_assignment_cover env
in
let parse_assignment_pattern ~key env =
let open Ast.Expression.Object in
match key with
| Property.Identifier id ->
let assignment_loc = Peek.loc env in
let ast =
with_loc
~start_loc:(fst id)
(fun env ->
let leading = Peek.comments env in
Expect.token env T_ASSIGN;
let trailing = Eat.trailing_comments env in
let left = Parse.pattern_from_expr env (fst id, Ast.Expression.Identifier id) in
let right = Parse.assignment env in
let comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () in
Ast.Expression.Assignment
{ Ast.Expression.Assignment.operator = None; left; right; comments })
env
in
let errs =
{
if_expr = [(assignment_loc, Parse_error.Unexpected (Token.quote_token_value "="))];
if_patt = [];
}
in
(ast, errs)
| Property.Literal _
| Property.PrivateName _
| Property.Computed _ ->
parse_value env
in
let parse_init ~key ~async ~generator ~leading env =
if async || generator then
let key = object_key_remove_trailing env key in
let value = parse_method env ~async ~generator ~leading in
let prop = Method { key; value } in
(prop, Pattern_cover.empty_errors)
else
match Peek.token env with
| T_RCURLY
| T_COMMA ->
let value = parse_shorthand env key in
let prop = Init { key; value; shorthand = true } in
(prop, Pattern_cover.empty_errors)
| T_LESS_THAN
| T_LPAREN ->
let key = object_key_remove_trailing env key in
let value = parse_method env ~async ~generator ~leading in
let prop = Method { key; value } in
(prop, Pattern_cover.empty_errors)
| T_ASSIGN ->
let (value, errs) = parse_assignment_pattern ~key env in
let prop = Init { key; value; shorthand = true } in
(prop, errs)
| T_COLON ->
let (value, errs) = parse_value env in
let prop = Init { key; value; shorthand = false } in
(prop, errs)
| _ ->
let value = parse_shorthand env key in
let prop = Init { key; value; shorthand = true } in
(prop, Pattern_cover.empty_errors)
in
fun env start_loc key async generator leading ->
let (loc, (prop, errs)) =
with_loc ~start_loc (parse_init ~key ~async ~generator ~leading) env
in
(Ast.Expression.Object.Property (loc, prop), errs)
in
let property env =
let open Ast.Expression.Object in
if Peek.token env = T_ELLIPSIS then
let leading = Peek.comments env in
let (loc, (argument, errs)) =
with_loc
(fun env ->
Expect.token env T_ELLIPSIS;
parse_assignment_cover env)
env
in
( SpreadProperty
(loc, { SpreadProperty.argument; comments = Flow_ast_utils.mk_comments_opt ~leading () }),
errs
)
else
let start_loc = Peek.loc env in
let (async, leading_async) =
match Peek.ith_token ~i:1 env with
| T_ASSIGN
| T_COLON
| T_LESS_THAN
| T_LPAREN
| T_COMMA
(false, [])
| _ -> Declaration.async env
in
let (generator, leading_generator) = Declaration.generator env in
let leading = leading_async @ leading_generator in
match (async, generator, Peek.token env) with
| (false, false, T_IDENTIFIER { raw = "get"; _ }) ->
let leading = Peek.comments env in
let (_, key) = key env in
begin
match Peek.token env with
| T_ASSIGN
| T_COLON
| T_LESS_THAN
| T_LPAREN
| T_COMMA
| T_RCURLY ->
init env start_loc key false false []
| _ ->
ignore (Comment_attachment.object_key_remove_trailing env key);
(get env start_loc leading, Pattern_cover.empty_errors)
end
| (false, false, T_IDENTIFIER { raw = "set"; _ }) ->
let leading = Peek.comments env in
let (_, key) = key env in
begin
match Peek.token env with
| T_ASSIGN
| T_COLON
| T_LESS_THAN
| T_LPAREN
| T_COMMA
| T_RCURLY ->
init env start_loc key false false []
| _ ->
ignore (Comment_attachment.object_key_remove_trailing env key);
(set env start_loc leading, Pattern_cover.empty_errors)
end
| (async, generator, _) ->
let (_, key) = key env in
init env start_loc key async generator leading
in
let rec properties env ~rest_trailing_comma (props, errs) =
match Peek.token env with
| T_EOF
| T_RCURLY ->
let errs =
match rest_trailing_comma with
| Some loc ->
{ errs with if_patt = (loc, Parse_error.TrailingCommaAfterRestElement) :: errs.if_patt }
| None -> errs
in
(List.rev props, Pattern_cover.rev_errors errs)
| _ ->
let (prop, new_errs) = property env in
let rest_trailing_comma =
match prop with
| Ast.Expression.Object.SpreadProperty _ when Peek.token env = T_COMMA ->
Some (Peek.loc env)
| _ -> None
in
let errs = Pattern_cover.rev_append_errors new_errs errs in
let errs =
match Peek.token env with
| T_RCURLY
| T_EOF ->
errs
| T_COMMA ->
Eat.token env;
errs
| _ ->
let err = Expect.get_error env T_COMMA in
let _ = Eat.maybe env T_SEMICOLON in
Pattern_cover.cons_error err errs
in
properties env ~rest_trailing_comma (prop :: props, errs)
in
fun env ->
let (loc, (expr, errs)) =
with_loc
(fun env ->
let leading = Peek.comments env in
Expect.token env T_LCURLY;
let (props, errs) =
properties env ~rest_trailing_comma:None ([], Pattern_cover.empty_errors)
in
let internal = Peek.comments env in
Expect.token env T_RCURLY;
let trailing = Eat.trailing_comments env in
( {
Ast.Expression.Object.properties = props;
comments =
Flow_ast_utils.mk_comments_with_internal_opt ~leading ~trailing ~internal ();
},
errs
))
env
in
(loc, expr, errs)
let check_property_name env loc name static =
if String.equal name "constructor" || (String.equal name "prototype" && static) then
error_at
env
(loc, Parse_error.InvalidClassMemberName { name; static; method_ = false; private_ = false })
let check_private_names
env seen_names private_name (kind : [ `Method | `Field | `Getter | `Setter ]) =
let (loc, { PrivateName.name; comments = _ }) = private_name in
if String.equal name "constructor" then
let () =
error_at
env
( loc,
Parse_error.InvalidClassMemberName
{ name; static = false; method_ = kind = `Method; private_ = true }
)
in
seen_names
else
match SMap.find_opt name seen_names with
| Some seen ->
begin
match (kind, seen) with
| (`Getter, `Setter)
| (`Setter, `Getter) ->
one getter and one setter are allowed as long as it 's not used as a field
()
| _ -> error_at env (loc, Parse_error.DuplicatePrivateFields name)
end;
SMap.add name `Field seen_names
| None -> SMap.add name kind seen_names
let class_implements env ~attach_leading =
let rec interfaces env acc =
let interface =
with_loc
(fun env ->
let id =
let id = Type.type_identifier env in
if Peek.token env <> T_LESS_THAN then
id
else
let { remove_trailing; _ } = trailing_and_remover env in
remove_trailing id (fun remover id -> remover#identifier id)
in
let targs = Type.type_args env in
{ Ast.Class.Implements.Interface.id; targs })
env
in
let acc = interface :: acc in
match Peek.token env with
| T_COMMA ->
Expect.token env T_COMMA;
interfaces env acc
| _ -> List.rev acc
in
with_loc
(fun env ->
let leading =
if attach_leading then
Peek.comments env
else
[]
in
Expect.token env T_IMPLEMENTS;
let interfaces = interfaces env [] in
{ Ast.Class.Implements.interfaces; comments = Flow_ast_utils.mk_comments_opt ~leading () })
env
let class_extends ~leading =
with_loc (fun env ->
let expr =
let expr = Expression.left_hand_side (env |> with_allow_yield false) in
if Peek.token env <> T_LESS_THAN then
expr
else
let { remove_trailing; _ } = trailing_and_remover env in
remove_trailing expr (fun remover expr -> remover#expression expr)
in
let targs = Type.type_args env in
{ Class.Extends.expr; targs; comments = Flow_ast_utils.mk_comments_opt ~leading () }
)
let class_heritage env =
let extends =
let leading = Peek.comments env in
if Eat.maybe env T_EXTENDS then
let (loc, extends) = class_extends ~leading env in
let { remove_trailing; _ } = trailing_and_remover env in
Some
(loc, remove_trailing extends (fun remover extends -> remover#class_extends loc extends))
else
None
in
let implements =
if Peek.token env = T_IMPLEMENTS then (
if not (should_parse_types env) then error env Parse_error.UnexpectedTypeInterface;
Some (class_implements_remove_trailing env (class_implements env ~attach_leading:true))
) else
None
in
(extends, implements)
let class_element =
let get env start_loc decorators static leading =
let (loc, (key, value)) =
with_loc ~start_loc (fun env -> getter_or_setter env ~in_class_body:true true) env
in
let open Ast.Class in
Body.Method
( loc,
{
Method.key;
value;
kind = Method.Get;
static;
decorators;
comments = Flow_ast_utils.mk_comments_opt ~leading ();
}
)
in
let set env start_loc decorators static leading =
let (loc, (key, value)) =
with_loc ~start_loc (fun env -> getter_or_setter env ~in_class_body:true false) env
in
let open Ast.Class in
Body.Method
( loc,
{
Method.key;
value;
kind = Method.Set;
static;
decorators;
comments = Flow_ast_utils.mk_comments_opt ~leading ();
}
)
in
let error_unsupported_variance env = function
| Some (loc, _) -> error_at env (loc, Parse_error.UnexpectedVariance)
| None -> ()
in
let error_unsupported_declare env = function
| Some loc -> error_at env (loc, Parse_error.DeclareClassElement)
| None -> ()
in
let property_end_and_semicolon env key annot value =
match Peek.token env with
| T_LBRACKET
| T_LPAREN ->
error_unexpected env;
(key, annot, value, [])
| T_SEMICOLON ->
Eat.token env;
let trailing =
match Peek.token env with
| T_EOF
| T_RCURLY ->
Eat.trailing_comments env
| _ when Peek.is_line_terminator env -> Eat.comments_until_next_line env
| _ -> []
in
(key, annot, value, trailing)
| _ ->
let remover =
match Peek.token env with
| T_EOF
| T_RCURLY ->
{ trailing = []; remove_trailing = (fun x _ -> x) }
| _ when Peek.is_line_terminator env ->
Comment_attachment.trailing_and_remover_after_last_line env
| _ -> Comment_attachment.trailing_and_remover_after_last_loc env
in
let (key, annot, value) =
match (annot, value) with
| (_, Class.Property.Initialized expr) ->
( key,
annot,
Class.Property.Initialized
(remover.remove_trailing expr (fun remover expr -> remover#expression expr))
)
| (Ast.Type.Available annot, _) ->
( key,
Ast.Type.Available
(remover.remove_trailing annot (fun remover annot -> remover#type_annotation annot)),
value
)
| _ ->
(remover.remove_trailing key (fun remover key -> remover#object_key key), annot, value)
in
(key, annot, value, [])
in
let property env start_loc decorators key static declare variance leading =
let (loc, (key, annot, value, comments)) =
with_loc
~start_loc
(fun env ->
let annot = Type.annotation_opt env in
let value =
match (declare, Peek.token env) with
| (None, T_ASSIGN) ->
Eat.token env;
Ast.Class.Property.Initialized
(Parse.expression (env |> with_allow_super Super_prop))
| (Some _, T_ASSIGN) ->
error env Parse_error.DeclareClassFieldInitializer;
Eat.token env;
Ast.Class.Property.Declared
| (None, _) -> Ast.Class.Property.Uninitialized
| (Some _, _) -> Ast.Class.Property.Declared
in
let (key, annot, value, trailing) = property_end_and_semicolon env key annot value in
(key, annot, value, Flow_ast_utils.mk_comments_opt ~leading ~trailing ()))
env
in
let open Ast.Class in
match key with
| Ast.Expression.Object.Property.PrivateName key ->
Body.PrivateField
(loc, { PrivateField.key; value; annot; static; variance; decorators; comments })
| _ ->
Body.Property (loc, { Property.key; value; annot; static; variance; decorators; comments })
in
let is_asi env =
match Peek.token env with
| T_LESS_THAN -> false
| T_LPAREN -> false
| _ when Peek.is_implicit_semicolon env -> true
| _ -> false
in
let rec init env start_loc decorators key ~async ~generator ~static ~declare variance leading =
match Peek.token env with
| T_COLON
| T_ASSIGN
| T_SEMICOLON
| T_RCURLY
when (not async) && not generator ->
property env start_loc decorators key static declare variance leading
| T_PLING ->
error_unexpected env;
Eat.token env;
init env start_loc decorators key ~async ~generator ~static ~declare variance leading
| _ when is_asi env ->
property env start_loc decorators key static declare variance leading
| _ ->
error_unsupported_declare env declare;
error_unsupported_variance env variance;
let (kind, env) =
match (static, key) with
| ( false,
Ast.Expression.Object.Property.Identifier
(_, { Identifier.name = "constructor"; comments = _ })
)
| ( false,
Ast.Expression.Object.Property.Literal
(_, { Literal.value = Literal.String "constructor"; _ })
) ->
(Ast.Class.Method.Constructor, env |> with_allow_super Super_prop_or_call)
| _ -> (Ast.Class.Method.Method, env |> with_allow_super Super_prop)
in
let key = object_key_remove_trailing env key in
let value =
with_loc
(fun env ->
let (sig_loc, (tparams, params, return)) =
with_loc
(fun env ->
let tparams = type_params_remove_trailing env (Type.type_params env) in
let params =
let params = Declaration.function_params ~await:async ~yield:generator env in
let params =
if Peek.token env = T_COLON then
params
else
function_params_remove_trailing env params
in
Ast.Function.Params.(
match params with
| (loc, ({ this_ = Some (this_loc, _); _ } as params))
when kind = Ast.Class.Method.Constructor ->
error_at env (this_loc, Parse_error.ThisParamBannedInConstructor);
(loc, { params with this_ = None })
| params -> params
)
in
let return =
type_annotation_hint_remove_trailing env (Type.annotation_opt env)
in
(tparams, params, return))
env
in
let simple_params = is_simple_parameter_list params in
let (body, contains_use_strict) =
Declaration.function_body env ~async ~generator ~expression:false ~simple_params
in
Declaration.strict_post_check env ~contains_use_strict None params;
{
Function.id = None;
params;
body;
generator;
async;
predicate = None;
return;
tparams;
sig_loc;
comments = None;
})
env
in
let open Ast.Class in
Body.Method
( Loc.btwn start_loc (fst value),
{
Method.key;
value;
kind;
static;
decorators;
comments = Flow_ast_utils.mk_comments_opt ~leading ();
}
)
in
let ith_implies_identifier ~i env =
match Peek.ith_token ~i env with
| T_LESS_THAN
| T_COLON
| T_ASSIGN
| T_SEMICOLON
| T_LPAREN
| T_RCURLY ->
true
| _ -> false
in
let implies_identifier = ith_implies_identifier ~i:0 in
fun env ->
let start_loc = Peek.loc env in
let decorators = decorator_list env in
let (declare, leading_declare) =
match Peek.token env with
| T_DECLARE when not (ith_implies_identifier ~i:1 env) ->
let ret = Some (Peek.loc env) in
let leading = Peek.comments env in
Eat.token env;
(ret, leading)
| _ -> (None, [])
in
(match Peek.token env with
| (T_PUBLIC as t)
| (T_PRIVATE as t)
| (T_PROTECTED as t)
when Peek.ith_is_identifier ~i:1 env ->
let kind =
match t with
| T_PUBLIC -> `Public
| T_PRIVATE -> `Private
| T_PROTECTED -> `Protected
| _ -> failwith "Must be one of the above"
in
error env (Parse_error.TSClassVisibility kind);
Eat.token env
| _ -> ());
let static =
Peek.token env = T_STATIC
&&
match Peek.ith_token ~i:1 env with
static = 123
false
| _ -> true
in
let leading_static =
if static then (
let leading = Peek.comments env in
Eat.token env;
leading
) else
[]
in
let async =
Peek.token env = T_ASYNC
&& (not (ith_implies_identifier ~i:1 env))
&& not (Peek.ith_is_line_terminator ~i:1 env)
in
let leading_async =
if async then (
let leading = Peek.comments env in
Eat.token env;
leading
) else
[]
in
let (generator, leading_generator) = Declaration.generator env in
let parse_readonly =
Peek.ith_is_identifier ~i:1 env || Peek.ith_token ~i:1 env = T_LBRACKET
in
let variance = Declaration.variance env ~parse_readonly async generator in
let (generator, leading_generator) =
match (generator, variance) with
| (false, Some _) -> Declaration.generator env
| _ -> (generator, leading_generator)
in
let leading =
List.concat [leading_declare; leading_static; leading_async; leading_generator]
in
match (async, generator, Peek.token env) with
| (false, false, T_IDENTIFIER { raw = "get"; _ }) ->
let leading_get = Peek.comments env in
let (_, key) = key ~class_body:true env in
if implies_identifier env then
init env start_loc decorators key ~async ~generator ~static ~declare variance leading
else (
error_unsupported_declare env declare;
error_unsupported_variance env variance;
ignore (object_key_remove_trailing env key);
get env start_loc decorators static (leading @ leading_get)
)
| (false, false, T_IDENTIFIER { raw = "set"; _ }) ->
let leading_set = Peek.comments env in
let (_, key) = key ~class_body:true env in
if implies_identifier env then
init env start_loc decorators key ~async ~generator ~static ~declare variance leading
else (
error_unsupported_declare env declare;
error_unsupported_variance env variance;
ignore (object_key_remove_trailing env key);
set env start_loc decorators static (leading @ leading_set)
)
| (_, _, _) ->
let (_, key) = key ~class_body:true env in
init env start_loc decorators key ~async ~generator ~static ~declare variance leading
let class_body =
let rec elements env seen_constructor private_names acc =
match Peek.token env with
| T_EOF
| T_RCURLY ->
List.rev acc
| T_SEMICOLON ->
Expect.token env T_SEMICOLON;
elements env seen_constructor private_names acc
| _ ->
let element = class_element env in
let (seen_constructor', private_names') =
match element with
| Ast.Class.Body.Method (loc, m) ->
let open Ast.Class.Method in
(match m.kind with
| Constructor ->
if m.static then
(seen_constructor, private_names)
else (
if seen_constructor then error_at env (loc, Parse_error.DuplicateConstructor);
(true, private_names)
)
| Method ->
let private_names =
match m.key with
| Ast.Expression.Object.Property.PrivateName name ->
check_private_names env private_names name `Method
| _ -> private_names
in
(seen_constructor, private_names)
| Get ->
let open Ast.Expression.Object.Property in
let private_names =
match m.key with
| PrivateName name -> check_private_names env private_names name `Getter
| _ -> private_names
in
(seen_constructor, private_names)
| Set ->
let open Ast.Expression.Object.Property in
let private_names =
match m.key with
| PrivateName name -> check_private_names env private_names name `Setter
| _ -> private_names
in
(seen_constructor, private_names))
| Ast.Class.Body.Property (_, { Ast.Class.Property.key; static; _ }) ->
let open Ast.Expression.Object.Property in
begin
match key with
| Identifier (loc, { Identifier.name; comments = _ })
| Literal (loc, { Literal.value = Literal.String name; _ }) ->
check_property_name env loc name static
| Literal _
| Computed _ ->
()
| PrivateName _ ->
failwith "unexpected PrivateName in Property, expected a PrivateField"
end;
(seen_constructor, private_names)
| Ast.Class.Body.PrivateField (_, { Ast.Class.PrivateField.key; _ }) ->
let private_names = check_private_names env private_names key `Field in
(seen_constructor, private_names)
in
elements env seen_constructor' private_names' (element :: acc)
in
fun ~expression env ->
with_loc
(fun env ->
let leading = Peek.comments env in
if Eat.maybe env T_LCURLY then (
enter_class env;
let body = elements env false SMap.empty [] in
exit_class env;
Expect.token env T_RCURLY;
let trailing =
match (expression, Peek.token env) with
| (true, _)
| (_, (T_RCURLY | T_EOF)) ->
Eat.trailing_comments env
| _ when Peek.is_line_terminator env -> Eat.comments_until_next_line env
| _ -> []
in
{ Ast.Class.Body.body; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }
) else (
Expect.error env T_LCURLY;
{ Ast.Class.Body.body = []; comments = None }
))
env
let _class ?(decorators = []) env ~optional_id ~expression =
let env = env |> with_strict true in
let decorators = decorators @ decorator_list env in
let leading = Peek.comments env in
(match Peek.token env with
| T_IDENTIFIER { raw = "abstract"; _ } ->
error env Parse_error.TSAbstractClass;
Eat.token env
| _ -> ());
Expect.token env T_CLASS;
let id =
let tmp_env = env |> with_no_let true in
match (optional_id, Peek.token tmp_env) with
| (true, (T_EXTENDS | T_IMPLEMENTS | T_LESS_THAN | T_LCURLY)) -> None
| _ when Peek.is_identifier env ->
let id = Parse.identifier tmp_env in
let { remove_trailing; _ } = trailing_and_remover env in
let id = remove_trailing id (fun remover id -> remover#identifier id) in
Some id
| _ ->
error_nameless_declaration env "class";
Some (Peek.loc env, { Identifier.name = ""; comments = None })
in
let tparams =
match Type.type_params env with
| None -> None
| Some tparams ->
let { remove_trailing; _ } = trailing_and_remover env in
Some (remove_trailing tparams (fun remover tparams -> remover#type_params tparams))
in
let (extends, implements) = class_heritage env in
let body = class_body env ~expression in
let comments = Flow_ast_utils.mk_comments_opt ~leading () in
{ Class.id; body; tparams; extends; implements; class_decorators = decorators; comments }
let class_declaration env decorators =
with_loc
(fun env ->
let optional_id = in_export_default env in
Ast.Statement.ClassDeclaration (_class env ~decorators ~optional_id ~expression:false))
env
let class_expression =
with_loc (fun env -> Ast.Expression.Class (_class env ~optional_id:true ~expression:true))
end
|
eeebcb045500618911203c3f7aea10afb449259db9531850ca085f1e4768fbb7 | cl-axon/shop2 | decls.lisp | (in-package :shop2)
(defclass search-state ()
(
(mode
:initarg :mode
:accessor mode
:documentation "Holds the mode -- the name -- of the current
\"instruction\" in the explicit search virtual machine."
)
(current-task
:initarg :current-task
:accessor current-task
:type (or list null)
:documentation "The currently active task, if that's meaningful
in this search MODE."
)
(alternatives
:initarg :alternatives
:accessor alternatives
)
;; world state
(world-state
:initarg :state
:initarg :world-state
:accessor state
:accessor world-state
:documentation "SHOP2 world state object."
)
(protections
:initarg :protections
:accessor protections
:initform NIL
:documentation "Set of protections in the current
state."
)
;; plan-so-far
(tasks
:initarg :tasks
:accessor tasks
:documentation "Current task network."
)
(top-tasks
:initarg :top-tasks
:accessor top-tasks
:documentation "Current set of tasks with no predecessors;
eligible to be planned."
)
(partial-plan
:initarg :partial-plan
:accessor partial-plan
:initform nil
:documentation "List: current plan prefix."
)
(cost
:initarg :cost
:accessor cost
:type number
:initform 0
:documentation "Cost of partial-plan."
)
(unifier
:initarg :unifier
:accessor unifier
:initform nil
)
(depth
:initarg :depth
:accessor depth
:type integer
:initform 0
:documentation "Depth in search. Used by the tracing
functions."
)
(backtrack-stack
:initarg :backtrack-stack
:accessor backtrack-stack
:initform (list (make-instance 'bottom-of-stack))
)
(plans-found
:initarg :plans-found
:initform nil
:accessor plans-found
)
))
(defmacro verbose-format (&rest args)
`(when *verbose* (format t ,@args)))
| null | https://raw.githubusercontent.com/cl-axon/shop2/9136e51f7845b46232cc17ca3618f515ddcf2787/explicit-stack-search/decls.lisp | lisp | world state
plan-so-far
| (in-package :shop2)
(defclass search-state ()
(
(mode
:initarg :mode
:accessor mode
:documentation "Holds the mode -- the name -- of the current
\"instruction\" in the explicit search virtual machine."
)
(current-task
:initarg :current-task
:accessor current-task
:type (or list null)
:documentation "The currently active task, if that's meaningful
in this search MODE."
)
(alternatives
:initarg :alternatives
:accessor alternatives
)
(world-state
:initarg :state
:initarg :world-state
:accessor state
:accessor world-state
:documentation "SHOP2 world state object."
)
(protections
:initarg :protections
:accessor protections
:initform NIL
:documentation "Set of protections in the current
state."
)
(tasks
:initarg :tasks
:accessor tasks
:documentation "Current task network."
)
(top-tasks
:initarg :top-tasks
:accessor top-tasks
eligible to be planned."
)
(partial-plan
:initarg :partial-plan
:accessor partial-plan
:initform nil
:documentation "List: current plan prefix."
)
(cost
:initarg :cost
:accessor cost
:type number
:initform 0
:documentation "Cost of partial-plan."
)
(unifier
:initarg :unifier
:accessor unifier
:initform nil
)
(depth
:initarg :depth
:accessor depth
:type integer
:initform 0
:documentation "Depth in search. Used by the tracing
functions."
)
(backtrack-stack
:initarg :backtrack-stack
:accessor backtrack-stack
:initform (list (make-instance 'bottom-of-stack))
)
(plans-found
:initarg :plans-found
:initform nil
:accessor plans-found
)
))
(defmacro verbose-format (&rest args)
`(when *verbose* (format t ,@args)))
|
76cbdfdfbbbe4b7ed7f60e8b48bf298ca793e71b36dbceb5669d3c39d83ffea0 | MinaProtocol/mina | mina_transaction_logic.ml | open Core_kernel
open Mina_base
open Currency
open Signature_lib
open Mina_transaction
module Zkapp_command_logic = Zkapp_command_logic
module Global_slot = Mina_numbers.Global_slot
module Transaction_applied = struct
module UC = Signed_command
module Signed_command_applied = struct
module Common = struct
[%%versioned
module Stable = struct
module V2 = struct
type t =
{ user_command : Signed_command.Stable.V2.t With_status.Stable.V2.t
}
[@@deriving sexp, to_yojson]
let to_latest = Fn.id
end
end]
end
module Body = struct
[%%versioned
module Stable = struct
module V2 = struct
type t =
| Payment of { new_accounts : Account_id.Stable.V2.t list }
| Stake_delegation of
{ previous_delegate : Public_key.Compressed.Stable.V1.t option }
| Failed
[@@deriving sexp, to_yojson]
let to_latest = Fn.id
end
end]
end
[%%versioned
module Stable = struct
module V2 = struct
type t = { common : Common.Stable.V2.t; body : Body.Stable.V2.t }
[@@deriving sexp, to_yojson]
let to_latest = Fn.id
end
end]
let new_accounts (t : t) =
match t.body with
| Payment { new_accounts; _ } ->
new_accounts
| Stake_delegation _ | Failed ->
[]
end
module Zkapp_command_applied = struct
[%%versioned
module Stable = struct
module V1 = struct
type t =
{ accounts :
(Account_id.Stable.V2.t * Account.Stable.V2.t option) list
; command : Zkapp_command.Stable.V1.t With_status.Stable.V2.t
; new_accounts : Account_id.Stable.V2.t list
}
[@@deriving sexp, to_yojson]
let to_latest = Fn.id
end
end]
end
module Command_applied = struct
[%%versioned
module Stable = struct
module V2 = struct
type t =
| Signed_command of Signed_command_applied.Stable.V2.t
| Zkapp_command of Zkapp_command_applied.Stable.V1.t
[@@deriving sexp, to_yojson]
let to_latest = Fn.id
end
end]
end
module Fee_transfer_applied = struct
[%%versioned
module Stable = struct
module V2 = struct
type t =
{ fee_transfer : Fee_transfer.Stable.V2.t With_status.Stable.V2.t
; new_accounts : Account_id.Stable.V2.t list
; burned_tokens : Currency.Amount.Stable.V1.t
}
[@@deriving sexp, to_yojson]
let to_latest = Fn.id
end
end]
end
module Coinbase_applied = struct
[%%versioned
module Stable = struct
module V2 = struct
type t =
{ coinbase : Coinbase.Stable.V1.t With_status.Stable.V2.t
; new_accounts : Account_id.Stable.V2.t list
; burned_tokens : Currency.Amount.Stable.V1.t
}
[@@deriving sexp, to_yojson]
let to_latest = Fn.id
end
end]
end
module Varying = struct
[%%versioned
module Stable = struct
module V2 = struct
type t =
| Command of Command_applied.Stable.V2.t
| Fee_transfer of Fee_transfer_applied.Stable.V2.t
| Coinbase of Coinbase_applied.Stable.V2.t
[@@deriving sexp, to_yojson]
let to_latest = Fn.id
end
end]
end
[%%versioned
module Stable = struct
module V2 = struct
type t =
{ previous_hash : Ledger_hash.Stable.V1.t
; varying : Varying.Stable.V2.t
}
[@@deriving sexp, to_yojson]
let to_latest = Fn.id
end
end]
let burned_tokens : t -> Currency.Amount.t =
fun { varying; _ } ->
match varying with
| Command _ ->
Currency.Amount.zero
| Fee_transfer f ->
f.burned_tokens
| Coinbase c ->
c.burned_tokens
let new_accounts : t -> Account_id.t list =
fun { varying; _ } ->
match varying with
| Command c -> (
match c with
| Signed_command sc ->
Signed_command_applied.new_accounts sc
| Zkapp_command zc ->
zc.new_accounts )
| Fee_transfer f ->
f.new_accounts
| Coinbase c ->
c.new_accounts
let supply_increase : t -> Currency.Amount.Signed.t Or_error.t =
fun t ->
let open Or_error.Let_syntax in
let burned_tokens = Currency.Amount.Signed.of_unsigned (burned_tokens t) in
let account_creation_fees =
let account_creation_fee_int =
Genesis_constants.Constraint_constants.compiled.account_creation_fee
|> Currency.Fee.to_nanomina_int
in
let num_accounts_created = List.length @@ new_accounts t in
(* int type is OK, no danger of overflow *)
Currency.Amount.(
Signed.of_unsigned
@@ of_nanomina_int_exn (account_creation_fee_int * num_accounts_created))
in
let txn : Transaction.t =
match t.varying with
| Command
(Signed_command { common = { user_command = { data; _ }; _ }; _ }) ->
Command (Signed_command data)
| Command (Zkapp_command c) ->
Command (Zkapp_command c.command.data)
| Fee_transfer f ->
Fee_transfer f.fee_transfer.data
| Coinbase c ->
Coinbase c.coinbase.data
in
let%bind expected_supply_increase =
Transaction.expected_supply_increase txn
in
let rec process_decreases total = function
| [] ->
Some total
| amt :: amts ->
let%bind.Option sum =
Currency.Amount.Signed.(add @@ negate amt) total
in
process_decreases sum amts
in
let total =
process_decreases
(Currency.Amount.Signed.of_unsigned expected_supply_increase)
[ burned_tokens; account_creation_fees ]
in
Option.value_map total ~default:(Or_error.error_string "overflow")
~f:(fun v -> Ok v)
let transaction_with_status : t -> Transaction.t With_status.t =
fun { varying; _ } ->
match varying with
| Command (Signed_command uc) ->
With_status.map uc.common.user_command ~f:(fun cmd ->
Transaction.Command (User_command.Signed_command cmd) )
| Command (Zkapp_command s) ->
With_status.map s.command ~f:(fun c ->
Transaction.Command (User_command.Zkapp_command c) )
| Fee_transfer f ->
With_status.map f.fee_transfer ~f:(fun f -> Transaction.Fee_transfer f)
| Coinbase c ->
With_status.map c.coinbase ~f:(fun c -> Transaction.Coinbase c)
let transaction_status : t -> Transaction_status.t =
fun { varying; _ } ->
match varying with
| Command
(Signed_command { common = { user_command = { status; _ }; _ }; _ }) ->
status
| Command (Zkapp_command c) ->
c.command.status
| Fee_transfer f ->
f.fee_transfer.status
| Coinbase c ->
c.coinbase.status
end
module type S = sig
type ledger
type location
module Transaction_applied : sig
module Signed_command_applied : sig
module Common : sig
type t = Transaction_applied.Signed_command_applied.Common.t =
{ user_command : Signed_command.t With_status.t }
[@@deriving sexp]
end
module Body : sig
type t = Transaction_applied.Signed_command_applied.Body.t =
| Payment of { new_accounts : Account_id.t list }
| Stake_delegation of
{ previous_delegate : Public_key.Compressed.t option }
| Failed
[@@deriving sexp]
end
type t = Transaction_applied.Signed_command_applied.t =
{ common : Common.t; body : Body.t }
[@@deriving sexp]
end
module Zkapp_command_applied : sig
type t = Transaction_applied.Zkapp_command_applied.t =
{ accounts : (Account_id.t * Account.t option) list
; command : Zkapp_command.t With_status.t
; new_accounts : Account_id.t list
}
[@@deriving sexp]
end
module Command_applied : sig
type t = Transaction_applied.Command_applied.t =
| Signed_command of Signed_command_applied.t
| Zkapp_command of Zkapp_command_applied.t
[@@deriving sexp]
end
module Fee_transfer_applied : sig
type t = Transaction_applied.Fee_transfer_applied.t =
{ fee_transfer : Fee_transfer.t With_status.t
; new_accounts : Account_id.t list
; burned_tokens : Currency.Amount.t
}
[@@deriving sexp]
end
module Coinbase_applied : sig
type t = Transaction_applied.Coinbase_applied.t =
{ coinbase : Coinbase.t With_status.t
; new_accounts : Account_id.t list
; burned_tokens : Currency.Amount.t
}
[@@deriving sexp]
end
module Varying : sig
type t = Transaction_applied.Varying.t =
| Command of Command_applied.t
| Fee_transfer of Fee_transfer_applied.t
| Coinbase of Coinbase_applied.t
[@@deriving sexp]
end
type t = Transaction_applied.t =
{ previous_hash : Ledger_hash.t; varying : Varying.t }
[@@deriving sexp]
val burned_tokens : t -> Currency.Amount.t
val supply_increase : t -> Currency.Amount.Signed.t Or_error.t
val transaction : t -> Transaction.t With_status.t
val transaction_status : t -> Transaction_status.t
end
module Global_state : sig
type t =
{ first_pass_ledger : ledger
; second_pass_ledger : ledger
; fee_excess : Amount.Signed.t
; supply_increase : Amount.Signed.t
; protocol_state : Zkapp_precondition.Protocol_state.View.t
; block_global_slot : Mina_numbers.Global_slot.t
Slot of block when the transaction is applied . NOTE : This is at least 1 slot after the protocol_state 's view , which is for the * previous * slot .
}
end
module Transaction_partially_applied : sig
module Zkapp_command_partially_applied : sig
type t =
{ command : Zkapp_command.t
; previous_hash : Ledger_hash.t
; original_first_pass_account_states :
(Account_id.t * (location * Account.t) option) list
; constraint_constants : Genesis_constants.Constraint_constants.t
; state_view : Zkapp_precondition.Protocol_state.View.t
; global_state : Global_state.t
; local_state :
( Stack_frame.value
, Stack_frame.value list
, Token_id.t
, Amount.Signed.t
, ledger
, bool
, Zkapp_command.Transaction_commitment.t
, Mina_numbers.Index.t
, Transaction_status.Failure.Collection.t )
Zkapp_command_logic.Local_state.t
}
end
type 'applied fully_applied =
{ previous_hash : Ledger_hash.t; applied : 'applied }
type t =
| Signed_command of
Transaction_applied.Signed_command_applied.t fully_applied
| Zkapp_command of Zkapp_command_partially_applied.t
| Fee_transfer of Transaction_applied.Fee_transfer_applied.t fully_applied
| Coinbase of Transaction_applied.Coinbase_applied.t fully_applied
val command : t -> Transaction.t
end
val apply_user_command :
constraint_constants:Genesis_constants.Constraint_constants.t
-> txn_global_slot:Global_slot.t
-> ledger
-> Signed_command.With_valid_signature.t
-> Transaction_applied.Signed_command_applied.t Or_error.t
val apply_user_command_unchecked :
constraint_constants:Genesis_constants.Constraint_constants.t
-> txn_global_slot:Global_slot.t
-> ledger
-> Signed_command.t
-> Transaction_applied.Signed_command_applied.t Or_error.t
val update_action_state :
Snark_params.Tick.Field.t Pickles_types.Vector.Vector_5.t
-> Zkapp_account.Actions.t
-> txn_global_slot:Global_slot.t
-> last_action_slot:Global_slot.t
-> Snark_params.Tick.Field.t Pickles_types.Vector.Vector_5.t * Global_slot.t
val apply_zkapp_command_unchecked :
constraint_constants:Genesis_constants.Constraint_constants.t
-> global_slot:Mina_numbers.Global_slot.t
-> state_view:Zkapp_precondition.Protocol_state.View.t
-> ledger
-> Zkapp_command.t
-> ( Transaction_applied.Zkapp_command_applied.t
* ( ( Stack_frame.value
, Stack_frame.value list
, Token_id.t
, Amount.Signed.t
, ledger
, bool
, Zkapp_command.Transaction_commitment.t
, Mina_numbers.Index.t
, Transaction_status.Failure.Collection.t )
Zkapp_command_logic.Local_state.t
* Amount.Signed.t ) )
Or_error.t
(** Apply all zkapp_command within a zkapp_command transaction. This behaves as
[apply_zkapp_command_unchecked], except that the [~init] and [~f] arguments
are provided to allow for the accumulation of the intermediate states.
Invariant: [f] is always applied at least once, so it is valid to use an
[_ option] as the initial state and call [Option.value_exn] on the
accumulated result.
This can be used to collect the intermediate states to make them
available for snark work. In particular, since the transaction snark has
a cap on the number of zkapp_command of each kind that may be included, we can
use this to retrieve the (source, target) pairs for each batch of
zkapp_command to include in the snark work spec / transaction snark witness.
*)
val apply_zkapp_command_unchecked_aux :
constraint_constants:Genesis_constants.Constraint_constants.t
-> global_slot:Mina_numbers.Global_slot.t
-> state_view:Zkapp_precondition.Protocol_state.View.t
-> init:'acc
-> f:
( 'acc
-> Global_state.t
* ( Stack_frame.value
, Stack_frame.value list
, Token_id.t
, Amount.Signed.t
, ledger
, bool
, Zkapp_command.Transaction_commitment.t
, Mina_numbers.Index.t
, Transaction_status.Failure.Collection.t )
Zkapp_command_logic.Local_state.t
-> 'acc )
-> ?fee_excess:Amount.Signed.t
-> ?supply_increase:Amount.Signed.t
-> ledger
-> Zkapp_command.t
-> (Transaction_applied.Zkapp_command_applied.t * 'acc) Or_error.t
val apply_zkapp_command_first_pass_aux :
constraint_constants:Genesis_constants.Constraint_constants.t
-> global_slot:Mina_numbers.Global_slot.t
-> state_view:Zkapp_precondition.Protocol_state.View.t
-> init:'acc
-> f:
( 'acc
-> Global_state.t
* ( Stack_frame.value
, Stack_frame.value list
, Token_id.t
, Amount.Signed.t
, ledger
, bool
, Zkapp_command.Transaction_commitment.t
, Mina_numbers.Index.t
, Transaction_status.Failure.Collection.t )
Zkapp_command_logic.Local_state.t
-> 'acc )
-> ?fee_excess:Amount.Signed.t
-> ?supply_increase:Amount.Signed.t
-> ledger
-> Zkapp_command.t
-> (Transaction_partially_applied.Zkapp_command_partially_applied.t * 'acc)
Or_error.t
val apply_zkapp_command_second_pass_aux :
init:'acc
-> f:
( 'acc
-> Global_state.t
* ( Stack_frame.value
, Stack_frame.value list
, Token_id.t
, Amount.Signed.t
, ledger
, bool
, Zkapp_command.Transaction_commitment.t
, Mina_numbers.Index.t
, Transaction_status.Failure.Collection.t )
Zkapp_command_logic.Local_state.t
-> 'acc )
-> ledger
-> Transaction_partially_applied.Zkapp_command_partially_applied.t
-> (Transaction_applied.Zkapp_command_applied.t * 'acc) Or_error.t
val apply_fee_transfer :
constraint_constants:Genesis_constants.Constraint_constants.t
-> txn_global_slot:Global_slot.t
-> ledger
-> Fee_transfer.t
-> Transaction_applied.Fee_transfer_applied.t Or_error.t
val apply_coinbase :
constraint_constants:Genesis_constants.Constraint_constants.t
-> txn_global_slot:Global_slot.t
-> ledger
-> Coinbase.t
-> Transaction_applied.Coinbase_applied.t Or_error.t
val apply_transaction_first_pass :
constraint_constants:Genesis_constants.Constraint_constants.t
-> global_slot:Global_slot.t
-> txn_state_view:Zkapp_precondition.Protocol_state.View.t
-> ledger
-> Transaction.t
-> Transaction_partially_applied.t Or_error.t
val apply_transaction_second_pass :
ledger
-> Transaction_partially_applied.t
-> Transaction_applied.t Or_error.t
val apply_transactions :
constraint_constants:Genesis_constants.Constraint_constants.t
-> global_slot:Mina_numbers.Global_slot.t
-> txn_state_view:Zkapp_precondition.Protocol_state.View.t
-> ledger
-> Transaction.t list
-> Transaction_applied.t list Or_error.t
val has_locked_tokens :
global_slot:Global_slot.t
-> account_id:Account_id.t
-> ledger
-> bool Or_error.t
module For_tests : sig
val validate_timing_with_min_balance :
account:Account.t
-> txn_amount:Amount.t
-> txn_global_slot:Global_slot.t
-> (Account.Timing.t * [> `Min_balance of Balance.t ]) Or_error.t
val validate_timing :
account:Account.t
-> txn_amount:Amount.t
-> txn_global_slot:Global_slot.t
-> Account.Timing.t Or_error.t
end
end
(* tags for timing validation errors *)
let nsf_tag = "nsf"
let min_balance_tag = "minbal"
let timing_error_to_user_command_status err =
match Error.Internal_repr.of_info err with
| Tag_t (tag, _) when String.equal tag nsf_tag ->
Transaction_status.Failure.Source_insufficient_balance
| Tag_t (tag, _) when String.equal tag min_balance_tag ->
Transaction_status.Failure.Source_minimum_balance_violation
| _ ->
failwith "Unexpected timed account validation error"
* [ validate_timing_with_min_balance ' ~account ~txn_amount ~txn_global_slot ]
returns a tuple of 3 values :
* [ [ ` Insufficient_balance of bool | ` Invalid_timing of bool ] ] encodes
possible errors , with the invariant that the return value is always
[ ` Invalid_timing false ] if there was no error .
- [ ` Insufficient_balance true ] results if [ txn_amount ] is larger than the
balance held in [ account ] .
- [ ` Invalid_timing true ] results if [ txn_amount ] is larger than the
balance available in [ account ] at global slot [ txn_global_slot ] .
* [ Timing.t ] , the new timing for [ account ] calculated at [ txn_global_slot ] .
* [ [ ` Min_balance of Balance.t ] ] returns the computed available balance at
[ txn_global_slot ] .
- NOTE : We skip this calculation if the error is
[ ` Insufficient_balance true ] . In this scenario , this value MUST NOT be
used , as it contains an incorrect placeholder value .
returns a tuple of 3 values:
* [[`Insufficient_balance of bool | `Invalid_timing of bool]] encodes
possible errors, with the invariant that the return value is always
[`Invalid_timing false] if there was no error.
- [`Insufficient_balance true] results if [txn_amount] is larger than the
balance held in [account].
- [`Invalid_timing true] results if [txn_amount] is larger than the
balance available in [account] at global slot [txn_global_slot].
* [Timing.t], the new timing for [account] calculated at [txn_global_slot].
* [[`Min_balance of Balance.t]] returns the computed available balance at
[txn_global_slot].
- NOTE: We skip this calculation if the error is
[`Insufficient_balance true]. In this scenario, this value MUST NOT be
used, as it contains an incorrect placeholder value.
*)
let validate_timing_with_min_balance' ~account ~txn_amount ~txn_global_slot =
let open Account.Poly in
let open Account.Timing.Poly in
match account.timing with
| Untimed -> (
(* no time restrictions *)
match Balance.(account.balance - txn_amount) with
| None ->
(`Insufficient_balance true, Untimed, `Min_balance Balance.zero)
| _ ->
(`Invalid_timing false, Untimed, `Min_balance Balance.zero) )
| Timed
{ initial_minimum_balance
; cliff_time
; cliff_amount
; vesting_period
; vesting_increment
} ->
let invalid_balance, invalid_timing, curr_min_balance =
let account_balance = account.balance in
match Balance.(account_balance - txn_amount) with
| None ->
(* NB: The [initial_minimum_balance] here is the incorrect value,
but:
* we don't use it anywhere in this error case; and
* we don't want to waste time computing it if it will be unused.
*)
(true, false, initial_minimum_balance)
| Some proposed_new_balance ->
let curr_min_balance =
Account.min_balance_at_slot ~global_slot:txn_global_slot
~cliff_time ~cliff_amount ~vesting_period ~vesting_increment
~initial_minimum_balance
in
if Balance.(proposed_new_balance < curr_min_balance) then
(false, true, curr_min_balance)
else (false, false, curr_min_balance)
in
once the calculated minimum balance becomes zero , the account becomes untimed
let possibly_error =
if invalid_balance then `Insufficient_balance invalid_balance
else `Invalid_timing invalid_timing
in
if Balance.(curr_min_balance > zero) then
(possibly_error, account.timing, `Min_balance curr_min_balance)
else (possibly_error, Untimed, `Min_balance Balance.zero)
let validate_timing_with_min_balance ~account ~txn_amount ~txn_global_slot =
let open Or_error.Let_syntax in
let nsf_error kind =
Or_error.errorf
!"For %s account, the requested transaction for amount %{sexp: Amount.t} \
at global slot %{sexp: Global_slot.t}, the balance %{sexp: Balance.t} \
is insufficient"
kind txn_amount txn_global_slot account.Account.Poly.balance
|> Or_error.tag ~tag:nsf_tag
in
let min_balance_error min_balance =
Or_error.errorf
!"For timed account, the requested transaction for amount %{sexp: \
Amount.t} at global slot %{sexp: Global_slot.t}, applying the \
transaction would put the balance below the calculated minimum balance \
of %{sexp: Balance.t}"
txn_amount txn_global_slot min_balance
|> Or_error.tag ~tag:min_balance_tag
in
let possibly_error, timing, (`Min_balance curr_min_balance as min_balance) =
validate_timing_with_min_balance' ~account ~txn_amount ~txn_global_slot
in
match possibly_error with
| `Insufficient_balance true ->
nsf_error "timed"
| `Invalid_timing true ->
min_balance_error curr_min_balance
| `Insufficient_balance false ->
failwith "Broken invariant in validate_timing_with_min_balance'"
| `Invalid_timing false ->
return (timing, min_balance)
let validate_timing ~account ~txn_amount ~txn_global_slot =
let open Result.Let_syntax in
let%map timing, `Min_balance _ =
validate_timing_with_min_balance ~account ~txn_amount ~txn_global_slot
in
timing
module Make (L : Ledger_intf.S) :
S with type ledger := L.t and type location := L.location = struct
open L
let error s = Or_error.errorf "Ledger.apply_transaction: %s" s
let error_opt e = Option.value_map ~default:(error e) ~f:Or_error.return
let get_with_location ledger account_id =
match location_of_account ledger account_id with
| Some location -> (
match get ledger location with
| Some account ->
Ok (`Existing location, account)
| None ->
failwith "Ledger location with no account" )
| None ->
Ok (`New, Account.create account_id Balance.zero)
let set_with_location ledger location account =
match location with
| `Existing location ->
Ok (set ledger location account)
| `New ->
create_new_account ledger (Account.identifier account) account
let add_amount balance amount =
error_opt "overflow" (Balance.add_amount balance amount)
let sub_amount balance amount =
error_opt "insufficient funds" (Balance.sub_amount balance amount)
let sub_account_creation_fee
~(constraint_constants : Genesis_constants.Constraint_constants.t) action
amount =
let fee = constraint_constants.account_creation_fee in
if Ledger_intf.equal_account_state action `Added then
error_opt
(sprintf
!"Error subtracting account creation fee %{sexp: Currency.Fee.t}; \
transaction amount %{sexp: Currency.Amount.t} insufficient"
fee amount )
Amount.(sub amount (of_fee fee))
else Ok amount
let check b = ksprintf (fun s -> if b then Ok () else Or_error.error_string s)
let validate_nonces txn_nonce account_nonce =
check
(Account.Nonce.equal account_nonce txn_nonce)
!"Nonce in account %{sexp: Account.Nonce.t} different from nonce in \
transaction %{sexp: Account.Nonce.t}"
account_nonce txn_nonce
let validate_time ~valid_until ~current_global_slot =
check
Global_slot.(current_global_slot <= valid_until)
!"Current global slot %{sexp: Global_slot.t} greater than transaction \
expiry slot %{sexp: Global_slot.t}"
current_global_slot valid_until
module Transaction_applied = struct
include Transaction_applied
let transaction : t -> Transaction.t With_status.t =
fun { varying; _ } ->
match varying with
| Command (Signed_command uc) ->
With_status.map uc.common.user_command ~f:(fun cmd ->
Transaction.Command (User_command.Signed_command cmd) )
| Command (Zkapp_command s) ->
With_status.map s.command ~f:(fun c ->
Transaction.Command (User_command.Zkapp_command c) )
| Fee_transfer f ->
With_status.map f.fee_transfer ~f:(fun f ->
Transaction.Fee_transfer f )
| Coinbase c ->
With_status.map c.coinbase ~f:(fun c -> Transaction.Coinbase c)
let transaction_status : t -> Transaction_status.t =
fun { varying; _ } ->
match varying with
| Command
(Signed_command { common = { user_command = { status; _ }; _ }; _ })
->
status
| Command (Zkapp_command c) ->
c.command.status
| Fee_transfer f ->
f.fee_transfer.status
| Coinbase c ->
c.coinbase.status
end
let get_new_accounts action pk =
if Ledger_intf.equal_account_state action `Added then [ pk ] else []
let has_locked_tokens ~global_slot ~account_id ledger =
let open Or_error.Let_syntax in
let%map _, account = get_with_location ledger account_id in
Account.has_locked_tokens ~global_slot account
let failure (e : Transaction_status.Failure.t) = e
let incr_balance (acct : Account.t) amt =
match add_amount acct.balance amt with
| Ok balance ->
Ok { acct with balance }
| Error _ ->
Result.fail (failure Overflow)
(* Helper function for [apply_user_command_unchecked] *)
let pay_fee' ~command ~nonce ~fee_payer ~fee ~ledger ~current_global_slot =
let open Or_error.Let_syntax in
(* Fee-payer information *)
let%bind location, account = get_with_location ledger fee_payer in
let%bind () =
match location with
| `Existing _ ->
return ()
| `New ->
Or_error.errorf "The fee-payer account does not exist"
in
let fee = Amount.of_fee fee in
let%bind balance = sub_amount account.balance fee in
let%bind () = validate_nonces nonce account.nonce in
let%map timing =
validate_timing ~txn_amount:fee ~txn_global_slot:current_global_slot
~account
in
( location
, { account with
balance
; nonce = Account.Nonce.succ account.nonce
; receipt_chain_hash =
Receipt.Chain_hash.cons_signed_command_payload command
account.receipt_chain_hash
; timing
} )
(* Helper function for [apply_user_command_unchecked] *)
let pay_fee ~user_command ~signer_pk ~ledger ~current_global_slot =
let open Or_error.Let_syntax in
(* Fee-payer information *)
let nonce = Signed_command.nonce user_command in
let fee_payer = Signed_command.fee_payer user_command in
let%bind () =
let fee_token = Signed_command.fee_token user_command in
let%bind () =
(* TODO: Enable multi-sig. *)
if
Public_key.Compressed.equal
(Account_id.public_key fee_payer)
signer_pk
then return ()
else
Or_error.errorf
"Cannot pay fees from a public key that did not sign the \
transaction"
in
let%map () =
TODO : Remove this check and update the transaction snark once we have
an exchange rate mechanism . See issue # 4447 .
an exchange rate mechanism. See issue #4447.
*)
if Token_id.equal fee_token Token_id.default then return ()
else
Or_error.errorf
"Cannot create transactions with fee_token different from the \
default"
in
()
in
let%map loc, account' =
pay_fee' ~command:(Signed_command_payload user_command.payload) ~nonce
~fee_payer
~fee:(Signed_command.fee user_command)
~ledger ~current_global_slot
in
(loc, account')
someday : It would probably be better if we did n't modify the receipt chain hash
in the case that the sender is equal to the receiver , but it complicates the SNARK , so
we do n't for now .
in the case that the sender is equal to the receiver, but it complicates the SNARK, so
we don't for now. *)
let apply_user_command_unchecked
~(constraint_constants : Genesis_constants.Constraint_constants.t)
~txn_global_slot ledger
({ payload; signer; signature = _ } as user_command : Signed_command.t) =
let open Or_error.Let_syntax in
let signer_pk = Public_key.compress signer in
let current_global_slot = txn_global_slot in
let%bind () =
validate_time
~valid_until:(Signed_command.valid_until user_command)
~current_global_slot
in
(* Fee-payer information *)
let fee_payer = Signed_command.fee_payer user_command in
let%bind fee_payer_location, fee_payer_account =
pay_fee ~user_command ~signer_pk ~ledger ~current_global_slot
in
let%bind () =
if
Account.has_permission ~control:Control.Tag.Signature ~to_:`Access
fee_payer_account
&& Account.has_permission ~control:Control.Tag.Signature ~to_:`Send
fee_payer_account
then Ok ()
else
Or_error.error_string
Transaction_status.Failure.(describe Update_not_permitted_balance)
in
(* Charge the fee. This must happen, whether or not the command itself
succeeds, to ensure that the network is compensated for processing this
command.
*)
let%bind () =
set_with_location ledger fee_payer_location fee_payer_account
in
let source = Signed_command.source user_command in
let receiver = Signed_command.receiver user_command in
let exception Reject of Error.t in
let ok_or_reject = function Ok x -> x | Error err -> raise (Reject err) in
let compute_updates () =
let open Result.Let_syntax in
(* Compute the necessary changes to apply the command, failing if any of
the conditions are not met.
*)
match payload.body with
| Stake_delegation _ ->
let receiver_location, _receiver_account =
(* Check that receiver account exists. *)
get_with_location ledger receiver |> ok_or_reject
in
let source_location, source_account =
get_with_location ledger source |> ok_or_reject
in
let%bind () =
if
Account.has_permission ~control:Control.Tag.Signature ~to_:`Access
source_account
&& Account.has_permission ~control:Control.Tag.Signature
~to_:`Set_delegate source_account
then Ok ()
else Error Transaction_status.Failure.Update_not_permitted_delegate
in
let%bind () =
match (source_location, receiver_location) with
| `Existing _, `Existing _ ->
return ()
| `New, _ ->
Result.fail Transaction_status.Failure.Source_not_present
| _, `New ->
Result.fail Transaction_status.Failure.Receiver_not_present
in
let previous_delegate = source_account.delegate in
(* Timing is always valid, but we need to record any switch from
timed to untimed here to stay in sync with the snark.
*)
let%map timing =
validate_timing ~txn_amount:Amount.zero
~txn_global_slot:current_global_slot ~account:source_account
|> Result.map_error ~f:timing_error_to_user_command_status
in
let source_account =
{ source_account with
delegate = Some (Account_id.public_key receiver)
; timing
}
in
( [ (source_location, source_account) ]
, Transaction_applied.Signed_command_applied.Body.Stake_delegation
{ previous_delegate } )
| Payment { amount; _ } ->
let receiver_location, receiver_account =
get_with_location ledger receiver |> ok_or_reject
in
let%bind () =
if
Account.has_permission ~control:Control.Tag.None_given
~to_:`Access receiver_account
&& Account.has_permission ~control:Control.Tag.None_given
~to_:`Receive receiver_account
then Ok ()
else Error Transaction_status.Failure.Update_not_permitted_balance
in
let%bind source_location, source_account =
let ret =
if Account_id.equal source receiver then
(*just check if the timing needs updating*)
let%bind location, account =
match receiver_location with
| `Existing _ ->
return (receiver_location, receiver_account)
| `New ->
Result.fail Transaction_status.Failure.Source_not_present
in
let%map timing =
validate_timing ~txn_amount:amount
~txn_global_slot:current_global_slot ~account
|> Result.map_error ~f:timing_error_to_user_command_status
in
(location, { account with timing })
else
let location, account =
get_with_location ledger source |> ok_or_reject
in
let%bind () =
match location with
| `Existing _ ->
return ()
| `New ->
Result.fail Transaction_status.Failure.Source_not_present
in
let%bind timing =
validate_timing ~txn_amount:amount
~txn_global_slot:current_global_slot ~account
|> Result.map_error ~f:timing_error_to_user_command_status
in
let%map balance =
Result.map_error (sub_amount account.balance amount)
~f:(fun _ ->
Transaction_status.Failure.Source_insufficient_balance )
in
(location, { account with timing; balance })
in
if Account_id.equal fee_payer source then
(* Don't process transactions with insufficient balance from the
fee-payer.
*)
match ret with
| Ok x ->
Ok x
| Error failure ->
raise
(Reject
(Error.createf "%s"
(Transaction_status.Failure.describe failure) ) )
else ret
in
let%bind () =
if
Account.has_permission ~control:Control.Tag.Signature ~to_:`Access
source_account
&& Account.has_permission ~control:Control.Tag.Signature
~to_:`Send source_account
then Ok ()
else Error Transaction_status.Failure.Update_not_permitted_balance
in
(* Charge the account creation fee. *)
let%bind receiver_amount =
match receiver_location with
| `Existing _ ->
return amount
| `New ->
(* Subtract the creation fee from the transaction amount. *)
sub_account_creation_fee ~constraint_constants `Added amount
|> Result.map_error ~f:(fun _ ->
Transaction_status.Failure
.Amount_insufficient_to_create_account )
in
let%map receiver_account =
incr_balance receiver_account receiver_amount
in
let new_accounts =
match receiver_location with
| `Existing _ ->
[]
| `New ->
[ receiver ]
in
( [ (receiver_location, receiver_account)
; (source_location, source_account)
]
, Transaction_applied.Signed_command_applied.Body.Payment
{ new_accounts } )
in
match compute_updates () with
| Ok (located_accounts, applied_body) ->
(* Update the ledger. *)
let%bind () =
List.fold located_accounts ~init:(Ok ())
~f:(fun acc (location, account) ->
let%bind () = acc in
set_with_location ledger location account )
in
let applied_common : Transaction_applied.Signed_command_applied.Common.t
=
{ user_command = { data = user_command; status = Applied } }
in
return
( { common = applied_common; body = applied_body }
: Transaction_applied.Signed_command_applied.t )
| Error failure ->
(* Do not update the ledger. Except for the fee payer which is already updated *)
let applied_common : Transaction_applied.Signed_command_applied.Common.t
=
{ user_command =
{ data = user_command
; status =
Failed
(Transaction_status.Failure.Collection.of_single_failure
failure )
}
}
in
return
( { common = applied_common; body = Failed }
: Transaction_applied.Signed_command_applied.t )
| exception Reject err ->
(* TODO: These transactions should never reach this stage, this error
should be fatal.
*)
Error err
let apply_user_command ~constraint_constants ~txn_global_slot ledger
(user_command : Signed_command.With_valid_signature.t) =
apply_user_command_unchecked ~constraint_constants ~txn_global_slot ledger
(Signed_command.forget_check user_command)
module Global_state = struct
type t =
{ first_pass_ledger : L.t
; second_pass_ledger : L.t
; fee_excess : Amount.Signed.t
; supply_increase : Amount.Signed.t
; protocol_state : Zkapp_precondition.Protocol_state.View.t
; block_global_slot : Global_slot.t
}
let first_pass_ledger { first_pass_ledger; _ } =
L.create_masked first_pass_ledger
let set_first_pass_ledger ~should_update t ledger =
if should_update then L.apply_mask t.first_pass_ledger ~masked:ledger ;
t
let second_pass_ledger { second_pass_ledger; _ } =
L.create_masked second_pass_ledger
let set_second_pass_ledger ~should_update t ledger =
if should_update then L.apply_mask t.second_pass_ledger ~masked:ledger ;
t
let fee_excess { fee_excess; _ } = fee_excess
let set_fee_excess t fee_excess = { t with fee_excess }
let supply_increase { supply_increase; _ } = supply_increase
let set_supply_increase t supply_increase = { t with supply_increase }
let block_global_slot { block_global_slot; _ } = block_global_slot
end
module Transaction_partially_applied = struct
module Zkapp_command_partially_applied = struct
type t =
{ command : Zkapp_command.t
; previous_hash : Ledger_hash.t
; original_first_pass_account_states :
(Account_id.t * (location * Account.t) option) list
; constraint_constants : Genesis_constants.Constraint_constants.t
; state_view : Zkapp_precondition.Protocol_state.View.t
; global_state : Global_state.t
; local_state :
( Stack_frame.value
, Stack_frame.value list
, Token_id.t
, Amount.Signed.t
, L.t
, bool
, Zkapp_command.Transaction_commitment.t
, Mina_numbers.Index.t
, Transaction_status.Failure.Collection.t )
Zkapp_command_logic.Local_state.t
}
end
type 'applied fully_applied =
{ previous_hash : Ledger_hash.t; applied : 'applied }
(* TODO: lift previous_hash up in the types *)
type t =
| Signed_command of
Transaction_applied.Signed_command_applied.t fully_applied
| Zkapp_command of Zkapp_command_partially_applied.t
| Fee_transfer of Transaction_applied.Fee_transfer_applied.t fully_applied
| Coinbase of Transaction_applied.Coinbase_applied.t fully_applied
let command (t : t) : Transaction.t =
match t with
| Signed_command s ->
Transaction.Command
(User_command.Signed_command s.applied.common.user_command.data)
| Zkapp_command z ->
Command (User_command.Zkapp_command z.command)
| Fee_transfer f ->
Fee_transfer f.applied.fee_transfer.data
| Coinbase c ->
Coinbase c.applied.coinbase.data
end
module Inputs = struct
let with_label ~label:_ f = f ()
let value_if b ~then_ ~else_ = if b then then_ else else_
module Global_state = Global_state
module Field = struct
type t = Snark_params.Tick.Field.t
let if_ = value_if
let equal = Snark_params.Tick.Field.equal
end
module Bool = struct
type t = bool
module Assert = struct
let is_true ~pos b =
try assert b
with Assert_failure _ ->
let file, line, col, _ecol = pos in
raise (Assert_failure (file, line, col))
let any ~pos bs = List.exists ~f:Fn.id bs |> is_true ~pos
end
let if_ = value_if
let true_ = true
let false_ = false
let equal = Bool.equal
let not = not
let ( ||| ) = ( || )
let ( &&& ) = ( && )
let display b ~label = sprintf "%s: %b" label b
let all = List.for_all ~f:Fn.id
type failure_status = Transaction_status.Failure.t option
type failure_status_tbl = Transaction_status.Failure.Collection.t
let is_empty t = List.join t |> List.is_empty
let assert_with_failure_status_tbl ~pos b failure_status_tbl =
let file, line, col, ecol = pos in
if (not b) && not (is_empty failure_status_tbl) then
(* Raise a more useful error message if we have a failure
description. *)
let failure_msg =
Yojson.Safe.to_string
@@ Transaction_status.Failure.Collection.Display.to_yojson
@@ Transaction_status.Failure.Collection.to_display
failure_status_tbl
in
Error.raise @@ Error.of_string
@@ sprintf "File %S, line %d, characters %d-%d: %s" file line col ecol
failure_msg
else
try assert b
with Assert_failure _ -> raise (Assert_failure (file, line, col))
end
module Account_id = struct
include Account_id
let if_ = value_if
end
module Ledger = struct
type t = L.t
let if_ = value_if
let empty = L.empty
type inclusion_proof = [ `Existing of location | `New ]
let get_account p l =
let loc, acct =
Or_error.ok_exn (get_with_location l (Account_update.account_id p))
in
(acct, loc)
let set_account l (a, loc) =
Or_error.ok_exn (set_with_location l loc a) ;
l
let check_inclusion _ledger (_account, _loc) = ()
let check_account public_key token_id
((account, loc) : Account.t * inclusion_proof) =
assert (Public_key.Compressed.equal public_key account.public_key) ;
assert (Token_id.equal token_id account.token_id) ;
match loc with `Existing _ -> `Is_new false | `New -> `Is_new true
end
module Transaction_commitment = struct
type t = Field.t
let empty = Zkapp_command.Transaction_commitment.empty
let commitment ~account_updates =
let account_updates_hash =
Mina_base.Zkapp_command.Call_forest.hash account_updates
in
Zkapp_command.Transaction_commitment.create ~account_updates_hash
let full_commitment ~account_update ~memo_hash ~commitment =
(* when called from Zkapp_command_logic.apply, the account_update is the fee payer *)
let fee_payer_hash =
Zkapp_command.Digest.Account_update.create account_update
in
Zkapp_command.Transaction_commitment.create_complete commitment
~memo_hash ~fee_payer_hash
let if_ = value_if
end
module Index = struct
type t = Mina_numbers.Index.t
let zero, succ = Mina_numbers.Index.(zero, succ)
let if_ = value_if
end
module Public_key = struct
type t = Public_key.Compressed.t
let if_ = value_if
end
module Controller = struct
type t = Permissions.Auth_required.t
let if_ = value_if
let check ~proof_verifies ~signature_verifies perm =
(* Invariant: We either have a proof, a signature, or neither. *)
assert (not (proof_verifies && signature_verifies)) ;
let tag =
if proof_verifies then Control.Tag.Proof
else if signature_verifies then Control.Tag.Signature
else Control.Tag.None_given
in
Permissions.Auth_required.check perm tag
end
module Global_slot = struct
include Mina_numbers.Global_slot
let if_ = value_if
end
module Nonce = struct
type t = Account.Nonce.t
let if_ = value_if
let succ = Account.Nonce.succ
end
module Receipt_chain_hash = struct
type t = Receipt.Chain_hash.t
module Elt = struct
type t = Receipt.Zkapp_command_elt.t
let of_transaction_commitment tc =
Receipt.Zkapp_command_elt.Zkapp_command_commitment tc
end
let cons_zkapp_command_commitment =
Receipt.Chain_hash.cons_zkapp_command_commitment
let if_ = value_if
end
module State_hash = struct
include State_hash
let if_ = value_if
end
module Timing = struct
type t = Account_update.Update.Timing_info.t option
let if_ = value_if
let vesting_period (t : t) =
match t with
| Some t ->
t.vesting_period
| None ->
(Account_timing.to_record Untimed).vesting_period
end
module Balance = struct
include Balance
let if_ = value_if
end
module Verification_key = struct
type t = (Side_loaded_verification_key.t, Field.t) With_hash.t option
let if_ = value_if
end
module Verification_key_hash = struct
type t = Field.t option
let equal vk1 vk2 = Option.equal Field.equal vk1 vk2
end
module Actions = struct
type t = Zkapp_account.Actions.t
let is_empty = List.is_empty
let push_events = Account_update.Actions.push_events
end
module Zkapp_uri = struct
type t = string
let if_ = value_if
end
module Token_symbol = struct
type t = Account.Token_symbol.t
let if_ = value_if
end
module Account = struct
include Account
module Permissions = struct
let access : t -> Controller.t = fun a -> a.permissions.access
let edit_state : t -> Controller.t = fun a -> a.permissions.edit_state
let send : t -> Controller.t = fun a -> a.permissions.send
let receive : t -> Controller.t = fun a -> a.permissions.receive
let set_delegate : t -> Controller.t =
fun a -> a.permissions.set_delegate
let set_permissions : t -> Controller.t =
fun a -> a.permissions.set_permissions
let set_verification_key : t -> Controller.t =
fun a -> a.permissions.set_verification_key
let set_zkapp_uri : t -> Controller.t =
fun a -> a.permissions.set_zkapp_uri
let edit_action_state : t -> Controller.t =
fun a -> a.permissions.edit_action_state
let set_token_symbol : t -> Controller.t =
fun a -> a.permissions.set_token_symbol
let increment_nonce : t -> Controller.t =
fun a -> a.permissions.increment_nonce
let set_voting_for : t -> Controller.t =
fun a -> a.permissions.set_voting_for
let set_timing : t -> Controller.t = fun a -> a.permissions.set_timing
type t = Permissions.t
let if_ = value_if
end
type timing = Account_update.Update.Timing_info.t option
let timing (a : t) : timing =
Account_update.Update.Timing_info.of_account_timing a.timing
let set_timing (a : t) (timing : timing) : t =
{ a with
timing =
Option.value_map ~default:Account_timing.Untimed
~f:Account_update.Update.Timing_info.to_account_timing timing
}
let is_timed (a : t) =
match a.timing with Account_timing.Untimed -> false | _ -> true
let set_token_id (a : t) (id : Token_id.t) : t = { a with token_id = id }
let balance (a : t) : Balance.t = a.balance
let set_balance (balance : Balance.t) (a : t) : t = { a with balance }
let check_timing ~txn_global_slot account =
let invalid_timing, timing, _ =
validate_timing_with_min_balance' ~txn_amount:Amount.zero
~txn_global_slot ~account
in
( invalid_timing
, Account_update.Update.Timing_info.of_account_timing timing )
let receipt_chain_hash (a : t) : Receipt.Chain_hash.t =
a.receipt_chain_hash
let set_receipt_chain_hash (a : t) hash =
{ a with receipt_chain_hash = hash }
let make_zkapp (a : t) =
let zkapp =
match a.zkapp with
| None ->
Some Zkapp_account.default
| Some _ as zkapp ->
zkapp
in
{ a with zkapp }
let unmake_zkapp (a : t) : t =
let zkapp =
match a.zkapp with
| None ->
None
| Some zkapp ->
if Zkapp_account.(equal default zkapp) then None else Some zkapp
in
{ a with zkapp }
let get_zkapp (a : t) = Option.value_exn a.zkapp
let set_zkapp (a : t) ~f : t = { a with zkapp = Option.map a.zkapp ~f }
let proved_state (a : t) = (get_zkapp a).proved_state
let set_proved_state proved_state (a : t) =
set_zkapp a ~f:(fun zkapp -> { zkapp with proved_state })
let app_state (a : t) = (get_zkapp a).app_state
let set_app_state app_state (a : t) =
set_zkapp a ~f:(fun zkapp -> { zkapp with app_state })
let register_verification_key (_ : t) = ()
let verification_key (a : t) = (get_zkapp a).verification_key
let set_verification_key verification_key (a : t) =
set_zkapp a ~f:(fun zkapp -> { zkapp with verification_key })
let verification_key_hash (a : t) =
match a.zkapp with
| None ->
None
| Some zkapp ->
Option.map zkapp.verification_key ~f:With_hash.hash
let last_action_slot (a : t) = (get_zkapp a).last_action_slot
let set_last_action_slot last_action_slot (a : t) =
set_zkapp a ~f:(fun zkapp -> { zkapp with last_action_slot })
let action_state (a : t) = (get_zkapp a).action_state
let set_action_state action_state (a : t) =
set_zkapp a ~f:(fun zkapp -> { zkapp with action_state })
let zkapp_uri (a : t) =
Option.value_map a.zkapp ~default:"" ~f:(fun zkapp -> zkapp.zkapp_uri)
let set_zkapp_uri zkapp_uri (a : t) : t =
{ a with
zkapp = Option.map a.zkapp ~f:(fun zkapp -> { zkapp with zkapp_uri })
}
let token_symbol (a : t) = a.token_symbol
let set_token_symbol token_symbol (a : t) = { a with token_symbol }
let public_key (a : t) = a.public_key
let set_public_key public_key (a : t) = { a with public_key }
let delegate (a : t) = Account.delegate_opt a.delegate
let set_delegate delegate (a : t) =
let delegate =
if Signature_lib.Public_key.Compressed.(equal empty) delegate then
None
else Some delegate
in
{ a with delegate }
let nonce (a : t) = a.nonce
let set_nonce nonce (a : t) = { a with nonce }
let voting_for (a : t) = a.voting_for
let set_voting_for voting_for (a : t) = { a with voting_for }
let permissions (a : t) = a.permissions
let set_permissions permissions (a : t) = { a with permissions }
end
module Amount = struct
open Currency.Amount
type unsigned = t
type t = unsigned
let if_ = value_if
module Signed = struct
include Signed
let if_ = value_if
let is_pos (t : t) = Sgn.equal t.sgn Pos
let is_neg (t : t) = Sgn.equal t.sgn Neg
end
let zero = zero
let equal = equal
let add_flagged = add_flagged
let add_signed_flagged (x1 : t) (x2 : Signed.t) : t * [ `Overflow of bool ]
=
let y, `Overflow b = Signed.(add_flagged (of_unsigned x1) x2) in
match y.sgn with
| Pos ->
(y.magnitude, `Overflow b)
| Neg ->
(* We want to capture the accurate value so that this will match
with the values in the snarked logic.
*)
let magnitude =
Amount.to_uint64 y.magnitude
|> Unsigned.UInt64.(mul (sub zero one))
|> Amount.of_uint64
in
(magnitude, `Overflow true)
let of_constant_fee = of_fee
end
module Token_id = struct
include Token_id
let if_ = value_if
end
module Protocol_state_precondition = struct
include Zkapp_precondition.Protocol_state
end
module Valid_while_precondition = struct
include Zkapp_precondition.Valid_while
end
module Account_update = struct
include Account_update
module Account_precondition = struct
include Account_update.Account_precondition
let nonce (t : Account_update.t) = nonce t.body.preconditions.account
end
type 'a or_ignore = 'a Zkapp_basic.Or_ignore.t
type call_forest = Zkapp_call_forest.t
type transaction_commitment = Transaction_commitment.t
let may_use_parents_own_token (p : t) =
May_use_token.parents_own_token p.body.may_use_token
let may_use_token_inherited_from_parent (p : t) =
May_use_token.inherit_from_parent p.body.may_use_token
let check_authorization ~will_succeed:_ ~commitment:_ ~calls:_
(account_update : t) =
(* The transaction's validity should already have been checked before
this point.
*)
match account_update.authorization with
| Signature _ ->
(`Proof_verifies false, `Signature_verifies true)
| Proof _ ->
(`Proof_verifies true, `Signature_verifies false)
| None_given ->
(`Proof_verifies false, `Signature_verifies false)
let is_proved (account_update : t) =
match account_update.body.authorization_kind with
| Proof _ ->
true
| Signature | None_given ->
false
let is_signed (account_update : t) =
match account_update.body.authorization_kind with
| Signature ->
true
| Proof _ | None_given ->
false
let verification_key_hash (p : t) =
match p.body.authorization_kind with
| Proof vk_hash ->
Some vk_hash
| _ ->
None
module Update = struct
open Zkapp_basic
type 'a set_or_keep = 'a Zkapp_basic.Set_or_keep.t
let timing (account_update : t) : Account.timing set_or_keep =
Set_or_keep.map ~f:Option.some account_update.body.update.timing
let app_state (account_update : t) =
account_update.body.update.app_state
let verification_key (account_update : t) =
Zkapp_basic.Set_or_keep.map ~f:Option.some
account_update.body.update.verification_key
let actions (account_update : t) = account_update.body.actions
let zkapp_uri (account_update : t) =
account_update.body.update.zkapp_uri
let token_symbol (account_update : t) =
account_update.body.update.token_symbol
let delegate (account_update : t) = account_update.body.update.delegate
let voting_for (account_update : t) =
account_update.body.update.voting_for
let permissions (account_update : t) =
account_update.body.update.permissions
end
end
module Set_or_keep = struct
include Zkapp_basic.Set_or_keep
let set_or_keep ~if_:_ t x = set_or_keep t x
end
module Opt = struct
type 'a t = 'a option
let is_some = Option.is_some
let map = Option.map
let or_default ~if_ x ~default =
if_ (is_some x) ~then_:(Option.value ~default x) ~else_:default
let or_exn x = Option.value_exn x
end
module Stack (Elt : sig
type t
end) =
struct
type t = Elt.t list
let if_ = value_if
let empty () = []
let is_empty = List.is_empty
let pop_exn : t -> Elt.t * t = function
| [] ->
failwith "pop_exn"
| x :: xs ->
(x, xs)
let pop : t -> (Elt.t * t) option = function
| x :: xs ->
Some (x, xs)
| _ ->
None
let push x ~onto : t = x :: onto
end
module Call_forest = Zkapp_call_forest
module Stack_frame = struct
include Stack_frame
type t = value
let if_ = Zkapp_command.value_if
let make = Stack_frame.make
end
module Call_stack = Stack (Stack_frame)
module Local_state = struct
type t =
( Stack_frame.t
, Call_stack.t
, Token_id.t
, Amount.Signed.t
, Ledger.t
, Bool.t
, Transaction_commitment.t
, Index.t
, Bool.failure_status_tbl )
Zkapp_command_logic.Local_state.t
let add_check (t : t) failure b =
let failure_status_tbl =
match t.failure_status_tbl with
| hd :: tl when not b ->
(failure :: hd) :: tl
| old_failure_status_tbl ->
old_failure_status_tbl
in
{ t with failure_status_tbl; success = t.success && b }
let update_failure_status_tbl (t : t) failure_status b =
match failure_status with
| None ->
{ t with success = t.success && b }
| Some failure ->
add_check t failure b
let add_new_failure_status_bucket (t : t) =
{ t with failure_status_tbl = [] :: t.failure_status_tbl }
end
module Nonce_precondition = struct
let is_constant =
Zkapp_precondition.Numeric.is_constant
Zkapp_precondition.Numeric.Tc.nonce
end
end
module Env = struct
open Inputs
type t =
< account_update : Account_update.t
; zkapp_command : Zkapp_command.t
; account : Account.t
; ledger : Ledger.t
; amount : Amount.t
; signed_amount : Amount.Signed.t
; bool : Bool.t
; token_id : Token_id.t
; global_state : Global_state.t
; inclusion_proof : [ `Existing of location | `New ]
; local_state :
( Stack_frame.t
, Call_stack.t
, Token_id.t
, Amount.Signed.t
, L.t
, bool
, Transaction_commitment.t
, Index.t
, Transaction_status.Failure.Collection.t )
Zkapp_command_logic.Local_state.t
; protocol_state_precondition : Zkapp_precondition.Protocol_state.t
; valid_while_precondition : Zkapp_precondition.Valid_while.t
; transaction_commitment : Transaction_commitment.t
; full_transaction_commitment : Transaction_commitment.t
; field : Snark_params.Tick.Field.t
; failure : Transaction_status.Failure.t option >
let perform ~constraint_constants:_ (type r)
(eff : (r, t) Zkapp_command_logic.Eff.t) : r =
match eff with
| Check_valid_while_precondition (valid_while, global_state) ->
Zkapp_precondition.Valid_while.check valid_while
global_state.block_global_slot
|> Or_error.is_ok
| Check_protocol_state_precondition (pred, global_state) -> (
Zkapp_precondition.Protocol_state.check pred
global_state.protocol_state
|> fun or_err -> match or_err with Ok () -> true | Error _ -> false )
| Check_account_precondition
(account_update, account, new_account, local_state) -> (
match account_update.body.preconditions.account with
| Accept ->
local_state
| Nonce n ->
let nonce_matches = Account.Nonce.equal account.nonce n in
Inputs.Local_state.add_check local_state
Account_nonce_precondition_unsatisfied nonce_matches
| Full precondition_account ->
let local_state = ref local_state in
let check failure b =
local_state :=
Inputs.Local_state.add_check !local_state failure b
in
Zkapp_precondition.Account.check ~new_account ~check
precondition_account account ;
!local_state )
| Init_account { account_update = _; account = a } ->
a
end
module M = Zkapp_command_logic.Make (Inputs)
let update_action_state action_state actions ~txn_global_slot
~last_action_slot =
let action_state', last_action_slot' =
M.update_action_state action_state actions ~txn_global_slot
~last_action_slot
in
(action_state', last_action_slot')
apply zkapp command fee payer 's while stubbing out the second pass ledger
CAUTION : If you use the intermediate local states , you MUST update the
[ will_succeed ] field to [ false ] if the [ status ] is [ Failed ] .
CAUTION: If you use the intermediate local states, you MUST update the
[will_succeed] field to [false] if the [status] is [Failed].*)
let apply_zkapp_command_first_pass_aux (type user_acc) ~constraint_constants
~global_slot ~(state_view : Zkapp_precondition.Protocol_state.View.t)
~(init : user_acc) ~f
?((* TODO: can this be ripped out from here? *)
fee_excess = Amount.Signed.zero)
TODO : is the right ? is it never used for zkapps ?
supply_increase = Amount.Signed.zero) (ledger : L.t)
(command : Zkapp_command.t) :
( Transaction_partially_applied.Zkapp_command_partially_applied.t
* user_acc )
Or_error.t =
let open Or_error.Let_syntax in
let previous_hash = merkle_root ledger in
let original_first_pass_account_states =
let id = Zkapp_command.fee_payer command in
[ ( id
, Option.Let_syntax.(
let%bind loc = L.location_of_account ledger id in
let%map a = L.get ledger loc in
(loc, a)) )
]
in
let perform eff = Env.perform ~constraint_constants eff in
let initial_state :
Inputs.Global_state.t * _ Zkapp_command_logic.Local_state.t =
( { protocol_state = state_view
; first_pass_ledger = ledger
; second_pass_ledger =
We stub out the second_pass_ledger initially , and then poke the
correct value in place after the first pass is finished .
correct value in place after the first pass is finished.
*)
L.empty ~depth:0 ()
; fee_excess
; supply_increase
; block_global_slot = global_slot
}
, { stack_frame =
({ calls = []
; caller = Token_id.default
; caller_caller = Token_id.default
} : Inputs.Stack_frame.t)
; call_stack = []
; transaction_commitment = Inputs.Transaction_commitment.empty
; full_transaction_commitment = Inputs.Transaction_commitment.empty
; token_id = Token_id.default
; excess = Currency.Amount.(Signed.of_unsigned zero)
; supply_increase = Currency.Amount.(Signed.of_unsigned zero)
; ledger = L.empty ~depth:0 ()
; success = true
; account_update_index = Inputs.Index.zero
; failure_status_tbl = []
; will_succeed = true
} )
in
let user_acc = f init initial_state in
let account_updates = Zkapp_command.all_account_updates command in
let%map global_state, local_state =
Or_error.try_with (fun () ->
M.start ~constraint_constants
{ account_updates
; memo_hash = Signed_command_memo.hash command.memo
; will_succeed =
(* It's always valid to set this value to true, and it will
have no effect outside of the snark.
*)
true
}
{ perform } initial_state )
in
( { Transaction_partially_applied.Zkapp_command_partially_applied.command
; previous_hash
; original_first_pass_account_states
; constraint_constants
; state_view
; global_state
; local_state
}
, user_acc )
let apply_zkapp_command_first_pass ~constraint_constants ~global_slot
~(state_view : Zkapp_precondition.Protocol_state.View.t)
?((* TODO: can this be ripped out from here? *)
fee_excess = Amount.Signed.zero)
TODO : is the right ? is it never used for zkapps ?
supply_increase = Amount.Signed.zero) (ledger : L.t)
(command : Zkapp_command.t) :
Transaction_partially_applied.Zkapp_command_partially_applied.t Or_error.t
=
let open Or_error.Let_syntax in
let%map partial_stmt, _user_acc =
apply_zkapp_command_first_pass_aux ~constraint_constants ~global_slot
~state_view ~fee_excess ~supply_increase ledger command ~init:None
~f:(fun _acc state -> Some state)
in
partial_stmt
let apply_zkapp_command_second_pass_aux (type user_acc) ~(init : user_acc) ~f
ledger
(c : Transaction_partially_applied.Zkapp_command_partially_applied.t) :
(Transaction_applied.Zkapp_command_applied.t * user_acc) Or_error.t =
let open Or_error.Let_syntax in
let perform eff =
Env.perform ~constraint_constants:c.constraint_constants eff
in
let original_account_states =
get the original states of all the accounts in each pass .
If an account updated in the first pass is referenced in account
updates , then retain the value before first pass application
If an account updated in the first pass is referenced in account
updates, then retain the value before first pass application*)
let account_states = Account_id.Table.create () in
List.iter
~f:(fun (id, acc_opt) ->
Account_id.Table.update account_states id
~f:(Option.value ~default:acc_opt) )
( c.original_first_pass_account_states
@ List.map (Zkapp_command.accounts_referenced c.command) ~f:(fun id ->
( id
, Option.Let_syntax.(
let%bind loc = L.location_of_account ledger id in
let%map a = L.get ledger loc in
(loc, a)) ) ) ) ;
Account_id.Table.to_alist account_states
in
let rec step_all (user_acc : user_acc)
( (g_state : Inputs.Global_state.t)
, (l_state : _ Zkapp_command_logic.Local_state.t) ) :
(user_acc * Transaction_status.Failure.Collection.t) Or_error.t =
if List.is_empty l_state.stack_frame.Stack_frame.calls then
Ok (user_acc, l_state.failure_status_tbl)
else
let%bind states =
Or_error.try_with (fun () ->
M.step ~constraint_constants:c.constraint_constants { perform }
(g_state, l_state) )
in
step_all (f user_acc states) states
in
let account_states_after_fee_payer =
To check if the accounts remain unchanged in the event the transaction
fails . First pass updates will remain even if the transaction fails to
apply zkapp account updates
fails. First pass updates will remain even if the transaction fails to
apply zkapp account updates*)
List.map (Zkapp_command.accounts_referenced c.command) ~f:(fun id ->
( id
, Option.Let_syntax.(
let%bind loc = L.location_of_account ledger id in
let%map a = L.get ledger loc in
(loc, a)) ) )
in
let accounts () =
List.map original_account_states
~f:(Tuple2.map_snd ~f:(Option.map ~f:snd))
in
update local and global state ledger to second pass ledger
let global_state = { c.global_state with second_pass_ledger = ledger } in
let local_state =
{ c.local_state with
ledger = Global_state.second_pass_ledger global_state
}
in
let start = (global_state, local_state) in
match step_all (f init start) start with
| Error e ->
Error e
| Ok (user_acc, reversed_failure_status_tbl) ->
let failure_status_tbl = List.rev reversed_failure_status_tbl in
let account_ids_originally_not_in_ledger =
List.filter_map original_account_states
~f:(fun (acct_id, loc_and_acct) ->
if Option.is_none loc_and_acct then Some acct_id else None )
in
let successfully_applied =
Transaction_status.Failure.Collection.is_empty failure_status_tbl
in
if the zkapp command fails in at least 1 account update ,
then all the account updates would be cancelled except
the fee payer one
then all the account updates would be cancelled except
the fee payer one
*)
let failure_status_tbl =
if successfully_applied then failure_status_tbl
else
List.mapi failure_status_tbl ~f:(fun idx fs ->
if idx > 0 && List.is_empty fs then
[ Transaction_status.Failure.Cancelled ]
else fs )
in
(* accounts not originally in ledger, now present in ledger *)
let new_accounts =
List.filter account_ids_originally_not_in_ledger ~f:(fun acct_id ->
Option.is_some @@ L.location_of_account ledger acct_id )
in
let valid_result =
Ok
( { Transaction_applied.Zkapp_command_applied.accounts = accounts ()
; command =
{ With_status.data = c.command
; status =
( if successfully_applied then Applied
else Failed failure_status_tbl )
}
; new_accounts
}
, user_acc )
in
if successfully_applied then valid_result
else
let other_account_update_accounts_unchanged =
List.fold_until account_states_after_fee_payer ~init:true
~f:(fun acc (_, loc_opt) ->
match
let open Option.Let_syntax in
let%bind loc, a = loc_opt in
let%bind a' = L.get ledger loc in
Option.some_if (not (Account.equal a a')) ()
with
| None ->
Continue acc
| Some _ ->
Stop false )
~finish:Fn.id
in
(* Other zkapp_command failed, therefore, updates in those should not get applied *)
if
List.is_empty new_accounts
&& other_account_update_accounts_unchanged
then valid_result
else
Or_error.error_string
"Zkapp_command application failed but new accounts created or \
some of the other account_update updates applied"
let apply_zkapp_command_second_pass ledger c :
Transaction_applied.Zkapp_command_applied.t Or_error.t =
let open Or_error.Let_syntax in
let%map x, () =
apply_zkapp_command_second_pass_aux ~init:() ~f:Fn.const ledger c
in
x
let apply_zkapp_command_unchecked_aux ~constraint_constants ~global_slot
~state_view ~init ~f ?fee_excess ?supply_increase ledger command =
let open Or_error.Let_syntax in
apply_zkapp_command_first_pass_aux ~constraint_constants ~global_slot
~state_view ?fee_excess ?supply_increase ledger command ~init ~f
>>= fun (partial_stmt, user_acc) ->
apply_zkapp_command_second_pass_aux ~init:user_acc ~f ledger partial_stmt
let apply_zkapp_command_unchecked ~constraint_constants ~global_slot
~state_view ledger command =
let open Or_error.Let_syntax in
apply_zkapp_command_first_pass ~constraint_constants ~global_slot
~state_view ledger command
>>= apply_zkapp_command_second_pass_aux ledger ~init:None
~f:(fun _acc (global_state, local_state) ->
Some (local_state, global_state.fee_excess) )
|> Result.map ~f:(fun (account_update_applied, state_res) ->
(account_update_applied, Option.value_exn state_res) )
let update_timing_when_no_deduction ~txn_global_slot account =
validate_timing ~txn_amount:Amount.zero ~txn_global_slot ~account
let has_permission_to_receive ~ledger receiver_account_id :
Account.t
* Ledger_intf.account_state
* [> `Has_permission_to_receive of bool ] =
let init_account = Account.initialize receiver_account_id in
match location_of_account ledger receiver_account_id with
| None ->
(* new account, check that default permissions allow receiving *)
( init_account
, `Added
, `Has_permission_to_receive
(Account.has_permission ~control:Control.Tag.None_given
~to_:`Receive init_account ) )
| Some loc -> (
match get ledger loc with
| None ->
failwith "Ledger location with no account"
| Some receiver_account ->
( receiver_account
, `Existed
, `Has_permission_to_receive
(Account.has_permission ~control:Control.Tag.None_given
~to_:`Receive receiver_account ) ) )
let no_failure = []
let update_failed =
[ Transaction_status.Failure.Update_not_permitted_balance ]
let empty = Transaction_status.Failure.Collection.empty
let single_failure =
Transaction_status.Failure.Collection.of_single_failure
Update_not_permitted_balance
let append_entry f (s : Transaction_status.Failure.Collection.t) :
Transaction_status.Failure.Collection.t =
match s with [] -> [ f ] | h :: t -> h :: f :: t
Structure of the failure status :
I. Only one fee transfer in the transaction ( ` One ) and it fails :
[ [ failure ] ]
II . Two fee transfers in the transaction ( ` Two)-
Both fee transfers fail :
[ [ failure - of - first - fee - transfer ] ; [ failure - of - second - fee - transfer ] ]
First succeeds and second one fails :
[ [ ] ; [ failure - of - second - fee - transfer ] ]
First fails and second succeeds :
[ [ failure - of - first - fee - transfer ] ; [ ] ]
I. Only one fee transfer in the transaction (`One) and it fails:
[[failure]]
II. Two fee transfers in the transaction (`Two)-
Both fee transfers fail:
[[failure-of-first-fee-transfer]; [failure-of-second-fee-transfer]]
First succeeds and second one fails:
[[];[failure-of-second-fee-transfer]]
First fails and second succeeds:
[[failure-of-first-fee-transfer];[]]
*)
let process_fee_transfer t (transfer : Fee_transfer.t) ~modify_balance
~modify_timing =
let open Or_error.Let_syntax in
(* TODO(#4555): Allow token_id to vary from default. *)
let%bind () =
if
List.for_all
~f:Token_id.(equal default)
(One_or_two.to_list (Fee_transfer.fee_tokens transfer))
then return ()
else Or_error.errorf "Cannot pay fees in non-default tokens."
in
match Fee_transfer.to_singles transfer with
| `One ft ->
let account_id = Fee_transfer.Single.receiver ft in
let a, action, `Has_permission_to_receive can_receive =
has_permission_to_receive ~ledger:t account_id
in
let%bind timing = modify_timing a in
let%bind balance = modify_balance action account_id a.balance ft.fee in
if can_receive then (
let%map _action, a, loc = get_or_create t account_id in
let new_accounts = get_new_accounts action account_id in
set t loc { a with balance; timing } ;
(new_accounts, empty, Currency.Amount.zero) )
else Ok ([], single_failure, Currency.Amount.of_fee ft.fee)
| `Two (ft1, ft2) ->
let account_id1 = Fee_transfer.Single.receiver ft1 in
let a1, action1, `Has_permission_to_receive can_receive1 =
has_permission_to_receive ~ledger:t account_id1
in
let account_id2 = Fee_transfer.Single.receiver ft2 in
if Account_id.equal account_id1 account_id2 then
let%bind fee = error_opt "overflow" (Fee.add ft1.fee ft2.fee) in
let%bind timing = modify_timing a1 in
let%bind balance =
modify_balance action1 account_id1 a1.balance fee
in
if can_receive1 then (
let%map _action1, a1, l1 = get_or_create t account_id1 in
let new_accounts1 = get_new_accounts action1 account_id1 in
set t l1 { a1 with balance; timing } ;
(new_accounts1, empty, Currency.Amount.zero) )
else
(*failure for each fee transfer single*)
Ok
( []
, append_entry update_failed single_failure
, Currency.Amount.of_fee fee )
else
let a2, action2, `Has_permission_to_receive can_receive2 =
has_permission_to_receive ~ledger:t account_id2
in
let%bind balance1 =
modify_balance action1 account_id1 a1.balance ft1.fee
in
Note : Not updating the timing field of a1 to avoid additional check in transactions snark ( check_timing for " receiver " ) . This is OK because timing rules will not be violated when balance increases and will be checked whenever an amount is deducted from the account . ( # 5973 )
let%bind timing2 = modify_timing a2 in
let%bind balance2 =
modify_balance action2 account_id2 a2.balance ft2.fee
in
let%bind new_accounts1, failures, burned_tokens1 =
if can_receive1 then (
let%map _action1, a1, l1 = get_or_create t account_id1 in
let new_accounts1 = get_new_accounts action1 account_id1 in
set t l1 { a1 with balance = balance1 } ;
( new_accounts1
, append_entry no_failure empty
, Currency.Amount.zero ) )
else Ok ([], single_failure, Currency.Amount.of_fee ft1.fee)
in
let%bind new_accounts2, failures', burned_tokens2 =
if can_receive2 then (
let%map _action2, a2, l2 = get_or_create t account_id2 in
let new_accounts2 = get_new_accounts action2 account_id2 in
set t l2 { a2 with balance = balance2; timing = timing2 } ;
( new_accounts2
, append_entry no_failure failures
, Currency.Amount.zero ) )
else
Ok
( []
, append_entry update_failed failures
, Currency.Amount.of_fee ft2.fee )
in
let%map burned_tokens =
error_opt "burned tokens overflow"
(Currency.Amount.add burned_tokens1 burned_tokens2)
in
(new_accounts1 @ new_accounts2, failures', burned_tokens)
let apply_fee_transfer ~constraint_constants ~txn_global_slot t transfer =
let open Or_error.Let_syntax in
let%map new_accounts, failures, burned_tokens =
process_fee_transfer t transfer
~modify_balance:(fun action _ b f ->
let%bind amount =
let amount = Amount.of_fee f in
sub_account_creation_fee ~constraint_constants action amount
in
add_amount b amount )
~modify_timing:(fun acc ->
update_timing_when_no_deduction ~txn_global_slot acc )
in
let ft_with_status =
if Transaction_status.Failure.Collection.is_empty failures then
{ With_status.data = transfer; status = Applied }
else { data = transfer; status = Failed failures }
in
Transaction_applied.Fee_transfer_applied.
{ fee_transfer = ft_with_status; new_accounts; burned_tokens }
(*Structure of the failure status:
I. No fee transfer and coinbase transfer fails: [[failure]]
II. With fee transfer-
Both fee transfer and coinbase fails:
[[failure-of-fee-transfer]; [failure-of-coinbase]]
Fee transfer succeeds and coinbase fails:
[[];[failure-of-coinbase]]
Fee transfer fails and coinbase succeeds:
[[failure-of-fee-transfer];[]]
*)
let apply_coinbase ~constraint_constants ~txn_global_slot t
(* TODO: Better system needed for making atomic changes. Could use a monad. *)
({ receiver; fee_transfer; amount = coinbase_amount } as cb : Coinbase.t)
=
let open Or_error.Let_syntax in
let%bind ( receiver_reward
, new_accounts1
, transferee_update
, transferee_timing_prev
, failures1
, burned_tokens1 ) =
match fee_transfer with
| None ->
return (coinbase_amount, [], None, None, empty, Currency.Amount.zero)
| Some ({ receiver_pk = transferee; fee } as ft) ->
assert (not @@ Public_key.Compressed.equal transferee receiver) ;
let transferee_id = Coinbase.Fee_transfer.receiver ft in
let fee = Amount.of_fee fee in
let%bind receiver_reward =
error_opt "Coinbase fee transfer too large"
(Amount.sub coinbase_amount fee)
in
let transferee_account, action, `Has_permission_to_receive can_receive
=
has_permission_to_receive ~ledger:t transferee_id
in
let new_accounts = get_new_accounts action transferee_id in
let%bind timing =
update_timing_when_no_deduction ~txn_global_slot transferee_account
in
let%bind balance =
let%bind amount =
sub_account_creation_fee ~constraint_constants action fee
in
add_amount transferee_account.balance amount
in
if can_receive then
let%map _action, transferee_account, transferee_location =
get_or_create t transferee_id
in
( receiver_reward
, new_accounts
, Some
( transferee_location
, { transferee_account with balance; timing } )
, Some transferee_account.timing
, append_entry no_failure empty
, Currency.Amount.zero )
else return (receiver_reward, [], None, None, single_failure, fee)
in
let receiver_id = Account_id.create receiver Token_id.default in
let receiver_account, action2, `Has_permission_to_receive can_receive =
has_permission_to_receive ~ledger:t receiver_id
in
let new_accounts2 = get_new_accounts action2 receiver_id in
Note : Updating coinbase receiver timing only if there is no fee transfer .
This is so as to not add any extra constraints in transaction snark for checking
" receiver " timings . This is OK because timing rules will not be violated when
balance increases and will be checked whenever an amount is deducted from the
account ( # 5973 )
This is so as to not add any extra constraints in transaction snark for checking
"receiver" timings. This is OK because timing rules will not be violated when
balance increases and will be checked whenever an amount is deducted from the
account (#5973)
*)
let%bind coinbase_receiver_timing =
match transferee_timing_prev with
| None ->
let%map new_receiver_timing =
update_timing_when_no_deduction ~txn_global_slot receiver_account
in
new_receiver_timing
| Some _timing ->
Ok receiver_account.timing
in
let%bind receiver_balance =
let%bind amount =
sub_account_creation_fee ~constraint_constants action2 receiver_reward
in
add_amount receiver_account.balance amount
in
let%bind failures, burned_tokens2 =
if can_receive then (
let%map _action2, receiver_account, receiver_location =
get_or_create t receiver_id
in
set t receiver_location
{ receiver_account with
balance = receiver_balance
; timing = coinbase_receiver_timing
} ;
(append_entry no_failure failures1, Currency.Amount.zero) )
else return (append_entry update_failed failures1, receiver_reward)
in
Option.iter transferee_update ~f:(fun (l, a) -> set t l a) ;
let%map burned_tokens =
error_opt "burned tokens overflow"
(Amount.add burned_tokens1 burned_tokens2)
in
let coinbase_with_status =
if Transaction_status.Failure.Collection.is_empty failures then
{ With_status.data = cb; status = Applied }
else { With_status.data = cb; status = Failed failures }
in
Transaction_applied.Coinbase_applied.
{ coinbase = coinbase_with_status
; new_accounts = new_accounts1 @ new_accounts2
; burned_tokens
}
let apply_transaction_first_pass ~constraint_constants ~global_slot
~(txn_state_view : Zkapp_precondition.Protocol_state.View.t) ledger
(t : Transaction.t) : Transaction_partially_applied.t Or_error.t =
let open Or_error.Let_syntax in
let previous_hash = merkle_root ledger in
let txn_global_slot = global_slot in
match t with
| Command (Signed_command txn) ->
let%map applied =
apply_user_command_unchecked ~constraint_constants ~txn_global_slot
ledger txn
in
Transaction_partially_applied.Signed_command { previous_hash; applied }
| Command (Zkapp_command txn) ->
let%map partially_applied =
apply_zkapp_command_first_pass ~global_slot ~state_view:txn_state_view
~constraint_constants ledger txn
in
Transaction_partially_applied.Zkapp_command partially_applied
| Fee_transfer t ->
let%map applied =
apply_fee_transfer ~constraint_constants ~txn_global_slot ledger t
in
Transaction_partially_applied.Fee_transfer { previous_hash; applied }
| Coinbase t ->
let%map applied =
apply_coinbase ~constraint_constants ~txn_global_slot ledger t
in
Transaction_partially_applied.Coinbase { previous_hash; applied }
let apply_transaction_second_pass ledger (t : Transaction_partially_applied.t)
: Transaction_applied.t Or_error.t =
let open Or_error.Let_syntax in
let open Transaction_applied in
match t with
| Signed_command { previous_hash; applied } ->
return
{ previous_hash; varying = Varying.Command (Signed_command applied) }
| Zkapp_command partially_applied ->
TODO : either here or in second phase of apply , need to update the prior global state statement for the fee payer segment to add the second phase ledger at the end
let%map applied =
apply_zkapp_command_second_pass ledger partially_applied
in
{ previous_hash = partially_applied.previous_hash
; varying = Varying.Command (Zkapp_command applied)
}
| Fee_transfer { previous_hash; applied } ->
return { previous_hash; varying = Varying.Fee_transfer applied }
| Coinbase { previous_hash; applied } ->
return { previous_hash; varying = Varying.Coinbase applied }
let apply_transactions ~constraint_constants ~global_slot ~txn_state_view
ledger txns =
let open Or_error in
Mina_stdlib.Result.List.map txns
~f:
(apply_transaction_first_pass ~constraint_constants ~global_slot
~txn_state_view ledger )
>>= Mina_stdlib.Result.List.map ~f:(apply_transaction_second_pass ledger)
module For_tests = struct
let validate_timing_with_min_balance = validate_timing_with_min_balance
let validate_timing = validate_timing
end
end
module For_tests = struct
open Mina_numbers
open Currency
module Account_without_receipt_chain_hash = struct
type t =
( Public_key.Compressed.t
, Token_id.t
, Account.Token_symbol.t
, Balance.t
, Account_nonce.t
, unit
, Public_key.Compressed.t option
, State_hash.t
, Account_timing.t
, Permissions.t
, Zkapp_account.t option )
Account.Poly.t
[@@deriving sexp, compare]
end
let min_init_balance = Int64.of_string "8000000000"
let max_init_balance = Int64.of_string "8000000000000"
let num_accounts = 10
let num_transactions = 10
let depth = Int.ceil_log2 (num_accounts + num_transactions)
module Init_ledger = struct
type t = (Keypair.t * int64) array [@@deriving sexp]
let init ?(zkapp = true) (type l) (module L : Ledger_intf.S with type t = l)
(init_ledger : t) (l : L.t) =
Array.iter init_ledger ~f:(fun (kp, amount) ->
let _tag, account, loc =
L.get_or_create l
(Account_id.create
(Public_key.compress kp.public_key)
Token_id.default )
|> Or_error.ok_exn
in
let permissions : Permissions.t =
{ edit_state = Either
; send = Either
; receive = None
; set_delegate = Either
; set_permissions = Either
; set_verification_key = Either
; set_zkapp_uri = Either
; edit_action_state = Either
; set_token_symbol = Either
; increment_nonce = Either
; set_voting_for = Either
; access = None
; set_timing = Either
}
in
let zkapp =
if zkapp then
Some
{ Zkapp_account.default with
verification_key =
Some
{ With_hash.hash = Zkapp_basic.F.zero
; data = Side_loaded_verification_key.dummy
}
}
else None
in
L.set l loc
{ account with
balance =
Currency.Balance.of_uint64 (Unsigned.UInt64.of_int64 amount)
; permissions
; zkapp
} )
let gen () : t Quickcheck.Generator.t =
let tbl = Public_key.Compressed.Hash_set.create () in
let open Quickcheck.Generator in
let open Let_syntax in
let rec go acc n =
if n = 0 then return (Array.of_list acc)
else
let%bind kp =
filter Keypair.gen ~f:(fun kp ->
not (Hash_set.mem tbl (Public_key.compress kp.public_key)) )
and amount = Int64.gen_incl min_init_balance max_init_balance in
Hash_set.add tbl (Public_key.compress kp.public_key) ;
go ((kp, amount) :: acc) (n - 1)
in
go [] num_accounts
end
module Transaction_spec = struct
type t =
{ fee : Currency.Fee.t
; sender : Keypair.t * Account_nonce.t
; receiver : Public_key.Compressed.t
; amount : Currency.Amount.t
}
[@@deriving sexp]
let gen ~(init_ledger : Init_ledger.t) ~nonces =
let pk ((kp : Keypair.t), _) = Public_key.compress kp.public_key in
let open Quickcheck.Let_syntax in
let%bind receiver_is_new = Bool.quickcheck_generator in
let gen_index () = Int.gen_incl 0 (Array.length init_ledger - 1) in
let%bind receiver_index =
if receiver_is_new then return None else gen_index () >>| Option.return
in
let%bind receiver =
match receiver_index with
| None ->
Public_key.Compressed.gen
| Some i ->
return (pk init_ledger.(i))
in
let%bind sender =
let%map i =
match receiver_index with
| None ->
gen_index ()
| Some j ->
Quickcheck.Generator.filter (gen_index ()) ~f:(( <> ) j)
in
fst init_ledger.(i)
in
let gen_amount () =
Currency.Amount.(
gen_incl
(of_nanomina_int_exn 1_000_000)
(of_nanomina_int_exn 100_000_000))
in
let gen_fee () =
Currency.Fee.(
gen_incl
(of_nanomina_int_exn 1_000_000)
(of_nanomina_int_exn 100_000_000))
in
let nonce : Account_nonce.t = Map.find_exn nonces sender in
let%bind fee = gen_fee () in
let%bind amount = gen_amount () in
let nonces =
Map.set nonces ~key:sender ~data:(Account_nonce.succ nonce)
in
let spec = { fee; amount; receiver; sender = (sender, nonce) } in
return (spec, nonces)
end
module Test_spec = struct
type t = { init_ledger : Init_ledger.t; specs : Transaction_spec.t list }
[@@deriving sexp]
let mk_gen ?(num_transactions = num_transactions) () =
let open Quickcheck.Let_syntax in
let%bind init_ledger = Init_ledger.gen () in
let%bind specs =
let rec go acc n nonces =
if n = 0 then return (List.rev acc)
else
let%bind spec, nonces = Transaction_spec.gen ~init_ledger ~nonces in
go (spec :: acc) (n - 1) nonces
in
go [] num_transactions
(Keypair.Map.of_alist_exn
(List.map (Array.to_list init_ledger) ~f:(fun (pk, _) ->
(pk, Account_nonce.zero) ) ) )
in
return { init_ledger; specs }
let gen = mk_gen ~num_transactions ()
end
let command_send
{ Transaction_spec.fee; sender = sender, sender_nonce; receiver; amount }
: Signed_command.t =
let sender_pk = Public_key.compress sender.public_key in
Signed_command.sign sender
{ common =
{ fee
; fee_payer_pk = sender_pk
; nonce = sender_nonce
; valid_until = Global_slot.max_value
; memo = Signed_command_memo.dummy
}
; body = Payment { source_pk = sender_pk; receiver_pk = receiver; amount }
}
|> Signed_command.forget_check
let account_update_send ?(use_full_commitment = true)
?(double_sender_nonce = true)
{ Transaction_spec.fee; sender = sender, sender_nonce; receiver; amount }
: Zkapp_command.t =
let sender_pk = Public_key.compress sender.public_key in
let actual_nonce =
Here , we double the spec'd nonce , because we bump the nonce a second
time for the ' sender ' part of the payment .
time for the 'sender' part of the payment.
*)
(* TODO: We should make bumping the nonce for signed zkapp_command optional,
flagged by a field in the account_update (but always true for the fee payer).
This would also allow us to prevent replays of snapp proofs, by
allowing them to bump their nonce.
*)
if double_sender_nonce then
sender_nonce |> Account.Nonce.to_uint32
|> Unsigned.UInt32.(mul (of_int 2))
|> Account.Nonce.to_uint32
else sender_nonce
in
let zkapp_command : Zkapp_command.Simple.t =
{ fee_payer =
{ Account_update.Fee_payer.body =
{ public_key = sender_pk
; fee
; valid_until = None
; nonce = actual_nonce
}
(* Real signature added in below *)
; authorization = Signature.dummy
}
; account_updates =
[ { body =
{ public_key = sender_pk
; update = Account_update.Update.noop
; token_id = Token_id.default
; balance_change = Amount.Signed.(negate (of_unsigned amount))
; increment_nonce = double_sender_nonce
; events = []
; actions = []
; call_data = Snark_params.Tick.Field.zero
; call_depth = 0
; preconditions =
{ Account_update.Preconditions.network =
Zkapp_precondition.Protocol_state.accept
; account = Accept
; valid_while = Ignore
}
; may_use_token = No
; use_full_commitment
; implicit_account_creation_fee = true
; authorization_kind =
( if use_full_commitment then Signature
else Proof Zkapp_basic.F.zero )
}
; authorization =
( if use_full_commitment then Signature Signature.dummy
else Proof Mina_base.Proof.transaction_dummy )
}
; { body =
{ public_key = receiver
; update = Account_update.Update.noop
; token_id = Token_id.default
; balance_change = Amount.Signed.of_unsigned amount
; increment_nonce = false
; events = []
; actions = []
; call_data = Snark_params.Tick.Field.zero
; call_depth = 0
; preconditions =
{ Account_update.Preconditions.network =
Zkapp_precondition.Protocol_state.accept
; account = Accept
; valid_while = Ignore
}
; may_use_token = No
; use_full_commitment = false
; implicit_account_creation_fee = true
; authorization_kind = None_given
}
; authorization = None_given
}
]
; memo = Signed_command_memo.empty
}
in
let zkapp_command = Zkapp_command.of_simple zkapp_command in
let commitment = Zkapp_command.commitment zkapp_command in
let full_commitment =
Zkapp_command.Transaction_commitment.create_complete commitment
~memo_hash:(Signed_command_memo.hash zkapp_command.memo)
~fee_payer_hash:
(Zkapp_command.Digest.Account_update.create
(Account_update.of_fee_payer zkapp_command.fee_payer) )
in
let account_updates_signature =
let c = if use_full_commitment then full_commitment else commitment in
Schnorr.Chunked.sign sender.private_key
(Random_oracle.Input.Chunked.field c)
in
let account_updates =
Zkapp_command.Call_forest.map zkapp_command.account_updates
~f:(fun (account_update : Account_update.t) ->
match account_update.body.authorization_kind with
| Signature ->
{ account_update with
authorization = Control.Signature account_updates_signature
}
| _ ->
account_update )
in
let signature =
Schnorr.Chunked.sign sender.private_key
(Random_oracle.Input.Chunked.field full_commitment)
in
{ zkapp_command with
fee_payer = { zkapp_command.fee_payer with authorization = signature }
; account_updates
}
let test_eq (type l) (module L : Ledger_intf.S with type t = l) accounts
(l1 : L.t) (l2 : L.t) =
List.map accounts ~f:(fun a ->
Or_error.try_with (fun () ->
let mismatch () =
failwithf
!"One ledger had the account %{sexp:Account_id.t} but the \
other did not"
a ()
in
let hide_rc (a : _ Account.Poly.t) =
{ a with receipt_chain_hash = () }
in
match L.(location_of_account l1 a, location_of_account l2 a) with
| None, None ->
()
| Some _, None | None, Some _ ->
mismatch ()
| Some x1, Some x2 -> (
match L.(get l1 x1, get l2 x2) with
| None, None ->
()
| Some _, None | None, Some _ ->
mismatch ()
| Some a1, Some a2 ->
[%test_eq: Account_without_receipt_chain_hash.t]
(hide_rc a1) (hide_rc a2) ) ) )
|> Or_error.combine_errors_unit
let txn_global_slot = Global_slot.zero
let iter_err ts ~f =
List.fold_until ts
~finish:(fun () -> Ok ())
~init:()
~f:(fun () t ->
match f t with Error e -> Stop (Error e) | Ok _ -> Continue () )
let view : Zkapp_precondition.Protocol_state.View.t =
let h = Frozen_ledger_hash.empty_hash in
let len = Length.zero in
let a = Currency.Amount.zero in
let epoch_data =
{ Epoch_data.Poly.ledger =
{ Epoch_ledger.Poly.hash = h; total_currency = a }
; seed = h
; start_checkpoint = h
; lock_checkpoint = h
; epoch_length = len
}
in
{ snarked_ledger_hash = h
; blockchain_length = len
; min_window_density = len
; last_vrf_output = ()
; total_currency = a
; global_slot_since_genesis = txn_global_slot
; staking_epoch_data = epoch_data
; next_epoch_data = epoch_data
}
Quickcheck generator for Zkapp_command.t , derived from Test_spec generator
let gen_zkapp_command_from_test_spec =
let open Quickcheck.Let_syntax in
let%bind use_full_commitment = Bool.quickcheck_generator in
match%map Test_spec.mk_gen ~num_transactions:1 () with
| { specs = [ spec ]; _ } ->
account_update_send ~use_full_commitment spec
| { specs; _ } ->
failwithf "gen_zkapp_command_from_test_spec: expected one spec, got %d"
(List.length specs) ()
end
| null | https://raw.githubusercontent.com/MinaProtocol/mina/9a97ea71909a802f2ade0305a8069f7cbace5619/src/lib/transaction_logic/mina_transaction_logic.ml | ocaml | int type is OK, no danger of overflow
* Apply all zkapp_command within a zkapp_command transaction. This behaves as
[apply_zkapp_command_unchecked], except that the [~init] and [~f] arguments
are provided to allow for the accumulation of the intermediate states.
Invariant: [f] is always applied at least once, so it is valid to use an
[_ option] as the initial state and call [Option.value_exn] on the
accumulated result.
This can be used to collect the intermediate states to make them
available for snark work. In particular, since the transaction snark has
a cap on the number of zkapp_command of each kind that may be included, we can
use this to retrieve the (source, target) pairs for each batch of
zkapp_command to include in the snark work spec / transaction snark witness.
tags for timing validation errors
no time restrictions
NB: The [initial_minimum_balance] here is the incorrect value,
but:
* we don't use it anywhere in this error case; and
* we don't want to waste time computing it if it will be unused.
Helper function for [apply_user_command_unchecked]
Fee-payer information
Helper function for [apply_user_command_unchecked]
Fee-payer information
TODO: Enable multi-sig.
Fee-payer information
Charge the fee. This must happen, whether or not the command itself
succeeds, to ensure that the network is compensated for processing this
command.
Compute the necessary changes to apply the command, failing if any of
the conditions are not met.
Check that receiver account exists.
Timing is always valid, but we need to record any switch from
timed to untimed here to stay in sync with the snark.
just check if the timing needs updating
Don't process transactions with insufficient balance from the
fee-payer.
Charge the account creation fee.
Subtract the creation fee from the transaction amount.
Update the ledger.
Do not update the ledger. Except for the fee payer which is already updated
TODO: These transactions should never reach this stage, this error
should be fatal.
TODO: lift previous_hash up in the types
Raise a more useful error message if we have a failure
description.
when called from Zkapp_command_logic.apply, the account_update is the fee payer
Invariant: We either have a proof, a signature, or neither.
We want to capture the accurate value so that this will match
with the values in the snarked logic.
The transaction's validity should already have been checked before
this point.
TODO: can this be ripped out from here?
It's always valid to set this value to true, and it will
have no effect outside of the snark.
TODO: can this be ripped out from here?
accounts not originally in ledger, now present in ledger
Other zkapp_command failed, therefore, updates in those should not get applied
new account, check that default permissions allow receiving
TODO(#4555): Allow token_id to vary from default.
failure for each fee transfer single
Structure of the failure status:
I. No fee transfer and coinbase transfer fails: [[failure]]
II. With fee transfer-
Both fee transfer and coinbase fails:
[[failure-of-fee-transfer]; [failure-of-coinbase]]
Fee transfer succeeds and coinbase fails:
[[];[failure-of-coinbase]]
Fee transfer fails and coinbase succeeds:
[[failure-of-fee-transfer];[]]
TODO: Better system needed for making atomic changes. Could use a monad.
TODO: We should make bumping the nonce for signed zkapp_command optional,
flagged by a field in the account_update (but always true for the fee payer).
This would also allow us to prevent replays of snapp proofs, by
allowing them to bump their nonce.
Real signature added in below | open Core_kernel
open Mina_base
open Currency
open Signature_lib
open Mina_transaction
module Zkapp_command_logic = Zkapp_command_logic
module Global_slot = Mina_numbers.Global_slot
module Transaction_applied = struct
module UC = Signed_command
module Signed_command_applied = struct
module Common = struct
[%%versioned
module Stable = struct
module V2 = struct
type t =
{ user_command : Signed_command.Stable.V2.t With_status.Stable.V2.t
}
[@@deriving sexp, to_yojson]
let to_latest = Fn.id
end
end]
end
module Body = struct
[%%versioned
module Stable = struct
module V2 = struct
type t =
| Payment of { new_accounts : Account_id.Stable.V2.t list }
| Stake_delegation of
{ previous_delegate : Public_key.Compressed.Stable.V1.t option }
| Failed
[@@deriving sexp, to_yojson]
let to_latest = Fn.id
end
end]
end
[%%versioned
module Stable = struct
module V2 = struct
type t = { common : Common.Stable.V2.t; body : Body.Stable.V2.t }
[@@deriving sexp, to_yojson]
let to_latest = Fn.id
end
end]
let new_accounts (t : t) =
match t.body with
| Payment { new_accounts; _ } ->
new_accounts
| Stake_delegation _ | Failed ->
[]
end
module Zkapp_command_applied = struct
[%%versioned
module Stable = struct
module V1 = struct
type t =
{ accounts :
(Account_id.Stable.V2.t * Account.Stable.V2.t option) list
; command : Zkapp_command.Stable.V1.t With_status.Stable.V2.t
; new_accounts : Account_id.Stable.V2.t list
}
[@@deriving sexp, to_yojson]
let to_latest = Fn.id
end
end]
end
module Command_applied = struct
[%%versioned
module Stable = struct
module V2 = struct
type t =
| Signed_command of Signed_command_applied.Stable.V2.t
| Zkapp_command of Zkapp_command_applied.Stable.V1.t
[@@deriving sexp, to_yojson]
let to_latest = Fn.id
end
end]
end
module Fee_transfer_applied = struct
[%%versioned
module Stable = struct
module V2 = struct
type t =
{ fee_transfer : Fee_transfer.Stable.V2.t With_status.Stable.V2.t
; new_accounts : Account_id.Stable.V2.t list
; burned_tokens : Currency.Amount.Stable.V1.t
}
[@@deriving sexp, to_yojson]
let to_latest = Fn.id
end
end]
end
module Coinbase_applied = struct
[%%versioned
module Stable = struct
module V2 = struct
type t =
{ coinbase : Coinbase.Stable.V1.t With_status.Stable.V2.t
; new_accounts : Account_id.Stable.V2.t list
; burned_tokens : Currency.Amount.Stable.V1.t
}
[@@deriving sexp, to_yojson]
let to_latest = Fn.id
end
end]
end
module Varying = struct
[%%versioned
module Stable = struct
module V2 = struct
type t =
| Command of Command_applied.Stable.V2.t
| Fee_transfer of Fee_transfer_applied.Stable.V2.t
| Coinbase of Coinbase_applied.Stable.V2.t
[@@deriving sexp, to_yojson]
let to_latest = Fn.id
end
end]
end
[%%versioned
module Stable = struct
module V2 = struct
type t =
{ previous_hash : Ledger_hash.Stable.V1.t
; varying : Varying.Stable.V2.t
}
[@@deriving sexp, to_yojson]
let to_latest = Fn.id
end
end]
let burned_tokens : t -> Currency.Amount.t =
fun { varying; _ } ->
match varying with
| Command _ ->
Currency.Amount.zero
| Fee_transfer f ->
f.burned_tokens
| Coinbase c ->
c.burned_tokens
let new_accounts : t -> Account_id.t list =
fun { varying; _ } ->
match varying with
| Command c -> (
match c with
| Signed_command sc ->
Signed_command_applied.new_accounts sc
| Zkapp_command zc ->
zc.new_accounts )
| Fee_transfer f ->
f.new_accounts
| Coinbase c ->
c.new_accounts
let supply_increase : t -> Currency.Amount.Signed.t Or_error.t =
fun t ->
let open Or_error.Let_syntax in
let burned_tokens = Currency.Amount.Signed.of_unsigned (burned_tokens t) in
let account_creation_fees =
let account_creation_fee_int =
Genesis_constants.Constraint_constants.compiled.account_creation_fee
|> Currency.Fee.to_nanomina_int
in
let num_accounts_created = List.length @@ new_accounts t in
Currency.Amount.(
Signed.of_unsigned
@@ of_nanomina_int_exn (account_creation_fee_int * num_accounts_created))
in
let txn : Transaction.t =
match t.varying with
| Command
(Signed_command { common = { user_command = { data; _ }; _ }; _ }) ->
Command (Signed_command data)
| Command (Zkapp_command c) ->
Command (Zkapp_command c.command.data)
| Fee_transfer f ->
Fee_transfer f.fee_transfer.data
| Coinbase c ->
Coinbase c.coinbase.data
in
let%bind expected_supply_increase =
Transaction.expected_supply_increase txn
in
let rec process_decreases total = function
| [] ->
Some total
| amt :: amts ->
let%bind.Option sum =
Currency.Amount.Signed.(add @@ negate amt) total
in
process_decreases sum amts
in
let total =
process_decreases
(Currency.Amount.Signed.of_unsigned expected_supply_increase)
[ burned_tokens; account_creation_fees ]
in
Option.value_map total ~default:(Or_error.error_string "overflow")
~f:(fun v -> Ok v)
let transaction_with_status : t -> Transaction.t With_status.t =
fun { varying; _ } ->
match varying with
| Command (Signed_command uc) ->
With_status.map uc.common.user_command ~f:(fun cmd ->
Transaction.Command (User_command.Signed_command cmd) )
| Command (Zkapp_command s) ->
With_status.map s.command ~f:(fun c ->
Transaction.Command (User_command.Zkapp_command c) )
| Fee_transfer f ->
With_status.map f.fee_transfer ~f:(fun f -> Transaction.Fee_transfer f)
| Coinbase c ->
With_status.map c.coinbase ~f:(fun c -> Transaction.Coinbase c)
let transaction_status : t -> Transaction_status.t =
fun { varying; _ } ->
match varying with
| Command
(Signed_command { common = { user_command = { status; _ }; _ }; _ }) ->
status
| Command (Zkapp_command c) ->
c.command.status
| Fee_transfer f ->
f.fee_transfer.status
| Coinbase c ->
c.coinbase.status
end
module type S = sig
type ledger
type location
module Transaction_applied : sig
module Signed_command_applied : sig
module Common : sig
type t = Transaction_applied.Signed_command_applied.Common.t =
{ user_command : Signed_command.t With_status.t }
[@@deriving sexp]
end
module Body : sig
type t = Transaction_applied.Signed_command_applied.Body.t =
| Payment of { new_accounts : Account_id.t list }
| Stake_delegation of
{ previous_delegate : Public_key.Compressed.t option }
| Failed
[@@deriving sexp]
end
type t = Transaction_applied.Signed_command_applied.t =
{ common : Common.t; body : Body.t }
[@@deriving sexp]
end
module Zkapp_command_applied : sig
type t = Transaction_applied.Zkapp_command_applied.t =
{ accounts : (Account_id.t * Account.t option) list
; command : Zkapp_command.t With_status.t
; new_accounts : Account_id.t list
}
[@@deriving sexp]
end
module Command_applied : sig
type t = Transaction_applied.Command_applied.t =
| Signed_command of Signed_command_applied.t
| Zkapp_command of Zkapp_command_applied.t
[@@deriving sexp]
end
module Fee_transfer_applied : sig
type t = Transaction_applied.Fee_transfer_applied.t =
{ fee_transfer : Fee_transfer.t With_status.t
; new_accounts : Account_id.t list
; burned_tokens : Currency.Amount.t
}
[@@deriving sexp]
end
module Coinbase_applied : sig
type t = Transaction_applied.Coinbase_applied.t =
{ coinbase : Coinbase.t With_status.t
; new_accounts : Account_id.t list
; burned_tokens : Currency.Amount.t
}
[@@deriving sexp]
end
module Varying : sig
type t = Transaction_applied.Varying.t =
| Command of Command_applied.t
| Fee_transfer of Fee_transfer_applied.t
| Coinbase of Coinbase_applied.t
[@@deriving sexp]
end
type t = Transaction_applied.t =
{ previous_hash : Ledger_hash.t; varying : Varying.t }
[@@deriving sexp]
val burned_tokens : t -> Currency.Amount.t
val supply_increase : t -> Currency.Amount.Signed.t Or_error.t
val transaction : t -> Transaction.t With_status.t
val transaction_status : t -> Transaction_status.t
end
module Global_state : sig
type t =
{ first_pass_ledger : ledger
; second_pass_ledger : ledger
; fee_excess : Amount.Signed.t
; supply_increase : Amount.Signed.t
; protocol_state : Zkapp_precondition.Protocol_state.View.t
; block_global_slot : Mina_numbers.Global_slot.t
Slot of block when the transaction is applied . NOTE : This is at least 1 slot after the protocol_state 's view , which is for the * previous * slot .
}
end
module Transaction_partially_applied : sig
module Zkapp_command_partially_applied : sig
type t =
{ command : Zkapp_command.t
; previous_hash : Ledger_hash.t
; original_first_pass_account_states :
(Account_id.t * (location * Account.t) option) list
; constraint_constants : Genesis_constants.Constraint_constants.t
; state_view : Zkapp_precondition.Protocol_state.View.t
; global_state : Global_state.t
; local_state :
( Stack_frame.value
, Stack_frame.value list
, Token_id.t
, Amount.Signed.t
, ledger
, bool
, Zkapp_command.Transaction_commitment.t
, Mina_numbers.Index.t
, Transaction_status.Failure.Collection.t )
Zkapp_command_logic.Local_state.t
}
end
type 'applied fully_applied =
{ previous_hash : Ledger_hash.t; applied : 'applied }
type t =
| Signed_command of
Transaction_applied.Signed_command_applied.t fully_applied
| Zkapp_command of Zkapp_command_partially_applied.t
| Fee_transfer of Transaction_applied.Fee_transfer_applied.t fully_applied
| Coinbase of Transaction_applied.Coinbase_applied.t fully_applied
val command : t -> Transaction.t
end
val apply_user_command :
constraint_constants:Genesis_constants.Constraint_constants.t
-> txn_global_slot:Global_slot.t
-> ledger
-> Signed_command.With_valid_signature.t
-> Transaction_applied.Signed_command_applied.t Or_error.t
val apply_user_command_unchecked :
constraint_constants:Genesis_constants.Constraint_constants.t
-> txn_global_slot:Global_slot.t
-> ledger
-> Signed_command.t
-> Transaction_applied.Signed_command_applied.t Or_error.t
val update_action_state :
Snark_params.Tick.Field.t Pickles_types.Vector.Vector_5.t
-> Zkapp_account.Actions.t
-> txn_global_slot:Global_slot.t
-> last_action_slot:Global_slot.t
-> Snark_params.Tick.Field.t Pickles_types.Vector.Vector_5.t * Global_slot.t
val apply_zkapp_command_unchecked :
constraint_constants:Genesis_constants.Constraint_constants.t
-> global_slot:Mina_numbers.Global_slot.t
-> state_view:Zkapp_precondition.Protocol_state.View.t
-> ledger
-> Zkapp_command.t
-> ( Transaction_applied.Zkapp_command_applied.t
* ( ( Stack_frame.value
, Stack_frame.value list
, Token_id.t
, Amount.Signed.t
, ledger
, bool
, Zkapp_command.Transaction_commitment.t
, Mina_numbers.Index.t
, Transaction_status.Failure.Collection.t )
Zkapp_command_logic.Local_state.t
* Amount.Signed.t ) )
Or_error.t
val apply_zkapp_command_unchecked_aux :
constraint_constants:Genesis_constants.Constraint_constants.t
-> global_slot:Mina_numbers.Global_slot.t
-> state_view:Zkapp_precondition.Protocol_state.View.t
-> init:'acc
-> f:
( 'acc
-> Global_state.t
* ( Stack_frame.value
, Stack_frame.value list
, Token_id.t
, Amount.Signed.t
, ledger
, bool
, Zkapp_command.Transaction_commitment.t
, Mina_numbers.Index.t
, Transaction_status.Failure.Collection.t )
Zkapp_command_logic.Local_state.t
-> 'acc )
-> ?fee_excess:Amount.Signed.t
-> ?supply_increase:Amount.Signed.t
-> ledger
-> Zkapp_command.t
-> (Transaction_applied.Zkapp_command_applied.t * 'acc) Or_error.t
val apply_zkapp_command_first_pass_aux :
constraint_constants:Genesis_constants.Constraint_constants.t
-> global_slot:Mina_numbers.Global_slot.t
-> state_view:Zkapp_precondition.Protocol_state.View.t
-> init:'acc
-> f:
( 'acc
-> Global_state.t
* ( Stack_frame.value
, Stack_frame.value list
, Token_id.t
, Amount.Signed.t
, ledger
, bool
, Zkapp_command.Transaction_commitment.t
, Mina_numbers.Index.t
, Transaction_status.Failure.Collection.t )
Zkapp_command_logic.Local_state.t
-> 'acc )
-> ?fee_excess:Amount.Signed.t
-> ?supply_increase:Amount.Signed.t
-> ledger
-> Zkapp_command.t
-> (Transaction_partially_applied.Zkapp_command_partially_applied.t * 'acc)
Or_error.t
val apply_zkapp_command_second_pass_aux :
init:'acc
-> f:
( 'acc
-> Global_state.t
* ( Stack_frame.value
, Stack_frame.value list
, Token_id.t
, Amount.Signed.t
, ledger
, bool
, Zkapp_command.Transaction_commitment.t
, Mina_numbers.Index.t
, Transaction_status.Failure.Collection.t )
Zkapp_command_logic.Local_state.t
-> 'acc )
-> ledger
-> Transaction_partially_applied.Zkapp_command_partially_applied.t
-> (Transaction_applied.Zkapp_command_applied.t * 'acc) Or_error.t
val apply_fee_transfer :
constraint_constants:Genesis_constants.Constraint_constants.t
-> txn_global_slot:Global_slot.t
-> ledger
-> Fee_transfer.t
-> Transaction_applied.Fee_transfer_applied.t Or_error.t
val apply_coinbase :
constraint_constants:Genesis_constants.Constraint_constants.t
-> txn_global_slot:Global_slot.t
-> ledger
-> Coinbase.t
-> Transaction_applied.Coinbase_applied.t Or_error.t
val apply_transaction_first_pass :
constraint_constants:Genesis_constants.Constraint_constants.t
-> global_slot:Global_slot.t
-> txn_state_view:Zkapp_precondition.Protocol_state.View.t
-> ledger
-> Transaction.t
-> Transaction_partially_applied.t Or_error.t
val apply_transaction_second_pass :
ledger
-> Transaction_partially_applied.t
-> Transaction_applied.t Or_error.t
val apply_transactions :
constraint_constants:Genesis_constants.Constraint_constants.t
-> global_slot:Mina_numbers.Global_slot.t
-> txn_state_view:Zkapp_precondition.Protocol_state.View.t
-> ledger
-> Transaction.t list
-> Transaction_applied.t list Or_error.t
val has_locked_tokens :
global_slot:Global_slot.t
-> account_id:Account_id.t
-> ledger
-> bool Or_error.t
module For_tests : sig
val validate_timing_with_min_balance :
account:Account.t
-> txn_amount:Amount.t
-> txn_global_slot:Global_slot.t
-> (Account.Timing.t * [> `Min_balance of Balance.t ]) Or_error.t
val validate_timing :
account:Account.t
-> txn_amount:Amount.t
-> txn_global_slot:Global_slot.t
-> Account.Timing.t Or_error.t
end
end
let nsf_tag = "nsf"
let min_balance_tag = "minbal"
let timing_error_to_user_command_status err =
match Error.Internal_repr.of_info err with
| Tag_t (tag, _) when String.equal tag nsf_tag ->
Transaction_status.Failure.Source_insufficient_balance
| Tag_t (tag, _) when String.equal tag min_balance_tag ->
Transaction_status.Failure.Source_minimum_balance_violation
| _ ->
failwith "Unexpected timed account validation error"
* [ validate_timing_with_min_balance ' ~account ~txn_amount ~txn_global_slot ]
returns a tuple of 3 values :
* [ [ ` Insufficient_balance of bool | ` Invalid_timing of bool ] ] encodes
possible errors , with the invariant that the return value is always
[ ` Invalid_timing false ] if there was no error .
- [ ` Insufficient_balance true ] results if [ txn_amount ] is larger than the
balance held in [ account ] .
- [ ` Invalid_timing true ] results if [ txn_amount ] is larger than the
balance available in [ account ] at global slot [ txn_global_slot ] .
* [ Timing.t ] , the new timing for [ account ] calculated at [ txn_global_slot ] .
* [ [ ` Min_balance of Balance.t ] ] returns the computed available balance at
[ txn_global_slot ] .
- NOTE : We skip this calculation if the error is
[ ` Insufficient_balance true ] . In this scenario , this value MUST NOT be
used , as it contains an incorrect placeholder value .
returns a tuple of 3 values:
* [[`Insufficient_balance of bool | `Invalid_timing of bool]] encodes
possible errors, with the invariant that the return value is always
[`Invalid_timing false] if there was no error.
- [`Insufficient_balance true] results if [txn_amount] is larger than the
balance held in [account].
- [`Invalid_timing true] results if [txn_amount] is larger than the
balance available in [account] at global slot [txn_global_slot].
* [Timing.t], the new timing for [account] calculated at [txn_global_slot].
* [[`Min_balance of Balance.t]] returns the computed available balance at
[txn_global_slot].
- NOTE: We skip this calculation if the error is
[`Insufficient_balance true]. In this scenario, this value MUST NOT be
used, as it contains an incorrect placeholder value.
*)
let validate_timing_with_min_balance' ~account ~txn_amount ~txn_global_slot =
let open Account.Poly in
let open Account.Timing.Poly in
match account.timing with
| Untimed -> (
match Balance.(account.balance - txn_amount) with
| None ->
(`Insufficient_balance true, Untimed, `Min_balance Balance.zero)
| _ ->
(`Invalid_timing false, Untimed, `Min_balance Balance.zero) )
| Timed
{ initial_minimum_balance
; cliff_time
; cliff_amount
; vesting_period
; vesting_increment
} ->
let invalid_balance, invalid_timing, curr_min_balance =
let account_balance = account.balance in
match Balance.(account_balance - txn_amount) with
| None ->
(true, false, initial_minimum_balance)
| Some proposed_new_balance ->
let curr_min_balance =
Account.min_balance_at_slot ~global_slot:txn_global_slot
~cliff_time ~cliff_amount ~vesting_period ~vesting_increment
~initial_minimum_balance
in
if Balance.(proposed_new_balance < curr_min_balance) then
(false, true, curr_min_balance)
else (false, false, curr_min_balance)
in
once the calculated minimum balance becomes zero , the account becomes untimed
let possibly_error =
if invalid_balance then `Insufficient_balance invalid_balance
else `Invalid_timing invalid_timing
in
if Balance.(curr_min_balance > zero) then
(possibly_error, account.timing, `Min_balance curr_min_balance)
else (possibly_error, Untimed, `Min_balance Balance.zero)
let validate_timing_with_min_balance ~account ~txn_amount ~txn_global_slot =
let open Or_error.Let_syntax in
let nsf_error kind =
Or_error.errorf
!"For %s account, the requested transaction for amount %{sexp: Amount.t} \
at global slot %{sexp: Global_slot.t}, the balance %{sexp: Balance.t} \
is insufficient"
kind txn_amount txn_global_slot account.Account.Poly.balance
|> Or_error.tag ~tag:nsf_tag
in
let min_balance_error min_balance =
Or_error.errorf
!"For timed account, the requested transaction for amount %{sexp: \
Amount.t} at global slot %{sexp: Global_slot.t}, applying the \
transaction would put the balance below the calculated minimum balance \
of %{sexp: Balance.t}"
txn_amount txn_global_slot min_balance
|> Or_error.tag ~tag:min_balance_tag
in
let possibly_error, timing, (`Min_balance curr_min_balance as min_balance) =
validate_timing_with_min_balance' ~account ~txn_amount ~txn_global_slot
in
match possibly_error with
| `Insufficient_balance true ->
nsf_error "timed"
| `Invalid_timing true ->
min_balance_error curr_min_balance
| `Insufficient_balance false ->
failwith "Broken invariant in validate_timing_with_min_balance'"
| `Invalid_timing false ->
return (timing, min_balance)
let validate_timing ~account ~txn_amount ~txn_global_slot =
let open Result.Let_syntax in
let%map timing, `Min_balance _ =
validate_timing_with_min_balance ~account ~txn_amount ~txn_global_slot
in
timing
module Make (L : Ledger_intf.S) :
S with type ledger := L.t and type location := L.location = struct
open L
let error s = Or_error.errorf "Ledger.apply_transaction: %s" s
let error_opt e = Option.value_map ~default:(error e) ~f:Or_error.return
let get_with_location ledger account_id =
match location_of_account ledger account_id with
| Some location -> (
match get ledger location with
| Some account ->
Ok (`Existing location, account)
| None ->
failwith "Ledger location with no account" )
| None ->
Ok (`New, Account.create account_id Balance.zero)
let set_with_location ledger location account =
match location with
| `Existing location ->
Ok (set ledger location account)
| `New ->
create_new_account ledger (Account.identifier account) account
let add_amount balance amount =
error_opt "overflow" (Balance.add_amount balance amount)
let sub_amount balance amount =
error_opt "insufficient funds" (Balance.sub_amount balance amount)
let sub_account_creation_fee
~(constraint_constants : Genesis_constants.Constraint_constants.t) action
amount =
let fee = constraint_constants.account_creation_fee in
if Ledger_intf.equal_account_state action `Added then
error_opt
(sprintf
!"Error subtracting account creation fee %{sexp: Currency.Fee.t}; \
transaction amount %{sexp: Currency.Amount.t} insufficient"
fee amount )
Amount.(sub amount (of_fee fee))
else Ok amount
let check b = ksprintf (fun s -> if b then Ok () else Or_error.error_string s)
let validate_nonces txn_nonce account_nonce =
check
(Account.Nonce.equal account_nonce txn_nonce)
!"Nonce in account %{sexp: Account.Nonce.t} different from nonce in \
transaction %{sexp: Account.Nonce.t}"
account_nonce txn_nonce
let validate_time ~valid_until ~current_global_slot =
check
Global_slot.(current_global_slot <= valid_until)
!"Current global slot %{sexp: Global_slot.t} greater than transaction \
expiry slot %{sexp: Global_slot.t}"
current_global_slot valid_until
module Transaction_applied = struct
include Transaction_applied
let transaction : t -> Transaction.t With_status.t =
fun { varying; _ } ->
match varying with
| Command (Signed_command uc) ->
With_status.map uc.common.user_command ~f:(fun cmd ->
Transaction.Command (User_command.Signed_command cmd) )
| Command (Zkapp_command s) ->
With_status.map s.command ~f:(fun c ->
Transaction.Command (User_command.Zkapp_command c) )
| Fee_transfer f ->
With_status.map f.fee_transfer ~f:(fun f ->
Transaction.Fee_transfer f )
| Coinbase c ->
With_status.map c.coinbase ~f:(fun c -> Transaction.Coinbase c)
let transaction_status : t -> Transaction_status.t =
fun { varying; _ } ->
match varying with
| Command
(Signed_command { common = { user_command = { status; _ }; _ }; _ })
->
status
| Command (Zkapp_command c) ->
c.command.status
| Fee_transfer f ->
f.fee_transfer.status
| Coinbase c ->
c.coinbase.status
end
let get_new_accounts action pk =
if Ledger_intf.equal_account_state action `Added then [ pk ] else []
let has_locked_tokens ~global_slot ~account_id ledger =
let open Or_error.Let_syntax in
let%map _, account = get_with_location ledger account_id in
Account.has_locked_tokens ~global_slot account
let failure (e : Transaction_status.Failure.t) = e
let incr_balance (acct : Account.t) amt =
match add_amount acct.balance amt with
| Ok balance ->
Ok { acct with balance }
| Error _ ->
Result.fail (failure Overflow)
let pay_fee' ~command ~nonce ~fee_payer ~fee ~ledger ~current_global_slot =
let open Or_error.Let_syntax in
let%bind location, account = get_with_location ledger fee_payer in
let%bind () =
match location with
| `Existing _ ->
return ()
| `New ->
Or_error.errorf "The fee-payer account does not exist"
in
let fee = Amount.of_fee fee in
let%bind balance = sub_amount account.balance fee in
let%bind () = validate_nonces nonce account.nonce in
let%map timing =
validate_timing ~txn_amount:fee ~txn_global_slot:current_global_slot
~account
in
( location
, { account with
balance
; nonce = Account.Nonce.succ account.nonce
; receipt_chain_hash =
Receipt.Chain_hash.cons_signed_command_payload command
account.receipt_chain_hash
; timing
} )
let pay_fee ~user_command ~signer_pk ~ledger ~current_global_slot =
let open Or_error.Let_syntax in
let nonce = Signed_command.nonce user_command in
let fee_payer = Signed_command.fee_payer user_command in
let%bind () =
let fee_token = Signed_command.fee_token user_command in
let%bind () =
if
Public_key.Compressed.equal
(Account_id.public_key fee_payer)
signer_pk
then return ()
else
Or_error.errorf
"Cannot pay fees from a public key that did not sign the \
transaction"
in
let%map () =
TODO : Remove this check and update the transaction snark once we have
an exchange rate mechanism . See issue # 4447 .
an exchange rate mechanism. See issue #4447.
*)
if Token_id.equal fee_token Token_id.default then return ()
else
Or_error.errorf
"Cannot create transactions with fee_token different from the \
default"
in
()
in
let%map loc, account' =
pay_fee' ~command:(Signed_command_payload user_command.payload) ~nonce
~fee_payer
~fee:(Signed_command.fee user_command)
~ledger ~current_global_slot
in
(loc, account')
someday : It would probably be better if we did n't modify the receipt chain hash
in the case that the sender is equal to the receiver , but it complicates the SNARK , so
we do n't for now .
in the case that the sender is equal to the receiver, but it complicates the SNARK, so
we don't for now. *)
let apply_user_command_unchecked
~(constraint_constants : Genesis_constants.Constraint_constants.t)
~txn_global_slot ledger
({ payload; signer; signature = _ } as user_command : Signed_command.t) =
let open Or_error.Let_syntax in
let signer_pk = Public_key.compress signer in
let current_global_slot = txn_global_slot in
let%bind () =
validate_time
~valid_until:(Signed_command.valid_until user_command)
~current_global_slot
in
let fee_payer = Signed_command.fee_payer user_command in
let%bind fee_payer_location, fee_payer_account =
pay_fee ~user_command ~signer_pk ~ledger ~current_global_slot
in
let%bind () =
if
Account.has_permission ~control:Control.Tag.Signature ~to_:`Access
fee_payer_account
&& Account.has_permission ~control:Control.Tag.Signature ~to_:`Send
fee_payer_account
then Ok ()
else
Or_error.error_string
Transaction_status.Failure.(describe Update_not_permitted_balance)
in
let%bind () =
set_with_location ledger fee_payer_location fee_payer_account
in
let source = Signed_command.source user_command in
let receiver = Signed_command.receiver user_command in
let exception Reject of Error.t in
let ok_or_reject = function Ok x -> x | Error err -> raise (Reject err) in
let compute_updates () =
let open Result.Let_syntax in
match payload.body with
| Stake_delegation _ ->
let receiver_location, _receiver_account =
get_with_location ledger receiver |> ok_or_reject
in
let source_location, source_account =
get_with_location ledger source |> ok_or_reject
in
let%bind () =
if
Account.has_permission ~control:Control.Tag.Signature ~to_:`Access
source_account
&& Account.has_permission ~control:Control.Tag.Signature
~to_:`Set_delegate source_account
then Ok ()
else Error Transaction_status.Failure.Update_not_permitted_delegate
in
let%bind () =
match (source_location, receiver_location) with
| `Existing _, `Existing _ ->
return ()
| `New, _ ->
Result.fail Transaction_status.Failure.Source_not_present
| _, `New ->
Result.fail Transaction_status.Failure.Receiver_not_present
in
let previous_delegate = source_account.delegate in
let%map timing =
validate_timing ~txn_amount:Amount.zero
~txn_global_slot:current_global_slot ~account:source_account
|> Result.map_error ~f:timing_error_to_user_command_status
in
let source_account =
{ source_account with
delegate = Some (Account_id.public_key receiver)
; timing
}
in
( [ (source_location, source_account) ]
, Transaction_applied.Signed_command_applied.Body.Stake_delegation
{ previous_delegate } )
| Payment { amount; _ } ->
let receiver_location, receiver_account =
get_with_location ledger receiver |> ok_or_reject
in
let%bind () =
if
Account.has_permission ~control:Control.Tag.None_given
~to_:`Access receiver_account
&& Account.has_permission ~control:Control.Tag.None_given
~to_:`Receive receiver_account
then Ok ()
else Error Transaction_status.Failure.Update_not_permitted_balance
in
let%bind source_location, source_account =
let ret =
if Account_id.equal source receiver then
let%bind location, account =
match receiver_location with
| `Existing _ ->
return (receiver_location, receiver_account)
| `New ->
Result.fail Transaction_status.Failure.Source_not_present
in
let%map timing =
validate_timing ~txn_amount:amount
~txn_global_slot:current_global_slot ~account
|> Result.map_error ~f:timing_error_to_user_command_status
in
(location, { account with timing })
else
let location, account =
get_with_location ledger source |> ok_or_reject
in
let%bind () =
match location with
| `Existing _ ->
return ()
| `New ->
Result.fail Transaction_status.Failure.Source_not_present
in
let%bind timing =
validate_timing ~txn_amount:amount
~txn_global_slot:current_global_slot ~account
|> Result.map_error ~f:timing_error_to_user_command_status
in
let%map balance =
Result.map_error (sub_amount account.balance amount)
~f:(fun _ ->
Transaction_status.Failure.Source_insufficient_balance )
in
(location, { account with timing; balance })
in
if Account_id.equal fee_payer source then
match ret with
| Ok x ->
Ok x
| Error failure ->
raise
(Reject
(Error.createf "%s"
(Transaction_status.Failure.describe failure) ) )
else ret
in
let%bind () =
if
Account.has_permission ~control:Control.Tag.Signature ~to_:`Access
source_account
&& Account.has_permission ~control:Control.Tag.Signature
~to_:`Send source_account
then Ok ()
else Error Transaction_status.Failure.Update_not_permitted_balance
in
let%bind receiver_amount =
match receiver_location with
| `Existing _ ->
return amount
| `New ->
sub_account_creation_fee ~constraint_constants `Added amount
|> Result.map_error ~f:(fun _ ->
Transaction_status.Failure
.Amount_insufficient_to_create_account )
in
let%map receiver_account =
incr_balance receiver_account receiver_amount
in
let new_accounts =
match receiver_location with
| `Existing _ ->
[]
| `New ->
[ receiver ]
in
( [ (receiver_location, receiver_account)
; (source_location, source_account)
]
, Transaction_applied.Signed_command_applied.Body.Payment
{ new_accounts } )
in
match compute_updates () with
| Ok (located_accounts, applied_body) ->
let%bind () =
List.fold located_accounts ~init:(Ok ())
~f:(fun acc (location, account) ->
let%bind () = acc in
set_with_location ledger location account )
in
let applied_common : Transaction_applied.Signed_command_applied.Common.t
=
{ user_command = { data = user_command; status = Applied } }
in
return
( { common = applied_common; body = applied_body }
: Transaction_applied.Signed_command_applied.t )
| Error failure ->
let applied_common : Transaction_applied.Signed_command_applied.Common.t
=
{ user_command =
{ data = user_command
; status =
Failed
(Transaction_status.Failure.Collection.of_single_failure
failure )
}
}
in
return
( { common = applied_common; body = Failed }
: Transaction_applied.Signed_command_applied.t )
| exception Reject err ->
Error err
let apply_user_command ~constraint_constants ~txn_global_slot ledger
(user_command : Signed_command.With_valid_signature.t) =
apply_user_command_unchecked ~constraint_constants ~txn_global_slot ledger
(Signed_command.forget_check user_command)
module Global_state = struct
type t =
{ first_pass_ledger : L.t
; second_pass_ledger : L.t
; fee_excess : Amount.Signed.t
; supply_increase : Amount.Signed.t
; protocol_state : Zkapp_precondition.Protocol_state.View.t
; block_global_slot : Global_slot.t
}
let first_pass_ledger { first_pass_ledger; _ } =
L.create_masked first_pass_ledger
let set_first_pass_ledger ~should_update t ledger =
if should_update then L.apply_mask t.first_pass_ledger ~masked:ledger ;
t
let second_pass_ledger { second_pass_ledger; _ } =
L.create_masked second_pass_ledger
let set_second_pass_ledger ~should_update t ledger =
if should_update then L.apply_mask t.second_pass_ledger ~masked:ledger ;
t
let fee_excess { fee_excess; _ } = fee_excess
let set_fee_excess t fee_excess = { t with fee_excess }
let supply_increase { supply_increase; _ } = supply_increase
let set_supply_increase t supply_increase = { t with supply_increase }
let block_global_slot { block_global_slot; _ } = block_global_slot
end
module Transaction_partially_applied = struct
module Zkapp_command_partially_applied = struct
type t =
{ command : Zkapp_command.t
; previous_hash : Ledger_hash.t
; original_first_pass_account_states :
(Account_id.t * (location * Account.t) option) list
; constraint_constants : Genesis_constants.Constraint_constants.t
; state_view : Zkapp_precondition.Protocol_state.View.t
; global_state : Global_state.t
; local_state :
( Stack_frame.value
, Stack_frame.value list
, Token_id.t
, Amount.Signed.t
, L.t
, bool
, Zkapp_command.Transaction_commitment.t
, Mina_numbers.Index.t
, Transaction_status.Failure.Collection.t )
Zkapp_command_logic.Local_state.t
}
end
type 'applied fully_applied =
{ previous_hash : Ledger_hash.t; applied : 'applied }
type t =
| Signed_command of
Transaction_applied.Signed_command_applied.t fully_applied
| Zkapp_command of Zkapp_command_partially_applied.t
| Fee_transfer of Transaction_applied.Fee_transfer_applied.t fully_applied
| Coinbase of Transaction_applied.Coinbase_applied.t fully_applied
let command (t : t) : Transaction.t =
match t with
| Signed_command s ->
Transaction.Command
(User_command.Signed_command s.applied.common.user_command.data)
| Zkapp_command z ->
Command (User_command.Zkapp_command z.command)
| Fee_transfer f ->
Fee_transfer f.applied.fee_transfer.data
| Coinbase c ->
Coinbase c.applied.coinbase.data
end
module Inputs = struct
let with_label ~label:_ f = f ()
let value_if b ~then_ ~else_ = if b then then_ else else_
module Global_state = Global_state
module Field = struct
type t = Snark_params.Tick.Field.t
let if_ = value_if
let equal = Snark_params.Tick.Field.equal
end
module Bool = struct
type t = bool
module Assert = struct
let is_true ~pos b =
try assert b
with Assert_failure _ ->
let file, line, col, _ecol = pos in
raise (Assert_failure (file, line, col))
let any ~pos bs = List.exists ~f:Fn.id bs |> is_true ~pos
end
let if_ = value_if
let true_ = true
let false_ = false
let equal = Bool.equal
let not = not
let ( ||| ) = ( || )
let ( &&& ) = ( && )
let display b ~label = sprintf "%s: %b" label b
let all = List.for_all ~f:Fn.id
type failure_status = Transaction_status.Failure.t option
type failure_status_tbl = Transaction_status.Failure.Collection.t
let is_empty t = List.join t |> List.is_empty
let assert_with_failure_status_tbl ~pos b failure_status_tbl =
let file, line, col, ecol = pos in
if (not b) && not (is_empty failure_status_tbl) then
let failure_msg =
Yojson.Safe.to_string
@@ Transaction_status.Failure.Collection.Display.to_yojson
@@ Transaction_status.Failure.Collection.to_display
failure_status_tbl
in
Error.raise @@ Error.of_string
@@ sprintf "File %S, line %d, characters %d-%d: %s" file line col ecol
failure_msg
else
try assert b
with Assert_failure _ -> raise (Assert_failure (file, line, col))
end
module Account_id = struct
include Account_id
let if_ = value_if
end
module Ledger = struct
type t = L.t
let if_ = value_if
let empty = L.empty
type inclusion_proof = [ `Existing of location | `New ]
let get_account p l =
let loc, acct =
Or_error.ok_exn (get_with_location l (Account_update.account_id p))
in
(acct, loc)
let set_account l (a, loc) =
Or_error.ok_exn (set_with_location l loc a) ;
l
let check_inclusion _ledger (_account, _loc) = ()
let check_account public_key token_id
((account, loc) : Account.t * inclusion_proof) =
assert (Public_key.Compressed.equal public_key account.public_key) ;
assert (Token_id.equal token_id account.token_id) ;
match loc with `Existing _ -> `Is_new false | `New -> `Is_new true
end
module Transaction_commitment = struct
type t = Field.t
let empty = Zkapp_command.Transaction_commitment.empty
let commitment ~account_updates =
let account_updates_hash =
Mina_base.Zkapp_command.Call_forest.hash account_updates
in
Zkapp_command.Transaction_commitment.create ~account_updates_hash
let full_commitment ~account_update ~memo_hash ~commitment =
let fee_payer_hash =
Zkapp_command.Digest.Account_update.create account_update
in
Zkapp_command.Transaction_commitment.create_complete commitment
~memo_hash ~fee_payer_hash
let if_ = value_if
end
module Index = struct
type t = Mina_numbers.Index.t
let zero, succ = Mina_numbers.Index.(zero, succ)
let if_ = value_if
end
module Public_key = struct
type t = Public_key.Compressed.t
let if_ = value_if
end
module Controller = struct
type t = Permissions.Auth_required.t
let if_ = value_if
let check ~proof_verifies ~signature_verifies perm =
assert (not (proof_verifies && signature_verifies)) ;
let tag =
if proof_verifies then Control.Tag.Proof
else if signature_verifies then Control.Tag.Signature
else Control.Tag.None_given
in
Permissions.Auth_required.check perm tag
end
module Global_slot = struct
include Mina_numbers.Global_slot
let if_ = value_if
end
module Nonce = struct
type t = Account.Nonce.t
let if_ = value_if
let succ = Account.Nonce.succ
end
module Receipt_chain_hash = struct
type t = Receipt.Chain_hash.t
module Elt = struct
type t = Receipt.Zkapp_command_elt.t
let of_transaction_commitment tc =
Receipt.Zkapp_command_elt.Zkapp_command_commitment tc
end
let cons_zkapp_command_commitment =
Receipt.Chain_hash.cons_zkapp_command_commitment
let if_ = value_if
end
module State_hash = struct
include State_hash
let if_ = value_if
end
module Timing = struct
type t = Account_update.Update.Timing_info.t option
let if_ = value_if
let vesting_period (t : t) =
match t with
| Some t ->
t.vesting_period
| None ->
(Account_timing.to_record Untimed).vesting_period
end
module Balance = struct
include Balance
let if_ = value_if
end
module Verification_key = struct
type t = (Side_loaded_verification_key.t, Field.t) With_hash.t option
let if_ = value_if
end
module Verification_key_hash = struct
type t = Field.t option
let equal vk1 vk2 = Option.equal Field.equal vk1 vk2
end
module Actions = struct
type t = Zkapp_account.Actions.t
let is_empty = List.is_empty
let push_events = Account_update.Actions.push_events
end
module Zkapp_uri = struct
type t = string
let if_ = value_if
end
module Token_symbol = struct
type t = Account.Token_symbol.t
let if_ = value_if
end
module Account = struct
include Account
module Permissions = struct
let access : t -> Controller.t = fun a -> a.permissions.access
let edit_state : t -> Controller.t = fun a -> a.permissions.edit_state
let send : t -> Controller.t = fun a -> a.permissions.send
let receive : t -> Controller.t = fun a -> a.permissions.receive
let set_delegate : t -> Controller.t =
fun a -> a.permissions.set_delegate
let set_permissions : t -> Controller.t =
fun a -> a.permissions.set_permissions
let set_verification_key : t -> Controller.t =
fun a -> a.permissions.set_verification_key
let set_zkapp_uri : t -> Controller.t =
fun a -> a.permissions.set_zkapp_uri
let edit_action_state : t -> Controller.t =
fun a -> a.permissions.edit_action_state
let set_token_symbol : t -> Controller.t =
fun a -> a.permissions.set_token_symbol
let increment_nonce : t -> Controller.t =
fun a -> a.permissions.increment_nonce
let set_voting_for : t -> Controller.t =
fun a -> a.permissions.set_voting_for
let set_timing : t -> Controller.t = fun a -> a.permissions.set_timing
type t = Permissions.t
let if_ = value_if
end
type timing = Account_update.Update.Timing_info.t option
let timing (a : t) : timing =
Account_update.Update.Timing_info.of_account_timing a.timing
let set_timing (a : t) (timing : timing) : t =
{ a with
timing =
Option.value_map ~default:Account_timing.Untimed
~f:Account_update.Update.Timing_info.to_account_timing timing
}
let is_timed (a : t) =
match a.timing with Account_timing.Untimed -> false | _ -> true
let set_token_id (a : t) (id : Token_id.t) : t = { a with token_id = id }
let balance (a : t) : Balance.t = a.balance
let set_balance (balance : Balance.t) (a : t) : t = { a with balance }
let check_timing ~txn_global_slot account =
let invalid_timing, timing, _ =
validate_timing_with_min_balance' ~txn_amount:Amount.zero
~txn_global_slot ~account
in
( invalid_timing
, Account_update.Update.Timing_info.of_account_timing timing )
let receipt_chain_hash (a : t) : Receipt.Chain_hash.t =
a.receipt_chain_hash
let set_receipt_chain_hash (a : t) hash =
{ a with receipt_chain_hash = hash }
let make_zkapp (a : t) =
let zkapp =
match a.zkapp with
| None ->
Some Zkapp_account.default
| Some _ as zkapp ->
zkapp
in
{ a with zkapp }
let unmake_zkapp (a : t) : t =
let zkapp =
match a.zkapp with
| None ->
None
| Some zkapp ->
if Zkapp_account.(equal default zkapp) then None else Some zkapp
in
{ a with zkapp }
let get_zkapp (a : t) = Option.value_exn a.zkapp
let set_zkapp (a : t) ~f : t = { a with zkapp = Option.map a.zkapp ~f }
let proved_state (a : t) = (get_zkapp a).proved_state
let set_proved_state proved_state (a : t) =
set_zkapp a ~f:(fun zkapp -> { zkapp with proved_state })
let app_state (a : t) = (get_zkapp a).app_state
let set_app_state app_state (a : t) =
set_zkapp a ~f:(fun zkapp -> { zkapp with app_state })
let register_verification_key (_ : t) = ()
let verification_key (a : t) = (get_zkapp a).verification_key
let set_verification_key verification_key (a : t) =
set_zkapp a ~f:(fun zkapp -> { zkapp with verification_key })
let verification_key_hash (a : t) =
match a.zkapp with
| None ->
None
| Some zkapp ->
Option.map zkapp.verification_key ~f:With_hash.hash
let last_action_slot (a : t) = (get_zkapp a).last_action_slot
let set_last_action_slot last_action_slot (a : t) =
set_zkapp a ~f:(fun zkapp -> { zkapp with last_action_slot })
let action_state (a : t) = (get_zkapp a).action_state
let set_action_state action_state (a : t) =
set_zkapp a ~f:(fun zkapp -> { zkapp with action_state })
let zkapp_uri (a : t) =
Option.value_map a.zkapp ~default:"" ~f:(fun zkapp -> zkapp.zkapp_uri)
let set_zkapp_uri zkapp_uri (a : t) : t =
{ a with
zkapp = Option.map a.zkapp ~f:(fun zkapp -> { zkapp with zkapp_uri })
}
let token_symbol (a : t) = a.token_symbol
let set_token_symbol token_symbol (a : t) = { a with token_symbol }
let public_key (a : t) = a.public_key
let set_public_key public_key (a : t) = { a with public_key }
let delegate (a : t) = Account.delegate_opt a.delegate
let set_delegate delegate (a : t) =
let delegate =
if Signature_lib.Public_key.Compressed.(equal empty) delegate then
None
else Some delegate
in
{ a with delegate }
let nonce (a : t) = a.nonce
let set_nonce nonce (a : t) = { a with nonce }
let voting_for (a : t) = a.voting_for
let set_voting_for voting_for (a : t) = { a with voting_for }
let permissions (a : t) = a.permissions
let set_permissions permissions (a : t) = { a with permissions }
end
module Amount = struct
open Currency.Amount
type unsigned = t
type t = unsigned
let if_ = value_if
module Signed = struct
include Signed
let if_ = value_if
let is_pos (t : t) = Sgn.equal t.sgn Pos
let is_neg (t : t) = Sgn.equal t.sgn Neg
end
let zero = zero
let equal = equal
let add_flagged = add_flagged
let add_signed_flagged (x1 : t) (x2 : Signed.t) : t * [ `Overflow of bool ]
=
let y, `Overflow b = Signed.(add_flagged (of_unsigned x1) x2) in
match y.sgn with
| Pos ->
(y.magnitude, `Overflow b)
| Neg ->
let magnitude =
Amount.to_uint64 y.magnitude
|> Unsigned.UInt64.(mul (sub zero one))
|> Amount.of_uint64
in
(magnitude, `Overflow true)
let of_constant_fee = of_fee
end
module Token_id = struct
include Token_id
let if_ = value_if
end
module Protocol_state_precondition = struct
include Zkapp_precondition.Protocol_state
end
module Valid_while_precondition = struct
include Zkapp_precondition.Valid_while
end
module Account_update = struct
include Account_update
module Account_precondition = struct
include Account_update.Account_precondition
let nonce (t : Account_update.t) = nonce t.body.preconditions.account
end
type 'a or_ignore = 'a Zkapp_basic.Or_ignore.t
type call_forest = Zkapp_call_forest.t
type transaction_commitment = Transaction_commitment.t
let may_use_parents_own_token (p : t) =
May_use_token.parents_own_token p.body.may_use_token
let may_use_token_inherited_from_parent (p : t) =
May_use_token.inherit_from_parent p.body.may_use_token
let check_authorization ~will_succeed:_ ~commitment:_ ~calls:_
(account_update : t) =
match account_update.authorization with
| Signature _ ->
(`Proof_verifies false, `Signature_verifies true)
| Proof _ ->
(`Proof_verifies true, `Signature_verifies false)
| None_given ->
(`Proof_verifies false, `Signature_verifies false)
let is_proved (account_update : t) =
match account_update.body.authorization_kind with
| Proof _ ->
true
| Signature | None_given ->
false
let is_signed (account_update : t) =
match account_update.body.authorization_kind with
| Signature ->
true
| Proof _ | None_given ->
false
let verification_key_hash (p : t) =
match p.body.authorization_kind with
| Proof vk_hash ->
Some vk_hash
| _ ->
None
module Update = struct
open Zkapp_basic
type 'a set_or_keep = 'a Zkapp_basic.Set_or_keep.t
let timing (account_update : t) : Account.timing set_or_keep =
Set_or_keep.map ~f:Option.some account_update.body.update.timing
let app_state (account_update : t) =
account_update.body.update.app_state
let verification_key (account_update : t) =
Zkapp_basic.Set_or_keep.map ~f:Option.some
account_update.body.update.verification_key
let actions (account_update : t) = account_update.body.actions
let zkapp_uri (account_update : t) =
account_update.body.update.zkapp_uri
let token_symbol (account_update : t) =
account_update.body.update.token_symbol
let delegate (account_update : t) = account_update.body.update.delegate
let voting_for (account_update : t) =
account_update.body.update.voting_for
let permissions (account_update : t) =
account_update.body.update.permissions
end
end
module Set_or_keep = struct
include Zkapp_basic.Set_or_keep
let set_or_keep ~if_:_ t x = set_or_keep t x
end
module Opt = struct
type 'a t = 'a option
let is_some = Option.is_some
let map = Option.map
let or_default ~if_ x ~default =
if_ (is_some x) ~then_:(Option.value ~default x) ~else_:default
let or_exn x = Option.value_exn x
end
module Stack (Elt : sig
type t
end) =
struct
type t = Elt.t list
let if_ = value_if
let empty () = []
let is_empty = List.is_empty
let pop_exn : t -> Elt.t * t = function
| [] ->
failwith "pop_exn"
| x :: xs ->
(x, xs)
let pop : t -> (Elt.t * t) option = function
| x :: xs ->
Some (x, xs)
| _ ->
None
let push x ~onto : t = x :: onto
end
module Call_forest = Zkapp_call_forest
module Stack_frame = struct
include Stack_frame
type t = value
let if_ = Zkapp_command.value_if
let make = Stack_frame.make
end
module Call_stack = Stack (Stack_frame)
module Local_state = struct
type t =
( Stack_frame.t
, Call_stack.t
, Token_id.t
, Amount.Signed.t
, Ledger.t
, Bool.t
, Transaction_commitment.t
, Index.t
, Bool.failure_status_tbl )
Zkapp_command_logic.Local_state.t
let add_check (t : t) failure b =
let failure_status_tbl =
match t.failure_status_tbl with
| hd :: tl when not b ->
(failure :: hd) :: tl
| old_failure_status_tbl ->
old_failure_status_tbl
in
{ t with failure_status_tbl; success = t.success && b }
let update_failure_status_tbl (t : t) failure_status b =
match failure_status with
| None ->
{ t with success = t.success && b }
| Some failure ->
add_check t failure b
let add_new_failure_status_bucket (t : t) =
{ t with failure_status_tbl = [] :: t.failure_status_tbl }
end
module Nonce_precondition = struct
let is_constant =
Zkapp_precondition.Numeric.is_constant
Zkapp_precondition.Numeric.Tc.nonce
end
end
module Env = struct
open Inputs
type t =
< account_update : Account_update.t
; zkapp_command : Zkapp_command.t
; account : Account.t
; ledger : Ledger.t
; amount : Amount.t
; signed_amount : Amount.Signed.t
; bool : Bool.t
; token_id : Token_id.t
; global_state : Global_state.t
; inclusion_proof : [ `Existing of location | `New ]
; local_state :
( Stack_frame.t
, Call_stack.t
, Token_id.t
, Amount.Signed.t
, L.t
, bool
, Transaction_commitment.t
, Index.t
, Transaction_status.Failure.Collection.t )
Zkapp_command_logic.Local_state.t
; protocol_state_precondition : Zkapp_precondition.Protocol_state.t
; valid_while_precondition : Zkapp_precondition.Valid_while.t
; transaction_commitment : Transaction_commitment.t
; full_transaction_commitment : Transaction_commitment.t
; field : Snark_params.Tick.Field.t
; failure : Transaction_status.Failure.t option >
let perform ~constraint_constants:_ (type r)
(eff : (r, t) Zkapp_command_logic.Eff.t) : r =
match eff with
| Check_valid_while_precondition (valid_while, global_state) ->
Zkapp_precondition.Valid_while.check valid_while
global_state.block_global_slot
|> Or_error.is_ok
| Check_protocol_state_precondition (pred, global_state) -> (
Zkapp_precondition.Protocol_state.check pred
global_state.protocol_state
|> fun or_err -> match or_err with Ok () -> true | Error _ -> false )
| Check_account_precondition
(account_update, account, new_account, local_state) -> (
match account_update.body.preconditions.account with
| Accept ->
local_state
| Nonce n ->
let nonce_matches = Account.Nonce.equal account.nonce n in
Inputs.Local_state.add_check local_state
Account_nonce_precondition_unsatisfied nonce_matches
| Full precondition_account ->
let local_state = ref local_state in
let check failure b =
local_state :=
Inputs.Local_state.add_check !local_state failure b
in
Zkapp_precondition.Account.check ~new_account ~check
precondition_account account ;
!local_state )
| Init_account { account_update = _; account = a } ->
a
end
module M = Zkapp_command_logic.Make (Inputs)
let update_action_state action_state actions ~txn_global_slot
~last_action_slot =
let action_state', last_action_slot' =
M.update_action_state action_state actions ~txn_global_slot
~last_action_slot
in
(action_state', last_action_slot')
apply zkapp command fee payer 's while stubbing out the second pass ledger
CAUTION : If you use the intermediate local states , you MUST update the
[ will_succeed ] field to [ false ] if the [ status ] is [ Failed ] .
CAUTION: If you use the intermediate local states, you MUST update the
[will_succeed] field to [false] if the [status] is [Failed].*)
let apply_zkapp_command_first_pass_aux (type user_acc) ~constraint_constants
~global_slot ~(state_view : Zkapp_precondition.Protocol_state.View.t)
~(init : user_acc) ~f
fee_excess = Amount.Signed.zero)
TODO : is the right ? is it never used for zkapps ?
supply_increase = Amount.Signed.zero) (ledger : L.t)
(command : Zkapp_command.t) :
( Transaction_partially_applied.Zkapp_command_partially_applied.t
* user_acc )
Or_error.t =
let open Or_error.Let_syntax in
let previous_hash = merkle_root ledger in
let original_first_pass_account_states =
let id = Zkapp_command.fee_payer command in
[ ( id
, Option.Let_syntax.(
let%bind loc = L.location_of_account ledger id in
let%map a = L.get ledger loc in
(loc, a)) )
]
in
let perform eff = Env.perform ~constraint_constants eff in
let initial_state :
Inputs.Global_state.t * _ Zkapp_command_logic.Local_state.t =
( { protocol_state = state_view
; first_pass_ledger = ledger
; second_pass_ledger =
We stub out the second_pass_ledger initially , and then poke the
correct value in place after the first pass is finished .
correct value in place after the first pass is finished.
*)
L.empty ~depth:0 ()
; fee_excess
; supply_increase
; block_global_slot = global_slot
}
, { stack_frame =
({ calls = []
; caller = Token_id.default
; caller_caller = Token_id.default
} : Inputs.Stack_frame.t)
; call_stack = []
; transaction_commitment = Inputs.Transaction_commitment.empty
; full_transaction_commitment = Inputs.Transaction_commitment.empty
; token_id = Token_id.default
; excess = Currency.Amount.(Signed.of_unsigned zero)
; supply_increase = Currency.Amount.(Signed.of_unsigned zero)
; ledger = L.empty ~depth:0 ()
; success = true
; account_update_index = Inputs.Index.zero
; failure_status_tbl = []
; will_succeed = true
} )
in
let user_acc = f init initial_state in
let account_updates = Zkapp_command.all_account_updates command in
let%map global_state, local_state =
Or_error.try_with (fun () ->
M.start ~constraint_constants
{ account_updates
; memo_hash = Signed_command_memo.hash command.memo
; will_succeed =
true
}
{ perform } initial_state )
in
( { Transaction_partially_applied.Zkapp_command_partially_applied.command
; previous_hash
; original_first_pass_account_states
; constraint_constants
; state_view
; global_state
; local_state
}
, user_acc )
let apply_zkapp_command_first_pass ~constraint_constants ~global_slot
~(state_view : Zkapp_precondition.Protocol_state.View.t)
fee_excess = Amount.Signed.zero)
TODO : is the right ? is it never used for zkapps ?
supply_increase = Amount.Signed.zero) (ledger : L.t)
(command : Zkapp_command.t) :
Transaction_partially_applied.Zkapp_command_partially_applied.t Or_error.t
=
let open Or_error.Let_syntax in
let%map partial_stmt, _user_acc =
apply_zkapp_command_first_pass_aux ~constraint_constants ~global_slot
~state_view ~fee_excess ~supply_increase ledger command ~init:None
~f:(fun _acc state -> Some state)
in
partial_stmt
let apply_zkapp_command_second_pass_aux (type user_acc) ~(init : user_acc) ~f
ledger
(c : Transaction_partially_applied.Zkapp_command_partially_applied.t) :
(Transaction_applied.Zkapp_command_applied.t * user_acc) Or_error.t =
let open Or_error.Let_syntax in
let perform eff =
Env.perform ~constraint_constants:c.constraint_constants eff
in
let original_account_states =
get the original states of all the accounts in each pass .
If an account updated in the first pass is referenced in account
updates , then retain the value before first pass application
If an account updated in the first pass is referenced in account
updates, then retain the value before first pass application*)
let account_states = Account_id.Table.create () in
List.iter
~f:(fun (id, acc_opt) ->
Account_id.Table.update account_states id
~f:(Option.value ~default:acc_opt) )
( c.original_first_pass_account_states
@ List.map (Zkapp_command.accounts_referenced c.command) ~f:(fun id ->
( id
, Option.Let_syntax.(
let%bind loc = L.location_of_account ledger id in
let%map a = L.get ledger loc in
(loc, a)) ) ) ) ;
Account_id.Table.to_alist account_states
in
let rec step_all (user_acc : user_acc)
( (g_state : Inputs.Global_state.t)
, (l_state : _ Zkapp_command_logic.Local_state.t) ) :
(user_acc * Transaction_status.Failure.Collection.t) Or_error.t =
if List.is_empty l_state.stack_frame.Stack_frame.calls then
Ok (user_acc, l_state.failure_status_tbl)
else
let%bind states =
Or_error.try_with (fun () ->
M.step ~constraint_constants:c.constraint_constants { perform }
(g_state, l_state) )
in
step_all (f user_acc states) states
in
let account_states_after_fee_payer =
To check if the accounts remain unchanged in the event the transaction
fails . First pass updates will remain even if the transaction fails to
apply zkapp account updates
fails. First pass updates will remain even if the transaction fails to
apply zkapp account updates*)
List.map (Zkapp_command.accounts_referenced c.command) ~f:(fun id ->
( id
, Option.Let_syntax.(
let%bind loc = L.location_of_account ledger id in
let%map a = L.get ledger loc in
(loc, a)) ) )
in
let accounts () =
List.map original_account_states
~f:(Tuple2.map_snd ~f:(Option.map ~f:snd))
in
update local and global state ledger to second pass ledger
let global_state = { c.global_state with second_pass_ledger = ledger } in
let local_state =
{ c.local_state with
ledger = Global_state.second_pass_ledger global_state
}
in
let start = (global_state, local_state) in
match step_all (f init start) start with
| Error e ->
Error e
| Ok (user_acc, reversed_failure_status_tbl) ->
let failure_status_tbl = List.rev reversed_failure_status_tbl in
let account_ids_originally_not_in_ledger =
List.filter_map original_account_states
~f:(fun (acct_id, loc_and_acct) ->
if Option.is_none loc_and_acct then Some acct_id else None )
in
let successfully_applied =
Transaction_status.Failure.Collection.is_empty failure_status_tbl
in
if the zkapp command fails in at least 1 account update ,
then all the account updates would be cancelled except
the fee payer one
then all the account updates would be cancelled except
the fee payer one
*)
let failure_status_tbl =
if successfully_applied then failure_status_tbl
else
List.mapi failure_status_tbl ~f:(fun idx fs ->
if idx > 0 && List.is_empty fs then
[ Transaction_status.Failure.Cancelled ]
else fs )
in
let new_accounts =
List.filter account_ids_originally_not_in_ledger ~f:(fun acct_id ->
Option.is_some @@ L.location_of_account ledger acct_id )
in
let valid_result =
Ok
( { Transaction_applied.Zkapp_command_applied.accounts = accounts ()
; command =
{ With_status.data = c.command
; status =
( if successfully_applied then Applied
else Failed failure_status_tbl )
}
; new_accounts
}
, user_acc )
in
if successfully_applied then valid_result
else
let other_account_update_accounts_unchanged =
List.fold_until account_states_after_fee_payer ~init:true
~f:(fun acc (_, loc_opt) ->
match
let open Option.Let_syntax in
let%bind loc, a = loc_opt in
let%bind a' = L.get ledger loc in
Option.some_if (not (Account.equal a a')) ()
with
| None ->
Continue acc
| Some _ ->
Stop false )
~finish:Fn.id
in
if
List.is_empty new_accounts
&& other_account_update_accounts_unchanged
then valid_result
else
Or_error.error_string
"Zkapp_command application failed but new accounts created or \
some of the other account_update updates applied"
let apply_zkapp_command_second_pass ledger c :
Transaction_applied.Zkapp_command_applied.t Or_error.t =
let open Or_error.Let_syntax in
let%map x, () =
apply_zkapp_command_second_pass_aux ~init:() ~f:Fn.const ledger c
in
x
let apply_zkapp_command_unchecked_aux ~constraint_constants ~global_slot
~state_view ~init ~f ?fee_excess ?supply_increase ledger command =
let open Or_error.Let_syntax in
apply_zkapp_command_first_pass_aux ~constraint_constants ~global_slot
~state_view ?fee_excess ?supply_increase ledger command ~init ~f
>>= fun (partial_stmt, user_acc) ->
apply_zkapp_command_second_pass_aux ~init:user_acc ~f ledger partial_stmt
let apply_zkapp_command_unchecked ~constraint_constants ~global_slot
~state_view ledger command =
let open Or_error.Let_syntax in
apply_zkapp_command_first_pass ~constraint_constants ~global_slot
~state_view ledger command
>>= apply_zkapp_command_second_pass_aux ledger ~init:None
~f:(fun _acc (global_state, local_state) ->
Some (local_state, global_state.fee_excess) )
|> Result.map ~f:(fun (account_update_applied, state_res) ->
(account_update_applied, Option.value_exn state_res) )
let update_timing_when_no_deduction ~txn_global_slot account =
validate_timing ~txn_amount:Amount.zero ~txn_global_slot ~account
let has_permission_to_receive ~ledger receiver_account_id :
Account.t
* Ledger_intf.account_state
* [> `Has_permission_to_receive of bool ] =
let init_account = Account.initialize receiver_account_id in
match location_of_account ledger receiver_account_id with
| None ->
( init_account
, `Added
, `Has_permission_to_receive
(Account.has_permission ~control:Control.Tag.None_given
~to_:`Receive init_account ) )
| Some loc -> (
match get ledger loc with
| None ->
failwith "Ledger location with no account"
| Some receiver_account ->
( receiver_account
, `Existed
, `Has_permission_to_receive
(Account.has_permission ~control:Control.Tag.None_given
~to_:`Receive receiver_account ) ) )
let no_failure = []
let update_failed =
[ Transaction_status.Failure.Update_not_permitted_balance ]
let empty = Transaction_status.Failure.Collection.empty
let single_failure =
Transaction_status.Failure.Collection.of_single_failure
Update_not_permitted_balance
let append_entry f (s : Transaction_status.Failure.Collection.t) :
Transaction_status.Failure.Collection.t =
match s with [] -> [ f ] | h :: t -> h :: f :: t
Structure of the failure status :
I. Only one fee transfer in the transaction ( ` One ) and it fails :
[ [ failure ] ]
II . Two fee transfers in the transaction ( ` Two)-
Both fee transfers fail :
[ [ failure - of - first - fee - transfer ] ; [ failure - of - second - fee - transfer ] ]
First succeeds and second one fails :
[ [ ] ; [ failure - of - second - fee - transfer ] ]
First fails and second succeeds :
[ [ failure - of - first - fee - transfer ] ; [ ] ]
I. Only one fee transfer in the transaction (`One) and it fails:
[[failure]]
II. Two fee transfers in the transaction (`Two)-
Both fee transfers fail:
[[failure-of-first-fee-transfer]; [failure-of-second-fee-transfer]]
First succeeds and second one fails:
[[];[failure-of-second-fee-transfer]]
First fails and second succeeds:
[[failure-of-first-fee-transfer];[]]
*)
let process_fee_transfer t (transfer : Fee_transfer.t) ~modify_balance
~modify_timing =
let open Or_error.Let_syntax in
let%bind () =
if
List.for_all
~f:Token_id.(equal default)
(One_or_two.to_list (Fee_transfer.fee_tokens transfer))
then return ()
else Or_error.errorf "Cannot pay fees in non-default tokens."
in
match Fee_transfer.to_singles transfer with
| `One ft ->
let account_id = Fee_transfer.Single.receiver ft in
let a, action, `Has_permission_to_receive can_receive =
has_permission_to_receive ~ledger:t account_id
in
let%bind timing = modify_timing a in
let%bind balance = modify_balance action account_id a.balance ft.fee in
if can_receive then (
let%map _action, a, loc = get_or_create t account_id in
let new_accounts = get_new_accounts action account_id in
set t loc { a with balance; timing } ;
(new_accounts, empty, Currency.Amount.zero) )
else Ok ([], single_failure, Currency.Amount.of_fee ft.fee)
| `Two (ft1, ft2) ->
let account_id1 = Fee_transfer.Single.receiver ft1 in
let a1, action1, `Has_permission_to_receive can_receive1 =
has_permission_to_receive ~ledger:t account_id1
in
let account_id2 = Fee_transfer.Single.receiver ft2 in
if Account_id.equal account_id1 account_id2 then
let%bind fee = error_opt "overflow" (Fee.add ft1.fee ft2.fee) in
let%bind timing = modify_timing a1 in
let%bind balance =
modify_balance action1 account_id1 a1.balance fee
in
if can_receive1 then (
let%map _action1, a1, l1 = get_or_create t account_id1 in
let new_accounts1 = get_new_accounts action1 account_id1 in
set t l1 { a1 with balance; timing } ;
(new_accounts1, empty, Currency.Amount.zero) )
else
Ok
( []
, append_entry update_failed single_failure
, Currency.Amount.of_fee fee )
else
let a2, action2, `Has_permission_to_receive can_receive2 =
has_permission_to_receive ~ledger:t account_id2
in
let%bind balance1 =
modify_balance action1 account_id1 a1.balance ft1.fee
in
Note : Not updating the timing field of a1 to avoid additional check in transactions snark ( check_timing for " receiver " ) . This is OK because timing rules will not be violated when balance increases and will be checked whenever an amount is deducted from the account . ( # 5973 )
let%bind timing2 = modify_timing a2 in
let%bind balance2 =
modify_balance action2 account_id2 a2.balance ft2.fee
in
let%bind new_accounts1, failures, burned_tokens1 =
if can_receive1 then (
let%map _action1, a1, l1 = get_or_create t account_id1 in
let new_accounts1 = get_new_accounts action1 account_id1 in
set t l1 { a1 with balance = balance1 } ;
( new_accounts1
, append_entry no_failure empty
, Currency.Amount.zero ) )
else Ok ([], single_failure, Currency.Amount.of_fee ft1.fee)
in
let%bind new_accounts2, failures', burned_tokens2 =
if can_receive2 then (
let%map _action2, a2, l2 = get_or_create t account_id2 in
let new_accounts2 = get_new_accounts action2 account_id2 in
set t l2 { a2 with balance = balance2; timing = timing2 } ;
( new_accounts2
, append_entry no_failure failures
, Currency.Amount.zero ) )
else
Ok
( []
, append_entry update_failed failures
, Currency.Amount.of_fee ft2.fee )
in
let%map burned_tokens =
error_opt "burned tokens overflow"
(Currency.Amount.add burned_tokens1 burned_tokens2)
in
(new_accounts1 @ new_accounts2, failures', burned_tokens)
let apply_fee_transfer ~constraint_constants ~txn_global_slot t transfer =
let open Or_error.Let_syntax in
let%map new_accounts, failures, burned_tokens =
process_fee_transfer t transfer
~modify_balance:(fun action _ b f ->
let%bind amount =
let amount = Amount.of_fee f in
sub_account_creation_fee ~constraint_constants action amount
in
add_amount b amount )
~modify_timing:(fun acc ->
update_timing_when_no_deduction ~txn_global_slot acc )
in
let ft_with_status =
if Transaction_status.Failure.Collection.is_empty failures then
{ With_status.data = transfer; status = Applied }
else { data = transfer; status = Failed failures }
in
Transaction_applied.Fee_transfer_applied.
{ fee_transfer = ft_with_status; new_accounts; burned_tokens }
let apply_coinbase ~constraint_constants ~txn_global_slot t
({ receiver; fee_transfer; amount = coinbase_amount } as cb : Coinbase.t)
=
let open Or_error.Let_syntax in
let%bind ( receiver_reward
, new_accounts1
, transferee_update
, transferee_timing_prev
, failures1
, burned_tokens1 ) =
match fee_transfer with
| None ->
return (coinbase_amount, [], None, None, empty, Currency.Amount.zero)
| Some ({ receiver_pk = transferee; fee } as ft) ->
assert (not @@ Public_key.Compressed.equal transferee receiver) ;
let transferee_id = Coinbase.Fee_transfer.receiver ft in
let fee = Amount.of_fee fee in
let%bind receiver_reward =
error_opt "Coinbase fee transfer too large"
(Amount.sub coinbase_amount fee)
in
let transferee_account, action, `Has_permission_to_receive can_receive
=
has_permission_to_receive ~ledger:t transferee_id
in
let new_accounts = get_new_accounts action transferee_id in
let%bind timing =
update_timing_when_no_deduction ~txn_global_slot transferee_account
in
let%bind balance =
let%bind amount =
sub_account_creation_fee ~constraint_constants action fee
in
add_amount transferee_account.balance amount
in
if can_receive then
let%map _action, transferee_account, transferee_location =
get_or_create t transferee_id
in
( receiver_reward
, new_accounts
, Some
( transferee_location
, { transferee_account with balance; timing } )
, Some transferee_account.timing
, append_entry no_failure empty
, Currency.Amount.zero )
else return (receiver_reward, [], None, None, single_failure, fee)
in
let receiver_id = Account_id.create receiver Token_id.default in
let receiver_account, action2, `Has_permission_to_receive can_receive =
has_permission_to_receive ~ledger:t receiver_id
in
let new_accounts2 = get_new_accounts action2 receiver_id in
Note : Updating coinbase receiver timing only if there is no fee transfer .
This is so as to not add any extra constraints in transaction snark for checking
" receiver " timings . This is OK because timing rules will not be violated when
balance increases and will be checked whenever an amount is deducted from the
account ( # 5973 )
This is so as to not add any extra constraints in transaction snark for checking
"receiver" timings. This is OK because timing rules will not be violated when
balance increases and will be checked whenever an amount is deducted from the
account (#5973)
*)
let%bind coinbase_receiver_timing =
match transferee_timing_prev with
| None ->
let%map new_receiver_timing =
update_timing_when_no_deduction ~txn_global_slot receiver_account
in
new_receiver_timing
| Some _timing ->
Ok receiver_account.timing
in
let%bind receiver_balance =
let%bind amount =
sub_account_creation_fee ~constraint_constants action2 receiver_reward
in
add_amount receiver_account.balance amount
in
let%bind failures, burned_tokens2 =
if can_receive then (
let%map _action2, receiver_account, receiver_location =
get_or_create t receiver_id
in
set t receiver_location
{ receiver_account with
balance = receiver_balance
; timing = coinbase_receiver_timing
} ;
(append_entry no_failure failures1, Currency.Amount.zero) )
else return (append_entry update_failed failures1, receiver_reward)
in
Option.iter transferee_update ~f:(fun (l, a) -> set t l a) ;
let%map burned_tokens =
error_opt "burned tokens overflow"
(Amount.add burned_tokens1 burned_tokens2)
in
let coinbase_with_status =
if Transaction_status.Failure.Collection.is_empty failures then
{ With_status.data = cb; status = Applied }
else { With_status.data = cb; status = Failed failures }
in
Transaction_applied.Coinbase_applied.
{ coinbase = coinbase_with_status
; new_accounts = new_accounts1 @ new_accounts2
; burned_tokens
}
let apply_transaction_first_pass ~constraint_constants ~global_slot
~(txn_state_view : Zkapp_precondition.Protocol_state.View.t) ledger
(t : Transaction.t) : Transaction_partially_applied.t Or_error.t =
let open Or_error.Let_syntax in
let previous_hash = merkle_root ledger in
let txn_global_slot = global_slot in
match t with
| Command (Signed_command txn) ->
let%map applied =
apply_user_command_unchecked ~constraint_constants ~txn_global_slot
ledger txn
in
Transaction_partially_applied.Signed_command { previous_hash; applied }
| Command (Zkapp_command txn) ->
let%map partially_applied =
apply_zkapp_command_first_pass ~global_slot ~state_view:txn_state_view
~constraint_constants ledger txn
in
Transaction_partially_applied.Zkapp_command partially_applied
| Fee_transfer t ->
let%map applied =
apply_fee_transfer ~constraint_constants ~txn_global_slot ledger t
in
Transaction_partially_applied.Fee_transfer { previous_hash; applied }
| Coinbase t ->
let%map applied =
apply_coinbase ~constraint_constants ~txn_global_slot ledger t
in
Transaction_partially_applied.Coinbase { previous_hash; applied }
let apply_transaction_second_pass ledger (t : Transaction_partially_applied.t)
: Transaction_applied.t Or_error.t =
let open Or_error.Let_syntax in
let open Transaction_applied in
match t with
| Signed_command { previous_hash; applied } ->
return
{ previous_hash; varying = Varying.Command (Signed_command applied) }
| Zkapp_command partially_applied ->
TODO : either here or in second phase of apply , need to update the prior global state statement for the fee payer segment to add the second phase ledger at the end
let%map applied =
apply_zkapp_command_second_pass ledger partially_applied
in
{ previous_hash = partially_applied.previous_hash
; varying = Varying.Command (Zkapp_command applied)
}
| Fee_transfer { previous_hash; applied } ->
return { previous_hash; varying = Varying.Fee_transfer applied }
| Coinbase { previous_hash; applied } ->
return { previous_hash; varying = Varying.Coinbase applied }
let apply_transactions ~constraint_constants ~global_slot ~txn_state_view
ledger txns =
let open Or_error in
Mina_stdlib.Result.List.map txns
~f:
(apply_transaction_first_pass ~constraint_constants ~global_slot
~txn_state_view ledger )
>>= Mina_stdlib.Result.List.map ~f:(apply_transaction_second_pass ledger)
module For_tests = struct
let validate_timing_with_min_balance = validate_timing_with_min_balance
let validate_timing = validate_timing
end
end
module For_tests = struct
open Mina_numbers
open Currency
module Account_without_receipt_chain_hash = struct
type t =
( Public_key.Compressed.t
, Token_id.t
, Account.Token_symbol.t
, Balance.t
, Account_nonce.t
, unit
, Public_key.Compressed.t option
, State_hash.t
, Account_timing.t
, Permissions.t
, Zkapp_account.t option )
Account.Poly.t
[@@deriving sexp, compare]
end
let min_init_balance = Int64.of_string "8000000000"
let max_init_balance = Int64.of_string "8000000000000"
let num_accounts = 10
let num_transactions = 10
let depth = Int.ceil_log2 (num_accounts + num_transactions)
module Init_ledger = struct
type t = (Keypair.t * int64) array [@@deriving sexp]
let init ?(zkapp = true) (type l) (module L : Ledger_intf.S with type t = l)
(init_ledger : t) (l : L.t) =
Array.iter init_ledger ~f:(fun (kp, amount) ->
let _tag, account, loc =
L.get_or_create l
(Account_id.create
(Public_key.compress kp.public_key)
Token_id.default )
|> Or_error.ok_exn
in
let permissions : Permissions.t =
{ edit_state = Either
; send = Either
; receive = None
; set_delegate = Either
; set_permissions = Either
; set_verification_key = Either
; set_zkapp_uri = Either
; edit_action_state = Either
; set_token_symbol = Either
; increment_nonce = Either
; set_voting_for = Either
; access = None
; set_timing = Either
}
in
let zkapp =
if zkapp then
Some
{ Zkapp_account.default with
verification_key =
Some
{ With_hash.hash = Zkapp_basic.F.zero
; data = Side_loaded_verification_key.dummy
}
}
else None
in
L.set l loc
{ account with
balance =
Currency.Balance.of_uint64 (Unsigned.UInt64.of_int64 amount)
; permissions
; zkapp
} )
let gen () : t Quickcheck.Generator.t =
let tbl = Public_key.Compressed.Hash_set.create () in
let open Quickcheck.Generator in
let open Let_syntax in
let rec go acc n =
if n = 0 then return (Array.of_list acc)
else
let%bind kp =
filter Keypair.gen ~f:(fun kp ->
not (Hash_set.mem tbl (Public_key.compress kp.public_key)) )
and amount = Int64.gen_incl min_init_balance max_init_balance in
Hash_set.add tbl (Public_key.compress kp.public_key) ;
go ((kp, amount) :: acc) (n - 1)
in
go [] num_accounts
end
module Transaction_spec = struct
type t =
{ fee : Currency.Fee.t
; sender : Keypair.t * Account_nonce.t
; receiver : Public_key.Compressed.t
; amount : Currency.Amount.t
}
[@@deriving sexp]
let gen ~(init_ledger : Init_ledger.t) ~nonces =
let pk ((kp : Keypair.t), _) = Public_key.compress kp.public_key in
let open Quickcheck.Let_syntax in
let%bind receiver_is_new = Bool.quickcheck_generator in
let gen_index () = Int.gen_incl 0 (Array.length init_ledger - 1) in
let%bind receiver_index =
if receiver_is_new then return None else gen_index () >>| Option.return
in
let%bind receiver =
match receiver_index with
| None ->
Public_key.Compressed.gen
| Some i ->
return (pk init_ledger.(i))
in
let%bind sender =
let%map i =
match receiver_index with
| None ->
gen_index ()
| Some j ->
Quickcheck.Generator.filter (gen_index ()) ~f:(( <> ) j)
in
fst init_ledger.(i)
in
let gen_amount () =
Currency.Amount.(
gen_incl
(of_nanomina_int_exn 1_000_000)
(of_nanomina_int_exn 100_000_000))
in
let gen_fee () =
Currency.Fee.(
gen_incl
(of_nanomina_int_exn 1_000_000)
(of_nanomina_int_exn 100_000_000))
in
let nonce : Account_nonce.t = Map.find_exn nonces sender in
let%bind fee = gen_fee () in
let%bind amount = gen_amount () in
let nonces =
Map.set nonces ~key:sender ~data:(Account_nonce.succ nonce)
in
let spec = { fee; amount; receiver; sender = (sender, nonce) } in
return (spec, nonces)
end
module Test_spec = struct
type t = { init_ledger : Init_ledger.t; specs : Transaction_spec.t list }
[@@deriving sexp]
let mk_gen ?(num_transactions = num_transactions) () =
let open Quickcheck.Let_syntax in
let%bind init_ledger = Init_ledger.gen () in
let%bind specs =
let rec go acc n nonces =
if n = 0 then return (List.rev acc)
else
let%bind spec, nonces = Transaction_spec.gen ~init_ledger ~nonces in
go (spec :: acc) (n - 1) nonces
in
go [] num_transactions
(Keypair.Map.of_alist_exn
(List.map (Array.to_list init_ledger) ~f:(fun (pk, _) ->
(pk, Account_nonce.zero) ) ) )
in
return { init_ledger; specs }
let gen = mk_gen ~num_transactions ()
end
let command_send
{ Transaction_spec.fee; sender = sender, sender_nonce; receiver; amount }
: Signed_command.t =
let sender_pk = Public_key.compress sender.public_key in
Signed_command.sign sender
{ common =
{ fee
; fee_payer_pk = sender_pk
; nonce = sender_nonce
; valid_until = Global_slot.max_value
; memo = Signed_command_memo.dummy
}
; body = Payment { source_pk = sender_pk; receiver_pk = receiver; amount }
}
|> Signed_command.forget_check
let account_update_send ?(use_full_commitment = true)
?(double_sender_nonce = true)
{ Transaction_spec.fee; sender = sender, sender_nonce; receiver; amount }
: Zkapp_command.t =
let sender_pk = Public_key.compress sender.public_key in
let actual_nonce =
Here , we double the spec'd nonce , because we bump the nonce a second
time for the ' sender ' part of the payment .
time for the 'sender' part of the payment.
*)
if double_sender_nonce then
sender_nonce |> Account.Nonce.to_uint32
|> Unsigned.UInt32.(mul (of_int 2))
|> Account.Nonce.to_uint32
else sender_nonce
in
let zkapp_command : Zkapp_command.Simple.t =
{ fee_payer =
{ Account_update.Fee_payer.body =
{ public_key = sender_pk
; fee
; valid_until = None
; nonce = actual_nonce
}
; authorization = Signature.dummy
}
; account_updates =
[ { body =
{ public_key = sender_pk
; update = Account_update.Update.noop
; token_id = Token_id.default
; balance_change = Amount.Signed.(negate (of_unsigned amount))
; increment_nonce = double_sender_nonce
; events = []
; actions = []
; call_data = Snark_params.Tick.Field.zero
; call_depth = 0
; preconditions =
{ Account_update.Preconditions.network =
Zkapp_precondition.Protocol_state.accept
; account = Accept
; valid_while = Ignore
}
; may_use_token = No
; use_full_commitment
; implicit_account_creation_fee = true
; authorization_kind =
( if use_full_commitment then Signature
else Proof Zkapp_basic.F.zero )
}
; authorization =
( if use_full_commitment then Signature Signature.dummy
else Proof Mina_base.Proof.transaction_dummy )
}
; { body =
{ public_key = receiver
; update = Account_update.Update.noop
; token_id = Token_id.default
; balance_change = Amount.Signed.of_unsigned amount
; increment_nonce = false
; events = []
; actions = []
; call_data = Snark_params.Tick.Field.zero
; call_depth = 0
; preconditions =
{ Account_update.Preconditions.network =
Zkapp_precondition.Protocol_state.accept
; account = Accept
; valid_while = Ignore
}
; may_use_token = No
; use_full_commitment = false
; implicit_account_creation_fee = true
; authorization_kind = None_given
}
; authorization = None_given
}
]
; memo = Signed_command_memo.empty
}
in
let zkapp_command = Zkapp_command.of_simple zkapp_command in
let commitment = Zkapp_command.commitment zkapp_command in
let full_commitment =
Zkapp_command.Transaction_commitment.create_complete commitment
~memo_hash:(Signed_command_memo.hash zkapp_command.memo)
~fee_payer_hash:
(Zkapp_command.Digest.Account_update.create
(Account_update.of_fee_payer zkapp_command.fee_payer) )
in
let account_updates_signature =
let c = if use_full_commitment then full_commitment else commitment in
Schnorr.Chunked.sign sender.private_key
(Random_oracle.Input.Chunked.field c)
in
let account_updates =
Zkapp_command.Call_forest.map zkapp_command.account_updates
~f:(fun (account_update : Account_update.t) ->
match account_update.body.authorization_kind with
| Signature ->
{ account_update with
authorization = Control.Signature account_updates_signature
}
| _ ->
account_update )
in
let signature =
Schnorr.Chunked.sign sender.private_key
(Random_oracle.Input.Chunked.field full_commitment)
in
{ zkapp_command with
fee_payer = { zkapp_command.fee_payer with authorization = signature }
; account_updates
}
let test_eq (type l) (module L : Ledger_intf.S with type t = l) accounts
(l1 : L.t) (l2 : L.t) =
List.map accounts ~f:(fun a ->
Or_error.try_with (fun () ->
let mismatch () =
failwithf
!"One ledger had the account %{sexp:Account_id.t} but the \
other did not"
a ()
in
let hide_rc (a : _ Account.Poly.t) =
{ a with receipt_chain_hash = () }
in
match L.(location_of_account l1 a, location_of_account l2 a) with
| None, None ->
()
| Some _, None | None, Some _ ->
mismatch ()
| Some x1, Some x2 -> (
match L.(get l1 x1, get l2 x2) with
| None, None ->
()
| Some _, None | None, Some _ ->
mismatch ()
| Some a1, Some a2 ->
[%test_eq: Account_without_receipt_chain_hash.t]
(hide_rc a1) (hide_rc a2) ) ) )
|> Or_error.combine_errors_unit
let txn_global_slot = Global_slot.zero
let iter_err ts ~f =
List.fold_until ts
~finish:(fun () -> Ok ())
~init:()
~f:(fun () t ->
match f t with Error e -> Stop (Error e) | Ok _ -> Continue () )
let view : Zkapp_precondition.Protocol_state.View.t =
let h = Frozen_ledger_hash.empty_hash in
let len = Length.zero in
let a = Currency.Amount.zero in
let epoch_data =
{ Epoch_data.Poly.ledger =
{ Epoch_ledger.Poly.hash = h; total_currency = a }
; seed = h
; start_checkpoint = h
; lock_checkpoint = h
; epoch_length = len
}
in
{ snarked_ledger_hash = h
; blockchain_length = len
; min_window_density = len
; last_vrf_output = ()
; total_currency = a
; global_slot_since_genesis = txn_global_slot
; staking_epoch_data = epoch_data
; next_epoch_data = epoch_data
}
Quickcheck generator for Zkapp_command.t , derived from Test_spec generator
let gen_zkapp_command_from_test_spec =
let open Quickcheck.Let_syntax in
let%bind use_full_commitment = Bool.quickcheck_generator in
match%map Test_spec.mk_gen ~num_transactions:1 () with
| { specs = [ spec ]; _ } ->
account_update_send ~use_full_commitment spec
| { specs; _ } ->
failwithf "gen_zkapp_command_from_test_spec: expected one spec, got %d"
(List.length specs) ()
end
|
614b1f3b645fbbe5d6aa2b69e08ea5bee2f40964521e2dea0b523bbc5bd4caf8 | ml4tp/tcoq | ind_tables.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
open Term
open Names
open Declare
(** This module provides support for registering inductive scheme builders,
declaring schemes and generating schemes on demand *)
(** A scheme is either a "mutual scheme_kind" or an "individual scheme_kind" *)
type mutual
type individual
type 'a scheme_kind
type mutual_scheme_object_function =
internal_flag -> mutual_inductive -> constr array Evd.in_evar_universe_context * Safe_typing.private_constants
type individual_scheme_object_function =
internal_flag -> inductive -> constr Evd.in_evar_universe_context * Safe_typing.private_constants
(** Main functions to register a scheme builder *)
val declare_mutual_scheme_object : string -> ?aux:string ->
mutual_scheme_object_function -> mutual scheme_kind
val declare_individual_scheme_object : string -> ?aux:string ->
individual_scheme_object_function ->
individual scheme_kind
(** Force generation of a (mutually) scheme with possibly user-level names *)
val define_individual_scheme : individual scheme_kind ->
internal_flag (** internal *) ->
Id.t option -> inductive -> constant * Safe_typing.private_constants
val define_mutual_scheme : mutual scheme_kind -> internal_flag (** internal *) ->
(int * Id.t) list -> mutual_inductive -> constant array * Safe_typing.private_constants
(** Main function to retrieve a scheme in the cache or to generate it *)
val find_scheme : ?mode:internal_flag -> 'a scheme_kind -> inductive -> constant * Safe_typing.private_constants
val check_scheme : 'a scheme_kind -> inductive -> bool
val pr_scheme_kind : 'a scheme_kind -> Pp.std_ppcmds
| null | https://raw.githubusercontent.com/ml4tp/tcoq/7a78c31df480fba721648f277ab0783229c8bece/toplevel/ind_tables.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
* This module provides support for registering inductive scheme builders,
declaring schemes and generating schemes on demand
* A scheme is either a "mutual scheme_kind" or an "individual scheme_kind"
* Main functions to register a scheme builder
* Force generation of a (mutually) scheme with possibly user-level names
* internal
* internal
* Main function to retrieve a scheme in the cache or to generate it | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Term
open Names
open Declare
type mutual
type individual
type 'a scheme_kind
type mutual_scheme_object_function =
internal_flag -> mutual_inductive -> constr array Evd.in_evar_universe_context * Safe_typing.private_constants
type individual_scheme_object_function =
internal_flag -> inductive -> constr Evd.in_evar_universe_context * Safe_typing.private_constants
val declare_mutual_scheme_object : string -> ?aux:string ->
mutual_scheme_object_function -> mutual scheme_kind
val declare_individual_scheme_object : string -> ?aux:string ->
individual_scheme_object_function ->
individual scheme_kind
val define_individual_scheme : individual scheme_kind ->
Id.t option -> inductive -> constant * Safe_typing.private_constants
(int * Id.t) list -> mutual_inductive -> constant array * Safe_typing.private_constants
val find_scheme : ?mode:internal_flag -> 'a scheme_kind -> inductive -> constant * Safe_typing.private_constants
val check_scheme : 'a scheme_kind -> inductive -> bool
val pr_scheme_kind : 'a scheme_kind -> Pp.std_ppcmds
|
6f4aaf2302ab4efe068952a5fb88d75904d3a330d3d17de385181d62b6e16009 | well-typed/visualize-cbn | Pretty.hs | # LANGUAGE CPP #
module CBN.Pretty (ToDoc, toDoc, heapToDoc) where
#if !(MIN_VERSION_base(4,11,0))
import Data.Monoid
#endif
import Data.List (intersperse)
import Data.Set (Set)
import qualified Data.Map as Map
import qualified Data.Set as Set
import CBN.Closure
import CBN.Eval
import CBN.Heap
import CBN.Language
import CBN.Pretty.Precedence as P
import CBN.Util.Doc
import CBN.Util.Doc.Style
class ToDoc a where
toDoc :: a -> Doc Style String
toDoc = toDoc' Top
toDoc' :: FixityContext -> a -> Doc Style String
toDoc' _fc = toDoc
| For convenience , ' ToDoc ' is idempotent
instance ToDoc (Doc Style String) where
toDoc = id
instance ToDoc Var where
toDoc (Var x) = style (\st -> st { styleItalic = True }) $ doc x
instance ToDoc Con where
toDoc (Con "Nil") = doc "[]"
toDoc (Con "Unit") = doc "()"
toDoc (Con c) = style (\st -> st { styleForeground = Just Red }) $ doc c
instance ToDoc Prim where
toDoc (PInt n) = doc (show n)
toDoc PIAdd = doc "add"
toDoc PISub = doc "sub"
toDoc PIMul = doc "mul"
toDoc PIEq = doc "eq"
toDoc PILt = doc "lt"
toDoc PILe = doc "le"
instance ToDoc PrimApp where
toDoc' fc (PrimApp PIAdd [a, b]) = parensIf (needsParens fc Add) $
toDoc' (L Add) a <+> doc "+" <+> toDoc' (R Add) b
toDoc' fc (PrimApp PISub [a, b]) = parensIf (needsParens fc Sub) $
toDoc' (L Sub) a <+> doc "-" <+> toDoc' (R Sub) b
toDoc' fc (PrimApp PIMul [a, b]) = parensIf (needsParens fc Mul) $
toDoc' (L Mul) a <+> doc "*" <+> toDoc' (R Mul) b
toDoc' fc (PrimApp PILe [a, b]) = parensIf (needsParens fc Le) $
toDoc' (L Le) a <+> doc "<=" <+> toDoc' (R Le) b
toDoc' fc (PrimApp PILt [a, b]) = parensIf (needsParens fc Lt) $
toDoc' (L Lt) a <+> doc "<" <+> toDoc' (R Lt) b
toDoc' fc (PrimApp PIEq [a, b]) = parensIf (needsParens fc Eq) $
toDoc' (L Eq) a <+> doc "==" <+> toDoc' (R Eq) b
toDoc' fc (PrimApp p es) = parensIf (needsParens fc P.Ap && not (null es)) $
hsep (toDoc p : map (toDoc' (R P.Ap)) es)
instance ToDoc ConApp where
toDoc' fc (ConApp (Con "Cons") [x, xs]) = parensIf (needsParens fc Cons) $
toDoc' (L Cons) x <+> doc ":" <+> toDoc' (R Cons) xs
toDoc' _fc (ConApp (Con "Pair") [x, xs]) = parensIf True $
toDoc' Top x <> doc "," <+> toDoc' Top xs
toDoc' fc (ConApp c es) = parensIf (needsParens fc P.Ap && not (null es)) $
hsep (toDoc c : map (toDoc' (R P.Ap)) es)
instance ToDoc Pat where
toDoc (Pat (Con "Cons") [x, xs]) =
toDoc x <> doc ":" <> toDoc xs
toDoc (Pat (Con "Pair") [x, xs]) = parensIf True $
toDoc x <> doc "," <> toDoc xs
toDoc (Pat c xs) =
hsep (toDoc c : map toDoc xs)
instance ToDoc Match where
toDoc' fc = mconcat . matchRow fc
-- | Table-row for a match statement
--
-- Used when using a vertical layout for a case statement
matchRow :: FixityContext -> Match -> [Doc Style String]
matchRow fc (Match p rhs) = [toDoc p, doc " -> ", toDoc' fc rhs]
-- | We make elements from the prelude blue
instance ToDoc Ptr where
toDoc (Ptr Nothing Nothing) = error "invalid pointer"
toDoc (Ptr (Just n) Nothing) = doc (show n)
toDoc (Ptr Nothing (Just name)) = style (\st -> st { styleForeground = Just Blue })
$ doc name
toDoc (Ptr (Just n) (Just name)) = doc name <> doc "_" <> doc (show n)
instance ToDoc Term where
toDoc' _ (TVar x) = toDoc x
toDoc' _ (TPtr n) = toDoc n
toDoc' fc (TPrim pes ) = toDoc' fc pes
toDoc' fc (TCon ces) = toDoc' fc ces
special case for e1 ( \x - > e2)@
toDoc' fc (TApp (TApp (TPtr bind@(Ptr Nothing (Just "bind"))) e1) (TLam x e2)) =
parensIfChoice (needsParens fc P.Ap) $ [
stack [
toDoc bind <+> toDoc' (R P.Ap) e1 <+> doc "(\\" <> toDoc x <+> doc "->"
, toDoc' (R Lam) e2 <> doc ")"
]
]
-- standard rendering
toDoc' fc (TApp e1 e2) = parensIf (needsParens fc P.Ap) $
toDoc' (L P.Ap) e1 <+> toDoc' (R P.Ap) e2
toDoc' fc (TSeq e1 e2) = parensIf (needsParens fc P.Ap) $
kw "seq" <+> toDoc' (R P.Ap) e1 <+> toDoc' (R P.Ap) e2
toDoc' fc (TLam x e) = parensIf (needsParens fc Lam) $
doc "\\" <> hsep (map toDoc (x:xs)) <+> doc "->" <+> toDoc' (R Lam) e'
where
(xs, e') = collectArgs e
toDoc' fc (TLet x e1 e2) = parensIfChoice (needsParens fc Let) [
stack [
kw "let" <+> x' <+> doc "=" <+> e1' <+> kw "in"
, e2'
]
, kw "let" <+> x' <+> doc "=" <+> e1' <+> kw "in" <+> e2'
]
where
x' = toDoc x
e1' = toDoc' Top e1
e2' = toDoc' (R Let) e2
toDoc' fc (TCase e ms) = parensIfChoice (needsParens fc Case) [
stack [
kw "case" <+> e' <+> kw "of" <+> doc "{"
, indent $ table $ map (matchRow (R Case)) ms
, doc "}"
]
, kw "case" <+> e' <+> kw "of" <+> wrap "{ " " }" (punctuate " ; " ms')
]
where
e' = toDoc' (L Case) e
ms' = map (toDoc' (R Case)) ms
toDoc' fc (TIf c t f) = parensIfChoice (needsParens fc If) [
stack [
kw "if" <+> c'
, indent $ stack [
kw "then" <+> t'
, kw "else" <+> f'
]
]
, kw "if" <+> c' <+> kw "then" <+> t' <+> kw "else" <+> f'
]
where
c' = toDoc' Top c
t' = toDoc' (R If) t
f' = toDoc' (R If) f
instance ToDoc Closure where
toDoc cl = case cl of
ErrorClosure str -> doc "Error :" <+> doc str
FunClosure term _ -> doc "Function :" <+> toDoc term
ConClosure con _ -> doc "Constructor :" <+> toDoc con
IndirectionClosure _ -> doc "Indirection " -- <+> toDoc ptr
ThunkClosure term _ -> doc "Thunk :" <+> toDoc term
PrimClosure prim _ -> doc "Primary :" <+> toDoc prim
instance ToDoc Description where
toDoc StepAlloc = doc "allocate"
toDoc StepBeta = doc "beta reduction"
toDoc (StepApply f) = doc "apply" <+> toDoc f
toDoc (StepDelta pes) = doc "delta:" <+> toDoc pes
toDoc (StepMatch c) = doc "match" <+> toDoc c
toDoc (StepIf b) = doc "if" <+> doc (show b)
toDoc StepSeq = doc "seq"
-- | Based on purescript implementation
mintersperse :: (Monoid m) => m -> [m] -> m
mintersperse _ [] = mempty
mintersperse _ [x] = x
mintersperse sep (x:xs) = x <> sep <> mintersperse sep xs
instance ToDoc DescriptionWithContext where
toDoc (DescriptionWithContext descr []) = toDoc descr
toDoc (DescriptionWithContext descr context) = mconcat [
toDoc descr
, doc " in ["
, mintersperse (doc ", ") $ map toDoc context
, doc "]"
]
-- | For the heap we need to know which pointers we are about to collect
heapToDoc :: forall a. ToDoc a => Set Ptr -> Heap a -> Doc Style String
heapToDoc garbage (Heap _next heap) =
table $ map go (Map.toList heap)
where
go :: (Ptr, a) -> [Doc Style String]
go (ptr, a) = [markGarbage ptr $ toDoc ptr, doc " = ", toDoc a]
markGarbage :: Ptr -> Doc Style String -> Doc Style String
markGarbage ptr
| ptr `Set.member` garbage = style $ \st -> st { styleBackground = Just Red }
| otherwise = id
{-------------------------------------------------------------------------------
Auxiliary
-------------------------------------------------------------------------------}
kw :: String -> Doc Style String
kw = style (\st -> st { styleBold = True }) . doc
parensIf :: Bool -> Doc Style String -> Doc Style String
parensIf False = id
parensIf True = wrap "(" ")"
-- | Swap the order of the choices if we need parentheses
--
-- The idea is that we prefer a multi-line layout normally, but if we
-- need to insert parentheses we prefer a single-line layout.
parensIfChoice :: Bool -> [Doc Style String] -> Doc Style String
parensIfChoice p ds = parensIf p $ choice $ (if p then reverse else id) ds
wrap :: String -> String -> Doc Style String -> Doc Style String
wrap lft rgt d = doc lft <> d <> doc rgt
punctuate :: String -> [Doc Style String] -> Doc Style String
punctuate sep = mconcat . intersperse (doc sep)
hsep :: [Doc Style String] -> Doc Style String
hsep = punctuate " "
indent :: Doc Style String -> Doc Style String
indent = (doc " " <>)
(<+>) :: Doc Style String -> Doc Style String -> Doc Style String
(<+>) d1 d2 = d1 <> doc " " <> d2
| null | https://raw.githubusercontent.com/well-typed/visualize-cbn/499a8bc806ce03ce6d1bc12df1ea6e6df7768817/src/CBN/Pretty.hs | haskell | | Table-row for a match statement
Used when using a vertical layout for a case statement
| We make elements from the prelude blue
standard rendering
<+> toDoc ptr
| Based on purescript implementation
| For the heap we need to know which pointers we are about to collect
------------------------------------------------------------------------------
Auxiliary
------------------------------------------------------------------------------
| Swap the order of the choices if we need parentheses
The idea is that we prefer a multi-line layout normally, but if we
need to insert parentheses we prefer a single-line layout. | # LANGUAGE CPP #
module CBN.Pretty (ToDoc, toDoc, heapToDoc) where
#if !(MIN_VERSION_base(4,11,0))
import Data.Monoid
#endif
import Data.List (intersperse)
import Data.Set (Set)
import qualified Data.Map as Map
import qualified Data.Set as Set
import CBN.Closure
import CBN.Eval
import CBN.Heap
import CBN.Language
import CBN.Pretty.Precedence as P
import CBN.Util.Doc
import CBN.Util.Doc.Style
class ToDoc a where
toDoc :: a -> Doc Style String
toDoc = toDoc' Top
toDoc' :: FixityContext -> a -> Doc Style String
toDoc' _fc = toDoc
| For convenience , ' ToDoc ' is idempotent
instance ToDoc (Doc Style String) where
toDoc = id
instance ToDoc Var where
toDoc (Var x) = style (\st -> st { styleItalic = True }) $ doc x
instance ToDoc Con where
toDoc (Con "Nil") = doc "[]"
toDoc (Con "Unit") = doc "()"
toDoc (Con c) = style (\st -> st { styleForeground = Just Red }) $ doc c
instance ToDoc Prim where
toDoc (PInt n) = doc (show n)
toDoc PIAdd = doc "add"
toDoc PISub = doc "sub"
toDoc PIMul = doc "mul"
toDoc PIEq = doc "eq"
toDoc PILt = doc "lt"
toDoc PILe = doc "le"
instance ToDoc PrimApp where
toDoc' fc (PrimApp PIAdd [a, b]) = parensIf (needsParens fc Add) $
toDoc' (L Add) a <+> doc "+" <+> toDoc' (R Add) b
toDoc' fc (PrimApp PISub [a, b]) = parensIf (needsParens fc Sub) $
toDoc' (L Sub) a <+> doc "-" <+> toDoc' (R Sub) b
toDoc' fc (PrimApp PIMul [a, b]) = parensIf (needsParens fc Mul) $
toDoc' (L Mul) a <+> doc "*" <+> toDoc' (R Mul) b
toDoc' fc (PrimApp PILe [a, b]) = parensIf (needsParens fc Le) $
toDoc' (L Le) a <+> doc "<=" <+> toDoc' (R Le) b
toDoc' fc (PrimApp PILt [a, b]) = parensIf (needsParens fc Lt) $
toDoc' (L Lt) a <+> doc "<" <+> toDoc' (R Lt) b
toDoc' fc (PrimApp PIEq [a, b]) = parensIf (needsParens fc Eq) $
toDoc' (L Eq) a <+> doc "==" <+> toDoc' (R Eq) b
toDoc' fc (PrimApp p es) = parensIf (needsParens fc P.Ap && not (null es)) $
hsep (toDoc p : map (toDoc' (R P.Ap)) es)
instance ToDoc ConApp where
toDoc' fc (ConApp (Con "Cons") [x, xs]) = parensIf (needsParens fc Cons) $
toDoc' (L Cons) x <+> doc ":" <+> toDoc' (R Cons) xs
toDoc' _fc (ConApp (Con "Pair") [x, xs]) = parensIf True $
toDoc' Top x <> doc "," <+> toDoc' Top xs
toDoc' fc (ConApp c es) = parensIf (needsParens fc P.Ap && not (null es)) $
hsep (toDoc c : map (toDoc' (R P.Ap)) es)
instance ToDoc Pat where
toDoc (Pat (Con "Cons") [x, xs]) =
toDoc x <> doc ":" <> toDoc xs
toDoc (Pat (Con "Pair") [x, xs]) = parensIf True $
toDoc x <> doc "," <> toDoc xs
toDoc (Pat c xs) =
hsep (toDoc c : map toDoc xs)
instance ToDoc Match where
toDoc' fc = mconcat . matchRow fc
matchRow :: FixityContext -> Match -> [Doc Style String]
matchRow fc (Match p rhs) = [toDoc p, doc " -> ", toDoc' fc rhs]
instance ToDoc Ptr where
toDoc (Ptr Nothing Nothing) = error "invalid pointer"
toDoc (Ptr (Just n) Nothing) = doc (show n)
toDoc (Ptr Nothing (Just name)) = style (\st -> st { styleForeground = Just Blue })
$ doc name
toDoc (Ptr (Just n) (Just name)) = doc name <> doc "_" <> doc (show n)
instance ToDoc Term where
toDoc' _ (TVar x) = toDoc x
toDoc' _ (TPtr n) = toDoc n
toDoc' fc (TPrim pes ) = toDoc' fc pes
toDoc' fc (TCon ces) = toDoc' fc ces
special case for e1 ( \x - > e2)@
toDoc' fc (TApp (TApp (TPtr bind@(Ptr Nothing (Just "bind"))) e1) (TLam x e2)) =
parensIfChoice (needsParens fc P.Ap) $ [
stack [
toDoc bind <+> toDoc' (R P.Ap) e1 <+> doc "(\\" <> toDoc x <+> doc "->"
, toDoc' (R Lam) e2 <> doc ")"
]
]
toDoc' fc (TApp e1 e2) = parensIf (needsParens fc P.Ap) $
toDoc' (L P.Ap) e1 <+> toDoc' (R P.Ap) e2
toDoc' fc (TSeq e1 e2) = parensIf (needsParens fc P.Ap) $
kw "seq" <+> toDoc' (R P.Ap) e1 <+> toDoc' (R P.Ap) e2
toDoc' fc (TLam x e) = parensIf (needsParens fc Lam) $
doc "\\" <> hsep (map toDoc (x:xs)) <+> doc "->" <+> toDoc' (R Lam) e'
where
(xs, e') = collectArgs e
toDoc' fc (TLet x e1 e2) = parensIfChoice (needsParens fc Let) [
stack [
kw "let" <+> x' <+> doc "=" <+> e1' <+> kw "in"
, e2'
]
, kw "let" <+> x' <+> doc "=" <+> e1' <+> kw "in" <+> e2'
]
where
x' = toDoc x
e1' = toDoc' Top e1
e2' = toDoc' (R Let) e2
toDoc' fc (TCase e ms) = parensIfChoice (needsParens fc Case) [
stack [
kw "case" <+> e' <+> kw "of" <+> doc "{"
, indent $ table $ map (matchRow (R Case)) ms
, doc "}"
]
, kw "case" <+> e' <+> kw "of" <+> wrap "{ " " }" (punctuate " ; " ms')
]
where
e' = toDoc' (L Case) e
ms' = map (toDoc' (R Case)) ms
toDoc' fc (TIf c t f) = parensIfChoice (needsParens fc If) [
stack [
kw "if" <+> c'
, indent $ stack [
kw "then" <+> t'
, kw "else" <+> f'
]
]
, kw "if" <+> c' <+> kw "then" <+> t' <+> kw "else" <+> f'
]
where
c' = toDoc' Top c
t' = toDoc' (R If) t
f' = toDoc' (R If) f
instance ToDoc Closure where
toDoc cl = case cl of
ErrorClosure str -> doc "Error :" <+> doc str
FunClosure term _ -> doc "Function :" <+> toDoc term
ConClosure con _ -> doc "Constructor :" <+> toDoc con
ThunkClosure term _ -> doc "Thunk :" <+> toDoc term
PrimClosure prim _ -> doc "Primary :" <+> toDoc prim
instance ToDoc Description where
toDoc StepAlloc = doc "allocate"
toDoc StepBeta = doc "beta reduction"
toDoc (StepApply f) = doc "apply" <+> toDoc f
toDoc (StepDelta pes) = doc "delta:" <+> toDoc pes
toDoc (StepMatch c) = doc "match" <+> toDoc c
toDoc (StepIf b) = doc "if" <+> doc (show b)
toDoc StepSeq = doc "seq"
mintersperse :: (Monoid m) => m -> [m] -> m
mintersperse _ [] = mempty
mintersperse _ [x] = x
mintersperse sep (x:xs) = x <> sep <> mintersperse sep xs
instance ToDoc DescriptionWithContext where
toDoc (DescriptionWithContext descr []) = toDoc descr
toDoc (DescriptionWithContext descr context) = mconcat [
toDoc descr
, doc " in ["
, mintersperse (doc ", ") $ map toDoc context
, doc "]"
]
heapToDoc :: forall a. ToDoc a => Set Ptr -> Heap a -> Doc Style String
heapToDoc garbage (Heap _next heap) =
table $ map go (Map.toList heap)
where
go :: (Ptr, a) -> [Doc Style String]
go (ptr, a) = [markGarbage ptr $ toDoc ptr, doc " = ", toDoc a]
markGarbage :: Ptr -> Doc Style String -> Doc Style String
markGarbage ptr
| ptr `Set.member` garbage = style $ \st -> st { styleBackground = Just Red }
| otherwise = id
kw :: String -> Doc Style String
kw = style (\st -> st { styleBold = True }) . doc
parensIf :: Bool -> Doc Style String -> Doc Style String
parensIf False = id
parensIf True = wrap "(" ")"
parensIfChoice :: Bool -> [Doc Style String] -> Doc Style String
parensIfChoice p ds = parensIf p $ choice $ (if p then reverse else id) ds
wrap :: String -> String -> Doc Style String -> Doc Style String
wrap lft rgt d = doc lft <> d <> doc rgt
punctuate :: String -> [Doc Style String] -> Doc Style String
punctuate sep = mconcat . intersperse (doc sep)
hsep :: [Doc Style String] -> Doc Style String
hsep = punctuate " "
indent :: Doc Style String -> Doc Style String
indent = (doc " " <>)
(<+>) :: Doc Style String -> Doc Style String -> Doc Style String
(<+>) d1 d2 = d1 <> doc " " <> d2
|
c2fbcaa79a428e6cf7436b938fb9a15c232dcd76d8a0d639586501ede9454ad2 | tweag/lagoon | Logging.hs | Copyright 2020 Pfizer Inc.
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
-- -2.0
-- Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
{-# LANGUAGE DeriveAnyClass #-}
module Lagoon.Interface.Logging (
LogLevel(..)
, Logger(..)
, filterLogMessages
) where
import Control.Monad
import Data.Functor.Contravariant
import GHC.Generics (Generic)
import Text.Show.Pretty (PrettyVal)
-- | Level of a log message: how important is it?
--
Order of the constructors is important for the derived ' ' instance
data LogLevel =
Debug
| Notice
| Warning
| Error
deriving (Show, Eq, Ord, Generic, PrettyVal)
data Logger m a = Logger {
logMessage :: LogLevel -> a -> m ()
}
instance Contravariant (Logger m) where
contramap f Logger{..} = Logger $ \l -> logMessage l . f
-- | Filter log messages
--
-- This is a helper function for constructing 'Logger' instances; we only
-- pass log messages to the provided 'Logger' instance if their log level
-- is at or above the level we want to see.
filterLogMessages :: LogLevel -> Logger IO a -> Logger IO a
filterLogMessages minLogLevel logger = Logger {
logMessage = \logLevel msg ->
when (logLevel >= minLogLevel) $ logMessage logger logLevel msg
}
| null | https://raw.githubusercontent.com/tweag/lagoon/2ef0440db810f4f45dbed160b369daf41d92bfa4/src/interface/src/Lagoon/Interface/Logging.hs | haskell | you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
# LANGUAGE DeriveAnyClass #
| Level of a log message: how important is it?
| Filter log messages
This is a helper function for constructing 'Logger' instances; we only
pass log messages to the provided 'Logger' instance if their log level
is at or above the level we want to see. | Copyright 2020 Pfizer Inc.
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
module Lagoon.Interface.Logging (
LogLevel(..)
, Logger(..)
, filterLogMessages
) where
import Control.Monad
import Data.Functor.Contravariant
import GHC.Generics (Generic)
import Text.Show.Pretty (PrettyVal)
Order of the constructors is important for the derived ' ' instance
data LogLevel =
Debug
| Notice
| Warning
| Error
deriving (Show, Eq, Ord, Generic, PrettyVal)
data Logger m a = Logger {
logMessage :: LogLevel -> a -> m ()
}
instance Contravariant (Logger m) where
contramap f Logger{..} = Logger $ \l -> logMessage l . f
filterLogMessages :: LogLevel -> Logger IO a -> Logger IO a
filterLogMessages minLogLevel logger = Logger {
logMessage = \logLevel msg ->
when (logLevel >= minLogLevel) $ logMessage logger logLevel msg
}
|
e62672f9522d9955d96f1b615f0e3d25aebf2e4aa70ec18ff7eef02ce042c36d | MinaProtocol/mina | staged_ledger.mli | open Core_kernel
open Async_kernel
open Mina_base
open Mina_transaction
open Signature_lib
module Ledger = Mina_ledger.Ledger
type t [@@deriving sexp]
module Scan_state : sig
[%%versioned:
module Stable : sig
module V2 : sig
type t [@@deriving sexp]
val hash : t -> Staged_ledger_hash.Aux_hash.t
end
end]
module Job_view : sig
type t [@@deriving sexp, to_yojson]
end
module Space_partition : sig
type t = { first : int * int; second : (int * int) option }
[@@deriving sexp]
end
module Transactions_ordered : sig
module Poly : sig
type 'a t =
{ first_pass : 'a list
; second_pass : 'a list
; previous_incomplete : 'a list
; current_incomplete : 'a list
}
[@@deriving sexp, to_yojson]
end
type t = Transaction_snark_scan_state.Transaction_with_witness.t Poly.t
[@@deriving sexp, to_yojson]
end
val hash : t -> Staged_ledger_hash.Aux_hash.t
val empty :
constraint_constants:Genesis_constants.Constraint_constants.t -> unit -> t
val snark_job_list_json : t -> string
(** All the transactions with hash of the parent block in which they were included in the order in which they were applied*)
val staged_transactions_with_state_hash :
t
-> (Transaction.t With_status.t * State_hash.t * Mina_numbers.Global_slot.t)
Transactions_ordered.Poly.t
list
val all_work_statements_exn : t -> Transaction_snark_work.Statement.t list
(** Hashes of the protocol states required for proving pending transactions*)
val required_state_hashes : t -> State_hash.Set.t
(** Validate protocol states required for proving the transactions. Returns an association list of state_hash and the corresponding state*)
val check_required_protocol_states :
t
-> protocol_states:
Mina_state.Protocol_state.value State_hash.With_state_hashes.t list
-> Mina_state.Protocol_state.value State_hash.With_state_hashes.t list
Or_error.t
* Apply transactions corresponding to the last emitted proof based on the
two - pass system to get snarked ledger- first pass includes legacy transactions and zkapp payments and the second pass includes account updates . This ignores any account updates if a blocks transactions were split among two trees .
two-pass system to get snarked ledger- first pass includes legacy transactions and zkapp payments and the second pass includes account updates. This ignores any account updates if a blocks transactions were split among two trees.
*)
val get_snarked_ledger_sync :
ledger:Ledger.t
-> get_protocol_state:
(State_hash.t -> Mina_state.Protocol_state.Value.t Or_error.t)
-> apply_first_pass:
( global_slot:Mina_numbers.Global_slot.t
-> txn_state_view:Mina_base.Zkapp_precondition.Protocol_state.View.t
-> Ledger.t
-> Transaction.t
-> Ledger.Transaction_partially_applied.t Or_error.t )
-> apply_second_pass:
( Ledger.t
-> Ledger.Transaction_partially_applied.t
-> Ledger.Transaction_applied.t Or_error.t )
-> apply_first_pass_sparse_ledger:
( global_slot:Mina_numbers.Global_slot.t
-> txn_state_view:Mina_base.Zkapp_precondition.Protocol_state.View.t
-> Mina_ledger.Sparse_ledger.t
-> Mina_transaction.Transaction.t
-> Mina_ledger.Sparse_ledger.T.Transaction_partially_applied.t
Or_error.t )
-> t
-> unit Or_error.t
* Apply transactions corresponding to the last emitted proof based on the
two - pass system to get snarked ledger- first pass includes legacy transactions and zkapp payments and the second pass includes account updates . This ignores any account updates if a blocks transactions were split among two trees .
two-pass system to get snarked ledger- first pass includes legacy transactions and zkapp payments and the second pass includes account updates. This ignores any account updates if a blocks transactions were split among two trees.
*)
val get_snarked_ledger_async :
?async_batch_size:int
-> ledger:Ledger.t
-> get_protocol_state:
(State_hash.t -> Mina_state.Protocol_state.Value.t Or_error.t)
-> apply_first_pass:
( global_slot:Mina_numbers.Global_slot.t
-> txn_state_view:Mina_base.Zkapp_precondition.Protocol_state.View.t
-> Ledger.t
-> Transaction.t
-> Ledger.Transaction_partially_applied.t Or_error.t )
-> apply_second_pass:
( Ledger.t
-> Ledger.Transaction_partially_applied.t
-> Ledger.Transaction_applied.t Or_error.t )
-> apply_first_pass_sparse_ledger:
( global_slot:Mina_numbers.Global_slot.t
-> txn_state_view:Mina_base.Zkapp_precondition.Protocol_state.View.t
-> Mina_ledger.Sparse_ledger.t
-> Mina_transaction.Transaction.t
-> Mina_ledger.Sparse_ledger.T.Transaction_partially_applied.t
Or_error.t )
-> t
-> unit Deferred.Or_error.t
end
module Pre_diff_info : Pre_diff_info.S
module Staged_ledger_error : sig
type t =
| Non_zero_fee_excess of
Scan_state.Space_partition.t * Transaction.t With_status.t list
| Invalid_proofs of
( Ledger_proof.t
* Transaction_snark.Statement.t
* Mina_base.Sok_message.t )
list
* Error.t
| Couldn't_reach_verifier of Error.t
| Pre_diff of Pre_diff_info.Error.t
| Insufficient_work of string
| Mismatched_statuses of Transaction.t With_status.t * Transaction_status.t
| Invalid_public_key of Public_key.Compressed.t
| Unexpected of Error.t
[@@deriving sexp]
val to_string : t -> string
val to_error : t -> Error.t
end
val ledger : t -> Ledger.t
val scan_state : t -> Scan_state.t
val pending_coinbase_collection : t -> Pending_coinbase.t
val create_exn :
constraint_constants:Genesis_constants.Constraint_constants.t
-> ledger:Ledger.t
-> t
val replace_ledger_exn : t -> Ledger.t -> t
val proof_txns_with_state_hashes :
t
-> (Transaction.t With_status.t * State_hash.t * Mina_numbers.Global_slot.t)
Scan_state.Transactions_ordered.Poly.t
Mina_stdlib.Nonempty_list.t
option
val copy : t -> t
val hash : t -> Staged_ledger_hash.t
val apply :
?skip_verification:[ `Proofs | `All ]
-> constraint_constants:Genesis_constants.Constraint_constants.t
-> global_slot:Mina_numbers.Global_slot.t
-> t
-> Staged_ledger_diff.t
-> logger:Logger.t
-> verifier:Verifier.t
-> current_state_view:Zkapp_precondition.Protocol_state.View.t
-> state_and_body_hash:State_hash.t * State_body_hash.t
-> coinbase_receiver:Public_key.Compressed.t
-> supercharge_coinbase:bool
-> ( [ `Hash_after_applying of Staged_ledger_hash.t ]
* [ `Ledger_proof of
( Ledger_proof.t
* ( Transaction.t With_status.t
* State_hash.t
* Mina_numbers.Global_slot.t )
Scan_state.Transactions_ordered.Poly.t
list )
option ]
* [ `Staged_ledger of t ]
* [ `Pending_coinbase_update of bool * Pending_coinbase.Update.t ]
, Staged_ledger_error.t )
Deferred.Result.t
val apply_diff_unchecked :
constraint_constants:Genesis_constants.Constraint_constants.t
-> global_slot:Mina_numbers.Global_slot.t
-> t
-> Staged_ledger_diff.With_valid_signatures_and_proofs.t
-> logger:Logger.t
-> current_state_view:Zkapp_precondition.Protocol_state.View.t
-> state_and_body_hash:State_hash.t * State_body_hash.t
-> coinbase_receiver:Public_key.Compressed.t
-> supercharge_coinbase:bool
-> ( [ `Hash_after_applying of Staged_ledger_hash.t ]
* [ `Ledger_proof of
( Ledger_proof.t
* ( Transaction.t With_status.t
* State_hash.t
* Mina_numbers.Global_slot.t )
Scan_state.Transactions_ordered.Poly.t
list )
option ]
* [ `Staged_ledger of t ]
* [ `Pending_coinbase_update of bool * Pending_coinbase.Update.t ]
, Staged_ledger_error.t )
Deferred.Result.t
val current_ledger_proof : t -> Ledger_proof.t option
(* This should memoize the snark verifications *)
val create_diff :
constraint_constants:Genesis_constants.Constraint_constants.t
-> global_slot:Mina_numbers.Global_slot.t
-> ?log_block_creation:bool
-> t
-> coinbase_receiver:Public_key.Compressed.t
-> logger:Logger.t
-> current_state_view:Zkapp_precondition.Protocol_state.View.t
-> transactions_by_fee:User_command.Valid.t Sequence.t
-> get_completed_work:
( Transaction_snark_work.Statement.t
-> Transaction_snark_work.Checked.t option )
-> supercharge_coinbase:bool
-> ( Staged_ledger_diff.With_valid_signatures_and_proofs.t
* (User_command.Valid.t * Error.t) list
, Pre_diff_info.Error.t )
Result.t
val can_apply_supercharged_coinbase_exn :
winner:Public_key.Compressed.t
-> epoch_ledger:Mina_ledger.Sparse_ledger.t
-> global_slot:Mina_numbers.Global_slot.t
-> bool
val of_scan_state_pending_coinbases_and_snarked_ledger :
logger:Logger.t
-> constraint_constants:Genesis_constants.Constraint_constants.t
-> verifier:Verifier.t
-> scan_state:Scan_state.t
-> snarked_ledger:Ledger.t
-> snarked_local_state:Mina_state.Local_state.t
-> expected_merkle_root:Ledger_hash.t
-> pending_coinbases:Pending_coinbase.t
-> get_state:(State_hash.t -> Mina_state.Protocol_state.value Or_error.t)
-> t Or_error.t Deferred.t
val of_scan_state_pending_coinbases_and_snarked_ledger_unchecked :
constraint_constants:Genesis_constants.Constraint_constants.t
-> scan_state:Scan_state.t
-> snarked_ledger:Ledger.t
-> snarked_local_state:Mina_state.Local_state.t
-> expected_merkle_root:Ledger_hash.t
-> pending_coinbases:Pending_coinbase.t
-> get_state:(State_hash.t -> Mina_state.Protocol_state.value Or_error.t)
-> t Or_error.t Deferred.t
val all_work_pairs :
t
-> get_state:(State_hash.t -> Mina_state.Protocol_state.value Or_error.t)
-> (Transaction_witness.t, Ledger_proof.t) Snark_work_lib.Work.Single.Spec.t
One_or_two.t
list
Or_error.t
val all_work_statements_exn : t -> Transaction_snark_work.Statement.t list
val check_commands :
Ledger.t
-> verifier:Verifier.t
-> User_command.t With_status.t list
-> (User_command.Valid.t list, Verifier.Failure.t) Result.t
Deferred.Or_error.t
* account ids created in the latest block , taken from the new_accounts
in the latest and next - to - latest trees of the scan state
in the latest and next-to-latest trees of the scan state
*)
val latest_block_accounts_created :
t -> previous_block_state_hash:State_hash.t -> Account_id.t list
| null | https://raw.githubusercontent.com/MinaProtocol/mina/fcbcca1b34414e52642661352588420af906cdf4/src/lib/staged_ledger/staged_ledger.mli | ocaml | * All the transactions with hash of the parent block in which they were included in the order in which they were applied
* Hashes of the protocol states required for proving pending transactions
* Validate protocol states required for proving the transactions. Returns an association list of state_hash and the corresponding state
This should memoize the snark verifications | open Core_kernel
open Async_kernel
open Mina_base
open Mina_transaction
open Signature_lib
module Ledger = Mina_ledger.Ledger
type t [@@deriving sexp]
module Scan_state : sig
[%%versioned:
module Stable : sig
module V2 : sig
type t [@@deriving sexp]
val hash : t -> Staged_ledger_hash.Aux_hash.t
end
end]
module Job_view : sig
type t [@@deriving sexp, to_yojson]
end
module Space_partition : sig
type t = { first : int * int; second : (int * int) option }
[@@deriving sexp]
end
module Transactions_ordered : sig
module Poly : sig
type 'a t =
{ first_pass : 'a list
; second_pass : 'a list
; previous_incomplete : 'a list
; current_incomplete : 'a list
}
[@@deriving sexp, to_yojson]
end
type t = Transaction_snark_scan_state.Transaction_with_witness.t Poly.t
[@@deriving sexp, to_yojson]
end
val hash : t -> Staged_ledger_hash.Aux_hash.t
val empty :
constraint_constants:Genesis_constants.Constraint_constants.t -> unit -> t
val snark_job_list_json : t -> string
val staged_transactions_with_state_hash :
t
-> (Transaction.t With_status.t * State_hash.t * Mina_numbers.Global_slot.t)
Transactions_ordered.Poly.t
list
val all_work_statements_exn : t -> Transaction_snark_work.Statement.t list
val required_state_hashes : t -> State_hash.Set.t
val check_required_protocol_states :
t
-> protocol_states:
Mina_state.Protocol_state.value State_hash.With_state_hashes.t list
-> Mina_state.Protocol_state.value State_hash.With_state_hashes.t list
Or_error.t
* Apply transactions corresponding to the last emitted proof based on the
two - pass system to get snarked ledger- first pass includes legacy transactions and zkapp payments and the second pass includes account updates . This ignores any account updates if a blocks transactions were split among two trees .
two-pass system to get snarked ledger- first pass includes legacy transactions and zkapp payments and the second pass includes account updates. This ignores any account updates if a blocks transactions were split among two trees.
*)
val get_snarked_ledger_sync :
ledger:Ledger.t
-> get_protocol_state:
(State_hash.t -> Mina_state.Protocol_state.Value.t Or_error.t)
-> apply_first_pass:
( global_slot:Mina_numbers.Global_slot.t
-> txn_state_view:Mina_base.Zkapp_precondition.Protocol_state.View.t
-> Ledger.t
-> Transaction.t
-> Ledger.Transaction_partially_applied.t Or_error.t )
-> apply_second_pass:
( Ledger.t
-> Ledger.Transaction_partially_applied.t
-> Ledger.Transaction_applied.t Or_error.t )
-> apply_first_pass_sparse_ledger:
( global_slot:Mina_numbers.Global_slot.t
-> txn_state_view:Mina_base.Zkapp_precondition.Protocol_state.View.t
-> Mina_ledger.Sparse_ledger.t
-> Mina_transaction.Transaction.t
-> Mina_ledger.Sparse_ledger.T.Transaction_partially_applied.t
Or_error.t )
-> t
-> unit Or_error.t
* Apply transactions corresponding to the last emitted proof based on the
two - pass system to get snarked ledger- first pass includes legacy transactions and zkapp payments and the second pass includes account updates . This ignores any account updates if a blocks transactions were split among two trees .
two-pass system to get snarked ledger- first pass includes legacy transactions and zkapp payments and the second pass includes account updates. This ignores any account updates if a blocks transactions were split among two trees.
*)
val get_snarked_ledger_async :
?async_batch_size:int
-> ledger:Ledger.t
-> get_protocol_state:
(State_hash.t -> Mina_state.Protocol_state.Value.t Or_error.t)
-> apply_first_pass:
( global_slot:Mina_numbers.Global_slot.t
-> txn_state_view:Mina_base.Zkapp_precondition.Protocol_state.View.t
-> Ledger.t
-> Transaction.t
-> Ledger.Transaction_partially_applied.t Or_error.t )
-> apply_second_pass:
( Ledger.t
-> Ledger.Transaction_partially_applied.t
-> Ledger.Transaction_applied.t Or_error.t )
-> apply_first_pass_sparse_ledger:
( global_slot:Mina_numbers.Global_slot.t
-> txn_state_view:Mina_base.Zkapp_precondition.Protocol_state.View.t
-> Mina_ledger.Sparse_ledger.t
-> Mina_transaction.Transaction.t
-> Mina_ledger.Sparse_ledger.T.Transaction_partially_applied.t
Or_error.t )
-> t
-> unit Deferred.Or_error.t
end
module Pre_diff_info : Pre_diff_info.S
module Staged_ledger_error : sig
type t =
| Non_zero_fee_excess of
Scan_state.Space_partition.t * Transaction.t With_status.t list
| Invalid_proofs of
( Ledger_proof.t
* Transaction_snark.Statement.t
* Mina_base.Sok_message.t )
list
* Error.t
| Couldn't_reach_verifier of Error.t
| Pre_diff of Pre_diff_info.Error.t
| Insufficient_work of string
| Mismatched_statuses of Transaction.t With_status.t * Transaction_status.t
| Invalid_public_key of Public_key.Compressed.t
| Unexpected of Error.t
[@@deriving sexp]
val to_string : t -> string
val to_error : t -> Error.t
end
val ledger : t -> Ledger.t
val scan_state : t -> Scan_state.t
val pending_coinbase_collection : t -> Pending_coinbase.t
val create_exn :
constraint_constants:Genesis_constants.Constraint_constants.t
-> ledger:Ledger.t
-> t
val replace_ledger_exn : t -> Ledger.t -> t
val proof_txns_with_state_hashes :
t
-> (Transaction.t With_status.t * State_hash.t * Mina_numbers.Global_slot.t)
Scan_state.Transactions_ordered.Poly.t
Mina_stdlib.Nonempty_list.t
option
val copy : t -> t
val hash : t -> Staged_ledger_hash.t
val apply :
?skip_verification:[ `Proofs | `All ]
-> constraint_constants:Genesis_constants.Constraint_constants.t
-> global_slot:Mina_numbers.Global_slot.t
-> t
-> Staged_ledger_diff.t
-> logger:Logger.t
-> verifier:Verifier.t
-> current_state_view:Zkapp_precondition.Protocol_state.View.t
-> state_and_body_hash:State_hash.t * State_body_hash.t
-> coinbase_receiver:Public_key.Compressed.t
-> supercharge_coinbase:bool
-> ( [ `Hash_after_applying of Staged_ledger_hash.t ]
* [ `Ledger_proof of
( Ledger_proof.t
* ( Transaction.t With_status.t
* State_hash.t
* Mina_numbers.Global_slot.t )
Scan_state.Transactions_ordered.Poly.t
list )
option ]
* [ `Staged_ledger of t ]
* [ `Pending_coinbase_update of bool * Pending_coinbase.Update.t ]
, Staged_ledger_error.t )
Deferred.Result.t
val apply_diff_unchecked :
constraint_constants:Genesis_constants.Constraint_constants.t
-> global_slot:Mina_numbers.Global_slot.t
-> t
-> Staged_ledger_diff.With_valid_signatures_and_proofs.t
-> logger:Logger.t
-> current_state_view:Zkapp_precondition.Protocol_state.View.t
-> state_and_body_hash:State_hash.t * State_body_hash.t
-> coinbase_receiver:Public_key.Compressed.t
-> supercharge_coinbase:bool
-> ( [ `Hash_after_applying of Staged_ledger_hash.t ]
* [ `Ledger_proof of
( Ledger_proof.t
* ( Transaction.t With_status.t
* State_hash.t
* Mina_numbers.Global_slot.t )
Scan_state.Transactions_ordered.Poly.t
list )
option ]
* [ `Staged_ledger of t ]
* [ `Pending_coinbase_update of bool * Pending_coinbase.Update.t ]
, Staged_ledger_error.t )
Deferred.Result.t
val current_ledger_proof : t -> Ledger_proof.t option
val create_diff :
constraint_constants:Genesis_constants.Constraint_constants.t
-> global_slot:Mina_numbers.Global_slot.t
-> ?log_block_creation:bool
-> t
-> coinbase_receiver:Public_key.Compressed.t
-> logger:Logger.t
-> current_state_view:Zkapp_precondition.Protocol_state.View.t
-> transactions_by_fee:User_command.Valid.t Sequence.t
-> get_completed_work:
( Transaction_snark_work.Statement.t
-> Transaction_snark_work.Checked.t option )
-> supercharge_coinbase:bool
-> ( Staged_ledger_diff.With_valid_signatures_and_proofs.t
* (User_command.Valid.t * Error.t) list
, Pre_diff_info.Error.t )
Result.t
val can_apply_supercharged_coinbase_exn :
winner:Public_key.Compressed.t
-> epoch_ledger:Mina_ledger.Sparse_ledger.t
-> global_slot:Mina_numbers.Global_slot.t
-> bool
val of_scan_state_pending_coinbases_and_snarked_ledger :
logger:Logger.t
-> constraint_constants:Genesis_constants.Constraint_constants.t
-> verifier:Verifier.t
-> scan_state:Scan_state.t
-> snarked_ledger:Ledger.t
-> snarked_local_state:Mina_state.Local_state.t
-> expected_merkle_root:Ledger_hash.t
-> pending_coinbases:Pending_coinbase.t
-> get_state:(State_hash.t -> Mina_state.Protocol_state.value Or_error.t)
-> t Or_error.t Deferred.t
val of_scan_state_pending_coinbases_and_snarked_ledger_unchecked :
constraint_constants:Genesis_constants.Constraint_constants.t
-> scan_state:Scan_state.t
-> snarked_ledger:Ledger.t
-> snarked_local_state:Mina_state.Local_state.t
-> expected_merkle_root:Ledger_hash.t
-> pending_coinbases:Pending_coinbase.t
-> get_state:(State_hash.t -> Mina_state.Protocol_state.value Or_error.t)
-> t Or_error.t Deferred.t
val all_work_pairs :
t
-> get_state:(State_hash.t -> Mina_state.Protocol_state.value Or_error.t)
-> (Transaction_witness.t, Ledger_proof.t) Snark_work_lib.Work.Single.Spec.t
One_or_two.t
list
Or_error.t
val all_work_statements_exn : t -> Transaction_snark_work.Statement.t list
val check_commands :
Ledger.t
-> verifier:Verifier.t
-> User_command.t With_status.t list
-> (User_command.Valid.t list, Verifier.Failure.t) Result.t
Deferred.Or_error.t
* account ids created in the latest block , taken from the new_accounts
in the latest and next - to - latest trees of the scan state
in the latest and next-to-latest trees of the scan state
*)
val latest_block_accounts_created :
t -> previous_block_state_hash:State_hash.t -> Account_id.t list
|
318a1c920c7fc0f85fe4dff3ade589ca5cee01296682f720deda02723ccbf8ee | snowleopard/alga | Arbitrary.hs | # LANGUAGE CPP #
# OPTIONS_GHC -fno - warn - orphans #
-----------------------------------------------------------------------------
-- |
-- Module : Algebra.Graph.Test.Arbitrary
Copyright : ( c ) 2016 - 2022
License : MIT ( see the file LICENSE )
-- Maintainer :
-- Stability : experimental
--
-- Generators and orphan Arbitrary instances for various data types.
-----------------------------------------------------------------------------
module Algebra.Graph.Test.Arbitrary (
-- * Generators of arbitrary graph instances
arbitraryGraph, arbitraryRelation, arbitraryAdjacencyMap,
) where
import Control.Monad
import Data.List.NonEmpty (NonEmpty (..), toList)
import Data.Maybe (catMaybes)
import Data.Tree
import Test.QuickCheck
import Algebra.Graph
import Algebra.Graph.Export
import Algebra.Graph.Label
import qualified Algebra.Graph.Undirected as UG
import qualified Algebra.Graph.Acyclic.AdjacencyMap as AAM
import qualified Algebra.Graph.AdjacencyIntMap as AIM
import qualified Algebra.Graph.AdjacencyMap as AM
import qualified Algebra.Graph.Bipartite.AdjacencyMap as BAM
import qualified Algebra.Graph.Bipartite.AdjacencyMap.Algorithm as BAMA
import qualified Algebra.Graph.NonEmpty.AdjacencyMap as NAM
import qualified Algebra.Graph.Class as C
import qualified Algebra.Graph.Labelled as LG
import qualified Algebra.Graph.Labelled.AdjacencyMap as LAM
import qualified Algebra.Graph.NonEmpty as NonEmpty
import qualified Algebra.Graph.Relation as Relation
import qualified Algebra.Graph.Relation.Preorder as Preorder
import qualified Algebra.Graph.Relation.Reflexive as Reflexive
import qualified Algebra.Graph.Relation.Symmetric as Symmetric
import qualified Algebra.Graph.Relation.Transitive as Transitive
-- | Generate an arbitrary 'C.Graph' value of a specified size.
arbitraryGraph :: (C.Graph g, Arbitrary (C.Vertex g)) => Gen g
arbitraryGraph = sized expr
where
expr 0 = return C.empty
expr 1 = C.vertex <$> arbitrary
expr n = do
left <- choose (0, n)
oneof [ C.overlay <$> expr left <*> expr (n - left)
, C.connect <$> expr left <*> expr (n - left) ]
instance Arbitrary a => Arbitrary (Graph a) where
arbitrary = arbitraryGraph
shrink Empty = []
shrink (Vertex _) = [Empty]
shrink (Overlay x y) = [Empty, x, y]
++ [Overlay x' y' | (x', y') <- shrink (x, y) ]
shrink (Connect x y) = [Empty, x, y, Overlay x y]
++ [Connect x' y' | (x', y') <- shrink (x, y) ]
An Arbitrary instance for .
instance Arbitrary a => Arbitrary (UG.Graph a) where
arbitrary = arbitraryGraph
An Arbitrary instance for Acyclic . AdjacencyMap
instance (Ord a, Arbitrary a) => Arbitrary (AAM.AdjacencyMap a) where
arbitrary = AAM.shrink <$> arbitrary
shrink g = shrinkVertices ++ shrinkEdges
where
shrinkVertices =
let vertices = AAM.vertexList g
in [ AAM.removeVertex x g | x <- vertices ]
shrinkEdges =
let edges = AAM.edgeList g
in [ AAM.removeEdge x y g | (x, y) <- edges ]
| Generate an arbitrary ' NonEmpty . Graph ' value of a specified size .
arbitraryNonEmptyGraph :: Arbitrary a => Gen (NonEmpty.Graph a)
arbitraryNonEmptyGraph = sized expr
where
ca n't generate non - empty graph of size 0
expr 1 = NonEmpty.vertex <$> arbitrary
expr n = do
left <- choose (1, n)
oneof [ NonEmpty.overlay <$> expr left <*> expr (n - left)
, NonEmpty.connect <$> expr left <*> expr (n - left) ]
instance Arbitrary a => Arbitrary (NonEmpty.Graph a) where
arbitrary = arbitraryNonEmptyGraph
shrink (NonEmpty.Vertex _) = []
shrink (NonEmpty.Overlay x y) = [x, y]
++ [NonEmpty.Overlay x' y' | (x', y') <- shrink (x, y) ]
shrink (NonEmpty.Connect x y) = [x, y, NonEmpty.Overlay x y]
++ [NonEmpty.Connect x' y' | (x', y') <- shrink (x, y) ]
-- | Generate an arbitrary 'Relation'.
arbitraryRelation :: (Arbitrary a, Ord a) => Gen (Relation.Relation a)
arbitraryRelation = Relation.stars <$> arbitrary
-- TODO: Implement a custom shrink method.
instance (Arbitrary a, Ord a) => Arbitrary (Relation.Relation a) where
arbitrary = arbitraryRelation
shrink g = shrinkVertices ++ shrinkEdges
where
shrinkVertices =
let vertices = Relation.vertexList g
in [ Relation.removeVertex v g | v <- vertices ]
shrinkEdges =
let edges = Relation.edgeList g
in [ Relation.removeEdge v w g | (v, w) <- edges ]
-- TODO: Simplify.
instance (Arbitrary a, Ord a) => Arbitrary (Reflexive.ReflexiveRelation a) where
arbitrary = Reflexive.fromRelation . Relation.reflexiveClosure
<$> arbitraryRelation
instance (Arbitrary a, Ord a) => Arbitrary (Symmetric.Relation a) where
arbitrary = Symmetric.toSymmetric <$> arbitraryRelation
instance (Arbitrary a, Ord a) => Arbitrary (Transitive.TransitiveRelation a) where
arbitrary = Transitive.fromRelation . Relation.transitiveClosure
<$> arbitraryRelation
instance (Arbitrary a, Ord a) => Arbitrary (Preorder.PreorderRelation a) where
arbitrary = Preorder.fromRelation . Relation.closure
<$> arbitraryRelation
| Generate an arbitrary ' AdjacencyMap ' . It is guaranteed that the
-- resulting adjacency map is 'consistent'.
arbitraryAdjacencyMap :: (Arbitrary a, Ord a) => Gen (AM.AdjacencyMap a)
arbitraryAdjacencyMap = AM.stars <$> arbitrary
instance (Arbitrary a, Ord a) => Arbitrary (AM.AdjacencyMap a) where
arbitrary = arbitraryAdjacencyMap
shrink g = shrinkVertices ++ shrinkEdges
where
shrinkVertices = [ AM.removeVertex v g | v <- AM.vertexList g ]
shrinkEdges = [ AM.removeEdge v w g | (v, w) <- AM.edgeList g ]
-- | Generate an arbitrary non-empty 'NAM.AdjacencyMap'. It is guaranteed that
-- the resulting adjacency map is 'consistent'.
arbitraryNonEmptyAdjacencyMap :: (Arbitrary a, Ord a) => Gen (NAM.AdjacencyMap a)
arbitraryNonEmptyAdjacencyMap = NAM.stars1 <$> nonEmpty
where
nonEmpty = do
xs <- arbitrary
case xs of
[] -> do
x <- arbitrary
There must be at least one vertex
(x:xs) -> return (x :| xs)
instance (Arbitrary a, Ord a) => Arbitrary (NAM.AdjacencyMap a) where
arbitrary = arbitraryNonEmptyAdjacencyMap
shrink g = shrinkVertices ++ shrinkEdges
where
shrinkVertices =
let vertices = toList $ NAM.vertexList1 g
in catMaybes [ NAM.removeVertex1 v g | v <- vertices ]
shrinkEdges =
let edges = NAM.edgeList g
in [ NAM.removeEdge v w g | (v, w) <- edges ]
instance Arbitrary AIM.AdjacencyIntMap where
arbitrary = AIM.stars <$> arbitrary
shrink g = shrinkVertices ++ shrinkEdges
where
shrinkVertices = [ AIM.removeVertex x g | x <- AIM.vertexList g ]
shrinkEdges = [ AIM.removeEdge x y g | (x, y) <- AIM.edgeList g ]
-- | Generate an arbitrary labelled 'LAM.AdjacencyMap'. It is guaranteed
-- that the resulting adjacency map is 'consistent'.
arbitraryLabelledAdjacencyMap :: (Arbitrary a, Ord a, Eq e, Arbitrary e, Monoid e) => Gen (LAM.AdjacencyMap e a)
arbitraryLabelledAdjacencyMap = LAM.fromAdjacencyMaps <$> arbitrary
instance (Arbitrary a, Ord a, Eq e, Arbitrary e, Monoid e) => Arbitrary (LAM.AdjacencyMap e a) where
arbitrary = arbitraryLabelledAdjacencyMap
shrink g = shrinkVertices ++ shrinkEdges
where
shrinkVertices =
let vertices = LAM.vertexList g
in [ LAM.removeVertex v g | v <- vertices ]
shrinkEdges =
let edges = LAM.edgeList g
in [ LAM.removeEdge v w g | (_, v, w) <- edges ]
| Generate an arbitrary labelled ' LAM.Graph ' value of a specified size .
arbitraryLabelledGraph :: (Arbitrary a, Arbitrary e) => Gen (LG.Graph e a)
arbitraryLabelledGraph = sized expr
where
expr 0 = return LG.empty
expr 1 = LG.vertex <$> arbitrary
expr n = do
label <- arbitrary
left <- choose (0, n)
LG.connect label <$> expr left <*> expr (n - left)
instance (Arbitrary a, Arbitrary e, Monoid e) => Arbitrary (LG.Graph e a) where
arbitrary = arbitraryLabelledGraph
shrink LG.Empty = []
shrink (LG.Vertex _) = [LG.Empty]
shrink (LG.Connect e x y) = [LG.Empty, x, y, LG.Connect mempty x y]
++ [LG.Connect e x' y' | (x', y') <- shrink (x, y) ]
-- TODO: Implement a custom shrink method.
instance Arbitrary s => Arbitrary (Doc s) where
arbitrary = mconcat . map literal <$> arbitrary
instance (Arbitrary a, Num a, Ord a) => Arbitrary (Distance a) where
arbitrary = (\x -> if x < 0 then distance infinite else distance (unsafeFinite x)) <$> arbitrary
instance (Arbitrary a, Num a, Ord a) => Arbitrary (Capacity a) where
arbitrary = (\x -> if x < 0 then capacity infinite else capacity (unsafeFinite x)) <$> arbitrary
instance (Arbitrary a, Num a, Ord a) => Arbitrary (Count a) where
arbitrary = (\x -> if x < 0 then count infinite else count (unsafeFinite x)) <$> arbitrary
instance Arbitrary a => Arbitrary (Minimum a) where
arbitrary = frequency [(10, pure <$> arbitrary), (1, pure noMinimum)]
instance (Arbitrary a, Ord a) => Arbitrary (PowerSet a) where
arbitrary = PowerSet <$> arbitrary
instance (Arbitrary o, Arbitrary a) => Arbitrary (Optimum o a) where
arbitrary = Optimum <$> arbitrary <*> arbitrary
instance (Arbitrary a, Arbitrary b, Ord a, Ord b) => Arbitrary (BAM.AdjacencyMap a b) where
arbitrary = BAM.toBipartite <$> arbitrary
shrink = map BAM.toBipartite . shrink . BAM.fromBipartite
instance (Arbitrary a, Arbitrary b) => Arbitrary (BAM.List a b) where
arbitrary = sized go
where
go 0 = return BAM.Nil
go 1 = do h <- arbitrary
return $ BAM.Cons h BAM.Nil
go n = do f <- arbitrary
s <- arbitrary
(BAM.Cons f . BAM.Cons s) <$> go (n - 2)
instance (Arbitrary a, Arbitrary b, Ord a, Ord b) => Arbitrary (BAMA.Matching a b) where
arbitrary = BAMA.matching <$> arbitrary
| null | https://raw.githubusercontent.com/snowleopard/alga/399b5dc538a2496b67f9322dc7247e06a4ec326b/test/Algebra/Graph/Test/Arbitrary.hs | haskell | ---------------------------------------------------------------------------
|
Module : Algebra.Graph.Test.Arbitrary
Maintainer :
Stability : experimental
Generators and orphan Arbitrary instances for various data types.
---------------------------------------------------------------------------
* Generators of arbitrary graph instances
| Generate an arbitrary 'C.Graph' value of a specified size.
| Generate an arbitrary 'Relation'.
TODO: Implement a custom shrink method.
TODO: Simplify.
resulting adjacency map is 'consistent'.
| Generate an arbitrary non-empty 'NAM.AdjacencyMap'. It is guaranteed that
the resulting adjacency map is 'consistent'.
| Generate an arbitrary labelled 'LAM.AdjacencyMap'. It is guaranteed
that the resulting adjacency map is 'consistent'.
TODO: Implement a custom shrink method. | # LANGUAGE CPP #
# OPTIONS_GHC -fno - warn - orphans #
Copyright : ( c ) 2016 - 2022
License : MIT ( see the file LICENSE )
module Algebra.Graph.Test.Arbitrary (
arbitraryGraph, arbitraryRelation, arbitraryAdjacencyMap,
) where
import Control.Monad
import Data.List.NonEmpty (NonEmpty (..), toList)
import Data.Maybe (catMaybes)
import Data.Tree
import Test.QuickCheck
import Algebra.Graph
import Algebra.Graph.Export
import Algebra.Graph.Label
import qualified Algebra.Graph.Undirected as UG
import qualified Algebra.Graph.Acyclic.AdjacencyMap as AAM
import qualified Algebra.Graph.AdjacencyIntMap as AIM
import qualified Algebra.Graph.AdjacencyMap as AM
import qualified Algebra.Graph.Bipartite.AdjacencyMap as BAM
import qualified Algebra.Graph.Bipartite.AdjacencyMap.Algorithm as BAMA
import qualified Algebra.Graph.NonEmpty.AdjacencyMap as NAM
import qualified Algebra.Graph.Class as C
import qualified Algebra.Graph.Labelled as LG
import qualified Algebra.Graph.Labelled.AdjacencyMap as LAM
import qualified Algebra.Graph.NonEmpty as NonEmpty
import qualified Algebra.Graph.Relation as Relation
import qualified Algebra.Graph.Relation.Preorder as Preorder
import qualified Algebra.Graph.Relation.Reflexive as Reflexive
import qualified Algebra.Graph.Relation.Symmetric as Symmetric
import qualified Algebra.Graph.Relation.Transitive as Transitive
arbitraryGraph :: (C.Graph g, Arbitrary (C.Vertex g)) => Gen g
arbitraryGraph = sized expr
where
expr 0 = return C.empty
expr 1 = C.vertex <$> arbitrary
expr n = do
left <- choose (0, n)
oneof [ C.overlay <$> expr left <*> expr (n - left)
, C.connect <$> expr left <*> expr (n - left) ]
instance Arbitrary a => Arbitrary (Graph a) where
arbitrary = arbitraryGraph
shrink Empty = []
shrink (Vertex _) = [Empty]
shrink (Overlay x y) = [Empty, x, y]
++ [Overlay x' y' | (x', y') <- shrink (x, y) ]
shrink (Connect x y) = [Empty, x, y, Overlay x y]
++ [Connect x' y' | (x', y') <- shrink (x, y) ]
An Arbitrary instance for .
instance Arbitrary a => Arbitrary (UG.Graph a) where
arbitrary = arbitraryGraph
An Arbitrary instance for Acyclic . AdjacencyMap
instance (Ord a, Arbitrary a) => Arbitrary (AAM.AdjacencyMap a) where
arbitrary = AAM.shrink <$> arbitrary
shrink g = shrinkVertices ++ shrinkEdges
where
shrinkVertices =
let vertices = AAM.vertexList g
in [ AAM.removeVertex x g | x <- vertices ]
shrinkEdges =
let edges = AAM.edgeList g
in [ AAM.removeEdge x y g | (x, y) <- edges ]
| Generate an arbitrary ' NonEmpty . Graph ' value of a specified size .
arbitraryNonEmptyGraph :: Arbitrary a => Gen (NonEmpty.Graph a)
arbitraryNonEmptyGraph = sized expr
where
ca n't generate non - empty graph of size 0
expr 1 = NonEmpty.vertex <$> arbitrary
expr n = do
left <- choose (1, n)
oneof [ NonEmpty.overlay <$> expr left <*> expr (n - left)
, NonEmpty.connect <$> expr left <*> expr (n - left) ]
instance Arbitrary a => Arbitrary (NonEmpty.Graph a) where
arbitrary = arbitraryNonEmptyGraph
shrink (NonEmpty.Vertex _) = []
shrink (NonEmpty.Overlay x y) = [x, y]
++ [NonEmpty.Overlay x' y' | (x', y') <- shrink (x, y) ]
shrink (NonEmpty.Connect x y) = [x, y, NonEmpty.Overlay x y]
++ [NonEmpty.Connect x' y' | (x', y') <- shrink (x, y) ]
arbitraryRelation :: (Arbitrary a, Ord a) => Gen (Relation.Relation a)
arbitraryRelation = Relation.stars <$> arbitrary
instance (Arbitrary a, Ord a) => Arbitrary (Relation.Relation a) where
arbitrary = arbitraryRelation
shrink g = shrinkVertices ++ shrinkEdges
where
shrinkVertices =
let vertices = Relation.vertexList g
in [ Relation.removeVertex v g | v <- vertices ]
shrinkEdges =
let edges = Relation.edgeList g
in [ Relation.removeEdge v w g | (v, w) <- edges ]
instance (Arbitrary a, Ord a) => Arbitrary (Reflexive.ReflexiveRelation a) where
arbitrary = Reflexive.fromRelation . Relation.reflexiveClosure
<$> arbitraryRelation
instance (Arbitrary a, Ord a) => Arbitrary (Symmetric.Relation a) where
arbitrary = Symmetric.toSymmetric <$> arbitraryRelation
instance (Arbitrary a, Ord a) => Arbitrary (Transitive.TransitiveRelation a) where
arbitrary = Transitive.fromRelation . Relation.transitiveClosure
<$> arbitraryRelation
instance (Arbitrary a, Ord a) => Arbitrary (Preorder.PreorderRelation a) where
arbitrary = Preorder.fromRelation . Relation.closure
<$> arbitraryRelation
| Generate an arbitrary ' AdjacencyMap ' . It is guaranteed that the
arbitraryAdjacencyMap :: (Arbitrary a, Ord a) => Gen (AM.AdjacencyMap a)
arbitraryAdjacencyMap = AM.stars <$> arbitrary
instance (Arbitrary a, Ord a) => Arbitrary (AM.AdjacencyMap a) where
arbitrary = arbitraryAdjacencyMap
shrink g = shrinkVertices ++ shrinkEdges
where
shrinkVertices = [ AM.removeVertex v g | v <- AM.vertexList g ]
shrinkEdges = [ AM.removeEdge v w g | (v, w) <- AM.edgeList g ]
arbitraryNonEmptyAdjacencyMap :: (Arbitrary a, Ord a) => Gen (NAM.AdjacencyMap a)
arbitraryNonEmptyAdjacencyMap = NAM.stars1 <$> nonEmpty
where
nonEmpty = do
xs <- arbitrary
case xs of
[] -> do
x <- arbitrary
There must be at least one vertex
(x:xs) -> return (x :| xs)
instance (Arbitrary a, Ord a) => Arbitrary (NAM.AdjacencyMap a) where
arbitrary = arbitraryNonEmptyAdjacencyMap
shrink g = shrinkVertices ++ shrinkEdges
where
shrinkVertices =
let vertices = toList $ NAM.vertexList1 g
in catMaybes [ NAM.removeVertex1 v g | v <- vertices ]
shrinkEdges =
let edges = NAM.edgeList g
in [ NAM.removeEdge v w g | (v, w) <- edges ]
instance Arbitrary AIM.AdjacencyIntMap where
arbitrary = AIM.stars <$> arbitrary
shrink g = shrinkVertices ++ shrinkEdges
where
shrinkVertices = [ AIM.removeVertex x g | x <- AIM.vertexList g ]
shrinkEdges = [ AIM.removeEdge x y g | (x, y) <- AIM.edgeList g ]
arbitraryLabelledAdjacencyMap :: (Arbitrary a, Ord a, Eq e, Arbitrary e, Monoid e) => Gen (LAM.AdjacencyMap e a)
arbitraryLabelledAdjacencyMap = LAM.fromAdjacencyMaps <$> arbitrary
instance (Arbitrary a, Ord a, Eq e, Arbitrary e, Monoid e) => Arbitrary (LAM.AdjacencyMap e a) where
arbitrary = arbitraryLabelledAdjacencyMap
shrink g = shrinkVertices ++ shrinkEdges
where
shrinkVertices =
let vertices = LAM.vertexList g
in [ LAM.removeVertex v g | v <- vertices ]
shrinkEdges =
let edges = LAM.edgeList g
in [ LAM.removeEdge v w g | (_, v, w) <- edges ]
| Generate an arbitrary labelled ' LAM.Graph ' value of a specified size .
arbitraryLabelledGraph :: (Arbitrary a, Arbitrary e) => Gen (LG.Graph e a)
arbitraryLabelledGraph = sized expr
where
expr 0 = return LG.empty
expr 1 = LG.vertex <$> arbitrary
expr n = do
label <- arbitrary
left <- choose (0, n)
LG.connect label <$> expr left <*> expr (n - left)
instance (Arbitrary a, Arbitrary e, Monoid e) => Arbitrary (LG.Graph e a) where
arbitrary = arbitraryLabelledGraph
shrink LG.Empty = []
shrink (LG.Vertex _) = [LG.Empty]
shrink (LG.Connect e x y) = [LG.Empty, x, y, LG.Connect mempty x y]
++ [LG.Connect e x' y' | (x', y') <- shrink (x, y) ]
instance Arbitrary s => Arbitrary (Doc s) where
arbitrary = mconcat . map literal <$> arbitrary
instance (Arbitrary a, Num a, Ord a) => Arbitrary (Distance a) where
arbitrary = (\x -> if x < 0 then distance infinite else distance (unsafeFinite x)) <$> arbitrary
instance (Arbitrary a, Num a, Ord a) => Arbitrary (Capacity a) where
arbitrary = (\x -> if x < 0 then capacity infinite else capacity (unsafeFinite x)) <$> arbitrary
instance (Arbitrary a, Num a, Ord a) => Arbitrary (Count a) where
arbitrary = (\x -> if x < 0 then count infinite else count (unsafeFinite x)) <$> arbitrary
instance Arbitrary a => Arbitrary (Minimum a) where
arbitrary = frequency [(10, pure <$> arbitrary), (1, pure noMinimum)]
instance (Arbitrary a, Ord a) => Arbitrary (PowerSet a) where
arbitrary = PowerSet <$> arbitrary
instance (Arbitrary o, Arbitrary a) => Arbitrary (Optimum o a) where
arbitrary = Optimum <$> arbitrary <*> arbitrary
instance (Arbitrary a, Arbitrary b, Ord a, Ord b) => Arbitrary (BAM.AdjacencyMap a b) where
arbitrary = BAM.toBipartite <$> arbitrary
shrink = map BAM.toBipartite . shrink . BAM.fromBipartite
instance (Arbitrary a, Arbitrary b) => Arbitrary (BAM.List a b) where
arbitrary = sized go
where
go 0 = return BAM.Nil
go 1 = do h <- arbitrary
return $ BAM.Cons h BAM.Nil
go n = do f <- arbitrary
s <- arbitrary
(BAM.Cons f . BAM.Cons s) <$> go (n - 2)
instance (Arbitrary a, Arbitrary b, Ord a, Ord b) => Arbitrary (BAMA.Matching a b) where
arbitrary = BAMA.matching <$> arbitrary
|
f75a79c3911ac15a94fb3e30b4a2b0efab4f6c5e4cc17af23f5cb70d0ab5a4a4 | OCamlPro/ocp-build | buildSubst.ml | (**************************************************************************)
(* *)
(* Typerex Tools *)
(* *)
Copyright 2011 - 2017 OCamlPro SAS
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU General Public License version 3 described in the file
(* LICENSE. *)
(* *)
(**************************************************************************)
open OcpSubst
let global_subst = empty_subst ( )
let add_to_subst env var vv =
( * Printf.eprintf " BuildSubst.add % S - > % S\n% ! " v vv ;
let global_subst = empty_subst ()
let add_to_subst env var vv =
(* Printf.eprintf "BuildSubst.add %S -> %S\n%!" v vv; *)
add_to_subst env (Printf.sprintf "%%{%s}%%" var) vv
let add_to_global_subst var var_value =
add_to_subst global_subst var var_value
let _ =
Array.iter (fun s ->
let var, var_value = OcpString.cut_at s '=' in
add_to_global_subst var var_value;
) (MinUnix.environment ())
let subst env_subst s =
let ss = snd (iter_subst env_subst s) in
Printf.eprintf " BuildSubst.subst % S - > % S\n% ! " s ss ;
ss
let subst_global = subst global_subst
let add_to_local_subst env var vv =
add_to_copy env (Printf.sprintf "%%{%s}%%" var) vv
let create_substituter list =
let subst = M.empty_subst () in
List.iter (fun (name, f) ->
M.add_to_subst subst ("%{" ^ name ^ "}%") f
) list;
subst
let apply_substituter subst s info =
let _, s1 = M.iter_subst subst s info in
Printf.eprintf " apply_substituter : % S - > % S\n% ! " s s1 ;
s1
*)
type 'context t = ('context -> string -> string)
let substitute f context s =
Printf.eprintf " BuildSubst.substitute % S\n% ! " s ;
let len = String.length s in
let b = Buffer.create len in
let rec iter b stack i =
if i = len then
match stack with
| [] -> ()
| b1 :: stack ->
Buffer.add_string b1 "%{";
Buffer.add_string b1 (Buffer.contents b);
iter b1 stack i
else
match s.[i] with
| '%' -> iter1 b stack (i+1)
| '}' when stack != [] -> iter2 b stack (i+1)
| c ->
Buffer.add_char b c;
iter b stack (i+1)
and iter1 b stack i =
if i = len then begin
Buffer.add_char b '%';
iter b stack i
end
else
match s.[i] with
| '%' ->
Buffer.add_char b '%';
iter b stack (i+1)
| '{' ->
iter (Buffer.create len) (b :: stack) (i+1)
| c ->
Buffer.add_char b '%';
Buffer.add_char b c;
iter b stack (i+1)
and iter2 b stack i =
if i = len then
match stack with
| [] -> assert false
| b1 :: stack ->
Buffer.add_string b1 ("%{" ^ Buffer.contents b ^ "}");
iter b1 stack i
else
match s.[i] with
| '%' -> begin
match stack with
| [] -> assert false
| b1 :: stack ->
let ident = Buffer.contents b in
let replacement = f context ident in
Buffer.add_string b1 replacement;
iter b1 stack (i+1)
end
| _ ->
Buffer.add_char b '}';
iter b stack i
in
iter b [] 0;
let s1 = Buffer.contents b in
Printf.eprintf " subst % S = % S\n% ! " s s1 ;
s1
let sub = substitute (fun context s ->
match s with
| "toto" -> "TOTO"
| "tutu" -> context
| x -> "<" ^ x ^ ">") "to"
let () =
assert (sub "%{toto}%" = "TOTO");
assert (sub "%{tutu}%" = "to");
assert (sub "%{toto}%%{tutu}%%{toto}%" = "TOTOtoTOTO");
assert (sub "%{to%{tutu}%}%%{tutu}%%{toto}%" = "TOTOtoTOTO");
assert (sub "%{toto" = "%{toto");
assert (sub "%{toto}" = "%{toto}");
assert (sub "%{toto}{}%" = "<toto}{>");
assert (sub "%{" = "%{");
assert (sub "%%" = "%");
()
open OcpCompat
let map_subst map s =
substitute (fun map s ->
try
StringMap.find s map
with Not_found -> "%{" ^ s ^ "}%") map s
let global_subst = ref StringMap.empty
(*
let add_to_subst env var vv = StringMap.add var vv
*)
let add_to_global_subst var var_value =
global_subst := StringMap.add var var_value !global_subst
let _ =
Array.iter (fun s ->
let var, var_value = OcpString.cut_at s '=' in
add_to_global_subst var var_value;
) (MinUnix.environment ())
let subst_global s = map_subst !global_subst s
let add_to_local_subst env var vv =
add_to_copy env ( Printf.sprintf " % % { % s}%% " var ) vv
let add_to_local_subst env var vv =
add_to_copy env (Printf.sprintf "%%{%s}%%" var) vv
*)
let create_substituter list =
let map = ref StringMap.empty in
List.iter (fun (name, f) ->
map := StringMap.add name f !map
) list;
fun context s ->
try
(StringMap.find s !map) context
with Not_found -> "%{" ^ s ^ "}%"
let apply_substituter subst s context =
substitute subst context s
let putenv var var_value =
MinUnix.putenv var var_value;
add_to_global_subst var var_value
let global_subst () = !global_subst
| null | https://raw.githubusercontent.com/OCamlPro/ocp-build/56aff560bb438c12b2929feaf8379bc6f31b9840/tools/ocp-build/misc/buildSubst.ml | ocaml | ************************************************************************
Typerex Tools
All rights reserved. This file is distributed under the terms of
LICENSE.
************************************************************************
Printf.eprintf "BuildSubst.add %S -> %S\n%!" v vv;
let add_to_subst env var vv = StringMap.add var vv
| Copyright 2011 - 2017 OCamlPro SAS
the GNU General Public License version 3 described in the file
open OcpSubst
let global_subst = empty_subst ( )
let add_to_subst env var vv =
( * Printf.eprintf " BuildSubst.add % S - > % S\n% ! " v vv ;
let global_subst = empty_subst ()
let add_to_subst env var vv =
add_to_subst env (Printf.sprintf "%%{%s}%%" var) vv
let add_to_global_subst var var_value =
add_to_subst global_subst var var_value
let _ =
Array.iter (fun s ->
let var, var_value = OcpString.cut_at s '=' in
add_to_global_subst var var_value;
) (MinUnix.environment ())
let subst env_subst s =
let ss = snd (iter_subst env_subst s) in
Printf.eprintf " BuildSubst.subst % S - > % S\n% ! " s ss ;
ss
let subst_global = subst global_subst
let add_to_local_subst env var vv =
add_to_copy env (Printf.sprintf "%%{%s}%%" var) vv
let create_substituter list =
let subst = M.empty_subst () in
List.iter (fun (name, f) ->
M.add_to_subst subst ("%{" ^ name ^ "}%") f
) list;
subst
let apply_substituter subst s info =
let _, s1 = M.iter_subst subst s info in
Printf.eprintf " apply_substituter : % S - > % S\n% ! " s s1 ;
s1
*)
type 'context t = ('context -> string -> string)
let substitute f context s =
Printf.eprintf " BuildSubst.substitute % S\n% ! " s ;
let len = String.length s in
let b = Buffer.create len in
let rec iter b stack i =
if i = len then
match stack with
| [] -> ()
| b1 :: stack ->
Buffer.add_string b1 "%{";
Buffer.add_string b1 (Buffer.contents b);
iter b1 stack i
else
match s.[i] with
| '%' -> iter1 b stack (i+1)
| '}' when stack != [] -> iter2 b stack (i+1)
| c ->
Buffer.add_char b c;
iter b stack (i+1)
and iter1 b stack i =
if i = len then begin
Buffer.add_char b '%';
iter b stack i
end
else
match s.[i] with
| '%' ->
Buffer.add_char b '%';
iter b stack (i+1)
| '{' ->
iter (Buffer.create len) (b :: stack) (i+1)
| c ->
Buffer.add_char b '%';
Buffer.add_char b c;
iter b stack (i+1)
and iter2 b stack i =
if i = len then
match stack with
| [] -> assert false
| b1 :: stack ->
Buffer.add_string b1 ("%{" ^ Buffer.contents b ^ "}");
iter b1 stack i
else
match s.[i] with
| '%' -> begin
match stack with
| [] -> assert false
| b1 :: stack ->
let ident = Buffer.contents b in
let replacement = f context ident in
Buffer.add_string b1 replacement;
iter b1 stack (i+1)
end
| _ ->
Buffer.add_char b '}';
iter b stack i
in
iter b [] 0;
let s1 = Buffer.contents b in
Printf.eprintf " subst % S = % S\n% ! " s s1 ;
s1
let sub = substitute (fun context s ->
match s with
| "toto" -> "TOTO"
| "tutu" -> context
| x -> "<" ^ x ^ ">") "to"
let () =
assert (sub "%{toto}%" = "TOTO");
assert (sub "%{tutu}%" = "to");
assert (sub "%{toto}%%{tutu}%%{toto}%" = "TOTOtoTOTO");
assert (sub "%{to%{tutu}%}%%{tutu}%%{toto}%" = "TOTOtoTOTO");
assert (sub "%{toto" = "%{toto");
assert (sub "%{toto}" = "%{toto}");
assert (sub "%{toto}{}%" = "<toto}{>");
assert (sub "%{" = "%{");
assert (sub "%%" = "%");
()
open OcpCompat
let map_subst map s =
substitute (fun map s ->
try
StringMap.find s map
with Not_found -> "%{" ^ s ^ "}%") map s
let global_subst = ref StringMap.empty
let add_to_global_subst var var_value =
global_subst := StringMap.add var var_value !global_subst
let _ =
Array.iter (fun s ->
let var, var_value = OcpString.cut_at s '=' in
add_to_global_subst var var_value;
) (MinUnix.environment ())
let subst_global s = map_subst !global_subst s
let add_to_local_subst env var vv =
add_to_copy env ( Printf.sprintf " % % { % s}%% " var ) vv
let add_to_local_subst env var vv =
add_to_copy env (Printf.sprintf "%%{%s}%%" var) vv
*)
let create_substituter list =
let map = ref StringMap.empty in
List.iter (fun (name, f) ->
map := StringMap.add name f !map
) list;
fun context s ->
try
(StringMap.find s !map) context
with Not_found -> "%{" ^ s ^ "}%"
let apply_substituter subst s context =
substitute subst context s
let putenv var var_value =
MinUnix.putenv var var_value;
add_to_global_subst var var_value
let global_subst () = !global_subst
|
375e34bac34f64bdbee0c0ba6e63fc291fdb0559893720d50e3240f56f2d49d0 | jacekschae/learn-datomic-course-files | datomic.clj | (ns cheffy.components.datomic
(:require [datomic.client.api :as d]
[cheffy.validation :as validation]
[clojure.edn :as edn]))
(defn ident-has-attr?
[db ident attr]
(contains? (d/pull db {:eid ident :selector '[*]}) attr))
(defn load-dataset
[conn]
(let [db (d/db conn)
tx #(d/transact conn {:tx-data %})]
(when-not (ident-has-attr? db :account/account-id :db/ident)
(tx (-> "src/resources/cheffy/schema.edn" slurp edn/read-string))
(tx (-> "src/resources/cheffy/seed.edn" slurp edn/read-string)))
(when-not (ident-has-attr? db :account/account-id :db.attr/preds)
(tx validation/attr-pred))
(when-not (ident-has-attr? db :account/validate :db.entity/attrs)
(tx validation/entity-attrs)))) | null | https://raw.githubusercontent.com/jacekschae/learn-datomic-course-files/f2378c84bade5cb64018f72aa9179a8c8bb25df4/increments/58-endpoint/src/main/cheffy/components/datomic.clj | clojure | (ns cheffy.components.datomic
(:require [datomic.client.api :as d]
[cheffy.validation :as validation]
[clojure.edn :as edn]))
(defn ident-has-attr?
[db ident attr]
(contains? (d/pull db {:eid ident :selector '[*]}) attr))
(defn load-dataset
[conn]
(let [db (d/db conn)
tx #(d/transact conn {:tx-data %})]
(when-not (ident-has-attr? db :account/account-id :db/ident)
(tx (-> "src/resources/cheffy/schema.edn" slurp edn/read-string))
(tx (-> "src/resources/cheffy/seed.edn" slurp edn/read-string)))
(when-not (ident-has-attr? db :account/account-id :db.attr/preds)
(tx validation/attr-pred))
(when-not (ident-has-attr? db :account/validate :db.entity/attrs)
(tx validation/entity-attrs)))) |
|
5f3c1e49710e9e6bafaf397c7263406812467bde9029b8e48bf8bb087eae2f95 | seereason/logic-classes | FirstOrder.hs | # LANGUAGE DeriveDataTypeable , FlexibleContexts , FlexibleInstances , MultiParamTypeClasses , TemplateHaskell , TypeFamilies , UndecidableInstances #
module Data.Logic.Types.FirstOrder
( withUnivQuants
, NFormula(..)
, NTerm(..)
, NPredicate(..)
) where
import Data.Data (Data)
import Data.Logic.ATP.Apply (HasApply(..), IsPredicate, prettyApply)
import Data.Logic.ATP.Equate (associativityEquate, HasEquate(equate, foldEquate), overtermsEq, ontermsEq, precedenceEquate, prettyEquate)
import Data.Logic.ATP.FOL (IsFirstOrder)
import Data.Logic.ATP.Formulas (IsAtom, IsFormula(..))
import Data.Logic.ATP.Lit (IsLiteral(..))
import Data.Logic.ATP.Pretty (HasFixity(..), Pretty(pPrintPrec), Side(Top))
import Data.Logic.ATP.Prop (BinOp(..), IsPropositional(..))
import Data.Logic.ATP.Quantified (associativityQuantified, exists, IsQuantified(..), precedenceQuantified, prettyQuantified, Quant(..))
import Data.Logic.ATP.Term (IsFunction, IsTerm(..), IsVariable(..), prettyTerm, V)
import Data.SafeCopy (base, deriveSafeCopy)
import Data.String (IsString(fromString))
import Data.Typeable (Typeable)
-- | Examine the formula to find the list of outermost universally
-- quantified variables, and call a function with that list and the
-- formula after the quantifiers are removed.
withUnivQuants :: IsQuantified formula => ([VarOf formula] -> formula -> r) -> formula -> r
withUnivQuants fn formula =
doFormula [] formula
where
doFormula vs f =
foldQuantified
(doQuant vs)
(\ _ _ _ -> fn (reverse vs) f)
(\ _ -> fn (reverse vs) f)
(\ _ -> fn (reverse vs) f)
(\ _ -> fn (reverse vs) f)
f
doQuant vs (:!:) v f = doFormula (v : vs) f
doQuant vs (:?:) v f = fn (reverse vs) (exists v f)
-- | The range of a formula is {True, False} when it has no free variables.
data NFormula v p f
= Predicate (NPredicate p (NTerm v f))
| Combine (NFormula v p f) BinOp (NFormula v p f)
| Negate (NFormula v p f)
| Quant Quant v (NFormula v p f)
| TT
| FF
Note that a derived Eq instance is not going to tell us that
a&b is equal to b&a , let alone that ~(a&b ) equals ( ~a)|(~b ) .
deriving (Eq, Ord, Data, Typeable, Show)
-- |A temporary type used in the fold method to represent the
-- combination of a predicate and its arguments. This reduces the
-- number of arguments to foldFirstOrder and makes it easier to manage the
-- mapping of the different instances to the class methods.
data NPredicate p term
= Equal term term
| Apply p [term]
deriving (Eq, Ord, Data, Typeable, Show)
-- | The range of a term is an element of a set.
data NTerm v f
= NVar v -- ^ A variable, either free or
-- bound by an enclosing quantifier.
| FunApp f [NTerm v f] -- ^ Function application.
-- Constants are encoded as
-- nullary functions. The result
-- is another term.
deriving (Eq, Ord, Data, Typeable, Show)
instance IsVariable v => IsString (NTerm v f) where
fromString = NVar . fromString
instance (IsVariable v, Pretty v, IsFunction f, Pretty f) => Pretty (NTerm v f) where
pPrintPrec = prettyTerm
instance (IsPredicate p, IsTerm term) => HasFixity (NPredicate p term) where
precedence = precedenceEquate
associativity = associativityEquate
instance (IsPredicate p, IsTerm term) => IsAtom (NPredicate p term)
instance HasFixity (NTerm v f) where
instance (IsVariable v, IsPredicate p, IsFunction f, atom ~ NPredicate p (NTerm v f), Pretty atom
) => IsPropositional (NFormula v p f) where
foldPropositional' ho _ _ _ _ fm@(Quant _ _ _) = ho fm
foldPropositional' _ co _ _ _ (Combine x op y) = co x op y
foldPropositional' _ _ ne _ _ (Negate x) = ne x
foldPropositional' _ _ _ tf _ TT = tf True
foldPropositional' _ _ _ tf _ FF = tf False
foldPropositional' _ _ _ _ at (Predicate x) = at x
a .|. b = Combine a (:|:) b
a .&. b = Combine a (:&:) b
a .=>. b = Combine a (:=>:) b
a .<=>. b = Combine a (:<=>:) b
foldCombination = error "FIXME foldCombination"
instance (IsVariable v, IsPredicate p, IsFunction f) => HasFixity (NFormula v p f) where
precedence = precedenceQuantified
associativity = associativityQuantified
instance ( v , IsPredicate p , IsFunction f ) = > Pretty ( NPredicate p ( NTerm v f ) ) where
-- pPrint p = foldEquate prettyEquate prettyApply p
instance (IsPredicate p, IsTerm term) => Pretty (NPredicate p term) where
pPrintPrec d r = foldEquate (prettyEquate d r) prettyApply
instance (IsVariable v, IsPredicate p, IsFunction f) => Pretty (NFormula v p f) where
pPrintPrec = prettyQuantified Top
instance (IsPredicate p, IsTerm term) => HasApply (NPredicate p term) where
type PredOf (NPredicate p term) = p
type TermOf (NPredicate p term) = term
applyPredicate = Apply
foldApply' _ f (Apply p ts) = f p ts
foldApply' d _ x = d x
overterms = overtermsEq
onterms = ontermsEq
instance (IsPredicate p, IsTerm term) => HasEquate (NPredicate p term) where
equate = Equal
foldEquate eq _ (Equal t1 t2) = eq t1 t2
foldEquate _ ap (Apply p ts) = ap p ts
instance HasBoolean p = > HasBoolean ( NPredicate p ( NTerm v f ) ) where
fromBool x = Apply ( fromBool x ) [ ]
asBool ( Apply p [ ] ) = asBool p
asBool _ = Nothing
instance HasBoolean p => HasBoolean (NPredicate p (NTerm v f)) where
fromBool x = Apply (fromBool x) []
asBool (Apply p []) = asBool p
asBool _ = Nothing
-}
instance (IsVariable v, IsPredicate p, IsFunction f
) => IsFormula (NFormula v p f) where
type AtomOf (NFormula v p f) = NPredicate p (NTerm v f)
atomic = Predicate
onatoms f (Negate fm) = Negate (onatoms f fm)
onatoms _ TT = TT
onatoms _ FF = FF
onatoms f (Combine lhs op rhs) = Combine (onatoms f lhs) op (onatoms f rhs)
onatoms f (Quant op v fm) = Quant op v (onatoms f fm)
onatoms f (Predicate p) = Predicate (f p)
overatoms f (Negate fm) b = overatoms f fm b
overatoms _ TT b = b
overatoms _ FF b = b
overatoms f (Combine lhs _ rhs) b = overatoms f lhs (overatoms f rhs b)
overatoms f (Quant _ _ fm) b = overatoms f fm b
overatoms f (Predicate p) b = f p b
asBool TT = Just True
asBool FF = Just False
asBool _ = Nothing
true = TT
false = FF
instance (IsVariable v, IsPredicate p, IsFunction f
, atom ~ NPredicate p (NTerm v f) -- , Pretty atom
) => IsQuantified (NFormula v p f) where
type VarOf (NFormula v p f) = v
foldQuantified qu _ _ _ _ (Quant op v fm) = qu op v fm
foldQuantified _ co ne tf at fm = foldPropositional' (error "FIXME - need other function in case of embedded quantifiers") co ne tf at fm
quant = Quant
instance (IsVariable v, IsPredicate p, IsFunction f
, atom ~ NPredicate p (NTerm v f) -- , Pretty atom
) => IsLiteral (NFormula v p f) where
foldLiteral' ho ne _tf at fm =
case fm of
Negate fm' -> ne fm'
Predicate x -> at x
_ -> ho fm
naiveNegate = Negate
foldNegation _ ne (Negate x) = ne x
foldNegation other _ fm = other fm
instance ( IsPredicate p , v , , IsAtom ( NPredicate p ( NTerm v f ) )
) = > HasEquate ( NPredicate p ( NTerm v f ) ) p ( NTerm v f ) where
overterms = overtermsEq
onterms = ontermsEq
instance (IsPredicate p, IsVariable v, IsFunction f, IsAtom (NPredicate p (NTerm v f))
) => HasEquate (NPredicate p (NTerm v f)) p (NTerm v f) where
overterms = overtermsEq
onterms = ontermsEq
-}
instance (IsVariable v, IsPredicate p, IsFunction f, IsAtom (NPredicate p (NTerm v f))
) => IsFirstOrder (NFormula v p f)
instance (IsVariable v, IsFunction f) => IsTerm (NTerm v f) where
type TVarOf (NTerm v f) = v
type FunOf (NTerm v f) = f
vt = NVar
fApp = FunApp
foldTerm vf _ (NVar v) = vf v
foldTerm _ ff (FunApp f ts) = ff f ts
$(deriveSafeCopy 1 'base ''BinOp)
$(deriveSafeCopy 1 'base ''Quant)
$(deriveSafeCopy 1 'base ''NFormula)
$(deriveSafeCopy 1 'base ''NPredicate)
$(deriveSafeCopy 1 'base ''NTerm)
$(deriveSafeCopy 1 'base ''V)
| null | https://raw.githubusercontent.com/seereason/logic-classes/819059218027c3ee77bd2ef3a873fcca230d54b4/Data/Logic/Types/FirstOrder.hs | haskell | | Examine the formula to find the list of outermost universally
quantified variables, and call a function with that list and the
formula after the quantifiers are removed.
| The range of a formula is {True, False} when it has no free variables.
|A temporary type used in the fold method to represent the
combination of a predicate and its arguments. This reduces the
number of arguments to foldFirstOrder and makes it easier to manage the
mapping of the different instances to the class methods.
| The range of a term is an element of a set.
^ A variable, either free or
bound by an enclosing quantifier.
^ Function application.
Constants are encoded as
nullary functions. The result
is another term.
pPrint p = foldEquate prettyEquate prettyApply p
, Pretty atom
, Pretty atom | # LANGUAGE DeriveDataTypeable , FlexibleContexts , FlexibleInstances , MultiParamTypeClasses , TemplateHaskell , TypeFamilies , UndecidableInstances #
module Data.Logic.Types.FirstOrder
( withUnivQuants
, NFormula(..)
, NTerm(..)
, NPredicate(..)
) where
import Data.Data (Data)
import Data.Logic.ATP.Apply (HasApply(..), IsPredicate, prettyApply)
import Data.Logic.ATP.Equate (associativityEquate, HasEquate(equate, foldEquate), overtermsEq, ontermsEq, precedenceEquate, prettyEquate)
import Data.Logic.ATP.FOL (IsFirstOrder)
import Data.Logic.ATP.Formulas (IsAtom, IsFormula(..))
import Data.Logic.ATP.Lit (IsLiteral(..))
import Data.Logic.ATP.Pretty (HasFixity(..), Pretty(pPrintPrec), Side(Top))
import Data.Logic.ATP.Prop (BinOp(..), IsPropositional(..))
import Data.Logic.ATP.Quantified (associativityQuantified, exists, IsQuantified(..), precedenceQuantified, prettyQuantified, Quant(..))
import Data.Logic.ATP.Term (IsFunction, IsTerm(..), IsVariable(..), prettyTerm, V)
import Data.SafeCopy (base, deriveSafeCopy)
import Data.String (IsString(fromString))
import Data.Typeable (Typeable)
withUnivQuants :: IsQuantified formula => ([VarOf formula] -> formula -> r) -> formula -> r
withUnivQuants fn formula =
doFormula [] formula
where
doFormula vs f =
foldQuantified
(doQuant vs)
(\ _ _ _ -> fn (reverse vs) f)
(\ _ -> fn (reverse vs) f)
(\ _ -> fn (reverse vs) f)
(\ _ -> fn (reverse vs) f)
f
doQuant vs (:!:) v f = doFormula (v : vs) f
doQuant vs (:?:) v f = fn (reverse vs) (exists v f)
data NFormula v p f
= Predicate (NPredicate p (NTerm v f))
| Combine (NFormula v p f) BinOp (NFormula v p f)
| Negate (NFormula v p f)
| Quant Quant v (NFormula v p f)
| TT
| FF
Note that a derived Eq instance is not going to tell us that
a&b is equal to b&a , let alone that ~(a&b ) equals ( ~a)|(~b ) .
deriving (Eq, Ord, Data, Typeable, Show)
data NPredicate p term
= Equal term term
| Apply p [term]
deriving (Eq, Ord, Data, Typeable, Show)
data NTerm v f
deriving (Eq, Ord, Data, Typeable, Show)
instance IsVariable v => IsString (NTerm v f) where
fromString = NVar . fromString
instance (IsVariable v, Pretty v, IsFunction f, Pretty f) => Pretty (NTerm v f) where
pPrintPrec = prettyTerm
instance (IsPredicate p, IsTerm term) => HasFixity (NPredicate p term) where
precedence = precedenceEquate
associativity = associativityEquate
instance (IsPredicate p, IsTerm term) => IsAtom (NPredicate p term)
instance HasFixity (NTerm v f) where
instance (IsVariable v, IsPredicate p, IsFunction f, atom ~ NPredicate p (NTerm v f), Pretty atom
) => IsPropositional (NFormula v p f) where
foldPropositional' ho _ _ _ _ fm@(Quant _ _ _) = ho fm
foldPropositional' _ co _ _ _ (Combine x op y) = co x op y
foldPropositional' _ _ ne _ _ (Negate x) = ne x
foldPropositional' _ _ _ tf _ TT = tf True
foldPropositional' _ _ _ tf _ FF = tf False
foldPropositional' _ _ _ _ at (Predicate x) = at x
a .|. b = Combine a (:|:) b
a .&. b = Combine a (:&:) b
a .=>. b = Combine a (:=>:) b
a .<=>. b = Combine a (:<=>:) b
foldCombination = error "FIXME foldCombination"
instance (IsVariable v, IsPredicate p, IsFunction f) => HasFixity (NFormula v p f) where
precedence = precedenceQuantified
associativity = associativityQuantified
instance ( v , IsPredicate p , IsFunction f ) = > Pretty ( NPredicate p ( NTerm v f ) ) where
instance (IsPredicate p, IsTerm term) => Pretty (NPredicate p term) where
pPrintPrec d r = foldEquate (prettyEquate d r) prettyApply
instance (IsVariable v, IsPredicate p, IsFunction f) => Pretty (NFormula v p f) where
pPrintPrec = prettyQuantified Top
instance (IsPredicate p, IsTerm term) => HasApply (NPredicate p term) where
type PredOf (NPredicate p term) = p
type TermOf (NPredicate p term) = term
applyPredicate = Apply
foldApply' _ f (Apply p ts) = f p ts
foldApply' d _ x = d x
overterms = overtermsEq
onterms = ontermsEq
instance (IsPredicate p, IsTerm term) => HasEquate (NPredicate p term) where
equate = Equal
foldEquate eq _ (Equal t1 t2) = eq t1 t2
foldEquate _ ap (Apply p ts) = ap p ts
instance HasBoolean p = > HasBoolean ( NPredicate p ( NTerm v f ) ) where
fromBool x = Apply ( fromBool x ) [ ]
asBool ( Apply p [ ] ) = asBool p
asBool _ = Nothing
instance HasBoolean p => HasBoolean (NPredicate p (NTerm v f)) where
fromBool x = Apply (fromBool x) []
asBool (Apply p []) = asBool p
asBool _ = Nothing
-}
instance (IsVariable v, IsPredicate p, IsFunction f
) => IsFormula (NFormula v p f) where
type AtomOf (NFormula v p f) = NPredicate p (NTerm v f)
atomic = Predicate
onatoms f (Negate fm) = Negate (onatoms f fm)
onatoms _ TT = TT
onatoms _ FF = FF
onatoms f (Combine lhs op rhs) = Combine (onatoms f lhs) op (onatoms f rhs)
onatoms f (Quant op v fm) = Quant op v (onatoms f fm)
onatoms f (Predicate p) = Predicate (f p)
overatoms f (Negate fm) b = overatoms f fm b
overatoms _ TT b = b
overatoms _ FF b = b
overatoms f (Combine lhs _ rhs) b = overatoms f lhs (overatoms f rhs b)
overatoms f (Quant _ _ fm) b = overatoms f fm b
overatoms f (Predicate p) b = f p b
asBool TT = Just True
asBool FF = Just False
asBool _ = Nothing
true = TT
false = FF
instance (IsVariable v, IsPredicate p, IsFunction f
) => IsQuantified (NFormula v p f) where
type VarOf (NFormula v p f) = v
foldQuantified qu _ _ _ _ (Quant op v fm) = qu op v fm
foldQuantified _ co ne tf at fm = foldPropositional' (error "FIXME - need other function in case of embedded quantifiers") co ne tf at fm
quant = Quant
instance (IsVariable v, IsPredicate p, IsFunction f
) => IsLiteral (NFormula v p f) where
foldLiteral' ho ne _tf at fm =
case fm of
Negate fm' -> ne fm'
Predicate x -> at x
_ -> ho fm
naiveNegate = Negate
foldNegation _ ne (Negate x) = ne x
foldNegation other _ fm = other fm
instance ( IsPredicate p , v , , IsAtom ( NPredicate p ( NTerm v f ) )
) = > HasEquate ( NPredicate p ( NTerm v f ) ) p ( NTerm v f ) where
overterms = overtermsEq
onterms = ontermsEq
instance (IsPredicate p, IsVariable v, IsFunction f, IsAtom (NPredicate p (NTerm v f))
) => HasEquate (NPredicate p (NTerm v f)) p (NTerm v f) where
overterms = overtermsEq
onterms = ontermsEq
-}
instance (IsVariable v, IsPredicate p, IsFunction f, IsAtom (NPredicate p (NTerm v f))
) => IsFirstOrder (NFormula v p f)
instance (IsVariable v, IsFunction f) => IsTerm (NTerm v f) where
type TVarOf (NTerm v f) = v
type FunOf (NTerm v f) = f
vt = NVar
fApp = FunApp
foldTerm vf _ (NVar v) = vf v
foldTerm _ ff (FunApp f ts) = ff f ts
$(deriveSafeCopy 1 'base ''BinOp)
$(deriveSafeCopy 1 'base ''Quant)
$(deriveSafeCopy 1 'base ''NFormula)
$(deriveSafeCopy 1 'base ''NPredicate)
$(deriveSafeCopy 1 'base ''NTerm)
$(deriveSafeCopy 1 'base ''V)
|
629079c8dcd156508dc6f03311539ca1d322dabf13980aaf472ebeabeca1a83f | lambdageek/unbound-generics | PropOpenClose.hs | # LANGUAGE DeriveGeneric , DeriveDataTypeable #
module PropOpenClose (test_openClose) where
import Control.Applicative (Applicative(..), (<$>))
import Data.Monoid (Any(..))
import Data.Typeable (Typeable)
import GHC.Generics (Generic)
import Test.QuickCheck
import Test.Tasty (testGroup, TestTree)
import Test.Tasty.QuickCheck (testProperty)
import Unbound.Generics.LocallyNameless
import Unbound.Generics.LocallyNameless.Internal.Fold (foldMapOf, toListOf)
import AlphaProperties
-- Wrapper around 'Name a' that has an Arbitrary instance that generates free names.
-- Note that this doesn't guarantee /freshness/. The name may clash with some other one.
-- But it will never be a bound name.
newtype FreeName a = FreeName {getFreeName :: Name a}
deriving (Show)
instance Arbitrary (FreeName a) where
arbitrary = do
s <- listOf1 (elements ['a'..'z'])
n <- arbitrary
return $ FreeName $ makeName s n
shrink = const []
----------------------------------------
-- example data structure, with no bound names.
data T a = Leaf !a
| V !(Name (T a))
| B !(T a) !(T a)
deriving (Show, Typeable, Generic)
instance (Typeable a, Alpha a) => Alpha (T a)
instance Arbitrary a => Arbitrary (T a) where
arbitrary =
oneof
[
Leaf <$> arbitrary
,(V . getFreeName) <$> arbitrary
, B <$> arbitrary <*> arbitrary
]
generator that picks out one of the free variables of a tree
arbVarsOf :: (Alpha a, Typeable a) => T a -> Gen (Name (T a))
arbVarsOf t =
let vs = toListOf fv t
in elements vs
-- spec for free variables of a tree.
-- fvSpec :: Traversal' (T a) (Name (T a))
fvSpec :: Applicative f => (Name (T a) -> f (Name (T a))) -> T a -> f (T a)
fvSpec f t =
case t of
Leaf {} -> pure t
V v -> V <$> f v
B t1 t2 -> B <$> fvSpec f t1 <*> fvSpec f t2
----------------------------------------
-- Properties
-- every tree is alpha-equivalent to itself
prop_refl :: T Int -> Property
prop_refl x = x =~= x
-- generic fv gives the same answer as fvSpec
prop_fv_spec :: T Int -> Property
prop_fv_spec t = toListOf fv t === toListOf fvSpec t
-- if a name is already free opening it has no effect
prop_open_idempotent :: T Int -> Property
prop_open_idempotent t =
forAll (arbVarsOf t) $ \v -> open initialCtx (nthPatFind v) t =~= t
-- if you close over a variable, then it is no longer free.
prop_close_binds :: T Int -> Property
prop_close_binds t =
(not $ null $ toListOf fvAny t) ==>
forAll (arbVarsOf t) $ \v -> v /~@ close initialCtx (namePatFind v) t
----------------------------------------
-- Test group
test_openClose :: TestTree
test_openClose =
testGroup "QuickCheck properties"
[
testProperty "reflexivity" prop_refl
, testProperty "fv specification" prop_fv_spec
, testProperty "open idempotency" prop_open_idempotent
, testProperty "closing binds variables" prop_close_binds
]
| null | https://raw.githubusercontent.com/lambdageek/unbound-generics/54096156b06dda2fcc523939de6da64b43737ea4/test/PropOpenClose.hs | haskell | Wrapper around 'Name a' that has an Arbitrary instance that generates free names.
Note that this doesn't guarantee /freshness/. The name may clash with some other one.
But it will never be a bound name.
--------------------------------------
example data structure, with no bound names.
spec for free variables of a tree.
fvSpec :: Traversal' (T a) (Name (T a))
--------------------------------------
Properties
every tree is alpha-equivalent to itself
generic fv gives the same answer as fvSpec
if a name is already free opening it has no effect
if you close over a variable, then it is no longer free.
--------------------------------------
Test group | # LANGUAGE DeriveGeneric , DeriveDataTypeable #
module PropOpenClose (test_openClose) where
import Control.Applicative (Applicative(..), (<$>))
import Data.Monoid (Any(..))
import Data.Typeable (Typeable)
import GHC.Generics (Generic)
import Test.QuickCheck
import Test.Tasty (testGroup, TestTree)
import Test.Tasty.QuickCheck (testProperty)
import Unbound.Generics.LocallyNameless
import Unbound.Generics.LocallyNameless.Internal.Fold (foldMapOf, toListOf)
import AlphaProperties
newtype FreeName a = FreeName {getFreeName :: Name a}
deriving (Show)
instance Arbitrary (FreeName a) where
arbitrary = do
s <- listOf1 (elements ['a'..'z'])
n <- arbitrary
return $ FreeName $ makeName s n
shrink = const []
data T a = Leaf !a
| V !(Name (T a))
| B !(T a) !(T a)
deriving (Show, Typeable, Generic)
instance (Typeable a, Alpha a) => Alpha (T a)
instance Arbitrary a => Arbitrary (T a) where
arbitrary =
oneof
[
Leaf <$> arbitrary
,(V . getFreeName) <$> arbitrary
, B <$> arbitrary <*> arbitrary
]
generator that picks out one of the free variables of a tree
arbVarsOf :: (Alpha a, Typeable a) => T a -> Gen (Name (T a))
arbVarsOf t =
let vs = toListOf fv t
in elements vs
fvSpec :: Applicative f => (Name (T a) -> f (Name (T a))) -> T a -> f (T a)
fvSpec f t =
case t of
Leaf {} -> pure t
V v -> V <$> f v
B t1 t2 -> B <$> fvSpec f t1 <*> fvSpec f t2
prop_refl :: T Int -> Property
prop_refl x = x =~= x
prop_fv_spec :: T Int -> Property
prop_fv_spec t = toListOf fv t === toListOf fvSpec t
prop_open_idempotent :: T Int -> Property
prop_open_idempotent t =
forAll (arbVarsOf t) $ \v -> open initialCtx (nthPatFind v) t =~= t
prop_close_binds :: T Int -> Property
prop_close_binds t =
(not $ null $ toListOf fvAny t) ==>
forAll (arbVarsOf t) $ \v -> v /~@ close initialCtx (namePatFind v) t
test_openClose :: TestTree
test_openClose =
testGroup "QuickCheck properties"
[
testProperty "reflexivity" prop_refl
, testProperty "fv specification" prop_fv_spec
, testProperty "open idempotency" prop_open_idempotent
, testProperty "closing binds variables" prop_close_binds
]
|
40a5663833e0cdf5edf80e20bc64c9848b16be44a359ddd29d5f28ebf2609fee | FlowForwarding/loom | tap_yaws_sup.erl | -module(tap_yaws_sup).
-behavior(supervisor).
-export([start_link/0]).
-export([init/1]).
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
init([]) ->
YBed = {tap_yaws,
{tap_yaws, start_link, [self()]},
permanent, 5000, worker, [tap_yaws]},
{ok, {{one_for_all, 0, 1}, [YBed]}}.
| null | https://raw.githubusercontent.com/FlowForwarding/loom/86a9c5aa8b7d4776062365716c9a3dbbf3330bc5/tapestry/apps/tapestry/src/tap_yaws_sup.erl | erlang | -module(tap_yaws_sup).
-behavior(supervisor).
-export([start_link/0]).
-export([init/1]).
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
init([]) ->
YBed = {tap_yaws,
{tap_yaws, start_link, [self()]},
permanent, 5000, worker, [tap_yaws]},
{ok, {{one_for_all, 0, 1}, [YBed]}}.
|
|
13041a3292fe87e57c64df4381acfbf6880bdd82168740a3fa59ed6499f402cb | ddssff/refact-global-hse | SrcLoc.hs | -- | Utility functions for the haskell-src-exts type SrcLoc.
# LANGUAGE BangPatterns , CPP , FlexibleInstances , PackageImports , ScopedTypeVariables , TemplateHaskell , UndecidableInstances #
# OPTIONS_GHC -Wall -fno - warn - orphans #
module Refactor.SrcLoc
* SpanInfo queries
srcLoc
, EndLoc(endLoc)
-- * Location and span info for a piece of text
, spanOfText
, endLocOfText
-- * Split text at a location
, splitText
, splits
, splits'
-- * Use span info to extract text
, textTripleOfSpan
, textOfSpan
* Repair spans that have column set to 0
, fixSpan
, testSpan
, fixEnds
, mapTopAnnotations
, locSum
, locDiff
, endOfPragmas
, endOfHeader
, endOfImports
, endOfImportSpecs
, endOfDecls
, endOfModule
, startOfModule
, startOfPragmas
, startOfHeader
, startOfImports
, startOfDecls
) where
import Control.Monad.State (get, put, runState, State)
import Data.Char (isSpace)
import Data.List (dropWhileEnd)
import Data.Monoid ((<>))
import Language.Haskell.Exts.Syntax -- (Annotated(ann), Module(..))
import Language.Haskell.Exts.Comments (Comment(..))
import Language.Haskell.Exts.SrcLoc (mkSrcSpan, SrcInfo(..), SrcLoc(..), SrcSpan(..), SrcSpanInfo(..))
import Language.Haskell.Names
import Refactor.ModuleInfo
import Refactor.SrcSpan
import Refactor.Utils (EZPrint(ezPrint), lines')
import Text.PrettyPrint.HughesPJClass (Pretty(pPrint), prettyShow, text)
srcLoc : : SpanInfo a = > a - > SrcLoc
srcLoc x = let ( SrcSpan f b e _ _ ) = srcSpan x in SrcLoc f b e
endLoc : : SpanInfo a = > a - > SrcLoc
endLoc x = let ( SrcSpan f _ _ b e ) = srcSpan x in SrcLoc f b e
srcLoc :: SpanInfo a => a -> SrcLoc
srcLoc x = let (SrcSpan f b e _ _) = srcSpan x in SrcLoc f b e
endLoc :: SpanInfo a => a -> SrcLoc
endLoc x = let (SrcSpan f _ _ b e) = srcSpan x in SrcLoc f b e
-}
locDiff :: SrcLoc -> SrcLoc -> SrcLoc
locDiff (SrcLoc file l1 c1) (SrcLoc _ l2 c2) =
if l1 == l2
then SrcLoc file 1 (c1 - c2 + 1)
else SrcLoc file (l1 - l2 + 1) c1
spanDiff :: SrcSpan -> SrcLoc -> SrcSpan
spanDiff sp l = mkSrcSpan (locDiff (srcLoc sp) l) (locDiff (endLoc sp) l)
locSum :: SrcLoc -> SrcLoc -> SrcLoc
locSum (SrcLoc f l1 c1) (SrcLoc _ l2 c2) =
if l2 == 1
then SrcLoc f (l1 + l2 - 1) (c1 + c2 - 1)
else SrcLoc f (l1 + l2 - 1) c2
endLocOfText :: FilePath -> String -> SrcLoc
endLocOfText path x =
case ls of
[] -> SrcLoc {srcFilename = path, srcLine = 1, srcColumn = 1}
_ -> SrcLoc {srcFilename = path, srcLine = length ls, srcColumn = length (last ls) + 1}
where ls = lines' x
-- | Return a span that exactly covers the string s
spanOfText :: FilePath -> String -> SrcSpanInfo
spanOfText path s =
let end = endLocOfText path s in
SrcSpanInfo {srcInfoSpan = mkSrcSpan (SrcLoc path 1 1) (SrcLoc path (srcLine end) (srcColumn end)),
srcInfoPoints = []}
-- | Return the text before, within, and after a span
textTripleOfSpan :: (SrcInfo a, EndLoc a) => a -> String -> (String, String, String)
textTripleOfSpan sp s =
let (pref, s') = splitText (srcLoc sp) s in
let (s'', suff) = splitText (locDiff (endLoc sp) (srcLoc sp)) s' in
(pref, s'', suff)
textOfSpan :: (SrcInfo a, EndLoc a) => a -> String -> String
textOfSpan sp s =
let (_, s') = splitText (srcLoc sp) s in
let (s'', _) = splitText (locDiff (endLoc sp) (srcLoc sp)) s' in
s''
testSpan :: (SrcInfo a, EndLoc a) => String -> a -> a
testSpan msg sp =
case (srcLoc sp, endLoc sp) of
(SrcLoc _ l1 c1, SrcLoc _ l2 c2) | c1 < 1 || c2 < 1 || l1 < 1 || l2 < 1 ||
l2 < l1 || (l2 == l1 && c2 < c1) -> error ("testSpan - " ++ msg)
_ -> sp
splitText :: SrcLoc -> String -> (String, String)
splitText loc@(SrcLoc _ l0 c0) s0 =
fst $ runState f (1, 1, "", s0)
where
f :: State (Int, Int, String, String) (String, String)
f = do (l, c, r, s) <- get
case (compare l l0, compare c c0) of
(LT, _) ->
case span (/= '\n') s of
(r', '\n' : s') ->
put (l + 1, 1, r ++ r' ++ "\n", s') >> f
(_, "") -> case s of
-- This should not happen, but if the last line
lacks a newline terminator ,
-- will set the end location as if the terminator
-- was present.
"" -> pure (r, s)
(ch : s') -> put (l, c + 1, r ++ [ch], s') >> f
_ -> error "splitText"
(_, LT) ->
case s of
[] -> error ("splitText " ++ ", loc=" ++ show loc ++ ", s=" ++ show s)
(ch : s') -> put (l, c + 1, r ++ [ch], s') >> f
(EQ, EQ) -> pure (r, s)
_ -> error ("splitText - invalid arguments: loc=" ++ show loc ++ ", s=" ++ show s0)
-- | Using n locations split a string into n + 1 segments.
splits ( SrcLoc " " 80 20 ) [ SrcLoc " " 80 22 , SrcLoc " " 80 25 , SrcLoc " " 81 4 ] " first line\nsecond line " - >
-- [("fi",SrcSpan "" 80 20 80 22),
( " rst",SrcSpan " " 80 22 80 25 ) ,
( " line\nsec",SrcSpan " " 80 25 81 4 ) ,
( " ond line",SrcSpan " " 81 4 81 12 ) ]
splits :: SrcLoc -> [SrcLoc] -> String -> [(String, SrcSpan)]
splits offset0@(SrcLoc file _ _) locs0@(_ : _) s0 =
zip (f offset0 locs0 s0) (map (uncurry mkSrcSpan) (zip (offset0 : locs0) (locs0 ++ [locSum offset0 (endLocOfText file s0)])))
where
f _ [] s = [s]
f offset (loc : locs) s =
let (pre, suf) = splitText (locDiff loc offset) s in
pre : f loc locs suf
splits (SrcLoc _ _ _) [] _ = error "splits"
data Seg
= Span (SrcLoc, SrcLoc) String
| Between (SrcLoc, SrcLoc) String
deriving Show
splits ' ( SrcLoc " " 80 20 ) [ SrcSpan " " 80 20 80 22 , SrcSpan " " 80 25 81 4 ] " first line\nsecond line "
[ Span ( SrcLoc " " 80 20 ) , " fi " , Between ( SrcLoc " " 80 22 ) " rst " ,
( SrcLoc " " 80 25 81 4 ) " line\nsec " , Between ( SrcLoc " " 81 4 ) " ond line " ]
splits' :: FilePath -> [SrcSpan] -> String -> [Seg]
splits' file spans s =
f (SrcLoc file 1 1) spans s
where
f :: SrcLoc -> [SrcSpan] -> String -> [Seg]
f offset [] s' = [Between (offset, locSum offset (endLocOfText file s')) s']
f offset (sp : sps) s''' =
let (pre, s') = splitText (locDiff (srcLoc sp) offset) s''' in
let (seg, s'') = splitText (locDiff (endLoc sp) (srcLoc sp)) s' in
-- trace ("offset=" ++ show offset ++ ", sp=" ++ show sp ++ ", pre=" ++ show pre ++ ", seg=" ++ show seg) $
(if null pre then [] else [Between (offset, srcLoc sp) pre]) ++ [Span (srcLoc sp, endLoc sp) seg] ++ f (endLoc sp) sps s''
-- t1 r = trace ("splits' " ++ show file ++ " " ++ show spans ++ " " ++ show s ++ " -> " ++ show r) r
t2 offset el b = trace ( " splits ' final : offset= " + + show offset + + " , el= " + + show el + + " , seg= " + + show b ) b
| Make sure every SrcSpan in the parsed module refers to existing
-- text. They could still be in the wrong places, so this doesn't
-- guarantee the parse is valid, but its a pretty good bet.
#if 0
validateParseResults :: Module SrcSpanInfo -> String -> IO ()
validateParseResults modul t =
mapM_ validateSpan (nub (sort (gFind modul :: [SrcSpan])))
where
-- validateSpan :: SrcSpan -> IO ()
validateSpan x =
let s = srcLoc x
e = endLoc x in
putStrLn ("span " ++ prettyShow s ++ "->" ++ prettyShow e ++ "=" ++ show (textOfSpan x t))
#endif
instance Pretty SrcLoc where
pPrint l = text ("(l" <> show (srcLine l) ++ ",c" ++ show (srcColumn l) ++ ")")
instance Pretty SrcSpan where
pPrint (SrcSpan _ bl bc el ec) = text ("(l" <> show bl ++ ",c" ++ show bc ++ ")->" ++
"(l" <> show el ++ ",c" ++ show ec ++ ")")
instance Pretty SrcSpanInfo where
pPrint = pPrint . srcInfoSpan
This happens , a span with end column 0 , even though column
numbering begins at 1 . Is it a bug in haskell - src - exts ?
fixSpan :: SrcSpanInfo -> SrcSpanInfo
fixSpan sp =
if srcSpanEndColumn (srcInfoSpan sp) == 0
then t1 $ sp {srcInfoSpan = (srcInfoSpan sp) {srcSpanEndColumn = 1}}
else sp
where
t1 sp' = {-trace ("fixSpan " ++ show (srcInfoSpan sp) ++ " -> " ++ show (srcInfoSpan sp'))-} sp'
instance EZPrint SrcLoc where
ezPrint = prettyShow
instance EZPrint SrcSpanInfo where
ezPrint = prettyShow
-- | Tighten the start and end points of a span to exclude any leading
-- and trailing whitespace and comments.
-- | Move the endpoint of a span to before any trailing whitespace and comments.
fixEnds :: [Comment] -> String -> SrcSpanInfo -> SrcSpanInfo
fixEnds cs s si@(SrcSpanInfo {srcInfoSpan = sp}) =
let b@(SrcLoc _ bl bc) = realBegin si cs s in
let e@(SrcLoc _ el ec) = realEnd si cs s in
case (b < srcLoc sp || b > endLoc sp || e < srcLoc sp || e > endLoc sp) of
True -> error "fixEnds returned position outside span"
_ -> si {srcInfoSpan = sp {srcSpanStartLine = bl, srcSpanStartColumn = bc,
srcSpanEndLine = el, srcSpanEndColumn = ec}}
-- | Given a SrcSpanInfo, find the "real" end of the object it covers,
-- meaning the position beyond which lies only whitespace and comments.
realEnd :: SrcSpanInfo -> [Comment] -> String -> SrcLoc
realEnd sp cs s =
let b@(SrcLoc file _ _) = srcLoc sp
e = endLoc sp
s'' = textOfSpan sp s
commentSpans = map (flip spanDiff b) .
takeWhile ((<= e) . endLoc) .
dropWhile ((< b) . srcLoc) .
map (\(Comment _ sp' _) -> sp') $ cs
segs = splits' file commentSpans s'' in
-- Use the end of the last nonspace segment
let e' = case dropWhile isWhite (reverse segs) of
[] -> endLocOfText file s''
(Span (_, x) _ : _) -> x
(Between (_, x) _ : _) -> x
(s''', _) = splitText e' s''
s'''' = dropWhileEnd isSpace s''' in
locSum b (endLocOfText file s'''')
-- e'' = locSum b e' in
-- if r < b || r > e then error ("realEnd: sp=" ++ show sp ++ ", segs=" ++ show segs ++ " -> " ++ show e'') else e''
where
isWhite (Between _ s') | all isSpace s' = True
isWhite (Span _ _) = True
isWhite _ = False
realBegin :: SrcSpanInfo -> [Comment] -> String -> SrcLoc
realBegin sp cs s =
let b@(SrcLoc file _ _) = srcLoc sp
e = endLoc sp
s'' = textOfSpan sp s
commentSpans = map (flip spanDiff b) .
takeWhile ((<= e) . endLoc) .
dropWhile ((< b) . srcLoc) .
map (\(Comment _ sp' _) -> sp') $ cs
segs = splits' file commentSpans s'' in
let b' = case dropWhile isWhite segs of
[] -> b
(Span (x, _) _ : _) -> {-locSum b-} x
(Between (x, _) _ : _) -> {-locSum b-} x
(_, s''') = splitText b' s''
b'' = endLocOfText "" (takeWhile isSpace s''') in
foldr1 locSum [b, b', b'']
-- if r < b || r > e then error ("realEnd: sp=" ++ show sp ++ ", segs=" ++ show segs ++ " -> " ++ show r) else r
where
isWhite (Between _ s') | all isSpace s' = True
isWhite (Span _ _) = True
isWhite _ = False
-- | Modify end locations so they precede any trailing whitespace
mapTopAnnotations :: forall a. (a -> a) -> Module a -> Module a
mapTopAnnotations fn (Module loc mh ps is ds) =
Module loc (fmap fixMH mh) ps (map fixImport is) (map fixDecl ds)
where
fixMH :: ModuleHead a -> ModuleHead a
fixMH (ModuleHead sp name warn specs) = ModuleHead (fn sp) name warn specs
fixImport :: ImportDecl a -> ImportDecl a
fixImport i = i {importAnn = fn (importAnn i)}
fixDecl :: Decl a -> Decl a
fixDecl (TypeDecl l a b) = (TypeDecl (fn l) a b)
fixDecl (TypeFamDecl l a b c) = (TypeFamDecl (fn l) a b c)
fixDecl (ClosedTypeFamDecl l a b c d) = (ClosedTypeFamDecl (fn l) a b c d)
fixDecl (DataDecl l a b c d e) = (DataDecl (fn l) a b c d e)
fixDecl (GDataDecl l a b c d e f) = GDataDecl (fn l) a b c d e f
fixDecl (DataFamDecl l a b c) = (DataFamDecl (fn l) a b c)
fixDecl (TypeInsDecl l a b) = (TypeInsDecl (fn l) a b)
fixDecl (DataInsDecl l a b c d) = (DataInsDecl (fn l) a b c d)
fixDecl (GDataInsDecl l a b c d e) = (GDataInsDecl (fn l) a b c d e)
fixDecl (ClassDecl l a b c d) = (ClassDecl (fn l) a b c d)
fixDecl (InstDecl l a b c) = (InstDecl (fn l) a b c)
fixDecl (DerivDecl l a b c) = (DerivDecl (fn l) a b c)
fixDecl (InfixDecl l a b c) = (InfixDecl (fn l) a b c)
fixDecl (DefaultDecl l a) = (DefaultDecl (fn l) a)
fixDecl (SpliceDecl l a) = (SpliceDecl (fn l) a)
fixDecl (TypeSig l a b) = (TypeSig (fn l) a b)
fixDecl (PatSynSig l a b c d e) = (PatSynSig (fn l) a b c d e)
fixDecl (FunBind l a) = (FunBind (fn l) a)
fixDecl (PatBind l a b c) = (PatBind (fn l) a b c)
fixDecl (PatSyn l a b c) = (PatSyn (fn l) a b c)
fixDecl (ForImp l a b c d e) = (ForImp (fn l) a b c d e)
fixDecl (ForExp l a b c d) = (ForExp (fn l) a b c d)
fixDecl (RulePragmaDecl l a) = (RulePragmaDecl (fn l) a)
fixDecl (DeprPragmaDecl l a) = (DeprPragmaDecl (fn l) a)
fixDecl (WarnPragmaDecl l a) = (WarnPragmaDecl (fn l) a)
fixDecl (CompletePragma l a b) = CompletePragma (fn l) a b
fixDecl (InlineSig l a b c) = (InlineSig (fn l) a b c)
fixDecl (InlineConlikeSig l a b) = (InlineConlikeSig (fn l) a b)
fixDecl (SpecSig l a b c) = (SpecSig (fn l) a b c)
fixDecl (SpecInlineSig l a b c d) = (SpecInlineSig (fn l) a b c d)
fixDecl (InstSig l a) = (InstSig (fn l) a)
fixDecl (AnnPragma l a) = (AnnPragma (fn l) a)
fixDecl (MinimalPragma l a) = (MinimalPragma (fn l) a)
fixDecl (RoleAnnotDecl l a b) = (RoleAnnotDecl (fn l) a b)
mapTopAnnotations _ _ = error "mapTopAnnotations"
#if 0
endOfDecls :: EndLoc l => Module l -> SrcLoc
endOfDecls m@(Module _l _mh _ps _ []) = endOfImports m
endOfDecls (Module _l _mh _ps _is ds) = endLoc (ann (last ds))
endOfDecls _ = error "endOfDecls"
endOfImports :: EndLoc l => Module l -> SrcLoc
endOfImports m@(Module _l _mh _ps [] _) = endOfHeader m
endOfImports (Module _l _mh _ps is _) = endLoc (ann (last is))
endOfImports _ = error "endOfImports"
endOfImportSpecs :: (EndLoc l, Show l) => ImportDecl l -> SrcLoc
endOfImportSpecs (ImportDecl {importSpecs = Just i}) =
case srcPoints (ann i) of
[] -> error $ "endOfImportSpecs: " ++ show i
pts -> srcLoc (last pts)
endOfImportSpecs (ImportDecl {importSpecs = Nothing}) = error "endOfImportSpecs"
endOfHeader :: EndLoc l => Module l -> SrcLoc
endOfHeader m@(Module _l Nothing _ps _ _) = endOfPragmas m
endOfHeader (Module _l (Just h) _ps _is _) = endLoc (ann h)
endOfHeader _ = error "endOfHeader"
endOfPragmas :: EndLoc l => Module l -> SrcLoc
endOfPragmas (Module l _ [] _ _) = endLoc l
endOfPragmas (Module _l _ ps _ _) = endLoc (ann (last ps))
endOfPragmas _ = error "endOfPragmas"
endOfModule :: ModuleInfo l -> SrcLoc
endOfModule mi = endLocOfText (_modulePath mi) (_moduleText mi)
#endif
startOfModule :: ModuleInfo l -> SrcLoc
startOfModule mi = SrcLoc (_modulePath mi) 1 1
| The beginning of the first thing after the imports
startOfDecls :: SrcInfo l => ModuleInfo l -> SrcLoc
startOfDecls mi@(ModuleInfo {_module = Module _l _mh _ps _is []}) = endLocOfText (_modulePath mi) (_moduleText mi)
startOfDecls (ModuleInfo {_module = Module _l _mh _ps _is (d : _)}) = srcLoc (ann d)
startOfDecls _ = error "startOfDecls"
| The beginning of the first thing after the header .
startOfImports :: SrcInfo l => ModuleInfo l -> SrcLoc
startOfImports mi@(ModuleInfo {_module = Module _l _mh _ps [] _}) = startOfDecls mi
startOfImports (ModuleInfo {_module = Module _l _mh _ps (i : _) _}) = srcLoc (ann i)
startOfImports _ = error "startOfImports"
| The beginning of the first thing after the pragmas .
startOfHeader :: SrcInfo l => ModuleInfo l -> SrcLoc
startOfHeader mi@(ModuleInfo {_module = Module _l Nothing _ps _ _}) = startOfImports mi
startOfHeader (ModuleInfo {_module = Module _l (Just h) _ps _is _}) = srcLoc (ann h)
startOfHeader _ = error "startOfHeader"
| The beginning of the first thing
startOfPragmas :: SrcInfo l => ModuleInfo l -> SrcLoc
startOfPragmas (ModuleInfo {_module = m@(Module _l _ [] _ _)}) = SrcLoc (fileName (ann m)) 1 1
startOfPragmas (ModuleInfo {_module = Module _l _ (p : _) _ _}) = srcLoc (ann p)
startOfPragmas _ = error "startOfPragmas"
| null | https://raw.githubusercontent.com/ddssff/refact-global-hse/519a017009cae8aa1a3db1b46eb560d76bd9895d/src/Refactor/SrcLoc.hs | haskell | | Utility functions for the haskell-src-exts type SrcLoc.
* Location and span info for a piece of text
* Split text at a location
* Use span info to extract text
(Annotated(ann), Module(..))
| Return a span that exactly covers the string s
| Return the text before, within, and after a span
This should not happen, but if the last line
will set the end location as if the terminator
was present.
| Using n locations split a string into n + 1 segments.
[("fi",SrcSpan "" 80 20 80 22),
trace ("offset=" ++ show offset ++ ", sp=" ++ show sp ++ ", pre=" ++ show pre ++ ", seg=" ++ show seg) $
t1 r = trace ("splits' " ++ show file ++ " " ++ show spans ++ " " ++ show s ++ " -> " ++ show r) r
text. They could still be in the wrong places, so this doesn't
guarantee the parse is valid, but its a pretty good bet.
validateSpan :: SrcSpan -> IO ()
trace ("fixSpan " ++ show (srcInfoSpan sp) ++ " -> " ++ show (srcInfoSpan sp'))
| Tighten the start and end points of a span to exclude any leading
and trailing whitespace and comments.
| Move the endpoint of a span to before any trailing whitespace and comments.
| Given a SrcSpanInfo, find the "real" end of the object it covers,
meaning the position beyond which lies only whitespace and comments.
Use the end of the last nonspace segment
e'' = locSum b e' in
if r < b || r > e then error ("realEnd: sp=" ++ show sp ++ ", segs=" ++ show segs ++ " -> " ++ show e'') else e''
locSum b
locSum b
if r < b || r > e then error ("realEnd: sp=" ++ show sp ++ ", segs=" ++ show segs ++ " -> " ++ show r) else r
| Modify end locations so they precede any trailing whitespace |
# LANGUAGE BangPatterns , CPP , FlexibleInstances , PackageImports , ScopedTypeVariables , TemplateHaskell , UndecidableInstances #
# OPTIONS_GHC -Wall -fno - warn - orphans #
module Refactor.SrcLoc
* SpanInfo queries
srcLoc
, EndLoc(endLoc)
, spanOfText
, endLocOfText
, splitText
, splits
, splits'
, textTripleOfSpan
, textOfSpan
* Repair spans that have column set to 0
, fixSpan
, testSpan
, fixEnds
, mapTopAnnotations
, locSum
, locDiff
, endOfPragmas
, endOfHeader
, endOfImports
, endOfImportSpecs
, endOfDecls
, endOfModule
, startOfModule
, startOfPragmas
, startOfHeader
, startOfImports
, startOfDecls
) where
import Control.Monad.State (get, put, runState, State)
import Data.Char (isSpace)
import Data.List (dropWhileEnd)
import Data.Monoid ((<>))
import Language.Haskell.Exts.Comments (Comment(..))
import Language.Haskell.Exts.SrcLoc (mkSrcSpan, SrcInfo(..), SrcLoc(..), SrcSpan(..), SrcSpanInfo(..))
import Language.Haskell.Names
import Refactor.ModuleInfo
import Refactor.SrcSpan
import Refactor.Utils (EZPrint(ezPrint), lines')
import Text.PrettyPrint.HughesPJClass (Pretty(pPrint), prettyShow, text)
srcLoc : : SpanInfo a = > a - > SrcLoc
srcLoc x = let ( SrcSpan f b e _ _ ) = srcSpan x in SrcLoc f b e
endLoc : : SpanInfo a = > a - > SrcLoc
endLoc x = let ( SrcSpan f _ _ b e ) = srcSpan x in SrcLoc f b e
srcLoc :: SpanInfo a => a -> SrcLoc
srcLoc x = let (SrcSpan f b e _ _) = srcSpan x in SrcLoc f b e
endLoc :: SpanInfo a => a -> SrcLoc
endLoc x = let (SrcSpan f _ _ b e) = srcSpan x in SrcLoc f b e
-}
locDiff :: SrcLoc -> SrcLoc -> SrcLoc
locDiff (SrcLoc file l1 c1) (SrcLoc _ l2 c2) =
if l1 == l2
then SrcLoc file 1 (c1 - c2 + 1)
else SrcLoc file (l1 - l2 + 1) c1
spanDiff :: SrcSpan -> SrcLoc -> SrcSpan
spanDiff sp l = mkSrcSpan (locDiff (srcLoc sp) l) (locDiff (endLoc sp) l)
locSum :: SrcLoc -> SrcLoc -> SrcLoc
locSum (SrcLoc f l1 c1) (SrcLoc _ l2 c2) =
if l2 == 1
then SrcLoc f (l1 + l2 - 1) (c1 + c2 - 1)
else SrcLoc f (l1 + l2 - 1) c2
endLocOfText :: FilePath -> String -> SrcLoc
endLocOfText path x =
case ls of
[] -> SrcLoc {srcFilename = path, srcLine = 1, srcColumn = 1}
_ -> SrcLoc {srcFilename = path, srcLine = length ls, srcColumn = length (last ls) + 1}
where ls = lines' x
spanOfText :: FilePath -> String -> SrcSpanInfo
spanOfText path s =
let end = endLocOfText path s in
SrcSpanInfo {srcInfoSpan = mkSrcSpan (SrcLoc path 1 1) (SrcLoc path (srcLine end) (srcColumn end)),
srcInfoPoints = []}
textTripleOfSpan :: (SrcInfo a, EndLoc a) => a -> String -> (String, String, String)
textTripleOfSpan sp s =
let (pref, s') = splitText (srcLoc sp) s in
let (s'', suff) = splitText (locDiff (endLoc sp) (srcLoc sp)) s' in
(pref, s'', suff)
textOfSpan :: (SrcInfo a, EndLoc a) => a -> String -> String
textOfSpan sp s =
let (_, s') = splitText (srcLoc sp) s in
let (s'', _) = splitText (locDiff (endLoc sp) (srcLoc sp)) s' in
s''
testSpan :: (SrcInfo a, EndLoc a) => String -> a -> a
testSpan msg sp =
case (srcLoc sp, endLoc sp) of
(SrcLoc _ l1 c1, SrcLoc _ l2 c2) | c1 < 1 || c2 < 1 || l1 < 1 || l2 < 1 ||
l2 < l1 || (l2 == l1 && c2 < c1) -> error ("testSpan - " ++ msg)
_ -> sp
splitText :: SrcLoc -> String -> (String, String)
splitText loc@(SrcLoc _ l0 c0) s0 =
fst $ runState f (1, 1, "", s0)
where
f :: State (Int, Int, String, String) (String, String)
f = do (l, c, r, s) <- get
case (compare l l0, compare c c0) of
(LT, _) ->
case span (/= '\n') s of
(r', '\n' : s') ->
put (l + 1, 1, r ++ r' ++ "\n", s') >> f
(_, "") -> case s of
lacks a newline terminator ,
"" -> pure (r, s)
(ch : s') -> put (l, c + 1, r ++ [ch], s') >> f
_ -> error "splitText"
(_, LT) ->
case s of
[] -> error ("splitText " ++ ", loc=" ++ show loc ++ ", s=" ++ show s)
(ch : s') -> put (l, c + 1, r ++ [ch], s') >> f
(EQ, EQ) -> pure (r, s)
_ -> error ("splitText - invalid arguments: loc=" ++ show loc ++ ", s=" ++ show s0)
splits ( SrcLoc " " 80 20 ) [ SrcLoc " " 80 22 , SrcLoc " " 80 25 , SrcLoc " " 81 4 ] " first line\nsecond line " - >
( " rst",SrcSpan " " 80 22 80 25 ) ,
( " line\nsec",SrcSpan " " 80 25 81 4 ) ,
( " ond line",SrcSpan " " 81 4 81 12 ) ]
splits :: SrcLoc -> [SrcLoc] -> String -> [(String, SrcSpan)]
splits offset0@(SrcLoc file _ _) locs0@(_ : _) s0 =
zip (f offset0 locs0 s0) (map (uncurry mkSrcSpan) (zip (offset0 : locs0) (locs0 ++ [locSum offset0 (endLocOfText file s0)])))
where
f _ [] s = [s]
f offset (loc : locs) s =
let (pre, suf) = splitText (locDiff loc offset) s in
pre : f loc locs suf
splits (SrcLoc _ _ _) [] _ = error "splits"
data Seg
= Span (SrcLoc, SrcLoc) String
| Between (SrcLoc, SrcLoc) String
deriving Show
splits ' ( SrcLoc " " 80 20 ) [ SrcSpan " " 80 20 80 22 , SrcSpan " " 80 25 81 4 ] " first line\nsecond line "
[ Span ( SrcLoc " " 80 20 ) , " fi " , Between ( SrcLoc " " 80 22 ) " rst " ,
( SrcLoc " " 80 25 81 4 ) " line\nsec " , Between ( SrcLoc " " 81 4 ) " ond line " ]
splits' :: FilePath -> [SrcSpan] -> String -> [Seg]
splits' file spans s =
f (SrcLoc file 1 1) spans s
where
f :: SrcLoc -> [SrcSpan] -> String -> [Seg]
f offset [] s' = [Between (offset, locSum offset (endLocOfText file s')) s']
f offset (sp : sps) s''' =
let (pre, s') = splitText (locDiff (srcLoc sp) offset) s''' in
let (seg, s'') = splitText (locDiff (endLoc sp) (srcLoc sp)) s' in
(if null pre then [] else [Between (offset, srcLoc sp) pre]) ++ [Span (srcLoc sp, endLoc sp) seg] ++ f (endLoc sp) sps s''
t2 offset el b = trace ( " splits ' final : offset= " + + show offset + + " , el= " + + show el + + " , seg= " + + show b ) b
| Make sure every SrcSpan in the parsed module refers to existing
#if 0
validateParseResults :: Module SrcSpanInfo -> String -> IO ()
validateParseResults modul t =
mapM_ validateSpan (nub (sort (gFind modul :: [SrcSpan])))
where
validateSpan x =
let s = srcLoc x
e = endLoc x in
putStrLn ("span " ++ prettyShow s ++ "->" ++ prettyShow e ++ "=" ++ show (textOfSpan x t))
#endif
instance Pretty SrcLoc where
pPrint l = text ("(l" <> show (srcLine l) ++ ",c" ++ show (srcColumn l) ++ ")")
instance Pretty SrcSpan where
pPrint (SrcSpan _ bl bc el ec) = text ("(l" <> show bl ++ ",c" ++ show bc ++ ")->" ++
"(l" <> show el ++ ",c" ++ show ec ++ ")")
instance Pretty SrcSpanInfo where
pPrint = pPrint . srcInfoSpan
This happens , a span with end column 0 , even though column
numbering begins at 1 . Is it a bug in haskell - src - exts ?
fixSpan :: SrcSpanInfo -> SrcSpanInfo
fixSpan sp =
if srcSpanEndColumn (srcInfoSpan sp) == 0
then t1 $ sp {srcInfoSpan = (srcInfoSpan sp) {srcSpanEndColumn = 1}}
else sp
where
instance EZPrint SrcLoc where
ezPrint = prettyShow
instance EZPrint SrcSpanInfo where
ezPrint = prettyShow
fixEnds :: [Comment] -> String -> SrcSpanInfo -> SrcSpanInfo
fixEnds cs s si@(SrcSpanInfo {srcInfoSpan = sp}) =
let b@(SrcLoc _ bl bc) = realBegin si cs s in
let e@(SrcLoc _ el ec) = realEnd si cs s in
case (b < srcLoc sp || b > endLoc sp || e < srcLoc sp || e > endLoc sp) of
True -> error "fixEnds returned position outside span"
_ -> si {srcInfoSpan = sp {srcSpanStartLine = bl, srcSpanStartColumn = bc,
srcSpanEndLine = el, srcSpanEndColumn = ec}}
realEnd :: SrcSpanInfo -> [Comment] -> String -> SrcLoc
realEnd sp cs s =
let b@(SrcLoc file _ _) = srcLoc sp
e = endLoc sp
s'' = textOfSpan sp s
commentSpans = map (flip spanDiff b) .
takeWhile ((<= e) . endLoc) .
dropWhile ((< b) . srcLoc) .
map (\(Comment _ sp' _) -> sp') $ cs
segs = splits' file commentSpans s'' in
let e' = case dropWhile isWhite (reverse segs) of
[] -> endLocOfText file s''
(Span (_, x) _ : _) -> x
(Between (_, x) _ : _) -> x
(s''', _) = splitText e' s''
s'''' = dropWhileEnd isSpace s''' in
locSum b (endLocOfText file s'''')
where
isWhite (Between _ s') | all isSpace s' = True
isWhite (Span _ _) = True
isWhite _ = False
realBegin :: SrcSpanInfo -> [Comment] -> String -> SrcLoc
realBegin sp cs s =
let b@(SrcLoc file _ _) = srcLoc sp
e = endLoc sp
s'' = textOfSpan sp s
commentSpans = map (flip spanDiff b) .
takeWhile ((<= e) . endLoc) .
dropWhile ((< b) . srcLoc) .
map (\(Comment _ sp' _) -> sp') $ cs
segs = splits' file commentSpans s'' in
let b' = case dropWhile isWhite segs of
[] -> b
(_, s''') = splitText b' s''
b'' = endLocOfText "" (takeWhile isSpace s''') in
foldr1 locSum [b, b', b'']
where
isWhite (Between _ s') | all isSpace s' = True
isWhite (Span _ _) = True
isWhite _ = False
mapTopAnnotations :: forall a. (a -> a) -> Module a -> Module a
mapTopAnnotations fn (Module loc mh ps is ds) =
Module loc (fmap fixMH mh) ps (map fixImport is) (map fixDecl ds)
where
fixMH :: ModuleHead a -> ModuleHead a
fixMH (ModuleHead sp name warn specs) = ModuleHead (fn sp) name warn specs
fixImport :: ImportDecl a -> ImportDecl a
fixImport i = i {importAnn = fn (importAnn i)}
fixDecl :: Decl a -> Decl a
fixDecl (TypeDecl l a b) = (TypeDecl (fn l) a b)
fixDecl (TypeFamDecl l a b c) = (TypeFamDecl (fn l) a b c)
fixDecl (ClosedTypeFamDecl l a b c d) = (ClosedTypeFamDecl (fn l) a b c d)
fixDecl (DataDecl l a b c d e) = (DataDecl (fn l) a b c d e)
fixDecl (GDataDecl l a b c d e f) = GDataDecl (fn l) a b c d e f
fixDecl (DataFamDecl l a b c) = (DataFamDecl (fn l) a b c)
fixDecl (TypeInsDecl l a b) = (TypeInsDecl (fn l) a b)
fixDecl (DataInsDecl l a b c d) = (DataInsDecl (fn l) a b c d)
fixDecl (GDataInsDecl l a b c d e) = (GDataInsDecl (fn l) a b c d e)
fixDecl (ClassDecl l a b c d) = (ClassDecl (fn l) a b c d)
fixDecl (InstDecl l a b c) = (InstDecl (fn l) a b c)
fixDecl (DerivDecl l a b c) = (DerivDecl (fn l) a b c)
fixDecl (InfixDecl l a b c) = (InfixDecl (fn l) a b c)
fixDecl (DefaultDecl l a) = (DefaultDecl (fn l) a)
fixDecl (SpliceDecl l a) = (SpliceDecl (fn l) a)
fixDecl (TypeSig l a b) = (TypeSig (fn l) a b)
fixDecl (PatSynSig l a b c d e) = (PatSynSig (fn l) a b c d e)
fixDecl (FunBind l a) = (FunBind (fn l) a)
fixDecl (PatBind l a b c) = (PatBind (fn l) a b c)
fixDecl (PatSyn l a b c) = (PatSyn (fn l) a b c)
fixDecl (ForImp l a b c d e) = (ForImp (fn l) a b c d e)
fixDecl (ForExp l a b c d) = (ForExp (fn l) a b c d)
fixDecl (RulePragmaDecl l a) = (RulePragmaDecl (fn l) a)
fixDecl (DeprPragmaDecl l a) = (DeprPragmaDecl (fn l) a)
fixDecl (WarnPragmaDecl l a) = (WarnPragmaDecl (fn l) a)
fixDecl (CompletePragma l a b) = CompletePragma (fn l) a b
fixDecl (InlineSig l a b c) = (InlineSig (fn l) a b c)
fixDecl (InlineConlikeSig l a b) = (InlineConlikeSig (fn l) a b)
fixDecl (SpecSig l a b c) = (SpecSig (fn l) a b c)
fixDecl (SpecInlineSig l a b c d) = (SpecInlineSig (fn l) a b c d)
fixDecl (InstSig l a) = (InstSig (fn l) a)
fixDecl (AnnPragma l a) = (AnnPragma (fn l) a)
fixDecl (MinimalPragma l a) = (MinimalPragma (fn l) a)
fixDecl (RoleAnnotDecl l a b) = (RoleAnnotDecl (fn l) a b)
mapTopAnnotations _ _ = error "mapTopAnnotations"
#if 0
endOfDecls :: EndLoc l => Module l -> SrcLoc
endOfDecls m@(Module _l _mh _ps _ []) = endOfImports m
endOfDecls (Module _l _mh _ps _is ds) = endLoc (ann (last ds))
endOfDecls _ = error "endOfDecls"
endOfImports :: EndLoc l => Module l -> SrcLoc
endOfImports m@(Module _l _mh _ps [] _) = endOfHeader m
endOfImports (Module _l _mh _ps is _) = endLoc (ann (last is))
endOfImports _ = error "endOfImports"
endOfImportSpecs :: (EndLoc l, Show l) => ImportDecl l -> SrcLoc
endOfImportSpecs (ImportDecl {importSpecs = Just i}) =
case srcPoints (ann i) of
[] -> error $ "endOfImportSpecs: " ++ show i
pts -> srcLoc (last pts)
endOfImportSpecs (ImportDecl {importSpecs = Nothing}) = error "endOfImportSpecs"
endOfHeader :: EndLoc l => Module l -> SrcLoc
endOfHeader m@(Module _l Nothing _ps _ _) = endOfPragmas m
endOfHeader (Module _l (Just h) _ps _is _) = endLoc (ann h)
endOfHeader _ = error "endOfHeader"
endOfPragmas :: EndLoc l => Module l -> SrcLoc
endOfPragmas (Module l _ [] _ _) = endLoc l
endOfPragmas (Module _l _ ps _ _) = endLoc (ann (last ps))
endOfPragmas _ = error "endOfPragmas"
endOfModule :: ModuleInfo l -> SrcLoc
endOfModule mi = endLocOfText (_modulePath mi) (_moduleText mi)
#endif
startOfModule :: ModuleInfo l -> SrcLoc
startOfModule mi = SrcLoc (_modulePath mi) 1 1
| The beginning of the first thing after the imports
startOfDecls :: SrcInfo l => ModuleInfo l -> SrcLoc
startOfDecls mi@(ModuleInfo {_module = Module _l _mh _ps _is []}) = endLocOfText (_modulePath mi) (_moduleText mi)
startOfDecls (ModuleInfo {_module = Module _l _mh _ps _is (d : _)}) = srcLoc (ann d)
startOfDecls _ = error "startOfDecls"
| The beginning of the first thing after the header .
startOfImports :: SrcInfo l => ModuleInfo l -> SrcLoc
startOfImports mi@(ModuleInfo {_module = Module _l _mh _ps [] _}) = startOfDecls mi
startOfImports (ModuleInfo {_module = Module _l _mh _ps (i : _) _}) = srcLoc (ann i)
startOfImports _ = error "startOfImports"
| The beginning of the first thing after the pragmas .
startOfHeader :: SrcInfo l => ModuleInfo l -> SrcLoc
startOfHeader mi@(ModuleInfo {_module = Module _l Nothing _ps _ _}) = startOfImports mi
startOfHeader (ModuleInfo {_module = Module _l (Just h) _ps _is _}) = srcLoc (ann h)
startOfHeader _ = error "startOfHeader"
| The beginning of the first thing
startOfPragmas :: SrcInfo l => ModuleInfo l -> SrcLoc
startOfPragmas (ModuleInfo {_module = m@(Module _l _ [] _ _)}) = SrcLoc (fileName (ann m)) 1 1
startOfPragmas (ModuleInfo {_module = Module _l _ (p : _) _ _}) = srcLoc (ann p)
startOfPragmas _ = error "startOfPragmas"
|
40bf87d916a1ee06be9d1e03fb16abe61838b0a7b26bbb4a992ea9810b3f9978 | mvaldesdeleon/aoc18 | Day9.hs | # LANGUAGE RecordWildCards #
module Day9
( day9
) where
import Control.Monad.Loops
import Control.Monad.State
import qualified Data.List.Zipper as Z
import qualified Data.Map.Strict as M
import Paths_aoc18 (getDataFileName)
import Text.Parsec (Parsec, digit, many1, newline, optional,
parse, string)
loadInput :: IO String
loadInput = getDataFileName "inputs/day-9.txt" >>= readFile
data GameConfig = GameConfig
{ cfgPlayers :: Integer
, cfgMarbles :: Integer
} deriving (Show)
number :: Parsec String () Integer
number = read <$> many1 digit
gameConfig :: Parsec String () GameConfig
gameConfig =
GameConfig <$> (number <* string " players; last marble is worth ") <*>
(number <* string " points") <*
optional newline
parseInput :: String -> GameConfig
parseInput input =
case result of
Left e -> error $ show e
Right game -> game
where
result = parse gameConfig "" input
data Game = Game
{ gConfig :: GameConfig
, gNextMarble :: Integer
, gPlayers :: Z.Zipper Integer
, gCircle :: Z.Zipper Integer
} deriving (Show)
startGame :: GameConfig -> Game
startGame gc@GameConfig {..} =
Game
gc
1
(Z.fromList . replicate (fromIntegral cfgPlayers) $ 0)
(Z.fromList [0])
left :: Eq a => Z.Zipper a -> Z.Zipper a
left z =
if lz == z
then Z.left . Z.end $ z
else lz
where
lz = Z.left z
right :: Z.Zipper a -> Z.Zipper a
right z =
if Z.endp rz
then Z.start z
else rz
where
rz = Z.right z
playTurn :: State Game ()
playTurn = do
nextMarble <- gets gNextMarble
if (nextMarble `mod` 23) == 0
then do
circle <- gets (fpow left 7 . gCircle)
modify
(\g@Game {..} ->
g
{ gPlayers =
Z.replace
(Z.cursor gPlayers + Z.cursor circle +
nextMarble)
gPlayers
, gCircle = Z.delete circle
})
else modify
(\g@Game {..} ->
g {gCircle = Z.insert nextMarble (fpow right 2 gCircle)})
modify
(\g@Game {..} ->
g {gPlayers = right gPlayers, gNextMarble = gNextMarble + 1})
fpow :: (a -> a) -> Integer -> a -> a
fpow f n a = iterate f a !! fromInteger n
gameOver :: State Game Bool
gameOver = do
lastMarble <- gets (cfgMarbles . gConfig)
nextMarble <- gets gNextMarble
return $ nextMarble > lastMarble
playGame :: State Game ()
playGame = playTurn `untilM_` gameOver
highScore :: GameConfig -> Integer
highScore gc = maximum . Z.toList . gPlayers $ endGame
where
endGame = execState playGame (startGame gc)
alter :: GameConfig -> GameConfig
alter gc@GameConfig {..} = gc {cfgMarbles = cfgMarbles * 100}
day9 :: IO ()
day9 = do
input <- parseInput <$> loadInput
print $ highScore input
print $ highScore (alter input)
| null | https://raw.githubusercontent.com/mvaldesdeleon/aoc18/1a6f6de7c482e5de264360e36f97a3c7487e2457/src/Day9.hs | haskell | # LANGUAGE RecordWildCards #
module Day9
( day9
) where
import Control.Monad.Loops
import Control.Monad.State
import qualified Data.List.Zipper as Z
import qualified Data.Map.Strict as M
import Paths_aoc18 (getDataFileName)
import Text.Parsec (Parsec, digit, many1, newline, optional,
parse, string)
loadInput :: IO String
loadInput = getDataFileName "inputs/day-9.txt" >>= readFile
data GameConfig = GameConfig
{ cfgPlayers :: Integer
, cfgMarbles :: Integer
} deriving (Show)
number :: Parsec String () Integer
number = read <$> many1 digit
gameConfig :: Parsec String () GameConfig
gameConfig =
GameConfig <$> (number <* string " players; last marble is worth ") <*>
(number <* string " points") <*
optional newline
parseInput :: String -> GameConfig
parseInput input =
case result of
Left e -> error $ show e
Right game -> game
where
result = parse gameConfig "" input
data Game = Game
{ gConfig :: GameConfig
, gNextMarble :: Integer
, gPlayers :: Z.Zipper Integer
, gCircle :: Z.Zipper Integer
} deriving (Show)
startGame :: GameConfig -> Game
startGame gc@GameConfig {..} =
Game
gc
1
(Z.fromList . replicate (fromIntegral cfgPlayers) $ 0)
(Z.fromList [0])
left :: Eq a => Z.Zipper a -> Z.Zipper a
left z =
if lz == z
then Z.left . Z.end $ z
else lz
where
lz = Z.left z
right :: Z.Zipper a -> Z.Zipper a
right z =
if Z.endp rz
then Z.start z
else rz
where
rz = Z.right z
playTurn :: State Game ()
playTurn = do
nextMarble <- gets gNextMarble
if (nextMarble `mod` 23) == 0
then do
circle <- gets (fpow left 7 . gCircle)
modify
(\g@Game {..} ->
g
{ gPlayers =
Z.replace
(Z.cursor gPlayers + Z.cursor circle +
nextMarble)
gPlayers
, gCircle = Z.delete circle
})
else modify
(\g@Game {..} ->
g {gCircle = Z.insert nextMarble (fpow right 2 gCircle)})
modify
(\g@Game {..} ->
g {gPlayers = right gPlayers, gNextMarble = gNextMarble + 1})
fpow :: (a -> a) -> Integer -> a -> a
fpow f n a = iterate f a !! fromInteger n
gameOver :: State Game Bool
gameOver = do
lastMarble <- gets (cfgMarbles . gConfig)
nextMarble <- gets gNextMarble
return $ nextMarble > lastMarble
playGame :: State Game ()
playGame = playTurn `untilM_` gameOver
highScore :: GameConfig -> Integer
highScore gc = maximum . Z.toList . gPlayers $ endGame
where
endGame = execState playGame (startGame gc)
alter :: GameConfig -> GameConfig
alter gc@GameConfig {..} = gc {cfgMarbles = cfgMarbles * 100}
day9 :: IO ()
day9 = do
input <- parseInput <$> loadInput
print $ highScore input
print $ highScore (alter input)
|
|
811401d67d63cb0595477d446a831a99a13a3fbd918a9769ea39b6759c6c1606 | mirage/ocaml-openflow | imperative.mli | (**************************************************************************)
(* *)
: a generic graph library for OCaml
Copyright ( C ) 2004 - 2010
, and
(* *)
(* This software is free software; you can redistribute it and/or *)
modify it under the terms of the GNU Library General Public
License version 2.1 , with the special exception on linking
(* described in file LICENSE. *)
(* *)
(* This software is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *)
(* *)
(**************************************************************************)
(** Imperative Graph Implementations. *)
open Sig
(** Signature of imperative graphs. *)
module type S = sig
* < b > Edges may be labeled or not</b > :
- Unlabeled : there is no label on edges
- Labeled : you have to provide a label implementation as a functor
parameter .
< b > Vertices may be concrete or abstract</b > :
- Concrete : type of vertex labels and type of vertices are identified .
- Abstract : type of vertices is abstract ( in particular it is not equal
to type of vertex labels
< b > How to choose between concrete and abstract vertices for my graph
implementation</b > ?
Usually , if you fall into one of the following cases , use abstract
vertices :
- you can not provide efficient comparison / hash functions for vertices ; or
- you wish to get two different vertices with the same label .
In other cases , it is certainly easier to use concrete vertices .
- Unlabeled: there is no label on edges
- Labeled: you have to provide a label implementation as a functor
parameter.
<b>Vertices may be concrete or abstract</b>:
- Concrete: type of vertex labels and type of vertices are identified.
- Abstract: type of vertices is abstract (in particular it is not equal
to type of vertex labels
<b>How to choose between concrete and abstract vertices for my graph
implementation</b>?
Usually, if you fall into one of the following cases, use abstract
vertices:
- you cannot provide efficient comparison/hash functions for vertices; or
- you wish to get two different vertices with the same label.
In other cases, it is certainly easier to use concrete vertices. *)
(** Imperative Unlabeled Graphs. *)
module Concrete (V: COMPARABLE) :
Sig.I with type V.t = V.t and type V.label = V.t and type E.t = V.t * V.t
and type E.label = unit
(** Abstract Imperative Unlabeled Graphs. *)
module Abstract(V: ANY_TYPE) :
Sig.IM with type V.label = V.t and type E.label = unit
(** Imperative Labeled Graphs. *)
module ConcreteLabeled (V: COMPARABLE)(E: ORDERED_TYPE_DFT) :
Sig.I with type V.t = V.t and type V.label = V.t
and type E.t = V.t * E.t * V.t and type E.label = E.t
(** Abstract Imperative Labeled Graphs. *)
module AbstractLabeled (V: ANY_TYPE)(E: ORDERED_TYPE_DFT) :
Sig.IM with type V.label = V.t and type E.label = E.t
end
(** Imperative Directed Graphs. *)
module Digraph : sig
include S
* { 2 Bidirectional graphs }
Bidirectional graphs use more memory space ( at worse the double ) that
standard concrete directional graphs . But accessing predecessors is in
O(1 ) amortized instead of O(max(|V|,|E| ) ) and removing a vertex is in
O(D*ln(D ) ) instead of O(|V|*ln(D ) ) . D is the maximal degree of the
graph .
Bidirectional graphs use more memory space (at worse the double) that
standard concrete directional graphs. But accessing predecessors is in
O(1) amortized instead of O(max(|V|,|E|)) and removing a vertex is in
O(D*ln(D)) instead of O(|V|*ln(D)). D is the maximal degree of the
graph. *)
(** Imperative Unlabeled, bidirectional graph. *)
module ConcreteBidirectional (V: COMPARABLE) :
Sig.I with type V.t = V.t and type V.label = V.t and type E.t = V.t * V.t
and type E.label = unit
(** Imperative Labeled and bidirectional graph. *)
module ConcreteBidirectionalLabeled(V:COMPARABLE)(E:ORDERED_TYPE_DFT) :
Sig.I with type V.t = V.t and type V.label = V.t
and type E.t = V.t * E.t * V.t and type E.label = E.t
end
(** Imperative Undirected Graphs. *)
module Graph : S
(** Imperative graphs implemented as adjacency matrices. *)
module Matrix : sig
module type S = sig
(** Vertices are integers in [0..n-1].
A vertex label is the vertex itself.
Edges are unlabeled. *)
include Sig.I with type V.t = int and type V.label = int
and type E.t = int * int
(** Creation. graphs are not resizeable: size is given at creation time.
Thus [make] must be used instead of [create]. *)
val make : int -> t
(** Note: [add_vertex] and [remove_vertex] have no effect.
[clear] only removes edges, not vertices. *)
end
module Digraph : S
(** Imperative Directed Graphs implemented with adjacency matrices. *)
module Graph : S
(** Imperative Undirected Graphs implemented with adjacency matrices. *)
end
* * *
( * * Faster implementations for abstract ( un)labeled ( di)graphs
when vertices are _ not shared _ between different graphs .
This means that , when using the following implementations , two different
graphs ( created with two calls to [ create ] ) must have disjoint sets of
vertices .
(** Faster implementations for abstract (un)labeled (di)graphs
when vertices are _not shared_ between different graphs.
This means that, when using the following implementations, two different
graphs (created with two calls to [create]) must have disjoint sets of
vertices. *)
module UV : sig
(** directed graphs *)
module Digraph : sig
module Abstract(V: ANY_TYPE) :
Sig.IM with type V.label = V.t and type E.label = unit
module AbstractLabeled (V: ANY_TYPE)(E: ORDERED_TYPE_DFT) :
Sig.IM with type V.label = V.t and type E.label = E.t
end
(** undirected graphs *)
module Graph : sig
module Abstract(V: ANY_TYPE) :
Sig.IM with type V.label = V.t and type E.label = unit
module AbstractLabeled (V: ANY_TYPE)(E: ORDERED_TYPE_DFT) :
Sig.IM with type V.label = V.t and type E.label = E.t
end
end
****)
(*
Local Variables:
compile-command: "make -C .."
End:
*)
| null | https://raw.githubusercontent.com/mirage/ocaml-openflow/dcda113745e8edc61b5508eb8ac2d1e864e1a2df/lib/imperative.mli | ocaml | ************************************************************************
This software is free software; you can redistribute it and/or
described in file LICENSE.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
************************************************************************
* Imperative Graph Implementations.
* Signature of imperative graphs.
* Imperative Unlabeled Graphs.
* Abstract Imperative Unlabeled Graphs.
* Imperative Labeled Graphs.
* Abstract Imperative Labeled Graphs.
* Imperative Directed Graphs.
* Imperative Unlabeled, bidirectional graph.
* Imperative Labeled and bidirectional graph.
* Imperative Undirected Graphs.
* Imperative graphs implemented as adjacency matrices.
* Vertices are integers in [0..n-1].
A vertex label is the vertex itself.
Edges are unlabeled.
* Creation. graphs are not resizeable: size is given at creation time.
Thus [make] must be used instead of [create].
* Note: [add_vertex] and [remove_vertex] have no effect.
[clear] only removes edges, not vertices.
* Imperative Directed Graphs implemented with adjacency matrices.
* Imperative Undirected Graphs implemented with adjacency matrices.
* Faster implementations for abstract (un)labeled (di)graphs
when vertices are _not shared_ between different graphs.
This means that, when using the following implementations, two different
graphs (created with two calls to [create]) must have disjoint sets of
vertices.
* directed graphs
* undirected graphs
Local Variables:
compile-command: "make -C .."
End:
| : a generic graph library for OCaml
Copyright ( C ) 2004 - 2010
, and
modify it under the terms of the GNU Library General Public
License version 2.1 , with the special exception on linking
open Sig
module type S = sig
* < b > Edges may be labeled or not</b > :
- Unlabeled : there is no label on edges
- Labeled : you have to provide a label implementation as a functor
parameter .
< b > Vertices may be concrete or abstract</b > :
- Concrete : type of vertex labels and type of vertices are identified .
- Abstract : type of vertices is abstract ( in particular it is not equal
to type of vertex labels
< b > How to choose between concrete and abstract vertices for my graph
implementation</b > ?
Usually , if you fall into one of the following cases , use abstract
vertices :
- you can not provide efficient comparison / hash functions for vertices ; or
- you wish to get two different vertices with the same label .
In other cases , it is certainly easier to use concrete vertices .
- Unlabeled: there is no label on edges
- Labeled: you have to provide a label implementation as a functor
parameter.
<b>Vertices may be concrete or abstract</b>:
- Concrete: type of vertex labels and type of vertices are identified.
- Abstract: type of vertices is abstract (in particular it is not equal
to type of vertex labels
<b>How to choose between concrete and abstract vertices for my graph
implementation</b>?
Usually, if you fall into one of the following cases, use abstract
vertices:
- you cannot provide efficient comparison/hash functions for vertices; or
- you wish to get two different vertices with the same label.
In other cases, it is certainly easier to use concrete vertices. *)
module Concrete (V: COMPARABLE) :
Sig.I with type V.t = V.t and type V.label = V.t and type E.t = V.t * V.t
and type E.label = unit
module Abstract(V: ANY_TYPE) :
Sig.IM with type V.label = V.t and type E.label = unit
module ConcreteLabeled (V: COMPARABLE)(E: ORDERED_TYPE_DFT) :
Sig.I with type V.t = V.t and type V.label = V.t
and type E.t = V.t * E.t * V.t and type E.label = E.t
module AbstractLabeled (V: ANY_TYPE)(E: ORDERED_TYPE_DFT) :
Sig.IM with type V.label = V.t and type E.label = E.t
end
module Digraph : sig
include S
* { 2 Bidirectional graphs }
Bidirectional graphs use more memory space ( at worse the double ) that
standard concrete directional graphs . But accessing predecessors is in
O(1 ) amortized instead of O(max(|V|,|E| ) ) and removing a vertex is in
O(D*ln(D ) ) instead of O(|V|*ln(D ) ) . D is the maximal degree of the
graph .
Bidirectional graphs use more memory space (at worse the double) that
standard concrete directional graphs. But accessing predecessors is in
O(1) amortized instead of O(max(|V|,|E|)) and removing a vertex is in
O(D*ln(D)) instead of O(|V|*ln(D)). D is the maximal degree of the
graph. *)
module ConcreteBidirectional (V: COMPARABLE) :
Sig.I with type V.t = V.t and type V.label = V.t and type E.t = V.t * V.t
and type E.label = unit
module ConcreteBidirectionalLabeled(V:COMPARABLE)(E:ORDERED_TYPE_DFT) :
Sig.I with type V.t = V.t and type V.label = V.t
and type E.t = V.t * E.t * V.t and type E.label = E.t
end
module Graph : S
module Matrix : sig
module type S = sig
include Sig.I with type V.t = int and type V.label = int
and type E.t = int * int
val make : int -> t
end
module Digraph : S
module Graph : S
end
* * *
( * * Faster implementations for abstract ( un)labeled ( di)graphs
when vertices are _ not shared _ between different graphs .
This means that , when using the following implementations , two different
graphs ( created with two calls to [ create ] ) must have disjoint sets of
vertices .
module UV : sig
module Digraph : sig
module Abstract(V: ANY_TYPE) :
Sig.IM with type V.label = V.t and type E.label = unit
module AbstractLabeled (V: ANY_TYPE)(E: ORDERED_TYPE_DFT) :
Sig.IM with type V.label = V.t and type E.label = E.t
end
module Graph : sig
module Abstract(V: ANY_TYPE) :
Sig.IM with type V.label = V.t and type E.label = unit
module AbstractLabeled (V: ANY_TYPE)(E: ORDERED_TYPE_DFT) :
Sig.IM with type V.label = V.t and type E.label = E.t
end
end
****)
|
703c0a5d17a2c505d40e05bba2761c22e1a6162adf2a900c405dcb2b4acc8935 | RedHatQE/katello.auto | conf.clj | (ns katello.conf
(:require [clojure.java.io :as io]
[clojure.string :as string]
clojure.tools.cli
[fn.trace :refer [all-fns]]
[ovirt.client :as ovirt]
katello
[katello.tasks :refer [unique-names]])
(:import [java.io PushbackReader FileNotFoundException]
[java.util.logging Level Logger]))
;;config layer
(def options
[["-h" "--help" "Print usage guide"
:default false :flag true]
["-s" "--server-url" "URL of the Katello server to test. Should use https URL if https is enabled."]
["-u" "--admin-user" "The admin username of the Katello server"
:default "admin"]
["-p" "--admin-password" "The admin password of the Katello server"
:default "admin"]
["-o" "--admin-org" "Name of Katello's admin organization"
:default "ACME_Corporation"]
["-y" "--sync-repo" "The url for a test repo to sync"
:default "/"]
["-m" "--fake-manifest-url" "URL that points to a fake test manifest"
: default " -cli/raw/fake-manifests-signed/system-test/fake-manifest-syncable.zip " ]
:default "-manifest-syncable.zip"]
["-r" "--fake-repo-url" "A Fake content delivery url to be used with --fake-manifest-url"
:default "/"]
["--redhat-manifest-url" "URL that points to a Red Hat test manifest"
:default "-manifest.zip"]
["--redhat-repo-url" "A Red Hat content delivery url to be used with --redhat-manifest-url"
:default "/"]
["--upgraded" "Running upgrade tests without setup, because setup already ran previous run."
:default false :flag true]
["--key-url" "A private key used to sign the cloned manifests"
;;:default "-misc/sign_manifest.sh/scripts/test/manifest_generation/fake_key.pem"]
:default ""]
["-e" "--environments" "A comma separated list of environment names to test with (need not already exist)"
:parse-fn #(seq (string/split % #",")) :default '("Development" "Q-eh") ]
["--ovirt-url" "A URL to ovirt (or RHEVM) API that can be used to provision client machines for tests that require them"]
["--ovirt-user" "The username to log in to ovirt api."]
["--ovirt-password" "The password for the ovirt user."]
["--ovirt-template" "The template to use to provision clients."]
["--ovirt-cluster" "The cluster to use to deploy clients on."]
["--sauce-user" "The username to log in to sauce api."]
["--sauce-key" "The api key used to log in to sauce."]
["--sauce-browser" "The browser to use on sauce for tests. "]
["--sauce-browser-version" "The version number of the browser to be used on sauce."]
["--sauce-os" "The OS to use on sauce for tests."]
["-a" "--selenium-address" "Address of the selenium server to connect to. eg 'host.com:4444' If none specified, an embedded selenium server is used."]
["-k" "--client-ssh-key" "The location of a (passwordless) ssh private key that can be used to access client machines."
:default (format "%s/.ssh/id_auto_dsa" (System/getProperty "user.home"))]
["-n" "--num-threads" "Number of threads to run tests with"
:parse-fn #(Integer. %) :default 5]
["-b" "--browser-types" "Selenium browser types, eg '*firefox' or '*firefox,*googlechrome' (multiple values only used when threads > 1"
:default ["*firefox"] :parse-fn #(string/split % #",")]
["--locale" "A locale to set the browser to for all the tests (if not set, will default to browser's default. Firefox only. eg 'fr' for french. Note, if using a remote selenium server, that server must already have a profile set up where the profile name equals the locale name."]
["-c" "--config" "Config files (containing a clojure map of config options) to read and overlay other command line options on top of - a list of comma separated places to look - first existing file is used and rest are ignored."
:default ["automation-properties.clj" (format "%s/automation-properties.clj" (System/getProperty "user.home"))]
:parse-fn #(string/split % #",")]
["--trace" "Namespaces and functions to trace"
:parse-fn #(->> (string/split % #",") (map symbol) vec)]
["--trace-excludes" "Functions to exclude from tracing"
:parse-fn #(->> (string/split % #",") (map symbol) (into #{}))]
["--sethostname" "URL of a script which can set the hostname of newly created VM"
:default "-scripts/master/jenkins/sethostname.sh"]
["--agent-repo" "URL of a .repo file to point to where katello-agent can be installed from."
:default "-devel.repo"]
["--gpg-key" "URL of a GPG-Key"
:default "-GPG-KEY-dummy-packages-generator"]])
(def defaults (first (apply clojure.tools.cli/cli [] options)))
(def config (atom {}))
;; Tracing setup
(def ^{:doc "Some pre-set trace settings. Don't trace too deeply into some
functions (or not at all into others)"}
trace-depths
'{katello.menu/fmap 0
katello.ui/component-deployment-dispatch 0
katello.ui/current-session-deployment 0
katello.notifications/success? 0
katello.tasks/uniqueify 0
katello.tasks/uniques 0
katello.conf/client-defs 0
katello.setup/conf-selenium 0
katello.rest/read-json-safe 0
katello.rest/get-id 1
katello/chain 1
katello/instance-or-nil? 0
webdriver/locator-finder-fn 1
webdriver/click 1
webdriver/input-text 1
webdriver/select-by-text 1
webdriver/move-to 1
webdriver/exists? 1
webdriver/visible? 1})
(defn record-contructor-depths
"Returns trace setting to not trace record constructors."
[]
(zipmap (filter (fn [fsym]
(re-find #"/map->|/new" (str fsym)))
(all-fns '(katello)))
(repeat 0)))
(defn trace-list
"Creates a list of functions to trace. Includes all katello
namespaces (except a few functions), and some of the API and
underlying lib namespaces."
[]
(-> (->> (loaded-libs)
(filter (fn [sym] (->> sym str (re-find #"^katello|^webdriver"))))
all-fns
(concat '(clj-http.client/get
clj-http.client/put
clj-http.client/post
clj-http.client/delete)))
(zipmap (repeat nil)) ;; default no limit to trace depth
(merge trace-depths (record-contructor-depths))))
(declare ^:dynamic *session-user*
^:dynamic *session-org*
^:dynamic *browsers*
^:dynamic *cloud-conn*
^:dynamic *environments*
^:dynamic *upgraded*)
(defn- try-read-configs
"try to read a config from filename, if file doesn't exist, return nil"
[filenames]
(for [f filenames]
(try
(with-open [r (io/reader f)]
(read (PushbackReader. r)))
(catch FileNotFoundException fnfe
nil))) )
(defn init
"Read in properties and set some defaults. This function should be
called before selenium client is created or any tests are run."
([] (init {}))
([opts]
;;bid adeiu to j.u.l logging
(-> (Logger/getLogger "") (.setLevel Level/OFF))
(swap! config merge defaults opts)
(swap! config merge (->> (:config @config)
try-read-configs
(drop-while nil?)
first))
(let [non-defaults (into {}
(filter (fn [[k v]] (not= v (k defaults)))
opts))]
merge 2nd time to override anything in
; config files
(def ^:dynamic *session-user* (katello/newUser {:name (@config :admin-user)
:password (@config :admin-password)
:email ""}))
(def ^:dynamic *session-org* (katello/newOrganization {:name (@config :admin-org)}))
(def ^:dynamic *cloud-conn* (try (when-let [ovirt-url (@config :ovirt-url)]
{:api (org.ovirt.engine.sdk.Api. ovirt-url
(@config :ovirt-user)
(@config :ovirt-password))
:cluster (@config :ovirt-cluster)})
(catch Exception e (.printStackTrace e))))
(def ^:dynamic *browsers* (@config :browser-types))
(def ^:dynamic *upgraded* (@config :upgraded))
(def ^:dynamic *environments* (for [e (@config :environments)]
(katello/newEnvironment {:name e
:org *session-org*})))))
(def promotion-deletion-lock nil) ;; var to lock on for promotions
(defn no-clients-defined "Blocks a test if no client machines are accessible." [_]
(boolean *cloud-conn*))
(defn client-defs "Return an infinite seq of client instance property definitions."
[basename]
(for [instname (unique-names basename)]
(ovirt/map->InstanceDefinition {:name instname
:template-name (@config :ovirt-template)
:memory (* 512 1024 1024)
:sockets 2
:cores 1})))
| null | https://raw.githubusercontent.com/RedHatQE/katello.auto/79fec96581044bce5db5350d0da325e517024962/src/katello/conf.clj | clojure | config layer
:default "-misc/sign_manifest.sh/scripts/test/manifest_generation/fake_key.pem"]
Tracing setup
default no limit to trace depth
bid adeiu to j.u.l logging
config files
var to lock on for promotions | (ns katello.conf
(:require [clojure.java.io :as io]
[clojure.string :as string]
clojure.tools.cli
[fn.trace :refer [all-fns]]
[ovirt.client :as ovirt]
katello
[katello.tasks :refer [unique-names]])
(:import [java.io PushbackReader FileNotFoundException]
[java.util.logging Level Logger]))
(def options
[["-h" "--help" "Print usage guide"
:default false :flag true]
["-s" "--server-url" "URL of the Katello server to test. Should use https URL if https is enabled."]
["-u" "--admin-user" "The admin username of the Katello server"
:default "admin"]
["-p" "--admin-password" "The admin password of the Katello server"
:default "admin"]
["-o" "--admin-org" "Name of Katello's admin organization"
:default "ACME_Corporation"]
["-y" "--sync-repo" "The url for a test repo to sync"
:default "/"]
["-m" "--fake-manifest-url" "URL that points to a fake test manifest"
: default " -cli/raw/fake-manifests-signed/system-test/fake-manifest-syncable.zip " ]
:default "-manifest-syncable.zip"]
["-r" "--fake-repo-url" "A Fake content delivery url to be used with --fake-manifest-url"
:default "/"]
["--redhat-manifest-url" "URL that points to a Red Hat test manifest"
:default "-manifest.zip"]
["--redhat-repo-url" "A Red Hat content delivery url to be used with --redhat-manifest-url"
:default "/"]
["--upgraded" "Running upgrade tests without setup, because setup already ran previous run."
:default false :flag true]
["--key-url" "A private key used to sign the cloned manifests"
:default ""]
["-e" "--environments" "A comma separated list of environment names to test with (need not already exist)"
:parse-fn #(seq (string/split % #",")) :default '("Development" "Q-eh") ]
["--ovirt-url" "A URL to ovirt (or RHEVM) API that can be used to provision client machines for tests that require them"]
["--ovirt-user" "The username to log in to ovirt api."]
["--ovirt-password" "The password for the ovirt user."]
["--ovirt-template" "The template to use to provision clients."]
["--ovirt-cluster" "The cluster to use to deploy clients on."]
["--sauce-user" "The username to log in to sauce api."]
["--sauce-key" "The api key used to log in to sauce."]
["--sauce-browser" "The browser to use on sauce for tests. "]
["--sauce-browser-version" "The version number of the browser to be used on sauce."]
["--sauce-os" "The OS to use on sauce for tests."]
["-a" "--selenium-address" "Address of the selenium server to connect to. eg 'host.com:4444' If none specified, an embedded selenium server is used."]
["-k" "--client-ssh-key" "The location of a (passwordless) ssh private key that can be used to access client machines."
:default (format "%s/.ssh/id_auto_dsa" (System/getProperty "user.home"))]
["-n" "--num-threads" "Number of threads to run tests with"
:parse-fn #(Integer. %) :default 5]
["-b" "--browser-types" "Selenium browser types, eg '*firefox' or '*firefox,*googlechrome' (multiple values only used when threads > 1"
:default ["*firefox"] :parse-fn #(string/split % #",")]
["--locale" "A locale to set the browser to for all the tests (if not set, will default to browser's default. Firefox only. eg 'fr' for french. Note, if using a remote selenium server, that server must already have a profile set up where the profile name equals the locale name."]
["-c" "--config" "Config files (containing a clojure map of config options) to read and overlay other command line options on top of - a list of comma separated places to look - first existing file is used and rest are ignored."
:default ["automation-properties.clj" (format "%s/automation-properties.clj" (System/getProperty "user.home"))]
:parse-fn #(string/split % #",")]
["--trace" "Namespaces and functions to trace"
:parse-fn #(->> (string/split % #",") (map symbol) vec)]
["--trace-excludes" "Functions to exclude from tracing"
:parse-fn #(->> (string/split % #",") (map symbol) (into #{}))]
["--sethostname" "URL of a script which can set the hostname of newly created VM"
:default "-scripts/master/jenkins/sethostname.sh"]
["--agent-repo" "URL of a .repo file to point to where katello-agent can be installed from."
:default "-devel.repo"]
["--gpg-key" "URL of a GPG-Key"
:default "-GPG-KEY-dummy-packages-generator"]])
(def defaults (first (apply clojure.tools.cli/cli [] options)))
(def config (atom {}))
(def ^{:doc "Some pre-set trace settings. Don't trace too deeply into some
functions (or not at all into others)"}
trace-depths
'{katello.menu/fmap 0
katello.ui/component-deployment-dispatch 0
katello.ui/current-session-deployment 0
katello.notifications/success? 0
katello.tasks/uniqueify 0
katello.tasks/uniques 0
katello.conf/client-defs 0
katello.setup/conf-selenium 0
katello.rest/read-json-safe 0
katello.rest/get-id 1
katello/chain 1
katello/instance-or-nil? 0
webdriver/locator-finder-fn 1
webdriver/click 1
webdriver/input-text 1
webdriver/select-by-text 1
webdriver/move-to 1
webdriver/exists? 1
webdriver/visible? 1})
(defn record-contructor-depths
"Returns trace setting to not trace record constructors."
[]
(zipmap (filter (fn [fsym]
(re-find #"/map->|/new" (str fsym)))
(all-fns '(katello)))
(repeat 0)))
(defn trace-list
"Creates a list of functions to trace. Includes all katello
namespaces (except a few functions), and some of the API and
underlying lib namespaces."
[]
(-> (->> (loaded-libs)
(filter (fn [sym] (->> sym str (re-find #"^katello|^webdriver"))))
all-fns
(concat '(clj-http.client/get
clj-http.client/put
clj-http.client/post
clj-http.client/delete)))
(merge trace-depths (record-contructor-depths))))
(declare ^:dynamic *session-user*
^:dynamic *session-org*
^:dynamic *browsers*
^:dynamic *cloud-conn*
^:dynamic *environments*
^:dynamic *upgraded*)
(defn- try-read-configs
"try to read a config from filename, if file doesn't exist, return nil"
[filenames]
(for [f filenames]
(try
(with-open [r (io/reader f)]
(read (PushbackReader. r)))
(catch FileNotFoundException fnfe
nil))) )
(defn init
"Read in properties and set some defaults. This function should be
called before selenium client is created or any tests are run."
([] (init {}))
([opts]
(-> (Logger/getLogger "") (.setLevel Level/OFF))
(swap! config merge defaults opts)
(swap! config merge (->> (:config @config)
try-read-configs
(drop-while nil?)
first))
(let [non-defaults (into {}
(filter (fn [[k v]] (not= v (k defaults)))
opts))]
merge 2nd time to override anything in
(def ^:dynamic *session-user* (katello/newUser {:name (@config :admin-user)
:password (@config :admin-password)
:email ""}))
(def ^:dynamic *session-org* (katello/newOrganization {:name (@config :admin-org)}))
(def ^:dynamic *cloud-conn* (try (when-let [ovirt-url (@config :ovirt-url)]
{:api (org.ovirt.engine.sdk.Api. ovirt-url
(@config :ovirt-user)
(@config :ovirt-password))
:cluster (@config :ovirt-cluster)})
(catch Exception e (.printStackTrace e))))
(def ^:dynamic *browsers* (@config :browser-types))
(def ^:dynamic *upgraded* (@config :upgraded))
(def ^:dynamic *environments* (for [e (@config :environments)]
(katello/newEnvironment {:name e
:org *session-org*})))))
(defn no-clients-defined "Blocks a test if no client machines are accessible." [_]
(boolean *cloud-conn*))
(defn client-defs "Return an infinite seq of client instance property definitions."
[basename]
(for [instname (unique-names basename)]
(ovirt/map->InstanceDefinition {:name instname
:template-name (@config :ovirt-template)
:memory (* 512 1024 1024)
:sockets 2
:cores 1})))
|
7b4161c62fd31f68e222e5694a306c3b86988a160055ebea60bcf807e77041c4 | facebookarchive/pfff | gtkThread.ml | (**************************************************************************)
(* Lablgtk *)
(* *)
(* This program is free software; you can redistribute it *)
and/or modify it under the terms of the GNU Library General
Public License as published by the Free Software Foundation
version 2 , with the exception described in file COPYING which
(* comes with the library. *)
(* *)
(* This program is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
GNU Library General Public License for more details .
(* *)
You should have received a copy of the GNU Library General
Public License along with this program ; if not , write to the
Free Software Foundation , Inc. , 59 Temple Place , Suite 330 ,
Boston , MA 02111 - 1307 USA
(* *)
(* *)
(**************************************************************************)
$ I d : gtkThread.ml 1518 2010 - 06 - 25 09:23:44Z garrigue $
open GtkMain
(* Job handling for Windows *)
let jobs : (unit -> unit) Queue.t = Queue.create ()
let m = Mutex.create ()
let with_jobs f =
Mutex.lock m; let y = f jobs in Mutex.unlock m; y
let loop_id = ref None
let reset () = loop_id := None
let cannot_sync () =
match !loop_id with None -> true
| Some id -> Thread.id (Thread.self ()) = id
let gui_safe () =
not (Sys.os_type = "Win32") || !loop_id = Some(Thread.id (Thread.self ()))
let has_jobs () = not (with_jobs Queue.is_empty)
let n_jobs () = with_jobs Queue.length
let do_next_job () = with_jobs Queue.take ()
let async j x = with_jobs
(Queue.add (fun () ->
GtkSignal.safe_call j x ~where:"asynchronous call"))
type 'a result = Val of 'a | Exn of exn | NA
let sync f x =
if cannot_sync () then f x else
let m = Mutex.create () in
let res = ref NA in
Mutex.lock m;
let c = Condition.create () in
let j x =
let y = try Val (f x) with e -> Exn e in
Mutex.lock m; res := y; Mutex.unlock m;
Condition.signal c
in
async j x;
while !res = NA do Condition.wait c m done;
match !res with Val y -> y | Exn e -> raise e | NA -> assert false
let do_jobs () =
Thread.delay 0.0001;
for i = 1 to n_jobs () do do_next_job () done;
true
We check first whether there are some event pending , and run
some iterations . We then need to delay , thus focing a thread switch .
some iterations. We then need to delay, thus focing a thread switch. *)
let thread_main_real () =
try
let loop = (Glib.Main.create true) in
Main.loops := loop :: !Main.loops;
loop_id := Some (Thread.id (Thread.self ()));
while Glib.Main.is_running loop do
let i = ref 0 in
while !i < 100 && Glib.Main.pending () do
Glib.Main.iteration true;
incr i
done;
do_jobs ()
done;
Main.loops := List.tl !Main.loops;
with exn ->
Main.loops := List.tl !Main.loops;
raise exn
let thread_main () =
sync thread_main_real ()
let main () =
GtkMain.Main.main_func := thread_main;
thread_main ()
let start () =
reset ();
Thread.create main ()
The code below would do nothing ...
let _ =
let mutex = Mutex.create ( ) in
let depth = ref 0 in
GtkSignal.enter_callback : =
( fun ( ) - > if ! depth = 0 then Mutex.lock mutex ; incr depth ) ;
GtkSignal.exit_callback : =
( fun ( ) - > decr depth ; if ! depth = 0 then Mutex.unlock mutex )
let _ =
let mutex = Mutex.create () in
let depth = ref 0 in
GtkSignal.enter_callback :=
(fun () -> if !depth = 0 then Mutex.lock mutex; incr depth);
GtkSignal.exit_callback :=
(fun () -> decr depth; if !depth = 0 then Mutex.unlock mutex)
*)
| null | https://raw.githubusercontent.com/facebookarchive/pfff/ec21095ab7d445559576513a63314e794378c367/external/ocamlgtk/src/gtkThread.ml | ocaml | ************************************************************************
Lablgtk
This program is free software; you can redistribute it
comes with the library.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
************************************************************************
Job handling for Windows | and/or modify it under the terms of the GNU Library General
Public License as published by the Free Software Foundation
version 2 , with the exception described in file COPYING which
GNU Library General Public License for more details .
You should have received a copy of the GNU Library General
Public License along with this program ; if not , write to the
Free Software Foundation , Inc. , 59 Temple Place , Suite 330 ,
Boston , MA 02111 - 1307 USA
$ I d : gtkThread.ml 1518 2010 - 06 - 25 09:23:44Z garrigue $
open GtkMain
let jobs : (unit -> unit) Queue.t = Queue.create ()
let m = Mutex.create ()
let with_jobs f =
Mutex.lock m; let y = f jobs in Mutex.unlock m; y
let loop_id = ref None
let reset () = loop_id := None
let cannot_sync () =
match !loop_id with None -> true
| Some id -> Thread.id (Thread.self ()) = id
let gui_safe () =
not (Sys.os_type = "Win32") || !loop_id = Some(Thread.id (Thread.self ()))
let has_jobs () = not (with_jobs Queue.is_empty)
let n_jobs () = with_jobs Queue.length
let do_next_job () = with_jobs Queue.take ()
let async j x = with_jobs
(Queue.add (fun () ->
GtkSignal.safe_call j x ~where:"asynchronous call"))
type 'a result = Val of 'a | Exn of exn | NA
let sync f x =
if cannot_sync () then f x else
let m = Mutex.create () in
let res = ref NA in
Mutex.lock m;
let c = Condition.create () in
let j x =
let y = try Val (f x) with e -> Exn e in
Mutex.lock m; res := y; Mutex.unlock m;
Condition.signal c
in
async j x;
while !res = NA do Condition.wait c m done;
match !res with Val y -> y | Exn e -> raise e | NA -> assert false
let do_jobs () =
Thread.delay 0.0001;
for i = 1 to n_jobs () do do_next_job () done;
true
We check first whether there are some event pending , and run
some iterations . We then need to delay , thus focing a thread switch .
some iterations. We then need to delay, thus focing a thread switch. *)
let thread_main_real () =
try
let loop = (Glib.Main.create true) in
Main.loops := loop :: !Main.loops;
loop_id := Some (Thread.id (Thread.self ()));
while Glib.Main.is_running loop do
let i = ref 0 in
while !i < 100 && Glib.Main.pending () do
Glib.Main.iteration true;
incr i
done;
do_jobs ()
done;
Main.loops := List.tl !Main.loops;
with exn ->
Main.loops := List.tl !Main.loops;
raise exn
let thread_main () =
sync thread_main_real ()
let main () =
GtkMain.Main.main_func := thread_main;
thread_main ()
let start () =
reset ();
Thread.create main ()
The code below would do nothing ...
let _ =
let mutex = Mutex.create ( ) in
let depth = ref 0 in
GtkSignal.enter_callback : =
( fun ( ) - > if ! depth = 0 then Mutex.lock mutex ; incr depth ) ;
GtkSignal.exit_callback : =
( fun ( ) - > decr depth ; if ! depth = 0 then Mutex.unlock mutex )
let _ =
let mutex = Mutex.create () in
let depth = ref 0 in
GtkSignal.enter_callback :=
(fun () -> if !depth = 0 then Mutex.lock mutex; incr depth);
GtkSignal.exit_callback :=
(fun () -> decr depth; if !depth = 0 then Mutex.unlock mutex)
*)
|
f36b670e3ac1e709b13aac26988b09385f00b0d4aae70ae903bec2a9b98101e4 | jvf/scalaris | prbr_SUITE.erl | 2012 - 2016 Zuse Institute Berlin
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
@author < >
@author < >
%% @doc Unit tests for prbr
%% @end
%% @version $Id$
-module(prbr_SUITE).
-author('').
-vsn('$Id$').
-compile(export_all).
-include("scalaris.hrl").
-include("unittest.hrl").
-include("client_types.hrl").
all() -> [
tester_type_check_rbr,
rbr_concurrency_kv,
rbr_concurrency_leases,
rbr_consistency,
rbr_consistency_delete
].
suite() -> [ {timetrap, {seconds, 400}} ].
init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
ok.
init_per_testcase(TestCase, Config) ->
case TestCase of
rbr_concurrency_kv ->
{priv_dir, PrivDir} = lists:keyfind(priv_dir, 1, Config),
Size = randoms:rand_uniform(3, 14),
unittest_helper:make_ring(Size, [{config, [{log_path, PrivDir}]}]),
%% necessary for the consistency check:
unittest_helper:check_ring_size_fully_joined(Size),
ok;
rbr_concurrency_leases ->
{priv_dir, PrivDir} = lists:keyfind(priv_dir, 1, Config),
Size = 1, %% larger rings not supported by leases yet,
Size = randoms : rand_uniform(2 , 14 ) ,
unittest_helper:make_ring(Size, [{config, [{log_path, PrivDir},
{leases, true}]}]),
%% necessary for the consistency check:
unittest_helper:check_ring_size_fully_joined(Size),
ok;
rbr_consistency ->
{priv_dir, PrivDir} = lists:keyfind(priv_dir, 1, Config),
unittest_helper:make_symmetric_ring([{config, [{log_path, PrivDir}]}]),
%% necessary for the consistency check:
unittest_helper:check_ring_size_fully_joined(config:read(replication_factor)),
ok;
_ ->
{priv_dir, PrivDir} = lists:keyfind(priv_dir, 1, Config),
Size = randoms:rand_uniform(1, 9),
unittest_helper:make_ring(Size, [{config, [{log_path, PrivDir}]}]),
ok
end,
[{stop_ring, true} | Config].
end_per_testcase(_TestCase, _Config) ->
ok.
%% TODO: unittest for: retrigger on read works
%% TODO: unittest for: retrigger on write works
rbr_concurrency_kv(_Config) ->
%% start random number of nodes
%% select a key to operate on
%% start random number of writers (increment operations
%% / use increment as write filter)
%% start random number of readers
%% only observe increasing values in reads
Key = randoms:getRandomString(),
{ok} = kv_on_cseq:write(Key, 1),
Parallel = randoms:rand_uniform(1, 11),
Count = 1000 div Parallel,
ct:pal("Starting concurrent writers: ~p~n"
"Performing iterations: ~p~n",
[Parallel, Count]),
UnitTestPid = self(),
_Pids = [ spawn(fun() ->
_ = [ begin
{ok, V} = kv_on_cseq:read(Key),
{ok} = kv_on_cseq:write(Key, V+1)
if 0 = = I rem 100 - >
%% ct:pal("~p performed write ~p.~n",
[ _ , I ] ) ;
%% true -> ok
%% end
end
|| _I <- lists:seq(1, Count)],
UnitTestPid ! {done}
end)
|| _Nth <- lists:seq(1, Parallel)],
_ = [ receive {done} ->
ct:pal("Finished ~p/~p.~n", [Nth, Parallel]),
ok
end || Nth <- lists:seq(1, Parallel)],
ct:pal("Planned ~p increments, done ~p - discrepancy is ok~n",
[Count*Parallel, kv_on_cseq:read(Key)]),
ok.
rbr_concurrency_leases(_Config) ->
%% start random number of nodes
%% select a key to operate on
%% start random number of writers (increment operations
%% / use increment as write filter)
%% start random number of readers
%% only observe increasing values in reads
Key = ?RT:get_random_node_id(),
ContentCheck =
fun (Current, _WriteFilter, _Next) ->
case Current == prbr_bottom of
true ->
{true, null};
false ->
{false, lease_already_exists}
end
end,
New = l_on_cseq:unittest_create_lease(Key),
DB = rbrcseq:get_db_for_id(lease_db, Key),
rbrcseq:qwrite(DB, self(), Key, l_on_cseq,
ContentCheck,
New),
receive
{qwrite_done, _ReqId, _Round, _, _} -> ok
end,
Parallel = randoms:rand_uniform(4, 11),
Count = 1000 div Parallel,
ct:pal("Starting concurrent writers: ~p~n"
"Performing iterations: ~p~n",
[Parallel, Count]),
UnitTestPid = self(),
DHTNodeGroups = pid_groups:groups_with(dht_node),
DHTNodeGroupsLen = length(DHTNodeGroups),
_Pids =
[ spawn(
fun() ->
Group = lists:nth(1 + Nth rem DHTNodeGroupsLen,
DHTNodeGroups),
pid_groups:join(Group),
_ = [ begin
F = fun(X) ->
{ok, V} = l_on_cseq:read(Key),
Update =
l_on_cseq:unittest_lease_update_unsafe(
V,
l_on_cseq:set_version(
V, l_on_cseq:get_version(V)+1), passive),
case Update of
ok -> ok;
failed ->
%% ct:pal("~p retry ~p.~n",
[ _ , l_on_cseq : get_version(V)+1 ] ) ,
X(X)
end
end,
F(F)
%% ct:pal("~p performed write.~n", [_Nth])
end
|| _I <- lists:seq(1, Count)],
UnitTestPid ! {done}
end)
|| Nth <- lists:seq(1, Parallel)],
_ = [ receive {done} ->
ct:pal("Finished ~p/~p.~n", [Nth, Parallel]),
ok
end || Nth <- lists:seq(1, Parallel)],
ct:pal("Planned ~p increments, done ~p, discrepancy is ok~n",
[Count*Parallel, l_on_cseq:read(Key)]),
ok.
rbr_consistency(_Config) ->
%% create an rbr entry
update 1 to 3 of its replicas
%% perform read in all quorum permutations
%% (intercept read on a single dht node)
%% output must be the old value or the new value
%% if the new value was seen once, the old must not be readable again.
Nodes = pid_groups:find_all(dht_node),
Key = "a",
%% initialize key
{ok} = kv_on_cseq:write(Key, 1),
%% select a replica
Replicas = ?RT:get_replica_keys(?RT:hash_key(Key)),
%% print modified rbr entries
%% api_tx_proto_sched_SUITE:rbr_invariant(a,b,c),
_ = [ begin
New = N+100,
{ok, Old} = kv_on_cseq:read(Key),
modify_rbr_at_key(R, N+100),
%% ct:pal("After modification:"),
%% print modified rbr entries
%% api_tx_proto_sched_SUITE:rbr_invariant(a,b,c),
%% intercept and drop a message at r1
_ = lists:foldl(read_quorum_without(Key), {Old, New}, Nodes),
ok
end || {R,N} <- lists:zip(Replicas, lists:seq(1, config:read(replication_factor)))],
ok.
rbr_consistency_delete(_Config) ->
%% create an rbr entry
update 1 to 3 of its replicas
%% perform read in all quorum permutations
%% (intercept read on a single dht node)
%% output must be the old value or the new value
%% if the new value was seen once, the old must not be readable again.
%% Nodes = pid_groups:find_all(dht_node),
Key = "a",
%% initialize key
{ok} = kv_on_cseq:write(Key, 1),
%% select a replica
Replicas = ?RT:get_replica_keys(?RT:hash_key(Key)),
%% print modified rbr entries
%% api_tx_proto_sched_SUITE:rbr_invariant(a,b,c),
ct:pal("Starting delete test~n"),
Res = [ begin
ct:pal("Read iteration: ~p~n", [R]),
{ok, Old} = kv_on_cseq:read(Key),
delete_rbr_entry_at_key(R),
Next = Old + 1,
ct:pal("Write in iteration: ~p~n", [R]),
_ = kv_on_cseq:write(Key, Next),
%% ct:pal("After modification:"),
%% print modified rbr entries
%% api_tx_proto_sched_SUITE:rbr_invariant(a,b,c),
ct:pal("Reread in iteration: ~p~n", [R]),
{ok, Next} = kv_on_cseq:read(Key)
end || R <- Replicas],
ct:pal("Result: ~p~n", [Res]),
ok.
tester_type_check_rbr(_Config) ->
Count = 250,
config:write(no_print_ring_data, true),
tester:register_value_creator({typedef, prbr, write_filter, []},
prbr, tester_create_write_filter, 1),
%% [{modulename, [excludelist = {fun, arity}]}]
Modules =
[ {txid_on_cseq,
[ {is_valid_new, 3}, %% cannot create funs
{is_valid_decide, 3}, %% cannot create funs
{is_valid_delete, 3}, %% cannot create funs
{decide, 5}, %% cannot create pids
{delete, 2}, %% cannot create pids
{read, 2} %% cannot create pids
],
[ ]
},
{tx_tm,
[{start_link, 2}, %% starts processes
{start_gen_component,5}, %% unsupported types
needs to be pid_group member
{on, 2}, %% needs valid messages
{on_init, 2}, %% needs valid messages
{commit, 4}, %% needs valid clients pid
{msg_commit_reply, 3} %% needs valid clients pid
],
[ {get_entry, 2}, %% could read arb, entries
guessing keys of tx entries ...
{tx_state_add_nextround_writtenval_for_commit, 4}
]
},
{kv_on_cseq,
[ {commit_read, 5}, %% tested via feeder
{commit_write, 5}, %% tested via feeder
{abort_read, 5}, %% tested via feeder
{abort_write, 5}], %% tested via feeder
[]},
{pr,
[
],
[]},
{prbr,
[ {init, 1}, %% needs to be in a pidgroup for db_name
{close, 1}, %% needs valid ets:tid()
{close_and_delete, 1}, %% needs valid ets:tid()
{on, 2}, %% sends messages
{get_load, 1}, %% needs valid ets:tid()
{set_entry, 2}, %% needs valid ets:tid()
{get_entry, 2}, %% needs valid ets:tid()
{delete_entry, 2}, %% needs valid ets:tid()
{tab2list, 1}, %% needs valid ets:tid()
{tab2list_raw_unittest, 1} %% needs valid ets:tid()
],
[ {msg_read_reply, 5}, %% sends messages
{msg_write_reply, 6}, %% sends messages
{msg_write_deny, 4}, %% sends messages
{tab2list_raw, 1} %% needs valid ets:tid()
]},
{rbrcseq,
[ {on, 2}, %% sends messages
{qread, 4}, %% tries to create envelopes
{qread, 5}, %% needs fun as input
{start_link, 3}, %% needs fun as input
{start_gen_component,5}, %% unsupported types
{qwrite, 6}, %% needs funs as input
{qwrite, 8}, %% needs funs as input
{qwrite_fast, 8}, %% needs funs as input
{qwrite_fast, 10} %% needs funs as input
],
[ {inform_client, 2}, %% cannot create valid envelopes
{get_entry, 2}, %% needs valid ets:tid()
{set_entry, 2}, %% needs valid ets:tid()
{add_read_reply, 6},%% needs client_value matching db_type
{add_write_reply, 3}%% needs valid entry()
]},
{replication,
[ {get_read_value, 2}, %% cannot create funs
{collect_read_value, 3} %% needs client_value matching datatype
],
[]}
],
_ = [ tester:type_check_module(Mod, Excl, ExclPriv, Count)
|| {Mod, Excl, ExclPriv} <- Modules ],
tester:unregister_value_creator({typedef, prbr, write_filter, []}),
true.
modify_rbr_at_key(R, N) ->
%% get a valid round number
%% we ask all replicas to not get an outdated round number (select
%% the highest one.
Rounds = [ begin
%% let fill in whether lookup was consistent
LookupReadEnvelope = dht_node_lookup:envelope(
4,
{prbr, read, kv_db, '_', comm:this(),
Repl, kv_on_cseq, unittest_rbr_consistency1_id,
fun prbr:noop_read_filter/1}),
comm:send_local(pid_groups:find_a(dht_node),
{?lookup_aux, Repl, 0, LookupReadEnvelope}),
receive
{read_reply, _, AssignedRound, _, _} ->
AssignedRound
end
end || Repl <- ?RT:get_replica_keys(R) ],
HighestRound = lists:max(Rounds),
%% perform a write
%% let fill in whether lookup was consistent
LookupWriteEnvelope = dht_node_lookup:envelope(
4,
{prbr, write, kv_db, '_', comm:this(),
R, kv_on_cseq, HighestRound,
{[], false, _Version = N-100, _Value = N},
null,
fun prbr:noop_write_filter/3}),
%% modify the replica at key R, therefore we use a lookup...
comm:send_local(pid_groups:find_a(dht_node),
{?lookup_aux, R, 0, LookupWriteEnvelope}),
receive
{write_reply, _, R, _, _NextRound, _} ->
ok
end.
delete_rbr_entry_at_key(R) ->
comm:send_local(pid_groups:find_a(dht_node),
{?lookup_aux, R, 0,
{prbr, delete_key, kv_db, self(), R}}),
receive {delete_key_reply, R} -> ok end.
drop_prbr_read_request(Client, Tag) ->
fun (Message, _State) ->
case Message of
{ prbr , _ , , ReqClient , Key , _ Round , _ RF } - >
_ when element(1, Message) =:= prbr
andalso element(3, Message) =:= kv_db ->
ct:pal("Detected read, dropping it ~p, key ~p~n",
[self(), element(5, Message)]),
comm:send_local(Client, {Tag, done}),
drop_single;
_ when element(1, Message) =:= prbr ->
false;
_ -> false
end
end.
read_quorum_without(Key) ->
fun (ExcludedDHTNode, {Old, New}) ->
gen_component:bp_set_cond(
ExcludedDHTNode,
drop_prbr_read_request(self(), drop_prbr_read),
drop_prbr_read),
{ok, Val} = kv_on_cseq:read(Key),
io:format("Old: ~p, Val: ~p New: ~p", [Old, Val, New]),
receive
{drop_prbr_read, done} ->
gen_component:bp_del(ExcludedDHTNode, drop_prbr_read),
ok
end,
cleanup({drop_prbr_read, done}),
%% print modified rbr entries:
%% api_tx_proto_sched_SUITE:rbr_invariant(a,b,c),
case Val of
Old ->
{Old, New}; %% valid for next read
New ->
{New, New}; %% old is no longer acceptable
X ->
%% maybe an update was not propagated at all in the previous round
case X > Old andalso X < New of
true -> {X, New};
_ -> ?equals(Val, New)
end
end
end.
cleanup(Msg) ->
receive Msg -> cleanup(Msg)
after 0 -> ok
end.
| null | https://raw.githubusercontent.com/jvf/scalaris/c069f44cf149ea6c69e24bdb08714bda242e7ee0/test/prbr_SUITE.erl | erlang | you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@doc Unit tests for prbr
@end
@version $Id$
necessary for the consistency check:
larger rings not supported by leases yet,
necessary for the consistency check:
necessary for the consistency check:
TODO: unittest for: retrigger on read works
TODO: unittest for: retrigger on write works
start random number of nodes
select a key to operate on
start random number of writers (increment operations
/ use increment as write filter)
start random number of readers
only observe increasing values in reads
ct:pal("~p performed write ~p.~n",
true -> ok
end
start random number of nodes
select a key to operate on
start random number of writers (increment operations
/ use increment as write filter)
start random number of readers
only observe increasing values in reads
ct:pal("~p retry ~p.~n",
ct:pal("~p performed write.~n", [_Nth])
create an rbr entry
perform read in all quorum permutations
(intercept read on a single dht node)
output must be the old value or the new value
if the new value was seen once, the old must not be readable again.
initialize key
select a replica
print modified rbr entries
api_tx_proto_sched_SUITE:rbr_invariant(a,b,c),
ct:pal("After modification:"),
print modified rbr entries
api_tx_proto_sched_SUITE:rbr_invariant(a,b,c),
intercept and drop a message at r1
create an rbr entry
perform read in all quorum permutations
(intercept read on a single dht node)
output must be the old value or the new value
if the new value was seen once, the old must not be readable again.
Nodes = pid_groups:find_all(dht_node),
initialize key
select a replica
print modified rbr entries
api_tx_proto_sched_SUITE:rbr_invariant(a,b,c),
ct:pal("After modification:"),
print modified rbr entries
api_tx_proto_sched_SUITE:rbr_invariant(a,b,c),
[{modulename, [excludelist = {fun, arity}]}]
cannot create funs
cannot create funs
cannot create funs
cannot create pids
cannot create pids
cannot create pids
starts processes
unsupported types
needs valid messages
needs valid messages
needs valid clients pid
needs valid clients pid
could read arb, entries
tested via feeder
tested via feeder
tested via feeder
tested via feeder
needs to be in a pidgroup for db_name
needs valid ets:tid()
needs valid ets:tid()
sends messages
needs valid ets:tid()
needs valid ets:tid()
needs valid ets:tid()
needs valid ets:tid()
needs valid ets:tid()
needs valid ets:tid()
sends messages
sends messages
sends messages
needs valid ets:tid()
sends messages
tries to create envelopes
needs fun as input
needs fun as input
unsupported types
needs funs as input
needs funs as input
needs funs as input
needs funs as input
cannot create valid envelopes
needs valid ets:tid()
needs valid ets:tid()
needs client_value matching db_type
needs valid entry()
cannot create funs
needs client_value matching datatype
get a valid round number
we ask all replicas to not get an outdated round number (select
the highest one.
let fill in whether lookup was consistent
perform a write
let fill in whether lookup was consistent
modify the replica at key R, therefore we use a lookup...
print modified rbr entries:
api_tx_proto_sched_SUITE:rbr_invariant(a,b,c),
valid for next read
old is no longer acceptable
maybe an update was not propagated at all in the previous round | 2012 - 2016 Zuse Institute Berlin
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
@author < >
@author < >
-module(prbr_SUITE).
-author('').
-vsn('$Id$').
-compile(export_all).
-include("scalaris.hrl").
-include("unittest.hrl").
-include("client_types.hrl").
all() -> [
tester_type_check_rbr,
rbr_concurrency_kv,
rbr_concurrency_leases,
rbr_consistency,
rbr_consistency_delete
].
suite() -> [ {timetrap, {seconds, 400}} ].
init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
ok.
init_per_testcase(TestCase, Config) ->
case TestCase of
rbr_concurrency_kv ->
{priv_dir, PrivDir} = lists:keyfind(priv_dir, 1, Config),
Size = randoms:rand_uniform(3, 14),
unittest_helper:make_ring(Size, [{config, [{log_path, PrivDir}]}]),
unittest_helper:check_ring_size_fully_joined(Size),
ok;
rbr_concurrency_leases ->
{priv_dir, PrivDir} = lists:keyfind(priv_dir, 1, Config),
Size = randoms : rand_uniform(2 , 14 ) ,
unittest_helper:make_ring(Size, [{config, [{log_path, PrivDir},
{leases, true}]}]),
unittest_helper:check_ring_size_fully_joined(Size),
ok;
rbr_consistency ->
{priv_dir, PrivDir} = lists:keyfind(priv_dir, 1, Config),
unittest_helper:make_symmetric_ring([{config, [{log_path, PrivDir}]}]),
unittest_helper:check_ring_size_fully_joined(config:read(replication_factor)),
ok;
_ ->
{priv_dir, PrivDir} = lists:keyfind(priv_dir, 1, Config),
Size = randoms:rand_uniform(1, 9),
unittest_helper:make_ring(Size, [{config, [{log_path, PrivDir}]}]),
ok
end,
[{stop_ring, true} | Config].
end_per_testcase(_TestCase, _Config) ->
ok.
rbr_concurrency_kv(_Config) ->
Key = randoms:getRandomString(),
{ok} = kv_on_cseq:write(Key, 1),
Parallel = randoms:rand_uniform(1, 11),
Count = 1000 div Parallel,
ct:pal("Starting concurrent writers: ~p~n"
"Performing iterations: ~p~n",
[Parallel, Count]),
UnitTestPid = self(),
_Pids = [ spawn(fun() ->
_ = [ begin
{ok, V} = kv_on_cseq:read(Key),
{ok} = kv_on_cseq:write(Key, V+1)
if 0 = = I rem 100 - >
[ _ , I ] ) ;
end
|| _I <- lists:seq(1, Count)],
UnitTestPid ! {done}
end)
|| _Nth <- lists:seq(1, Parallel)],
_ = [ receive {done} ->
ct:pal("Finished ~p/~p.~n", [Nth, Parallel]),
ok
end || Nth <- lists:seq(1, Parallel)],
ct:pal("Planned ~p increments, done ~p - discrepancy is ok~n",
[Count*Parallel, kv_on_cseq:read(Key)]),
ok.
rbr_concurrency_leases(_Config) ->
Key = ?RT:get_random_node_id(),
ContentCheck =
fun (Current, _WriteFilter, _Next) ->
case Current == prbr_bottom of
true ->
{true, null};
false ->
{false, lease_already_exists}
end
end,
New = l_on_cseq:unittest_create_lease(Key),
DB = rbrcseq:get_db_for_id(lease_db, Key),
rbrcseq:qwrite(DB, self(), Key, l_on_cseq,
ContentCheck,
New),
receive
{qwrite_done, _ReqId, _Round, _, _} -> ok
end,
Parallel = randoms:rand_uniform(4, 11),
Count = 1000 div Parallel,
ct:pal("Starting concurrent writers: ~p~n"
"Performing iterations: ~p~n",
[Parallel, Count]),
UnitTestPid = self(),
DHTNodeGroups = pid_groups:groups_with(dht_node),
DHTNodeGroupsLen = length(DHTNodeGroups),
_Pids =
[ spawn(
fun() ->
Group = lists:nth(1 + Nth rem DHTNodeGroupsLen,
DHTNodeGroups),
pid_groups:join(Group),
_ = [ begin
F = fun(X) ->
{ok, V} = l_on_cseq:read(Key),
Update =
l_on_cseq:unittest_lease_update_unsafe(
V,
l_on_cseq:set_version(
V, l_on_cseq:get_version(V)+1), passive),
case Update of
ok -> ok;
failed ->
[ _ , l_on_cseq : get_version(V)+1 ] ) ,
X(X)
end
end,
F(F)
end
|| _I <- lists:seq(1, Count)],
UnitTestPid ! {done}
end)
|| Nth <- lists:seq(1, Parallel)],
_ = [ receive {done} ->
ct:pal("Finished ~p/~p.~n", [Nth, Parallel]),
ok
end || Nth <- lists:seq(1, Parallel)],
ct:pal("Planned ~p increments, done ~p, discrepancy is ok~n",
[Count*Parallel, l_on_cseq:read(Key)]),
ok.
rbr_consistency(_Config) ->
update 1 to 3 of its replicas
Nodes = pid_groups:find_all(dht_node),
Key = "a",
{ok} = kv_on_cseq:write(Key, 1),
Replicas = ?RT:get_replica_keys(?RT:hash_key(Key)),
_ = [ begin
New = N+100,
{ok, Old} = kv_on_cseq:read(Key),
modify_rbr_at_key(R, N+100),
_ = lists:foldl(read_quorum_without(Key), {Old, New}, Nodes),
ok
end || {R,N} <- lists:zip(Replicas, lists:seq(1, config:read(replication_factor)))],
ok.
rbr_consistency_delete(_Config) ->
update 1 to 3 of its replicas
Key = "a",
{ok} = kv_on_cseq:write(Key, 1),
Replicas = ?RT:get_replica_keys(?RT:hash_key(Key)),
ct:pal("Starting delete test~n"),
Res = [ begin
ct:pal("Read iteration: ~p~n", [R]),
{ok, Old} = kv_on_cseq:read(Key),
delete_rbr_entry_at_key(R),
Next = Old + 1,
ct:pal("Write in iteration: ~p~n", [R]),
_ = kv_on_cseq:write(Key, Next),
ct:pal("Reread in iteration: ~p~n", [R]),
{ok, Next} = kv_on_cseq:read(Key)
end || R <- Replicas],
ct:pal("Result: ~p~n", [Res]),
ok.
tester_type_check_rbr(_Config) ->
Count = 250,
config:write(no_print_ring_data, true),
tester:register_value_creator({typedef, prbr, write_filter, []},
prbr, tester_create_write_filter, 1),
Modules =
[ {txid_on_cseq,
],
[ ]
},
{tx_tm,
needs to be pid_group member
],
guessing keys of tx entries ...
{tx_state_add_nextround_writtenval_for_commit, 4}
]
},
{kv_on_cseq,
[]},
{pr,
[
],
[]},
{prbr,
],
]},
{rbrcseq,
],
]},
{replication,
],
[]}
],
_ = [ tester:type_check_module(Mod, Excl, ExclPriv, Count)
|| {Mod, Excl, ExclPriv} <- Modules ],
tester:unregister_value_creator({typedef, prbr, write_filter, []}),
true.
modify_rbr_at_key(R, N) ->
Rounds = [ begin
LookupReadEnvelope = dht_node_lookup:envelope(
4,
{prbr, read, kv_db, '_', comm:this(),
Repl, kv_on_cseq, unittest_rbr_consistency1_id,
fun prbr:noop_read_filter/1}),
comm:send_local(pid_groups:find_a(dht_node),
{?lookup_aux, Repl, 0, LookupReadEnvelope}),
receive
{read_reply, _, AssignedRound, _, _} ->
AssignedRound
end
end || Repl <- ?RT:get_replica_keys(R) ],
HighestRound = lists:max(Rounds),
LookupWriteEnvelope = dht_node_lookup:envelope(
4,
{prbr, write, kv_db, '_', comm:this(),
R, kv_on_cseq, HighestRound,
{[], false, _Version = N-100, _Value = N},
null,
fun prbr:noop_write_filter/3}),
comm:send_local(pid_groups:find_a(dht_node),
{?lookup_aux, R, 0, LookupWriteEnvelope}),
receive
{write_reply, _, R, _, _NextRound, _} ->
ok
end.
delete_rbr_entry_at_key(R) ->
comm:send_local(pid_groups:find_a(dht_node),
{?lookup_aux, R, 0,
{prbr, delete_key, kv_db, self(), R}}),
receive {delete_key_reply, R} -> ok end.
drop_prbr_read_request(Client, Tag) ->
fun (Message, _State) ->
case Message of
{ prbr , _ , , ReqClient , Key , _ Round , _ RF } - >
_ when element(1, Message) =:= prbr
andalso element(3, Message) =:= kv_db ->
ct:pal("Detected read, dropping it ~p, key ~p~n",
[self(), element(5, Message)]),
comm:send_local(Client, {Tag, done}),
drop_single;
_ when element(1, Message) =:= prbr ->
false;
_ -> false
end
end.
read_quorum_without(Key) ->
fun (ExcludedDHTNode, {Old, New}) ->
gen_component:bp_set_cond(
ExcludedDHTNode,
drop_prbr_read_request(self(), drop_prbr_read),
drop_prbr_read),
{ok, Val} = kv_on_cseq:read(Key),
io:format("Old: ~p, Val: ~p New: ~p", [Old, Val, New]),
receive
{drop_prbr_read, done} ->
gen_component:bp_del(ExcludedDHTNode, drop_prbr_read),
ok
end,
cleanup({drop_prbr_read, done}),
case Val of
Old ->
New ->
X ->
case X > Old andalso X < New of
true -> {X, New};
_ -> ?equals(Val, New)
end
end
end.
cleanup(Msg) ->
receive Msg -> cleanup(Msg)
after 0 -> ok
end.
|
7582b0130fa5b51a6934dd4fa82914f8c4bc9fe03079e3baeac8885467aec8a5 | Datomic/dev.datafy | java.clj | Copyright ( c ) Cognitect , Inc.
;; All rights reserved.
;;
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; -2.0
;;
;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS - IS " BASIS ,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
(ns datomic.dev.datafy.java
(:require [clojure.core.protocols :as p]))
(set! *warn-on-reflection* true)
(defn hexify
"Convert byte array to hex string"
([^bytes bs]
(hexify bs 0 (alength bs)))
([^bytes bs pos len]
(let [hex [\0 \1 \2 \3 \4 \5 \6 \7 \8 \9 \A \B \C \D \E \F]
^chars buf (char-array (* 2 len))]
(loop [idx pos
out-idx 0
ct 0]
(if (< ct len)
(let [b (bit-and 0xff (aget bs idx))]
(aset-char buf out-idx (hex (bit-shift-right b 4)))
(aset-char buf (inc out-idx) (hex (bit-and b 0x0F)))
(recur (inc idx) (+ 2 out-idx) (inc ct)))
(String. buf))))))
(defn- byte-buffer-prefix-array
^bytes [^java.nio.ByteBuffer bb n]
(when (pos? (.remaining bb))
(let [cursor (.duplicate bb)
arr (byte-array (min (.remaining cursor) n))]
(.get cursor arr)
arr)))
(defn datafy!
"Datafies Java machinery. Currently mostly I/O."
[]
(extend-protocol p/Datafiable
java.net.ServerSocket
(datafy
[this]
{:channel (.getChannel this)
:inetAddress (.getInetAddress this)
:localPort (.getLocalPort this)
:localSocketAddress (.getLocalSocketAddress this)
:receiveBufferSize (.getReceiveBufferSize this)
:reuseAddress (.getReuseAddress this)
:soTimeout (.getSoTimeout this)
:isBound (.isBound this)
:isClosed (.isClosed this)})
java.lang.ThreadGroup
(datafy
[this]
{:activeCount (.activeCount this)
:maxPriority (.getMaxPriority this)
:name (.getName this)
:parent (.getParent this)
:isDaemon (.isDaemon this)
:isDestroyed (.isDestroyed this)
;; could navify '...' to enumerate all threads in the group?
})
java.lang.Thread
(datafy
[this]
{:id (.getId this)
:name (.getName this)
:priority (.getPriority this)
:threadGroup (.getThreadGroup this)
:isInterrupted (.isInterrupted this)
:isAlive (.isAlive this)
:isDaemon (.isDaemon this)})
java.util.concurrent.ThreadPoolExecutor
(datafy [this] (bean this))
java.nio.HeapByteBuffer
(datafy
[this]
(let [arr (byte-buffer-prefix-array this 64)]
{:position (.position this)
:remaining (.remaining this)
:limit (.limit this)
:capacity (.capacity this)
:prefix-hex (when arr (hexify arr))
:prefix-utf8 (when arr (String. arr "UTF-8"))}))))
| null | https://raw.githubusercontent.com/Datomic/dev.datafy/67a0923c3e0492503a44672b5bf74a9b35e0e46f/src/datomic/dev/datafy/java.clj | clojure | All rights reserved.
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
could navify '...' to enumerate all threads in the group? | Copyright ( c ) Cognitect , Inc.
distributed under the License is distributed on an " AS - IS " BASIS ,
(ns datomic.dev.datafy.java
(:require [clojure.core.protocols :as p]))
(set! *warn-on-reflection* true)
(defn hexify
"Convert byte array to hex string"
([^bytes bs]
(hexify bs 0 (alength bs)))
([^bytes bs pos len]
(let [hex [\0 \1 \2 \3 \4 \5 \6 \7 \8 \9 \A \B \C \D \E \F]
^chars buf (char-array (* 2 len))]
(loop [idx pos
out-idx 0
ct 0]
(if (< ct len)
(let [b (bit-and 0xff (aget bs idx))]
(aset-char buf out-idx (hex (bit-shift-right b 4)))
(aset-char buf (inc out-idx) (hex (bit-and b 0x0F)))
(recur (inc idx) (+ 2 out-idx) (inc ct)))
(String. buf))))))
(defn- byte-buffer-prefix-array
^bytes [^java.nio.ByteBuffer bb n]
(when (pos? (.remaining bb))
(let [cursor (.duplicate bb)
arr (byte-array (min (.remaining cursor) n))]
(.get cursor arr)
arr)))
(defn datafy!
"Datafies Java machinery. Currently mostly I/O."
[]
(extend-protocol p/Datafiable
java.net.ServerSocket
(datafy
[this]
{:channel (.getChannel this)
:inetAddress (.getInetAddress this)
:localPort (.getLocalPort this)
:localSocketAddress (.getLocalSocketAddress this)
:receiveBufferSize (.getReceiveBufferSize this)
:reuseAddress (.getReuseAddress this)
:soTimeout (.getSoTimeout this)
:isBound (.isBound this)
:isClosed (.isClosed this)})
java.lang.ThreadGroup
(datafy
[this]
{:activeCount (.activeCount this)
:maxPriority (.getMaxPriority this)
:name (.getName this)
:parent (.getParent this)
:isDaemon (.isDaemon this)
:isDestroyed (.isDestroyed this)
})
java.lang.Thread
(datafy
[this]
{:id (.getId this)
:name (.getName this)
:priority (.getPriority this)
:threadGroup (.getThreadGroup this)
:isInterrupted (.isInterrupted this)
:isAlive (.isAlive this)
:isDaemon (.isDaemon this)})
java.util.concurrent.ThreadPoolExecutor
(datafy [this] (bean this))
java.nio.HeapByteBuffer
(datafy
[this]
(let [arr (byte-buffer-prefix-array this 64)]
{:position (.position this)
:remaining (.remaining this)
:limit (.limit this)
:capacity (.capacity this)
:prefix-hex (when arr (hexify arr))
:prefix-utf8 (when arr (String. arr "UTF-8"))}))))
|
42aa9f2ed6e2a1686e72e63dcbc9d8ed10c9ab4cb92257dd7ca5b26a00a0488f | AdaCore/why3 | inlining.mli | (********************************************************************)
(* *)
The Why3 Verification Platform / The Why3 Development Team
Copyright 2010 - 2022 -- Inria - CNRS - Paris - Saclay University
(* *)
(* This software is distributed under the terms of the GNU Lesser *)
General Public License version 2.1 , with the special exception
(* on linking described in file LICENSE. *)
(* *)
(********************************************************************)
(** Inline non-recursive definitions *)
val intro_attr : Ident.attribute
val meta : Theory.meta
* { 2 Generic inlining }
val t :
use_meta:bool ->
in_goal:bool ->
?only_top_in_goal:bool ->
notls:(for_counterexample:bool -> Term.lsymbol -> bool) ->
notdef:(Term.term -> bool) ->
Task.task Trans.trans
* [ t ~use_meta ~in_goal ~notls ~notdef ] returns a transformation
that expands a symbol [ ls ] in the subsequent declarations unless [ ls ]
satisfies one of the following conditions :
- [ ls ] is defined via a ( mutually ) recursive definition ;
- [ ls ] is an inductive predicate or an algebraic type constructor ;
- [ notls ls ] returns [ true ] ;
- [ notdef ] returns [ true ] on the definition of [ ls ] ;
- [ use_meta ] is set and [ ls ] is tagged by " inline : no "
Notice that [ use_meta ] , [ notls ] , [ notdef ] restrict only which
symbols are inlined , not when .
If [ in_goal ] is set , only the top - most symbols in the goal are expanded .
that expands a symbol [ls] in the subsequent declarations unless [ls]
satisfies one of the following conditions:
- [ls] is defined via a (mutually) recursive definition;
- [ls] is an inductive predicate or an algebraic type constructor;
- [notls ls] returns [true];
- [notdef] returns [true] on the definition of [ls];
- [use_meta] is set and [ls] is tagged by "inline:no"
Notice that [use_meta], [notls], [notdef] restrict only which
symbols are inlined, not when.
If [in_goal] is set, only the top-most symbols in the goal are expanded.
*)
(** {2 Registered Transformation} *)
val all : Task.task Trans.trans
(** [all] corresponds to the transformation "inline_all" *)
val goal : Task.task Trans.trans
(** [goal] corresponds to the transformation "inline_goal" *)
val trivial : Task.task Trans.trans
(** [trivial] corresponds to the transformation "inline_trivial"
Inline only the trivial definition :
logic c : t = a
logic f(x : t,...) : t = g(y : t2,...) *)
(*
(** Functions to use in other transformations if inlining is needed *)
type env
val empty_env : env
val addfs : env -> Term.lsymbol -> Term.vsymbol list -> Term.term -> env
val addps : env -> Term.lsymbol -> Term.vsymbol list -> Term.term -> env
(** [addls env ls vs t] trigger the inlining of [ls] by the definition
[t] with the free variables [vs]. The variables of [vs] must have
the same type as the arguments of [ls] *)
val replacet : env -> Term.term -> Term.term
val replacep : env -> Term.term -> Term.term
*)
open Term
val t_replace_all : (vsymbol list * term) Mls.t -> term -> term
(* [t_replace_all env t] replaces in [t] all occurrences of function
applicatios given in [env] *)
| null | https://raw.githubusercontent.com/AdaCore/why3/4441127004d53cf2cb0f722fed4a930ccf040ee4/src/transform/inlining.mli | ocaml | ******************************************************************
This software is distributed under the terms of the GNU Lesser
on linking described in file LICENSE.
******************************************************************
* Inline non-recursive definitions
* {2 Registered Transformation}
* [all] corresponds to the transformation "inline_all"
* [goal] corresponds to the transformation "inline_goal"
* [trivial] corresponds to the transformation "inline_trivial"
Inline only the trivial definition :
logic c : t = a
logic f(x : t,...) : t = g(y : t2,...)
(** Functions to use in other transformations if inlining is needed
* [addls env ls vs t] trigger the inlining of [ls] by the definition
[t] with the free variables [vs]. The variables of [vs] must have
the same type as the arguments of [ls]
[t_replace_all env t] replaces in [t] all occurrences of function
applicatios given in [env] | The Why3 Verification Platform / The Why3 Development Team
Copyright 2010 - 2022 -- Inria - CNRS - Paris - Saclay University
General Public License version 2.1 , with the special exception
val intro_attr : Ident.attribute
val meta : Theory.meta
* { 2 Generic inlining }
val t :
use_meta:bool ->
in_goal:bool ->
?only_top_in_goal:bool ->
notls:(for_counterexample:bool -> Term.lsymbol -> bool) ->
notdef:(Term.term -> bool) ->
Task.task Trans.trans
* [ t ~use_meta ~in_goal ~notls ~notdef ] returns a transformation
that expands a symbol [ ls ] in the subsequent declarations unless [ ls ]
satisfies one of the following conditions :
- [ ls ] is defined via a ( mutually ) recursive definition ;
- [ ls ] is an inductive predicate or an algebraic type constructor ;
- [ notls ls ] returns [ true ] ;
- [ notdef ] returns [ true ] on the definition of [ ls ] ;
- [ use_meta ] is set and [ ls ] is tagged by " inline : no "
Notice that [ use_meta ] , [ notls ] , [ notdef ] restrict only which
symbols are inlined , not when .
If [ in_goal ] is set , only the top - most symbols in the goal are expanded .
that expands a symbol [ls] in the subsequent declarations unless [ls]
satisfies one of the following conditions:
- [ls] is defined via a (mutually) recursive definition;
- [ls] is an inductive predicate or an algebraic type constructor;
- [notls ls] returns [true];
- [notdef] returns [true] on the definition of [ls];
- [use_meta] is set and [ls] is tagged by "inline:no"
Notice that [use_meta], [notls], [notdef] restrict only which
symbols are inlined, not when.
If [in_goal] is set, only the top-most symbols in the goal are expanded.
*)
val all : Task.task Trans.trans
val goal : Task.task Trans.trans
val trivial : Task.task Trans.trans
type env
val empty_env : env
val addfs : env -> Term.lsymbol -> Term.vsymbol list -> Term.term -> env
val addps : env -> Term.lsymbol -> Term.vsymbol list -> Term.term -> env
val replacet : env -> Term.term -> Term.term
val replacep : env -> Term.term -> Term.term
*)
open Term
val t_replace_all : (vsymbol list * term) Mls.t -> term -> term
|
aa76a0a774c34f277d60aa8b895e0bd8ca5f09a90840e5751b1d5f0ed426668e | SimulaVR/godot-haskell | Sky.hs | # LANGUAGE DerivingStrategies , GeneralizedNewtypeDeriving ,
TypeFamilies , TypeOperators , FlexibleContexts , DataKinds ,
MultiParamTypeClasses #
TypeFamilies, TypeOperators, FlexibleContexts, DataKinds,
MultiParamTypeClasses #-}
module Godot.Core.Sky
(Godot.Core.Sky._RADIANCE_SIZE_128,
Godot.Core.Sky._RADIANCE_SIZE_2048,
Godot.Core.Sky._RADIANCE_SIZE_512,
Godot.Core.Sky._RADIANCE_SIZE_1024,
Godot.Core.Sky._RADIANCE_SIZE_64, Godot.Core.Sky._RADIANCE_SIZE_32,
Godot.Core.Sky._RADIANCE_SIZE_256,
Godot.Core.Sky._RADIANCE_SIZE_MAX,
Godot.Core.Sky.get_radiance_size, Godot.Core.Sky.set_radiance_size)
where
import Data.Coerce
import Foreign.C
import Godot.Internal.Dispatch
import qualified Data.Vector as V
import Linear(V2(..),V3(..),M22)
import Data.Colour(withOpacity)
import Data.Colour.SRGB(sRGB)
import System.IO.Unsafe
import Godot.Gdnative.Internal
import Godot.Api.Types
import Godot.Core.Resource()
_RADIANCE_SIZE_128 :: Int
_RADIANCE_SIZE_128 = 2
_RADIANCE_SIZE_2048 :: Int
_RADIANCE_SIZE_2048 = 6
_RADIANCE_SIZE_512 :: Int
_RADIANCE_SIZE_512 = 4
_RADIANCE_SIZE_1024 :: Int
_RADIANCE_SIZE_1024 = 5
_RADIANCE_SIZE_64 :: Int
_RADIANCE_SIZE_64 = 1
_RADIANCE_SIZE_32 :: Int
_RADIANCE_SIZE_32 = 0
_RADIANCE_SIZE_256 :: Int
_RADIANCE_SIZE_256 = 3
_RADIANCE_SIZE_MAX :: Int
_RADIANCE_SIZE_MAX = 7
instance NodeProperty Sky "radiance_size" Int 'False where
nodeProperty
= (get_radiance_size, wrapDroppingSetter set_radiance_size,
Nothing)
# NOINLINE bindSky_get_radiance_size #
-- | The @Sky@'s radiance map size. The higher the radiance map size, the more detailed the lighting from the @Sky@ will be.
See @enum RadianceSize@ constants for values .
_ _ Note : _ _ Some hardware will have trouble with higher radiance sizes , especially @RADIANCE_SIZE_512@ and above . Only use such high values on high - end hardware .
bindSky_get_radiance_size :: MethodBind
bindSky_get_radiance_size
= unsafePerformIO $
withCString "Sky" $
\ clsNamePtr ->
withCString "get_radiance_size" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
-- | The @Sky@'s radiance map size. The higher the radiance map size, the more detailed the lighting from the @Sky@ will be.
See @enum RadianceSize@ constants for values .
_ _ Note : _ _ Some hardware will have trouble with higher radiance sizes , especially @RADIANCE_SIZE_512@ and above . Only use such high values on high - end hardware .
get_radiance_size :: (Sky :< cls, Object :< cls) => cls -> IO Int
get_radiance_size cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call bindSky_get_radiance_size (upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod Sky "get_radiance_size" '[] (IO Int) where
nodeMethod = Godot.Core.Sky.get_radiance_size
# NOINLINE bindSky_set_radiance_size #
-- | The @Sky@'s radiance map size. The higher the radiance map size, the more detailed the lighting from the @Sky@ will be.
See @enum RadianceSize@ constants for values .
_ _ Note : _ _ Some hardware will have trouble with higher radiance sizes , especially @RADIANCE_SIZE_512@ and above . Only use such high values on high - end hardware .
bindSky_set_radiance_size :: MethodBind
bindSky_set_radiance_size
= unsafePerformIO $
withCString "Sky" $
\ clsNamePtr ->
withCString "set_radiance_size" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
-- | The @Sky@'s radiance map size. The higher the radiance map size, the more detailed the lighting from the @Sky@ will be.
See @enum RadianceSize@ constants for values .
_ _ Note : _ _ Some hardware will have trouble with higher radiance sizes , especially @RADIANCE_SIZE_512@ and above . Only use such high values on high - end hardware .
set_radiance_size ::
(Sky :< cls, Object :< cls) => cls -> Int -> IO ()
set_radiance_size cls arg1
= withVariantArray [toVariant arg1]
(\ (arrPtr, len) ->
godot_method_bind_call bindSky_set_radiance_size (upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod Sky "set_radiance_size" '[Int] (IO ()) where
nodeMethod = Godot.Core.Sky.set_radiance_size | null | https://raw.githubusercontent.com/SimulaVR/godot-haskell/e8f2c45f1b9cc2f0586ebdc9ec6002c8c2d384ae/src/Godot/Core/Sky.hs | haskell | | The @Sky@'s radiance map size. The higher the radiance map size, the more detailed the lighting from the @Sky@ will be.
| The @Sky@'s radiance map size. The higher the radiance map size, the more detailed the lighting from the @Sky@ will be.
| The @Sky@'s radiance map size. The higher the radiance map size, the more detailed the lighting from the @Sky@ will be.
| The @Sky@'s radiance map size. The higher the radiance map size, the more detailed the lighting from the @Sky@ will be. | # LANGUAGE DerivingStrategies , GeneralizedNewtypeDeriving ,
TypeFamilies , TypeOperators , FlexibleContexts , DataKinds ,
MultiParamTypeClasses #
TypeFamilies, TypeOperators, FlexibleContexts, DataKinds,
MultiParamTypeClasses #-}
module Godot.Core.Sky
(Godot.Core.Sky._RADIANCE_SIZE_128,
Godot.Core.Sky._RADIANCE_SIZE_2048,
Godot.Core.Sky._RADIANCE_SIZE_512,
Godot.Core.Sky._RADIANCE_SIZE_1024,
Godot.Core.Sky._RADIANCE_SIZE_64, Godot.Core.Sky._RADIANCE_SIZE_32,
Godot.Core.Sky._RADIANCE_SIZE_256,
Godot.Core.Sky._RADIANCE_SIZE_MAX,
Godot.Core.Sky.get_radiance_size, Godot.Core.Sky.set_radiance_size)
where
import Data.Coerce
import Foreign.C
import Godot.Internal.Dispatch
import qualified Data.Vector as V
import Linear(V2(..),V3(..),M22)
import Data.Colour(withOpacity)
import Data.Colour.SRGB(sRGB)
import System.IO.Unsafe
import Godot.Gdnative.Internal
import Godot.Api.Types
import Godot.Core.Resource()
_RADIANCE_SIZE_128 :: Int
_RADIANCE_SIZE_128 = 2
_RADIANCE_SIZE_2048 :: Int
_RADIANCE_SIZE_2048 = 6
_RADIANCE_SIZE_512 :: Int
_RADIANCE_SIZE_512 = 4
_RADIANCE_SIZE_1024 :: Int
_RADIANCE_SIZE_1024 = 5
_RADIANCE_SIZE_64 :: Int
_RADIANCE_SIZE_64 = 1
_RADIANCE_SIZE_32 :: Int
_RADIANCE_SIZE_32 = 0
_RADIANCE_SIZE_256 :: Int
_RADIANCE_SIZE_256 = 3
_RADIANCE_SIZE_MAX :: Int
_RADIANCE_SIZE_MAX = 7
instance NodeProperty Sky "radiance_size" Int 'False where
nodeProperty
= (get_radiance_size, wrapDroppingSetter set_radiance_size,
Nothing)
# NOINLINE bindSky_get_radiance_size #
See @enum RadianceSize@ constants for values .
_ _ Note : _ _ Some hardware will have trouble with higher radiance sizes , especially @RADIANCE_SIZE_512@ and above . Only use such high values on high - end hardware .
bindSky_get_radiance_size :: MethodBind
bindSky_get_radiance_size
= unsafePerformIO $
withCString "Sky" $
\ clsNamePtr ->
withCString "get_radiance_size" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
See @enum RadianceSize@ constants for values .
_ _ Note : _ _ Some hardware will have trouble with higher radiance sizes , especially @RADIANCE_SIZE_512@ and above . Only use such high values on high - end hardware .
get_radiance_size :: (Sky :< cls, Object :< cls) => cls -> IO Int
get_radiance_size cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call bindSky_get_radiance_size (upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod Sky "get_radiance_size" '[] (IO Int) where
nodeMethod = Godot.Core.Sky.get_radiance_size
# NOINLINE bindSky_set_radiance_size #
See @enum RadianceSize@ constants for values .
_ _ Note : _ _ Some hardware will have trouble with higher radiance sizes , especially @RADIANCE_SIZE_512@ and above . Only use such high values on high - end hardware .
bindSky_set_radiance_size :: MethodBind
bindSky_set_radiance_size
= unsafePerformIO $
withCString "Sky" $
\ clsNamePtr ->
withCString "set_radiance_size" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
See @enum RadianceSize@ constants for values .
_ _ Note : _ _ Some hardware will have trouble with higher radiance sizes , especially @RADIANCE_SIZE_512@ and above . Only use such high values on high - end hardware .
set_radiance_size ::
(Sky :< cls, Object :< cls) => cls -> Int -> IO ()
set_radiance_size cls arg1
= withVariantArray [toVariant arg1]
(\ (arrPtr, len) ->
godot_method_bind_call bindSky_set_radiance_size (upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod Sky "set_radiance_size" '[Int] (IO ()) where
nodeMethod = Godot.Core.Sky.set_radiance_size |
3808c045047ac8103a34fb3671e685838e5ccfe20e49ff9cbc7658beb1844f51 | gedge-platform/gedge-platform | jose_jwa_concat_kdf.erl | -*- mode : erlang ; tab - width : 4 ; indent - tabs - mode : 1 ; st - rulers : [ 70 ] -*-
%% vim: ts=4 sw=4 ft=erlang noet
%%%-------------------------------------------------------------------
@author < >
2014 - 2015 ,
@doc , as defined in Section 5.8.1 of NIST.800 - 56A
%%% See NIST.800-56A: -56Ar2
%%% @end
Created : 24 Jul 2015 by < >
%%%-------------------------------------------------------------------
-module(jose_jwa_concat_kdf).
%% API
-export([kdf/3]).
-export([kdf/4]).
%%====================================================================
%% API functions
%%====================================================================
kdf(Hash, Z, OtherInfo) ->
HashFun = resolve_hash(Hash),
KeyDataLen = bit_size(HashFun(<<>>)),
kdf(HashFun, Z, OtherInfo, KeyDataLen).
kdf(Hash, Z, OtherInfo, KeyDataLen)
when is_function(Hash)
andalso is_binary(Z)
andalso is_binary(OtherInfo)
andalso is_integer(KeyDataLen) ->
HashLen = bit_size(Hash(<<>>)),
Reps = ceiling(KeyDataLen / HashLen),
case Reps of
1 ->
Concatenation = << 0, 0, 0, 1, Z/binary, OtherInfo/binary >>,
<< DerivedKey:KeyDataLen/bitstring, _/bitstring >> = Hash(Concatenation),
DerivedKey;
_ when Reps > 16#FFFFFFFF ->
erlang:error({badarg, [Hash, Z, OtherInfo, KeyDataLen]});
_ ->
derive_key(Hash, 1, Reps, KeyDataLen, << Z/binary, OtherInfo/binary >>, <<>>)
end;
kdf(Hash, Z, OtherInfo, KeyDataLen)
when is_tuple(Hash)
orelse is_atom(Hash) ->
kdf(resolve_hash(Hash), Z, OtherInfo, KeyDataLen);
kdf(Hash, Z, {AlgorithmID, PartyUInfo, PartyVInfo}, KeyDataLen) ->
kdf(Hash, Z, {AlgorithmID, PartyUInfo, PartyVInfo, <<>>}, KeyDataLen);
kdf(Hash, Z, {AlgorithmID, PartyUInfo, PartyVInfo, SuppPubInfo}, KeyDataLen) ->
kdf(Hash, Z, {AlgorithmID, PartyUInfo, PartyVInfo, SuppPubInfo, <<>>}, KeyDataLen);
kdf(Hash, Z, {AlgorithmID, PartyUInfo, PartyVInfo, SuppPubInfo, SuppPrivInfo}, KeyDataLen)
when is_binary(AlgorithmID)
andalso is_binary(PartyUInfo)
andalso is_binary(PartyVInfo)
andalso is_binary(SuppPubInfo)
andalso is_binary(SuppPrivInfo) ->
kdf(Hash, Z, <<
(byte_size(AlgorithmID)):1/unsigned-big-integer-unit:32, AlgorithmID/binary,
(byte_size(PartyUInfo)):1/unsigned-big-integer-unit:32, PartyUInfo/binary,
(byte_size(PartyVInfo)):1/unsigned-big-integer-unit:32, PartyVInfo/binary,
SuppPubInfo/binary,
SuppPrivInfo/binary
>>, KeyDataLen);
kdf(Hash, Z, {undefined, PartyUInfo, PartyVInfo, SuppPubInfo, SuppPrivInfo}, KeyDataLen) ->
kdf(Hash, Z, {<<>>, PartyUInfo, PartyVInfo, SuppPubInfo, SuppPrivInfo}, KeyDataLen);
kdf(Hash, Z, {AlgorithmID, undefined, PartyVInfo, SuppPubInfo, SuppPrivInfo}, KeyDataLen) ->
kdf(Hash, Z, {AlgorithmID, <<>>, PartyVInfo, SuppPubInfo, SuppPrivInfo}, KeyDataLen);
kdf(Hash, Z, {AlgorithmID, PartyUInfo, undefined, SuppPubInfo, SuppPrivInfo}, KeyDataLen) ->
kdf(Hash, Z, {AlgorithmID, PartyUInfo, <<>>, SuppPubInfo, SuppPrivInfo}, KeyDataLen);
kdf(Hash, Z, {AlgorithmID, PartyUInfo, PartyVInfo, undefined, SuppPrivInfo}, KeyDataLen) ->
kdf(Hash, Z, {AlgorithmID, PartyUInfo, PartyVInfo, <<>>, SuppPrivInfo}, KeyDataLen);
kdf(Hash, Z, {AlgorithmID, PartyUInfo, PartyVInfo, SuppPubInfo, undefined}, KeyDataLen) ->
kdf(Hash, Z, {AlgorithmID, PartyUInfo, PartyVInfo, SuppPubInfo, <<>>}, KeyDataLen).
%%%-------------------------------------------------------------------
Internal functions
%%%-------------------------------------------------------------------
@private
ceiling(X) when X < 0 ->
trunc(X);
ceiling(X) ->
T = trunc(X),
case X - T == 0 of
false ->
T + 1;
true ->
T
end.
@private
derive_key(Hash, Reps, Reps, KeyDataLen, ZOtherInfo, DerivedKeyingMaterial) ->
Concatenation = << Reps:1/unsigned-big-integer-unit:32, ZOtherInfo/binary >>,
<< DerivedKey:KeyDataLen/bitstring, _/bitstring >> = << DerivedKeyingMaterial/binary, (Hash(Concatenation))/binary >>,
DerivedKey;
derive_key(Hash, Counter, Reps, KeyDataLen, ZOtherInfo, DerivedKeyingMaterial) ->
Concatenation = << Counter:1/unsigned-big-integer-unit:32, ZOtherInfo/binary >>,
derive_key(Hash, Counter + 1, Reps, KeyDataLen, ZOtherInfo, << DerivedKeyingMaterial/binary, (Hash(Concatenation))/binary >>).
@private
resolve_hash(HashFun) when is_function(HashFun) ->
HashFun;
resolve_hash(DigestType) when is_atom(DigestType) ->
fun(Data) ->
crypto:hash(DigestType, Data)
end;
resolve_hash({hmac, DigestType, Key}) when is_atom(DigestType) ->
fun(Data) ->
jose_crypto_compat:mac(hmac, DigestType, Key, Data)
end.
| null | https://raw.githubusercontent.com/gedge-platform/gedge-platform/97c1e87faf28ba2942a77196b6be0a952bff1c3e/gs-broker/broker-server/deps/jose/src/jwa/jose_jwa_concat_kdf.erl | erlang | vim: ts=4 sw=4 ft=erlang noet
-------------------------------------------------------------------
See NIST.800-56A: -56Ar2
@end
-------------------------------------------------------------------
API
====================================================================
API functions
====================================================================
-------------------------------------------------------------------
------------------------------------------------------------------- | -*- mode : erlang ; tab - width : 4 ; indent - tabs - mode : 1 ; st - rulers : [ 70 ] -*-
@author < >
2014 - 2015 ,
@doc , as defined in Section 5.8.1 of NIST.800 - 56A
Created : 24 Jul 2015 by < >
-module(jose_jwa_concat_kdf).
-export([kdf/3]).
-export([kdf/4]).
kdf(Hash, Z, OtherInfo) ->
HashFun = resolve_hash(Hash),
KeyDataLen = bit_size(HashFun(<<>>)),
kdf(HashFun, Z, OtherInfo, KeyDataLen).
kdf(Hash, Z, OtherInfo, KeyDataLen)
when is_function(Hash)
andalso is_binary(Z)
andalso is_binary(OtherInfo)
andalso is_integer(KeyDataLen) ->
HashLen = bit_size(Hash(<<>>)),
Reps = ceiling(KeyDataLen / HashLen),
case Reps of
1 ->
Concatenation = << 0, 0, 0, 1, Z/binary, OtherInfo/binary >>,
<< DerivedKey:KeyDataLen/bitstring, _/bitstring >> = Hash(Concatenation),
DerivedKey;
_ when Reps > 16#FFFFFFFF ->
erlang:error({badarg, [Hash, Z, OtherInfo, KeyDataLen]});
_ ->
derive_key(Hash, 1, Reps, KeyDataLen, << Z/binary, OtherInfo/binary >>, <<>>)
end;
kdf(Hash, Z, OtherInfo, KeyDataLen)
when is_tuple(Hash)
orelse is_atom(Hash) ->
kdf(resolve_hash(Hash), Z, OtherInfo, KeyDataLen);
kdf(Hash, Z, {AlgorithmID, PartyUInfo, PartyVInfo}, KeyDataLen) ->
kdf(Hash, Z, {AlgorithmID, PartyUInfo, PartyVInfo, <<>>}, KeyDataLen);
kdf(Hash, Z, {AlgorithmID, PartyUInfo, PartyVInfo, SuppPubInfo}, KeyDataLen) ->
kdf(Hash, Z, {AlgorithmID, PartyUInfo, PartyVInfo, SuppPubInfo, <<>>}, KeyDataLen);
kdf(Hash, Z, {AlgorithmID, PartyUInfo, PartyVInfo, SuppPubInfo, SuppPrivInfo}, KeyDataLen)
when is_binary(AlgorithmID)
andalso is_binary(PartyUInfo)
andalso is_binary(PartyVInfo)
andalso is_binary(SuppPubInfo)
andalso is_binary(SuppPrivInfo) ->
kdf(Hash, Z, <<
(byte_size(AlgorithmID)):1/unsigned-big-integer-unit:32, AlgorithmID/binary,
(byte_size(PartyUInfo)):1/unsigned-big-integer-unit:32, PartyUInfo/binary,
(byte_size(PartyVInfo)):1/unsigned-big-integer-unit:32, PartyVInfo/binary,
SuppPubInfo/binary,
SuppPrivInfo/binary
>>, KeyDataLen);
kdf(Hash, Z, {undefined, PartyUInfo, PartyVInfo, SuppPubInfo, SuppPrivInfo}, KeyDataLen) ->
kdf(Hash, Z, {<<>>, PartyUInfo, PartyVInfo, SuppPubInfo, SuppPrivInfo}, KeyDataLen);
kdf(Hash, Z, {AlgorithmID, undefined, PartyVInfo, SuppPubInfo, SuppPrivInfo}, KeyDataLen) ->
kdf(Hash, Z, {AlgorithmID, <<>>, PartyVInfo, SuppPubInfo, SuppPrivInfo}, KeyDataLen);
kdf(Hash, Z, {AlgorithmID, PartyUInfo, undefined, SuppPubInfo, SuppPrivInfo}, KeyDataLen) ->
kdf(Hash, Z, {AlgorithmID, PartyUInfo, <<>>, SuppPubInfo, SuppPrivInfo}, KeyDataLen);
kdf(Hash, Z, {AlgorithmID, PartyUInfo, PartyVInfo, undefined, SuppPrivInfo}, KeyDataLen) ->
kdf(Hash, Z, {AlgorithmID, PartyUInfo, PartyVInfo, <<>>, SuppPrivInfo}, KeyDataLen);
kdf(Hash, Z, {AlgorithmID, PartyUInfo, PartyVInfo, SuppPubInfo, undefined}, KeyDataLen) ->
kdf(Hash, Z, {AlgorithmID, PartyUInfo, PartyVInfo, SuppPubInfo, <<>>}, KeyDataLen).
Internal functions
@private
ceiling(X) when X < 0 ->
trunc(X);
ceiling(X) ->
T = trunc(X),
case X - T == 0 of
false ->
T + 1;
true ->
T
end.
@private
derive_key(Hash, Reps, Reps, KeyDataLen, ZOtherInfo, DerivedKeyingMaterial) ->
Concatenation = << Reps:1/unsigned-big-integer-unit:32, ZOtherInfo/binary >>,
<< DerivedKey:KeyDataLen/bitstring, _/bitstring >> = << DerivedKeyingMaterial/binary, (Hash(Concatenation))/binary >>,
DerivedKey;
derive_key(Hash, Counter, Reps, KeyDataLen, ZOtherInfo, DerivedKeyingMaterial) ->
Concatenation = << Counter:1/unsigned-big-integer-unit:32, ZOtherInfo/binary >>,
derive_key(Hash, Counter + 1, Reps, KeyDataLen, ZOtherInfo, << DerivedKeyingMaterial/binary, (Hash(Concatenation))/binary >>).
@private
resolve_hash(HashFun) when is_function(HashFun) ->
HashFun;
resolve_hash(DigestType) when is_atom(DigestType) ->
fun(Data) ->
crypto:hash(DigestType, Data)
end;
resolve_hash({hmac, DigestType, Key}) when is_atom(DigestType) ->
fun(Data) ->
jose_crypto_compat:mac(hmac, DigestType, Key, Data)
end.
|
be66952295ab73f6006a6a1a5f55330d9355df2b238f76afb82c5cc798f23f89 | oden-lang/oden | InferConstraintsSpec.hs | module Oden.Infer.InferConstraintsSpec where
import Test.Hspec
import Oden.Core.Typed
import Oden.Core.Expr
import Oden.Core.Untyped
import Oden.Identifier
import Oden.Infer (inferExpr)
import Oden.Predefined
import Oden.Pretty ()
import Oden.Type.Polymorphic
import qualified Data.Set as Set
import Oden.Assertions
import Oden.Infer.Fixtures
spec :: Spec
spec = describe "inferExpr" $ do
it "infers type with constraints" $
let constraint = ProtocolConstraint missing testableProtocolName tvarA
methodType = TConstrained (Set.singleton constraint) (typeFn tvarA typeBool) in
inferExpr predefAndTestableProtocol (MethodReference missing (NamedMethodReference (Identifier "Testable") (Identifier "test")) Untyped)
`shouldSucceedWith`
(Forall predefined [tvarBinding tvA] (Set.singleton constraint) methodType,
MethodReference
missing
(Unresolved testableProtocolName testableMethodName constraint)
methodType)
it "infers multiple usages of method" $
inferExpr
predefAndTestableProtocol
(Tuple
missing
(Application
missing
(MethodReference missing (NamedMethodReference (Identifier "Testable") (Identifier "test")) Untyped)
(Literal missing (Int 1) Untyped)
Untyped)
(Application
missing
(MethodReference missing (NamedMethodReference (Identifier "Testable") (Identifier "test")) Untyped)
(Literal missing (Bool True) Untyped)
Untyped)
[]
Untyped)
`shouldSucceedWith`
let boolConstraint = ProtocolConstraint missing testableProtocolName typeBool
intConstraint = ProtocolConstraint missing testableProtocolName typeInt in
(Forall
predefined
[]
Set.empty
(TTuple missing typeBool typeBool []),
Tuple
missing
(Application
missing
(MethodReference
missing
(Unresolved testableProtocolName testableMethodName intConstraint)
(TConstrained
(Set.singleton intConstraint)
(TFn missing typeInt typeBool)))
(Literal missing (Int 1) typeInt)
typeBool)
(Application
missing
(MethodReference
missing
(Unresolved testableProtocolName testableMethodName boolConstraint)
(TConstrained
(Set.singleton boolConstraint)
(TFn missing typeBool typeBool)))
(Literal missing (Bool True) typeBool)
typeBool)
[]
(TTuple missing typeBool typeBool []))
| null | https://raw.githubusercontent.com/oden-lang/oden/10c99b59c8b77c4db51ade9a4d8f9573db7f4d14/test/Oden/Infer/InferConstraintsSpec.hs | haskell | module Oden.Infer.InferConstraintsSpec where
import Test.Hspec
import Oden.Core.Typed
import Oden.Core.Expr
import Oden.Core.Untyped
import Oden.Identifier
import Oden.Infer (inferExpr)
import Oden.Predefined
import Oden.Pretty ()
import Oden.Type.Polymorphic
import qualified Data.Set as Set
import Oden.Assertions
import Oden.Infer.Fixtures
spec :: Spec
spec = describe "inferExpr" $ do
it "infers type with constraints" $
let constraint = ProtocolConstraint missing testableProtocolName tvarA
methodType = TConstrained (Set.singleton constraint) (typeFn tvarA typeBool) in
inferExpr predefAndTestableProtocol (MethodReference missing (NamedMethodReference (Identifier "Testable") (Identifier "test")) Untyped)
`shouldSucceedWith`
(Forall predefined [tvarBinding tvA] (Set.singleton constraint) methodType,
MethodReference
missing
(Unresolved testableProtocolName testableMethodName constraint)
methodType)
it "infers multiple usages of method" $
inferExpr
predefAndTestableProtocol
(Tuple
missing
(Application
missing
(MethodReference missing (NamedMethodReference (Identifier "Testable") (Identifier "test")) Untyped)
(Literal missing (Int 1) Untyped)
Untyped)
(Application
missing
(MethodReference missing (NamedMethodReference (Identifier "Testable") (Identifier "test")) Untyped)
(Literal missing (Bool True) Untyped)
Untyped)
[]
Untyped)
`shouldSucceedWith`
let boolConstraint = ProtocolConstraint missing testableProtocolName typeBool
intConstraint = ProtocolConstraint missing testableProtocolName typeInt in
(Forall
predefined
[]
Set.empty
(TTuple missing typeBool typeBool []),
Tuple
missing
(Application
missing
(MethodReference
missing
(Unresolved testableProtocolName testableMethodName intConstraint)
(TConstrained
(Set.singleton intConstraint)
(TFn missing typeInt typeBool)))
(Literal missing (Int 1) typeInt)
typeBool)
(Application
missing
(MethodReference
missing
(Unresolved testableProtocolName testableMethodName boolConstraint)
(TConstrained
(Set.singleton boolConstraint)
(TFn missing typeBool typeBool)))
(Literal missing (Bool True) typeBool)
typeBool)
[]
(TTuple missing typeBool typeBool []))
|
|
53716143a28b9bc9b6b48003cff5891f9d7d2695a8fc449ee46cefba84684689 | patricoferris/ocaml-multicore-monorepo | adapt.ml | This file is part of Dream , released under the MIT license . See LICENSE.md
for details , or visit .
Copyright 2021
for details, or visit .
Copyright 2021 Anton Bachin *)
module Stream = Dream_pure.Stream
module Message = Dream_pure.Message
let address_to_string : Unix.sockaddr -> string = function
| ADDR_UNIX path -> path
| ADDR_INET (address, port) ->
Printf.sprintf "%s:%i" (Unix.string_of_inet_addr address) port
TODO Write a test simulating client exit during SSE ; this was killing the
server at some point .
server at some point. *)
let forward_body_general
(response : Message.response)
(_write_string : ?off:int -> ?len:int -> string -> unit)
(write_buffer : ?off:int -> ?len:int -> Stream.buffer -> unit)
http_flush
close =
let bytes_since_flush = ref 0 in
let rec send () =
Message.client_stream response
|> fun stream ->
Stream.read
stream
~data
~close
~flush
~ping
~pong
and data chunk off len _binary _fin =
write_buffer ~off ~len chunk;
bytes_since_flush := !bytes_since_flush + len;
if !bytes_since_flush >= 4096 then begin
bytes_since_flush := 0;
http_flush send
end
else
send ()
and flush () =
bytes_since_flush := 0;
http_flush send
and ping _buffer _offset _length =
send ()
and pong _buffer _offset _length =
send ()
in
send ()
let forward_body
(response : Message.response)
(body : Httpaf.Body.Writer.t) =
forward_body_general
response
(Httpaf.Body.Writer.write_string body)
(Httpaf.Body.Writer.write_bigstring body)
(Httpaf.Body.Writer.flush body)
(fun _code -> Httpaf.Body.Writer.close body)
let forward_body_h2
(response : Message.response)
(body : [ `write ] H2.Body.t) =
forward_body_general
response
(H2.Body.write_string body)
(H2.Body.write_bigstring body)
(H2.Body.flush body)
(fun _code -> H2.Body.close_writer body)
| null | https://raw.githubusercontent.com/patricoferris/ocaml-multicore-monorepo/22b441e6727bc303950b3b37c8fbc024c748fe55/duniverse/dream/src/http/adapt.ml | ocaml | This file is part of Dream , released under the MIT license . See LICENSE.md
for details , or visit .
Copyright 2021
for details, or visit .
Copyright 2021 Anton Bachin *)
module Stream = Dream_pure.Stream
module Message = Dream_pure.Message
let address_to_string : Unix.sockaddr -> string = function
| ADDR_UNIX path -> path
| ADDR_INET (address, port) ->
Printf.sprintf "%s:%i" (Unix.string_of_inet_addr address) port
TODO Write a test simulating client exit during SSE ; this was killing the
server at some point .
server at some point. *)
let forward_body_general
(response : Message.response)
(_write_string : ?off:int -> ?len:int -> string -> unit)
(write_buffer : ?off:int -> ?len:int -> Stream.buffer -> unit)
http_flush
close =
let bytes_since_flush = ref 0 in
let rec send () =
Message.client_stream response
|> fun stream ->
Stream.read
stream
~data
~close
~flush
~ping
~pong
and data chunk off len _binary _fin =
write_buffer ~off ~len chunk;
bytes_since_flush := !bytes_since_flush + len;
if !bytes_since_flush >= 4096 then begin
bytes_since_flush := 0;
http_flush send
end
else
send ()
and flush () =
bytes_since_flush := 0;
http_flush send
and ping _buffer _offset _length =
send ()
and pong _buffer _offset _length =
send ()
in
send ()
let forward_body
(response : Message.response)
(body : Httpaf.Body.Writer.t) =
forward_body_general
response
(Httpaf.Body.Writer.write_string body)
(Httpaf.Body.Writer.write_bigstring body)
(Httpaf.Body.Writer.flush body)
(fun _code -> Httpaf.Body.Writer.close body)
let forward_body_h2
(response : Message.response)
(body : [ `write ] H2.Body.t) =
forward_body_general
response
(H2.Body.write_string body)
(H2.Body.write_bigstring body)
(H2.Body.flush body)
(fun _code -> H2.Body.close_writer body)
|
|
7699ca1e1789560fd4442f80ea95ae0503f94b21c2a392e5f3f06ee9376e3c46 | linyinfeng/myml | Spec.hs | module Myml.Syntax.Spec
( tests,
)
where
import qualified Data.Map as Map
import qualified Data.Set as Set
import Myml.Syntax
import Myml.Test.Helper
import Test.Tasty
import Test.Tasty.HUnit
tests :: TestTree
tests = testGroup "Myml.Syntax.Spec" [unitTests]
unitTests :: TestTree
unitTests = testGroup "Unit tests" [fvTests, isValueTests]
fvTests :: TestTree
fvTests = testGroup "freeVariable tests" [fvTermTests, fvTypeTests]
fvTermTests :: TestTree
fvTermTests =
testGroup
"fvTerm tests"
[ testCase "freeVariable simple variable" $
fvTerm (pTerm "x")
@?= Set.fromList ["x"],
testCase "freeVariable bind 1" $
fvTerm (pTerm "\x3bb x . x x")
@?= Set.empty,
testCase "fvTerm bind 2" $
fvTerm (pTerm "\x3bb x . x y")
@?= Set.fromList
["y"],
testCase "fvTerm bind 3" $
fvTerm (pTerm "\x3bb x . x y (\x3bb y . x z)")
@?= Set.fromList ["y", "z"],
testCase "fvTerm let 1" $
fvTerm (pTerm "let x = y in z")
@?= Set.fromList
["y", "z"],
testCase "fvTerm let 2" $
fvTerm (pTerm "let x = x in z")
@?= Set.fromList
["x", "z"],
testCase "fvTerm record" $
fvTerm (pTerm "{ x = x, y = y }")
@?= Set.fromList ["x", "y"],
testCase "fvTerm record extend" $
fvTerm (pTerm "{ x = x, y = y } extend { z = z }")
@?= Set.fromList ["x", "y", "z"],
testCase "fvTerm record access" $
fvTerm (pTerm "{ x = x, y = y }.x")
@?= Set.fromList ["x", "y"],
testCase "fvTerm match" $
fvTerm (pTerm "[`l1 x = x y, `l2 y = z y]")
@?= Set.fromList ["y", "z"],
testCase "fvTerm match extend" $
fvTerm (pTerm "[`l1 x = x y] extend [`l2 y = z y]")
@?= Set.fromList ["y", "z"],
testCase "fvTerm variant" $ fvTerm (pTerm "`x x") @?= Set.fromList ["x"],
testCase "fvTerm ref" $
fvTerm (pTerm "ref (x y)")
@?= Set.fromList
["x", "y"],
testCase "fvTerm deref" $
fvTerm (pTerm "!(x y)")
@?= Set.fromList
["x", "y"],
testCase "fvTerm assign" $
fvTerm (pTerm "x := y")
@?= Set.fromList
["x", "y"],
testCase "fvTerm location" $ fvTerm (TmLoc 0) @?= Set.empty,
testCase "fvTerm unit" $ fvTerm (pTerm "unit") @?= Set.empty,
testCase "fvTerm seq" $ fvTerm (pTerm "x; y") @?= Set.fromList ["x", "y"],
testCase "fvTerm true and false" $ fvTerm (pTerm "true false") @?= Set.empty,
testCase "fvTerm if" $
fvTerm (pTerm "if x then y else z")
@?= Set.fromList
["x", "y", "z"],
testCase "fvTerm 0" $ fvTerm (pTerm "0") @?= Set.empty
]
fvTypeTests :: TestTree
fvTypeTests =
testGroup
"freeVariable type tests"
[ testCase "fvType variable" $
fvType (pType "X")
@?= Right
(Map.fromList [("X", KProper)]),
testCase "fvType arrow" $
fvType (pType "X -> Y -> X")
@?= Right
(Map.fromList [("X", KProper), ("Y", KProper)]),
testCase "fvType record" $
fvType (pType "{ l1 : P Unit, l2 : Absent, l3 : Present X, R }")
@?= Right
(Map.fromList [("P", KPresenceWithType), ("X", KProper), ("R", KRow)]),
testCase "fvType record" $
fvType (pType "[ `l1 : P Unit, `l2 : Absent, `l3 : Present X, R ]")
@?= Right
(Map.fromList [("P", KPresenceWithType), ("X", KProper), ("R", KRow)]),
testCase "fvType mu" $
fvType (pType "\x3bc X . (X -> T)")
@?= Right
(Map.fromList [("T", KProper)]),
testCase "fvType Unit" $ fvType (pType "Unit") @?= Right Map.empty,
testCase "fvType Integer" $ fvType (pType "Integer") @?= Right Map.empty
]
isValueTests :: TestTree
isValueTests =
testGroup
"isValue tests"
[ testCase "isValue abstraction" $ isValue (pTerm "\x3bb x . x") @?= True,
testCase "isValue application" $ isValue (pTerm "x x") @?= False,
testCase "isValue variable" $ isValue (pTerm "x") @?= False,
testCase "isValue let" $ isValue (pTerm "let x = unit in x") @?= False,
testCase "isValue record 1" $ isValue (pTerm "{ l = x }") @?= False,
testCase "isValue record 2" $
isValue (pTerm "{ l1 = x, l2 = unit }")
@?= False,
testCase "isValue record 3" $
isValue (pTerm "{ l1 = unit, l2 = unit }")
@?= True,
testCase "isValue record extend" $
isValue (pTerm "{ l1 = x } with { l2 = y }")
@?= False,
testCase "isValue record access" $ isValue (pTerm "x.x") @?= False,
testCase "isValue match" $ isValue (pTerm "[`l x = x]") @?= True,
testCase "isValue match extend " $
isValue (pTerm "[`l1 x = x] extend [`l2 x = x]")
@?= True,
testCase "isValue variant 1" $ isValue (pTerm "`l1 x") @?= False,
testCase "isValue variant 2" $ isValue (pTerm "`l1 (\x3bb x . x)") @?= True,
testCase "isValue ref" $ isValue (pTerm "ref unit") @?= False,
testCase "isValue deref" $ isValue (pTerm "!unit") @?= False,
testCase "isValue assign" $ isValue (pTerm "unit := unit") @?= False,
testCase "isValue location" $ isValue (TmLoc 0) @?= True,
testCase "isValue unit" $ isValue (pTerm "unit") @?= True,
testCase "isValue seq" $ isValue (pTerm "unit; unit") @?= False,
testCase "isValue true" $ isValue (pTerm "true") @?= True,
testCase "isValue false" $ isValue (pTerm "false") @?= True,
testCase "isValue if" $
isValue (pTerm "if unit then unit else unit")
@?= False,
testCase "isValue zero" $ isValue (pTerm "0") @?= True,
testCase "isValue succ" $ isValue (pTerm "succ zero") @?= False
]
| null | https://raw.githubusercontent.com/linyinfeng/myml/c90446431caeebd4b67f9b6a7a172a70b92f138f/tests/Myml/Syntax/Spec.hs | haskell | module Myml.Syntax.Spec
( tests,
)
where
import qualified Data.Map as Map
import qualified Data.Set as Set
import Myml.Syntax
import Myml.Test.Helper
import Test.Tasty
import Test.Tasty.HUnit
tests :: TestTree
tests = testGroup "Myml.Syntax.Spec" [unitTests]
unitTests :: TestTree
unitTests = testGroup "Unit tests" [fvTests, isValueTests]
fvTests :: TestTree
fvTests = testGroup "freeVariable tests" [fvTermTests, fvTypeTests]
fvTermTests :: TestTree
fvTermTests =
testGroup
"fvTerm tests"
[ testCase "freeVariable simple variable" $
fvTerm (pTerm "x")
@?= Set.fromList ["x"],
testCase "freeVariable bind 1" $
fvTerm (pTerm "\x3bb x . x x")
@?= Set.empty,
testCase "fvTerm bind 2" $
fvTerm (pTerm "\x3bb x . x y")
@?= Set.fromList
["y"],
testCase "fvTerm bind 3" $
fvTerm (pTerm "\x3bb x . x y (\x3bb y . x z)")
@?= Set.fromList ["y", "z"],
testCase "fvTerm let 1" $
fvTerm (pTerm "let x = y in z")
@?= Set.fromList
["y", "z"],
testCase "fvTerm let 2" $
fvTerm (pTerm "let x = x in z")
@?= Set.fromList
["x", "z"],
testCase "fvTerm record" $
fvTerm (pTerm "{ x = x, y = y }")
@?= Set.fromList ["x", "y"],
testCase "fvTerm record extend" $
fvTerm (pTerm "{ x = x, y = y } extend { z = z }")
@?= Set.fromList ["x", "y", "z"],
testCase "fvTerm record access" $
fvTerm (pTerm "{ x = x, y = y }.x")
@?= Set.fromList ["x", "y"],
testCase "fvTerm match" $
fvTerm (pTerm "[`l1 x = x y, `l2 y = z y]")
@?= Set.fromList ["y", "z"],
testCase "fvTerm match extend" $
fvTerm (pTerm "[`l1 x = x y] extend [`l2 y = z y]")
@?= Set.fromList ["y", "z"],
testCase "fvTerm variant" $ fvTerm (pTerm "`x x") @?= Set.fromList ["x"],
testCase "fvTerm ref" $
fvTerm (pTerm "ref (x y)")
@?= Set.fromList
["x", "y"],
testCase "fvTerm deref" $
fvTerm (pTerm "!(x y)")
@?= Set.fromList
["x", "y"],
testCase "fvTerm assign" $
fvTerm (pTerm "x := y")
@?= Set.fromList
["x", "y"],
testCase "fvTerm location" $ fvTerm (TmLoc 0) @?= Set.empty,
testCase "fvTerm unit" $ fvTerm (pTerm "unit") @?= Set.empty,
testCase "fvTerm seq" $ fvTerm (pTerm "x; y") @?= Set.fromList ["x", "y"],
testCase "fvTerm true and false" $ fvTerm (pTerm "true false") @?= Set.empty,
testCase "fvTerm if" $
fvTerm (pTerm "if x then y else z")
@?= Set.fromList
["x", "y", "z"],
testCase "fvTerm 0" $ fvTerm (pTerm "0") @?= Set.empty
]
fvTypeTests :: TestTree
fvTypeTests =
testGroup
"freeVariable type tests"
[ testCase "fvType variable" $
fvType (pType "X")
@?= Right
(Map.fromList [("X", KProper)]),
testCase "fvType arrow" $
fvType (pType "X -> Y -> X")
@?= Right
(Map.fromList [("X", KProper), ("Y", KProper)]),
testCase "fvType record" $
fvType (pType "{ l1 : P Unit, l2 : Absent, l3 : Present X, R }")
@?= Right
(Map.fromList [("P", KPresenceWithType), ("X", KProper), ("R", KRow)]),
testCase "fvType record" $
fvType (pType "[ `l1 : P Unit, `l2 : Absent, `l3 : Present X, R ]")
@?= Right
(Map.fromList [("P", KPresenceWithType), ("X", KProper), ("R", KRow)]),
testCase "fvType mu" $
fvType (pType "\x3bc X . (X -> T)")
@?= Right
(Map.fromList [("T", KProper)]),
testCase "fvType Unit" $ fvType (pType "Unit") @?= Right Map.empty,
testCase "fvType Integer" $ fvType (pType "Integer") @?= Right Map.empty
]
isValueTests :: TestTree
isValueTests =
testGroup
"isValue tests"
[ testCase "isValue abstraction" $ isValue (pTerm "\x3bb x . x") @?= True,
testCase "isValue application" $ isValue (pTerm "x x") @?= False,
testCase "isValue variable" $ isValue (pTerm "x") @?= False,
testCase "isValue let" $ isValue (pTerm "let x = unit in x") @?= False,
testCase "isValue record 1" $ isValue (pTerm "{ l = x }") @?= False,
testCase "isValue record 2" $
isValue (pTerm "{ l1 = x, l2 = unit }")
@?= False,
testCase "isValue record 3" $
isValue (pTerm "{ l1 = unit, l2 = unit }")
@?= True,
testCase "isValue record extend" $
isValue (pTerm "{ l1 = x } with { l2 = y }")
@?= False,
testCase "isValue record access" $ isValue (pTerm "x.x") @?= False,
testCase "isValue match" $ isValue (pTerm "[`l x = x]") @?= True,
testCase "isValue match extend " $
isValue (pTerm "[`l1 x = x] extend [`l2 x = x]")
@?= True,
testCase "isValue variant 1" $ isValue (pTerm "`l1 x") @?= False,
testCase "isValue variant 2" $ isValue (pTerm "`l1 (\x3bb x . x)") @?= True,
testCase "isValue ref" $ isValue (pTerm "ref unit") @?= False,
testCase "isValue deref" $ isValue (pTerm "!unit") @?= False,
testCase "isValue assign" $ isValue (pTerm "unit := unit") @?= False,
testCase "isValue location" $ isValue (TmLoc 0) @?= True,
testCase "isValue unit" $ isValue (pTerm "unit") @?= True,
testCase "isValue seq" $ isValue (pTerm "unit; unit") @?= False,
testCase "isValue true" $ isValue (pTerm "true") @?= True,
testCase "isValue false" $ isValue (pTerm "false") @?= True,
testCase "isValue if" $
isValue (pTerm "if unit then unit else unit")
@?= False,
testCase "isValue zero" $ isValue (pTerm "0") @?= True,
testCase "isValue succ" $ isValue (pTerm "succ zero") @?= False
]
|
|
44b8069ad6cc54cd55dc065e6e198e52ed5288be44800149d16f038d6d98a172 | klartext/any-dl | main.mli | module E = Evaluate
exception AutoTry_success
exception No_parser_found_for_this_url
exception Unknown_parser
val parse_parser_definitions_from_files :
string list -> Parsetreetypes.lang_t list
val parsername_lookup_by_url : string -> (string * string) list -> string
val get_parserdef :
string ->
(string * string) list -> (string, 'a) Hashtbl.t -> string option -> 'a
val invoke_parser_on_url :
string ->
(string * string) list ->
(string, Parsetreetypes.parserdef_t) Hashtbl.t ->
string option -> Parsetreetypes.macrodef_t list -> unit
val main : unit -> unit
| null | https://raw.githubusercontent.com/klartext/any-dl/53a962b51c82dde04d11e0685920db60cd65c458/main.mli | ocaml | module E = Evaluate
exception AutoTry_success
exception No_parser_found_for_this_url
exception Unknown_parser
val parse_parser_definitions_from_files :
string list -> Parsetreetypes.lang_t list
val parsername_lookup_by_url : string -> (string * string) list -> string
val get_parserdef :
string ->
(string * string) list -> (string, 'a) Hashtbl.t -> string option -> 'a
val invoke_parser_on_url :
string ->
(string * string) list ->
(string, Parsetreetypes.parserdef_t) Hashtbl.t ->
string option -> Parsetreetypes.macrodef_t list -> unit
val main : unit -> unit
|
|
5c964457d31316162477212a63988e6c3d2903cea4466c9acf1c15cfc7f03531 | haskell-repa/repa | Base.hs |
module Data.Repa.Flow.Simple.Base
( Source, Sink
, finalize_i
, finalize_o
, wrapI_i
, wrapI_o)
where
import Data.Repa.Flow.States
import qualified Data.Repa.Flow.Generic as G
#include "repa-flow.h"
-- | Source consisting of a single stream.
type Source m e = G.Sources () m e
-- | Sink consisting of a single stream.
type Sink m e = G.Sinks () m e
-- Finalizers -----------------------------------------------------------------
-- | Attach a finalizer to a source.
--
The finalizer will be called the first time a consumer of that stream
-- tries to pull an element when no more are available.
--
-- The provided finalizer will be run after any finalizers already
-- attached to the source.
--
finalize_i
:: States () m
=> m ()
-> Source m a -> m (Source m a)
finalize_i f s0 = G.finalize_i (\_ -> f) s0
# INLINE finalize_i #
-- | Attach a finalizer to a sink.
--
The finalizer will be called the first time the stream is ejected .
--
-- The provided finalizer will be run after any finalizers already
-- attached to the sink.
--
finalize_o
:: States () m
=> m ()
-> Sink m a -> m (Sink m a)
finalize_o f s0 = G.finalize_o (\_ -> f) s0
# INLINE finalize_o #
-- Wrapping -------------------------------------------------------------------
wrapI_i :: G.Sources Int m e -> Maybe (Source m e)
wrapI_i (G.Sources n pullX)
| n /= 1 = Nothing
| otherwise
= let pullX' _ eat eject
= pullX 0 eat eject
{-# INLINE pullX' #-}
in Just $ G.Sources () pullX'
# INLINE_FLOW wrapI_i #
wrapI_o :: G.Sinks Int m e -> Maybe (Sink m e)
wrapI_o (G.Sinks n eatX ejectX)
| n /= 1 = Nothing
| otherwise
= let eatX' _ x = eatX 0 x
ejectX' _ = ejectX 0
in Just $ G.Sinks () eatX' ejectX'
# INLINE_FLOW wrapI_o #
| null | https://raw.githubusercontent.com/haskell-repa/repa/c867025e99fd008f094a5b18ce4dabd29bed00ba/repa-flow/Data/Repa/Flow/Simple/Base.hs | haskell | | Source consisting of a single stream.
| Sink consisting of a single stream.
Finalizers -----------------------------------------------------------------
| Attach a finalizer to a source.
tries to pull an element when no more are available.
The provided finalizer will be run after any finalizers already
attached to the source.
| Attach a finalizer to a sink.
The provided finalizer will be run after any finalizers already
attached to the sink.
Wrapping -------------------------------------------------------------------
# INLINE pullX' # |
module Data.Repa.Flow.Simple.Base
( Source, Sink
, finalize_i
, finalize_o
, wrapI_i
, wrapI_o)
where
import Data.Repa.Flow.States
import qualified Data.Repa.Flow.Generic as G
#include "repa-flow.h"
type Source m e = G.Sources () m e
type Sink m e = G.Sinks () m e
The finalizer will be called the first time a consumer of that stream
finalize_i
:: States () m
=> m ()
-> Source m a -> m (Source m a)
finalize_i f s0 = G.finalize_i (\_ -> f) s0
# INLINE finalize_i #
The finalizer will be called the first time the stream is ejected .
finalize_o
:: States () m
=> m ()
-> Sink m a -> m (Sink m a)
finalize_o f s0 = G.finalize_o (\_ -> f) s0
# INLINE finalize_o #
wrapI_i :: G.Sources Int m e -> Maybe (Source m e)
wrapI_i (G.Sources n pullX)
| n /= 1 = Nothing
| otherwise
= let pullX' _ eat eject
= pullX 0 eat eject
in Just $ G.Sources () pullX'
# INLINE_FLOW wrapI_i #
wrapI_o :: G.Sinks Int m e -> Maybe (Sink m e)
wrapI_o (G.Sinks n eatX ejectX)
| n /= 1 = Nothing
| otherwise
= let eatX' _ x = eatX 0 x
ejectX' _ = ejectX 0
in Just $ G.Sinks () eatX' ejectX'
# INLINE_FLOW wrapI_o #
|
f9e4a63d00744de417bfc327cb6c69c811dcde2a313f275cdebf989cbb55e75a | athensresearch/athens | migrations.cljc | (ns athens.common.migrations
" Migrations should be interruptible and resumable, so that crashes and mistakes
will not leave the DB in a bad state that can be recovered from.
If a migration fails, it should throw an error.
A good way to make something interruptible is to ensure its idempotent."
(:require
[athens.common.logging :as log]))
(defn run-migration!
[conn set-version! [migration-version migration-f]]
(log/debug "Running migration version" migration-version)
(migration-f conn)
(set-version! conn migration-version)
(log/debug "Finished migration version" migration-version)
nil)
(defn- migrate-bootstrap!
"Similar to migrate!, but for the migrator itself.
Doesn't keep a separate version table because that's a recursive problem,
and instead always runs all the migrations.
They should be idempotent, cheap, and don't log anything, so it's ok to always do this."
[conn bootstrap-migrations]
(run! (fn [[_ f]] (f conn)) bootstrap-migrations))
(defn migrate!
"Migrate conn to latest (or up-to).
Interrupted migrations should resume gracefully next time migrate! runs. "
[conn migrations bootstrap-migrations version set-version! & {:keys [up-to] :or {up-to ##Inf}}]
(migrate-bootstrap! conn bootstrap-migrations)
(let [current-v (version conn)
v-filter (fn [[v]] (and (< current-v v) (<= v up-to)))
migrations (filter v-filter migrations)]
(when (seq migrations)
(log/debug "Running" (count migrations) "migrations")
(run! (partial run-migration! conn set-version!) migrations)
(log/debug "Ledger migrated to version" (-> migrations last first)))))
| null | https://raw.githubusercontent.com/athensresearch/athens/7434452efc583ec4163894e0e4246558ac5cba41/src/cljc/athens/common/migrations.cljc | clojure | (ns athens.common.migrations
" Migrations should be interruptible and resumable, so that crashes and mistakes
will not leave the DB in a bad state that can be recovered from.
If a migration fails, it should throw an error.
A good way to make something interruptible is to ensure its idempotent."
(:require
[athens.common.logging :as log]))
(defn run-migration!
[conn set-version! [migration-version migration-f]]
(log/debug "Running migration version" migration-version)
(migration-f conn)
(set-version! conn migration-version)
(log/debug "Finished migration version" migration-version)
nil)
(defn- migrate-bootstrap!
"Similar to migrate!, but for the migrator itself.
Doesn't keep a separate version table because that's a recursive problem,
and instead always runs all the migrations.
They should be idempotent, cheap, and don't log anything, so it's ok to always do this."
[conn bootstrap-migrations]
(run! (fn [[_ f]] (f conn)) bootstrap-migrations))
(defn migrate!
"Migrate conn to latest (or up-to).
Interrupted migrations should resume gracefully next time migrate! runs. "
[conn migrations bootstrap-migrations version set-version! & {:keys [up-to] :or {up-to ##Inf}}]
(migrate-bootstrap! conn bootstrap-migrations)
(let [current-v (version conn)
v-filter (fn [[v]] (and (< current-v v) (<= v up-to)))
migrations (filter v-filter migrations)]
(when (seq migrations)
(log/debug "Running" (count migrations) "migrations")
(run! (partial run-migration! conn set-version!) migrations)
(log/debug "Ledger migrated to version" (-> migrations last first)))))
|
|
88eb060805b22040fdcb39fbf295b8ee71f1216f206066e54919030f95ee8a63 | mfoemmel/erlang-otp | wxEvtHandler.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2008 - 2009 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%% This file is generated DO NOT EDIT
This module is actually handwritten see .. /api_gen / wx_extra / wxEvtHandler.erl
%%
%% @doc The Event handler.
%%
%% To get events from wxwidgets objects you subscribe to them by
%% calling connect/[2-3]. Events are sent as messages, if no callback
was supplied These messages will be { @link wx ( ) . # wx { } } where
EventRecord is a record that depends on the { @link
%% wxEventType(). event type}. The records are defined in:
%% wx/include/wx.hrl.
%%
%% If a callback was supplied to connect, the callback will be invoked
%% (in another process) to handle the event. The callback should be of
arity 2 . fun(EventRecord::wx ( ) , ( ) ) .
%%
%% Beware that the callback will be in executed in new process each time.
%%
%% <a href="">
%% The orginal documentation</a>.
%%
@headerfile " .. / .. /include / wx.hrl "
%%
%%@type wxEvtHandler(). An object reference
-module(wxEvtHandler).
-include("wxe.hrl").
-include("../include/wx.hrl").
%% API
-export([connect/2, connect/3, disconnect/1, disconnect/2, disconnect/3]).
%% internal exports
-export([connect_impl/3, disconnect_impl/2, disconnect_impl/3,
new_evt_listener/0, destroy_evt_listener/1,
get_callback/1, replace_fun_with_id/2]).
-record(evh, {et=null,id=?wxID_ANY,lastId=?wxID_ANY,skip=undefined,userdata=[],cb=0}).
@spec ( This::wxEvtHandler ( ) , EventType::wxEventType ( ) ) - > ok
@doc Equivalent to { @link connect/3 . connect(This , EventType , [ ] ) }
connect(This, EventType) ->
connect(This, EventType, []).
@spec ( This::wxEvtHandler ( ) , EventType::wxEventType ( ) , [ Options ] ) - > ok
@doc This function subscribes the to events of EventType ,
%% in the range id, lastId. The events will be received as messages
%% if no callback is supplied.
%%
%% Options:
{ i d , integer ( ) } , The identifier ( or first of the identifier range ) to be
%% associated with this event handler.
%% Default ?wxID_ANY
{ lastId , integer ( ) } , The second part of the identifier range .
%% If used 'id' must be set as the starting identifier range.
%% Default ?wxID_ANY
%% {skip, boolean()}, If skip is true further event_handlers will be called.
%% This is not used if the 'callback' option is used.
%% Default false.
{ callback , function ( ) } Use a callback fun(EventRecord::wx ( ) , ( ) )
%% to process the event. Default not specfied i.e. a message will
%% be delivered to the process calling this function.
%% {userData, term()} An erlang term that will be sent with the event. Default: [].
connect(This=#wx_ref{type=ThisT}, EventType, Options) ->
EvH = parse_opts(Options, #evh{et=EventType}),
?CLASS(ThisT,wxEvtHandler),
case wxe_util:connect_cb(This, EvH) of
ok -> ok;
{badarg, event_type} ->
erlang:error({badarg,EventType})
end.
parse_opts([{callback,Fun}|R], Opts) when is_function(Fun) ->
%% Check Fun Arity?
parse_opts(R, Opts#evh{cb=Fun});
parse_opts([callback|R], Opts) ->
parse_opts(R, Opts#evh{cb=1});
parse_opts([{userData, UserData}|R],Opts) ->
parse_opts(R, Opts#evh{userdata=UserData});
parse_opts([{skip, Skip}|R],Opts) when is_boolean(Skip) ->
parse_opts(R, Opts#evh{skip=Skip});
parse_opts([{id, Id}|R],Opts) when is_integer(Id) ->
parse_opts(R, Opts#evh{id=Id});
parse_opts([{lastId, Id}|R],Opts) when is_integer(Id) ->
parse_opts(R, Opts#evh{lastId=Id});
parse_opts([_BadArg|R], Opts) ->
parse_opts(R, Opts);
parse_opts([], Opts = #evh{id=Id,lastId=Lid,skip=Skip, cb=CB}) ->
if
Skip =/= undefined andalso CB =/= 0 ->
erlang:error({badarg, {combined, skip, callback}});
Lid =/= ?wxID_ANY andalso Id =:= ?wxID_ANY ->
erlang:error({badarg, no_start_identifier_range});
Skip =:= undefined -> %% Default
Opts#evh{skip=false};
true ->
Opts
end.
%% @spec (This::wxEvtHandler()) -> true | false
%% @doc Equivalent to {@link disconnect/3. disconnect(This, null, [])}
%% Can also have an optional callback Fun() as an additional last argument.
disconnect(This=#wx_ref{type=ThisT,ref=_ThisRef}) ->
?CLASS(ThisT,wxEvtHandler),
disconnect(This, null, []).
@spec ( This::wxEvtHandler ( ) , EventType::wxEventType ( ) ) - > true | false
@doc Equivalent to { @link disconnect/3 . disconnect(This , EventType , [ ] ) }
disconnect(This=#wx_ref{type=ThisT,ref=_ThisRef}, EventType) when is_atom(EventType) ->
?CLASS(ThisT,wxEvtHandler),
disconnect(This, EventType, []).
@spec ( This::wxEvtHandler ( ) , EventType::wxEventType ( ) , ) - > true | false
%% @doc See <a href="#wxevthandlerdisconnect">external documentation</a>
%% This function unsubscribes the process or callback fun from the event handler.
EventType may be the atom ' null ' to match any eventtype .
Notice that the options skip and userdata is not used to match the eventhandler .
disconnect(This=#wx_ref{type=ThisT,ref=_ThisRef}, EventType, Opts) ->
?CLASS(ThisT,wxEvtHandler),
EvH = parse_opts(Opts, #evh{et=EventType}),
case wxe_util:disconnect_cb(This, EvH) of
{badarg, event_type} ->
erlang:error({badarg,EventType});
Bool ->
Bool
end.
%% @hidden
connect_impl(#wx_ref{type=wxeEvtListener,ref=EvtList},
#wx_ref{type=ThisT,ref=ThisRef},
#evh{id=Winid, lastId=LastId, et=EventType,
skip=Skip, userdata=Userdata, cb=FunID})
when is_integer(FunID)->
EventTypeBin = list_to_binary([atom_to_list(EventType)|[0]]),
ThisTypeBin = list_to_binary([atom_to_list(ThisT)|[0]]),
UD = if Userdata =:= [] -> 0;
true ->
wxe_util:send_bin(term_to_binary(Userdata)),
1
end,
wxe_util:call(100, <<EvtList:32/?UI,ThisRef:32/?UI,
Winid:32/?UI,LastId:32/?UI,
(wxe_util:from_bool(Skip)):32/?UI,
UD:32/?UI,
FunID:32/?UI,
(size(EventTypeBin)):32/?UI,
(size(ThisTypeBin)):32/?UI,
%% Note no alignment
EventTypeBin/binary,ThisTypeBin/binary>>).
%% @hidden
disconnect_impl(Listener, Object) ->
disconnect_impl(Listener, Object, #evh{}).
%% @hidden
disconnect_impl(#wx_ref{type=wxeEvtListener,ref=EvtList},
#wx_ref{type=_ThisT,ref=ThisRef},
#evh{id=Winid, lastId=LastId, et=EventType}) ->
EventTypeBin = list_to_binary([atom_to_list(EventType)|[0]]),
wxe_util:call(101, <<EvtList:32/?UI,
ThisRef:32/?UI,Winid:32/?UI,LastId:32/?UI,
(size(EventTypeBin)):32/?UI,
%% Note no alignment
EventTypeBin/binary>>).
%% @hidden
new_evt_listener() ->
wxe_util:call(98, <<>>).
%% @hidden
destroy_evt_listener(#wx_ref{type=wxeEvtListener,ref=EvtList}) ->
wxe_util:call(99, <<EvtList:32/?UI>>).
%% @hidden
get_callback(#evh{cb=Callback}) ->
Callback.
%% @hidden
replace_fun_with_id(Evh, Id) ->
Evh#evh{cb=Id}.
| null | https://raw.githubusercontent.com/mfoemmel/erlang-otp/9c6fdd21e4e6573ca6f567053ff3ac454d742bc2/lib/wx/src/gen/wxEvtHandler.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
This file is generated DO NOT EDIT
@doc The Event handler.
To get events from wxwidgets objects you subscribe to them by
calling connect/[2-3]. Events are sent as messages, if no callback
wxEventType(). event type}. The records are defined in:
wx/include/wx.hrl.
If a callback was supplied to connect, the callback will be invoked
(in another process) to handle the event. The callback should be of
Beware that the callback will be in executed in new process each time.
<a href="">
The orginal documentation</a>.
@type wxEvtHandler(). An object reference
API
internal exports
in the range id, lastId. The events will be received as messages
if no callback is supplied.
Options:
associated with this event handler.
Default ?wxID_ANY
If used 'id' must be set as the starting identifier range.
Default ?wxID_ANY
{skip, boolean()}, If skip is true further event_handlers will be called.
This is not used if the 'callback' option is used.
Default false.
to process the event. Default not specfied i.e. a message will
be delivered to the process calling this function.
{userData, term()} An erlang term that will be sent with the event. Default: [].
Check Fun Arity?
Default
@spec (This::wxEvtHandler()) -> true | false
@doc Equivalent to {@link disconnect/3. disconnect(This, null, [])}
Can also have an optional callback Fun() as an additional last argument.
@doc See <a href="#wxevthandlerdisconnect">external documentation</a>
This function unsubscribes the process or callback fun from the event handler.
@hidden
Note no alignment
@hidden
@hidden
Note no alignment
@hidden
@hidden
@hidden
@hidden | Copyright Ericsson AB 2008 - 2009 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
This module is actually handwritten see .. /api_gen / wx_extra / wxEvtHandler.erl
was supplied These messages will be { @link wx ( ) . # wx { } } where
EventRecord is a record that depends on the { @link
arity 2 . fun(EventRecord::wx ( ) , ( ) ) .
@headerfile " .. / .. /include / wx.hrl "
-module(wxEvtHandler).
-include("wxe.hrl").
-include("../include/wx.hrl").
-export([connect/2, connect/3, disconnect/1, disconnect/2, disconnect/3]).
-export([connect_impl/3, disconnect_impl/2, disconnect_impl/3,
new_evt_listener/0, destroy_evt_listener/1,
get_callback/1, replace_fun_with_id/2]).
-record(evh, {et=null,id=?wxID_ANY,lastId=?wxID_ANY,skip=undefined,userdata=[],cb=0}).
@spec ( This::wxEvtHandler ( ) , EventType::wxEventType ( ) ) - > ok
@doc Equivalent to { @link connect/3 . connect(This , EventType , [ ] ) }
connect(This, EventType) ->
connect(This, EventType, []).
@spec ( This::wxEvtHandler ( ) , EventType::wxEventType ( ) , [ Options ] ) - > ok
@doc This function subscribes the to events of EventType ,
{ i d , integer ( ) } , The identifier ( or first of the identifier range ) to be
{ lastId , integer ( ) } , The second part of the identifier range .
{ callback , function ( ) } Use a callback fun(EventRecord::wx ( ) , ( ) )
connect(This=#wx_ref{type=ThisT}, EventType, Options) ->
EvH = parse_opts(Options, #evh{et=EventType}),
?CLASS(ThisT,wxEvtHandler),
case wxe_util:connect_cb(This, EvH) of
ok -> ok;
{badarg, event_type} ->
erlang:error({badarg,EventType})
end.
parse_opts([{callback,Fun}|R], Opts) when is_function(Fun) ->
parse_opts(R, Opts#evh{cb=Fun});
parse_opts([callback|R], Opts) ->
parse_opts(R, Opts#evh{cb=1});
parse_opts([{userData, UserData}|R],Opts) ->
parse_opts(R, Opts#evh{userdata=UserData});
parse_opts([{skip, Skip}|R],Opts) when is_boolean(Skip) ->
parse_opts(R, Opts#evh{skip=Skip});
parse_opts([{id, Id}|R],Opts) when is_integer(Id) ->
parse_opts(R, Opts#evh{id=Id});
parse_opts([{lastId, Id}|R],Opts) when is_integer(Id) ->
parse_opts(R, Opts#evh{lastId=Id});
parse_opts([_BadArg|R], Opts) ->
parse_opts(R, Opts);
parse_opts([], Opts = #evh{id=Id,lastId=Lid,skip=Skip, cb=CB}) ->
if
Skip =/= undefined andalso CB =/= 0 ->
erlang:error({badarg, {combined, skip, callback}});
Lid =/= ?wxID_ANY andalso Id =:= ?wxID_ANY ->
erlang:error({badarg, no_start_identifier_range});
Opts#evh{skip=false};
true ->
Opts
end.
disconnect(This=#wx_ref{type=ThisT,ref=_ThisRef}) ->
?CLASS(ThisT,wxEvtHandler),
disconnect(This, null, []).
@spec ( This::wxEvtHandler ( ) , EventType::wxEventType ( ) ) - > true | false
@doc Equivalent to { @link disconnect/3 . disconnect(This , EventType , [ ] ) }
disconnect(This=#wx_ref{type=ThisT,ref=_ThisRef}, EventType) when is_atom(EventType) ->
?CLASS(ThisT,wxEvtHandler),
disconnect(This, EventType, []).
@spec ( This::wxEvtHandler ( ) , EventType::wxEventType ( ) , ) - > true | false
EventType may be the atom ' null ' to match any eventtype .
Notice that the options skip and userdata is not used to match the eventhandler .
disconnect(This=#wx_ref{type=ThisT,ref=_ThisRef}, EventType, Opts) ->
?CLASS(ThisT,wxEvtHandler),
EvH = parse_opts(Opts, #evh{et=EventType}),
case wxe_util:disconnect_cb(This, EvH) of
{badarg, event_type} ->
erlang:error({badarg,EventType});
Bool ->
Bool
end.
connect_impl(#wx_ref{type=wxeEvtListener,ref=EvtList},
#wx_ref{type=ThisT,ref=ThisRef},
#evh{id=Winid, lastId=LastId, et=EventType,
skip=Skip, userdata=Userdata, cb=FunID})
when is_integer(FunID)->
EventTypeBin = list_to_binary([atom_to_list(EventType)|[0]]),
ThisTypeBin = list_to_binary([atom_to_list(ThisT)|[0]]),
UD = if Userdata =:= [] -> 0;
true ->
wxe_util:send_bin(term_to_binary(Userdata)),
1
end,
wxe_util:call(100, <<EvtList:32/?UI,ThisRef:32/?UI,
Winid:32/?UI,LastId:32/?UI,
(wxe_util:from_bool(Skip)):32/?UI,
UD:32/?UI,
FunID:32/?UI,
(size(EventTypeBin)):32/?UI,
(size(ThisTypeBin)):32/?UI,
EventTypeBin/binary,ThisTypeBin/binary>>).
disconnect_impl(Listener, Object) ->
disconnect_impl(Listener, Object, #evh{}).
disconnect_impl(#wx_ref{type=wxeEvtListener,ref=EvtList},
#wx_ref{type=_ThisT,ref=ThisRef},
#evh{id=Winid, lastId=LastId, et=EventType}) ->
EventTypeBin = list_to_binary([atom_to_list(EventType)|[0]]),
wxe_util:call(101, <<EvtList:32/?UI,
ThisRef:32/?UI,Winid:32/?UI,LastId:32/?UI,
(size(EventTypeBin)):32/?UI,
EventTypeBin/binary>>).
new_evt_listener() ->
wxe_util:call(98, <<>>).
destroy_evt_listener(#wx_ref{type=wxeEvtListener,ref=EvtList}) ->
wxe_util:call(99, <<EvtList:32/?UI>>).
get_callback(#evh{cb=Callback}) ->
Callback.
replace_fun_with_id(Evh, Id) ->
Evh#evh{cb=Id}.
|
f1d16defef774b0d9f732545e6e0343f01fcd5dffdef4bd69db8df47c55394fa | TheAlgorithms/Haskell | SelectionSortSpec.hs | # LANGUAGE ScopedTypeVariables #
module SortSpecs.SelectionSortSpec where
import Test.Hspec
import Test.QuickCheck
import Sorts.SelectionSort
spec :: Spec
spec = do
describe "selectionSort" $ do
it "returns empty list when sorting empty list" $ property $
selectionSort [] == ([] :: [Int])
it "returns same list if input was already sorted" $ property $
\(x :: [Int]) -> selectionSort x == (selectionSort . selectionSort $ x)
it "returns list with smallest element at 0" $ property $
forAll (listOf1 arbitrary) $
\(x :: [Int]) -> let sortedList = selectionSort x
in head sortedList == minimum sortedList
it "returns list with largest element at the end" $ property $
forAll (listOf1 arbitrary) $
\(x :: [Int]) -> let sortedList = selectionSort x
in last sortedList == maximum sortedList
it "handle simple sorting of static value" $
let (unsortedList :: [Int]) = [4, 2, 1, 7, 3]
(sortedList :: [Int]) = [1, 2, 3, 4, 7]
in selectionSort unsortedList == sortedList
| null | https://raw.githubusercontent.com/TheAlgorithms/Haskell/9dcabef99fb8995a760ff25a9e0d659114c0b9d3/specs/SortSpecs/SelectionSortSpec.hs | haskell | # LANGUAGE ScopedTypeVariables #
module SortSpecs.SelectionSortSpec where
import Test.Hspec
import Test.QuickCheck
import Sorts.SelectionSort
spec :: Spec
spec = do
describe "selectionSort" $ do
it "returns empty list when sorting empty list" $ property $
selectionSort [] == ([] :: [Int])
it "returns same list if input was already sorted" $ property $
\(x :: [Int]) -> selectionSort x == (selectionSort . selectionSort $ x)
it "returns list with smallest element at 0" $ property $
forAll (listOf1 arbitrary) $
\(x :: [Int]) -> let sortedList = selectionSort x
in head sortedList == minimum sortedList
it "returns list with largest element at the end" $ property $
forAll (listOf1 arbitrary) $
\(x :: [Int]) -> let sortedList = selectionSort x
in last sortedList == maximum sortedList
it "handle simple sorting of static value" $
let (unsortedList :: [Int]) = [4, 2, 1, 7, 3]
(sortedList :: [Int]) = [1, 2, 3, 4, 7]
in selectionSort unsortedList == sortedList
|
|
2dc21d61c87d2426a4b95d1cd0af02fd77cb1b90be46fb7e1fa665e555bb220a | LesBoloss-es/ppx_deriving_madcast | function.ml | let () =
let f = [%madcast: string -> int] in
let g = [%madcast: (string -> int) -> (int -> string)] in
assert ((g f) 2 = "2")
| null | https://raw.githubusercontent.com/LesBoloss-es/ppx_deriving_madcast/2d44cbf21f79cd7aeacb077c09672ee823e5ba3f/test/positive/function.ml | ocaml | let () =
let f = [%madcast: string -> int] in
let g = [%madcast: (string -> int) -> (int -> string)] in
assert ((g f) 2 = "2")
|
|
4bb34740195b5389ab5b24445a5558eb962c23942a456e3ed7688b32ccd8140c | jaredly/belt | belt_HashMap.ml | module N = Belt_internalBuckets
module C = Belt_internalBucketsType
module A = Belt_Array
type ('a,'id) eq = ('a,'id) Belt_Id.eq
type ('a,'id) hash = ('a,'id) Belt_Id.hash
type ('a,'id) id = ('a,'id) Belt_Id.hashable
type ('a,'b,'id) t = (('a,'id) hash,('a,'id) eq,'a,'b) N.t
let clear = C.clear
let size = C.size
let forEach = N.forEach
let forEachU = N.forEachU
let reduce = N.reduce
let reduceU = N.reduceU
let logStats = N.logStats
let keepMapInPlaceU = N.keepMapInPlaceU
let keepMapInPlace = N.keepMapInPlace
let toArray = N.toArray
let copy = N.copy
let keysToArray = N.keysToArray
let valuesToArray = N.valuesToArray
let getBucketHistogram = N.getBucketHistogram
let isEmpty = C.isEmpty
let rec copyBucketReHash ~hash ~h_buckets ~ndata_tail old_bucket =
match C.toOpt old_bucket with
| None -> ()
| Some cell ->
let nidx = (hash (N.key cell)) land ((A.length h_buckets) - 1) in
let v = C.return cell in
((match C.toOpt (A.getUnsafe ndata_tail nidx) with
| None -> A.setUnsafe h_buckets nidx v
| Some tail -> N.nextSet tail v);
A.setUnsafe ndata_tail nidx v;
copyBucketReHash ~hash ~h_buckets ~ndata_tail (N.next cell))
let resize ~hash h =
let odata = C.buckets h in
let osize = A.length odata in
let nsize = osize * 2 in
if nsize >= osize
then
let h_buckets = A.makeUninitialized nsize in
let ndata_tail = A.makeUninitialized nsize in
(C.bucketsSet h h_buckets;
for i = 0 to osize - 1 do
copyBucketReHash ~hash ~h_buckets ~ndata_tail (A.getUnsafe odata i)
done;
for i = 0 to nsize - 1 do
(match C.toOpt (A.getUnsafe ndata_tail i) with
| None -> ()
| Some tail -> N.nextSet tail C.emptyOpt)
done)
let rec replaceInBucket ~eq key info cell =
if eq (N.key cell) key
then (N.valueSet cell info; false)
else
(match C.toOpt (N.next cell) with
| None -> true
| Some cell -> replaceInBucket ~eq key info cell)
let set0 h key value ~eq ~hash =
let h_buckets = C.buckets h in
let buckets_len = A.length h_buckets in
let i = (hash key) land (buckets_len - 1) in
let l = A.getUnsafe h_buckets i in
(match C.toOpt l with
| None ->
(A.setUnsafe h_buckets i
(C.return (N.bucket ~key ~value ~next:C.emptyOpt));
C.sizeSet h ((C.size h) + 1))
| Some bucket ->
if replaceInBucket ~eq key value bucket
then
(A.setUnsafe h_buckets i (C.return (N.bucket ~key ~value ~next:l));
C.sizeSet h ((C.size h) + 1)));
if (C.size h) > (buckets_len lsl 1) then resize ~hash h
let set h key value =
set0 h key value ~eq:(Belt_Id.getEqInternal (C.eq h))
~hash:(Belt_Id.getHashInternal (C.hash h))
let rec removeInBucket h h_buckets i key prec bucket ~eq =
match C.toOpt bucket with
| None -> ()
| Some cell ->
let cell_next = N.next cell in
if eq (N.key cell) key
then (N.nextSet prec cell_next; C.sizeSet h ((C.size h) - 1))
else removeInBucket ~eq h h_buckets i key cell cell_next
let remove h key =
let h_buckets = C.buckets h in
let i =
((Belt_Id.getHashInternal (C.hash h)) key) land
((A.length h_buckets) - 1) in
let bucket = A.getUnsafe h_buckets i in
match C.toOpt bucket with
| None -> ()
| Some cell ->
let eq = Belt_Id.getEqInternal (C.eq h) in
if eq (N.key cell) key
then
(A.setUnsafe h_buckets i (N.next cell); C.sizeSet h ((C.size h) - 1))
else removeInBucket ~eq h h_buckets i key cell (N.next cell)
let rec getAux ~eq key buckets =
match C.toOpt buckets with
| None -> None
| Some cell ->
if eq key (N.key cell)
then Some (N.value cell)
else getAux ~eq key (N.next cell)
let get h key =
let h_buckets = C.buckets h in
let nid =
((Belt_Id.getHashInternal (C.hash h)) key) land
((A.length h_buckets) - 1) in
match C.toOpt @@ (A.getUnsafe h_buckets nid) with
| None -> None
| Some cell1 ->
let eq = Belt_Id.getEqInternal (C.eq h) in
if eq key (N.key cell1)
then Some (N.value cell1)
else
(match C.toOpt (N.next cell1) with
| None -> None
| Some cell2 ->
if eq key (N.key cell2)
then Some (N.value cell2)
else
(match C.toOpt (N.next cell2) with
| None -> None
| Some cell3 ->
if eq key (N.key cell3)
then Some (N.value cell3)
else getAux ~eq key (N.next cell3)))
let rec memInBucket key cell ~eq =
(eq (N.key cell) key) ||
(match C.toOpt (N.next cell) with
| None -> false
| Some nextCell -> memInBucket ~eq key nextCell)
let has h key =
let h_buckets = C.buckets h in
let nid =
((Belt_Id.getHashInternal (C.hash h)) key) land
((A.length h_buckets) - 1) in
let bucket = A.getUnsafe h_buckets nid in
match C.toOpt bucket with
| None -> false
| Some bucket ->
memInBucket ~eq:(Belt_Id.getEqInternal (C.eq h)) key bucket
let make (type key) (type identity) ~hintSize ~id:(id : (key,identity) id)
= let module M = (val id) in C.make ~hash:M.hash ~eq:M.eq ~hintSize
let fromArray (type a) (type identity) arr ~id:(id : (a,identity) id) =
let module M = (val id) in
let (hash,eq) = (M.hash, M.eq) in
let len = A.length arr in
let v = C.make ~hash ~eq ~hintSize:len in
let (eq,hash) =
((Belt_Id.getEqInternal eq), (Belt_Id.getHashInternal hash)) in
for i = 0 to len - 1 do
(let (key,value) = A.getUnsafe arr i in set0 ~eq ~hash v key value)
done;
v
let mergeMany h arr =
let (hash,eq) =
((Belt_Id.getHashInternal (C.hash h)), (Belt_Id.getEqInternal (C.eq h))) in
let len = A.length arr in
for i = 0 to len - 1 do
let (key,value) = A.getUnsafe arr i in set0 h ~eq ~hash key value
done
module Int = Belt_HashMapInt
module String = Belt_HashMapString | null | https://raw.githubusercontent.com/jaredly/belt/4d07f859403fdbd3fbfc5a9547c6066d657a2131/belt/belt_HashMap.ml | ocaml | module N = Belt_internalBuckets
module C = Belt_internalBucketsType
module A = Belt_Array
type ('a,'id) eq = ('a,'id) Belt_Id.eq
type ('a,'id) hash = ('a,'id) Belt_Id.hash
type ('a,'id) id = ('a,'id) Belt_Id.hashable
type ('a,'b,'id) t = (('a,'id) hash,('a,'id) eq,'a,'b) N.t
let clear = C.clear
let size = C.size
let forEach = N.forEach
let forEachU = N.forEachU
let reduce = N.reduce
let reduceU = N.reduceU
let logStats = N.logStats
let keepMapInPlaceU = N.keepMapInPlaceU
let keepMapInPlace = N.keepMapInPlace
let toArray = N.toArray
let copy = N.copy
let keysToArray = N.keysToArray
let valuesToArray = N.valuesToArray
let getBucketHistogram = N.getBucketHistogram
let isEmpty = C.isEmpty
let rec copyBucketReHash ~hash ~h_buckets ~ndata_tail old_bucket =
match C.toOpt old_bucket with
| None -> ()
| Some cell ->
let nidx = (hash (N.key cell)) land ((A.length h_buckets) - 1) in
let v = C.return cell in
((match C.toOpt (A.getUnsafe ndata_tail nidx) with
| None -> A.setUnsafe h_buckets nidx v
| Some tail -> N.nextSet tail v);
A.setUnsafe ndata_tail nidx v;
copyBucketReHash ~hash ~h_buckets ~ndata_tail (N.next cell))
let resize ~hash h =
let odata = C.buckets h in
let osize = A.length odata in
let nsize = osize * 2 in
if nsize >= osize
then
let h_buckets = A.makeUninitialized nsize in
let ndata_tail = A.makeUninitialized nsize in
(C.bucketsSet h h_buckets;
for i = 0 to osize - 1 do
copyBucketReHash ~hash ~h_buckets ~ndata_tail (A.getUnsafe odata i)
done;
for i = 0 to nsize - 1 do
(match C.toOpt (A.getUnsafe ndata_tail i) with
| None -> ()
| Some tail -> N.nextSet tail C.emptyOpt)
done)
let rec replaceInBucket ~eq key info cell =
if eq (N.key cell) key
then (N.valueSet cell info; false)
else
(match C.toOpt (N.next cell) with
| None -> true
| Some cell -> replaceInBucket ~eq key info cell)
let set0 h key value ~eq ~hash =
let h_buckets = C.buckets h in
let buckets_len = A.length h_buckets in
let i = (hash key) land (buckets_len - 1) in
let l = A.getUnsafe h_buckets i in
(match C.toOpt l with
| None ->
(A.setUnsafe h_buckets i
(C.return (N.bucket ~key ~value ~next:C.emptyOpt));
C.sizeSet h ((C.size h) + 1))
| Some bucket ->
if replaceInBucket ~eq key value bucket
then
(A.setUnsafe h_buckets i (C.return (N.bucket ~key ~value ~next:l));
C.sizeSet h ((C.size h) + 1)));
if (C.size h) > (buckets_len lsl 1) then resize ~hash h
let set h key value =
set0 h key value ~eq:(Belt_Id.getEqInternal (C.eq h))
~hash:(Belt_Id.getHashInternal (C.hash h))
let rec removeInBucket h h_buckets i key prec bucket ~eq =
match C.toOpt bucket with
| None -> ()
| Some cell ->
let cell_next = N.next cell in
if eq (N.key cell) key
then (N.nextSet prec cell_next; C.sizeSet h ((C.size h) - 1))
else removeInBucket ~eq h h_buckets i key cell cell_next
let remove h key =
let h_buckets = C.buckets h in
let i =
((Belt_Id.getHashInternal (C.hash h)) key) land
((A.length h_buckets) - 1) in
let bucket = A.getUnsafe h_buckets i in
match C.toOpt bucket with
| None -> ()
| Some cell ->
let eq = Belt_Id.getEqInternal (C.eq h) in
if eq (N.key cell) key
then
(A.setUnsafe h_buckets i (N.next cell); C.sizeSet h ((C.size h) - 1))
else removeInBucket ~eq h h_buckets i key cell (N.next cell)
let rec getAux ~eq key buckets =
match C.toOpt buckets with
| None -> None
| Some cell ->
if eq key (N.key cell)
then Some (N.value cell)
else getAux ~eq key (N.next cell)
let get h key =
let h_buckets = C.buckets h in
let nid =
((Belt_Id.getHashInternal (C.hash h)) key) land
((A.length h_buckets) - 1) in
match C.toOpt @@ (A.getUnsafe h_buckets nid) with
| None -> None
| Some cell1 ->
let eq = Belt_Id.getEqInternal (C.eq h) in
if eq key (N.key cell1)
then Some (N.value cell1)
else
(match C.toOpt (N.next cell1) with
| None -> None
| Some cell2 ->
if eq key (N.key cell2)
then Some (N.value cell2)
else
(match C.toOpt (N.next cell2) with
| None -> None
| Some cell3 ->
if eq key (N.key cell3)
then Some (N.value cell3)
else getAux ~eq key (N.next cell3)))
let rec memInBucket key cell ~eq =
(eq (N.key cell) key) ||
(match C.toOpt (N.next cell) with
| None -> false
| Some nextCell -> memInBucket ~eq key nextCell)
let has h key =
let h_buckets = C.buckets h in
let nid =
((Belt_Id.getHashInternal (C.hash h)) key) land
((A.length h_buckets) - 1) in
let bucket = A.getUnsafe h_buckets nid in
match C.toOpt bucket with
| None -> false
| Some bucket ->
memInBucket ~eq:(Belt_Id.getEqInternal (C.eq h)) key bucket
let make (type key) (type identity) ~hintSize ~id:(id : (key,identity) id)
= let module M = (val id) in C.make ~hash:M.hash ~eq:M.eq ~hintSize
let fromArray (type a) (type identity) arr ~id:(id : (a,identity) id) =
let module M = (val id) in
let (hash,eq) = (M.hash, M.eq) in
let len = A.length arr in
let v = C.make ~hash ~eq ~hintSize:len in
let (eq,hash) =
((Belt_Id.getEqInternal eq), (Belt_Id.getHashInternal hash)) in
for i = 0 to len - 1 do
(let (key,value) = A.getUnsafe arr i in set0 ~eq ~hash v key value)
done;
v
let mergeMany h arr =
let (hash,eq) =
((Belt_Id.getHashInternal (C.hash h)), (Belt_Id.getEqInternal (C.eq h))) in
let len = A.length arr in
for i = 0 to len - 1 do
let (key,value) = A.getUnsafe arr i in set0 h ~eq ~hash key value
done
module Int = Belt_HashMapInt
module String = Belt_HashMapString |
|
c5ab2e20c093430b0d31573775921ea985a58e9593d5d46fda21bfa838a1cb02 | eglaysher/rldev | encoding.ml |
RLdev : character encoding utility functions
Copyright ( C ) 2006 Haeleth
This program is free software ; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation ; either version 2 of the License , or ( at your option ) any later
version .
This program is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU General Public License for more
details .
You should have received a copy of the GNU General Public License along with
this program ; if not , write to the Free Software Foundation , Inc. , 59 Temple
Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
RLdev: character encoding utility functions
Copyright (C) 2006 Haeleth
This program is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation; either version 2 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details.
You should have received a copy of the GNU General Public License along with
this program; if not, write to the Free Software Foundation, Inc., 59 Temple
Place - Suite 330, Boston, MA 02111-1307, USA.
*)
type unicode_mapping =
{ mutable db_to_uni : int array array;
mutable uni_to_db : int IMap.t; }
type generic_mapping =
{ mutable encode : int IMap.t;
mutable decode : int IMap.t; }
let enc_type =
function
| "SHIFTJIS" | "SHIFT_JIS" | "SHIFT-JIS" | "SJS" | "SJIS" | "CP932" -> `Sjs
| "EUC-JP" | "EUC" | "EUC_JP" -> `Euc
| "UTF8" | "UTF-8" -> `Utf8
| _ -> `Other
let sjs_to_euc s =
let b = Buffer.create 0 in
let rec getc idx =
if idx = String.length s then
Buffer.contents b
else
let c = s.[idx] in
let c1 =
match c with
| '\x00'..'\x7f' -> Buffer.add_char b c; -1
| '\xa0'..'\xdf' -> Printf.bprintf b "\x8e%c" c; -1
| '\x81'..'\x9f' -> int_of_char c - 0x71
| '\xe0'..'\xef'
| '\xf0'..'\xfc' -> int_of_char c - 0xb1
| _ -> Optpp.sysError "invalid character in input"
in
if c1 = -1 then
getc (idx + 1)
else
let c1 = (c1 lsl 1) + 1 in
if idx + 1 = String.length s then Optpp.sysError "invalid character in input";
let c2 = int_of_char s.[idx + 1] in
let c2 = if c2 > 0x7f then c2 - 1 else c2 in
let c1', c2' =
if c2 >= 0x9e
then (c1 + 1) lor 0x80, (c2 - 0x7d) lor 0x80
else c1 lor 0x80, (c2 - 0x1f) lor 0x80
in
Buffer.add_char b (char_of_int c1');
Buffer.add_char b (char_of_int c2');
getc (idx + 2)
in
getc 0 | null | https://raw.githubusercontent.com/eglaysher/rldev/e59103b165e1c20bd940942405b2eee767933c96/src/common/encoding.ml | ocaml |
RLdev : character encoding utility functions
Copyright ( C ) 2006 Haeleth
This program is free software ; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation ; either version 2 of the License , or ( at your option ) any later
version .
This program is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU General Public License for more
details .
You should have received a copy of the GNU General Public License along with
this program ; if not , write to the Free Software Foundation , Inc. , 59 Temple
Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
RLdev: character encoding utility functions
Copyright (C) 2006 Haeleth
This program is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation; either version 2 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details.
You should have received a copy of the GNU General Public License along with
this program; if not, write to the Free Software Foundation, Inc., 59 Temple
Place - Suite 330, Boston, MA 02111-1307, USA.
*)
type unicode_mapping =
{ mutable db_to_uni : int array array;
mutable uni_to_db : int IMap.t; }
type generic_mapping =
{ mutable encode : int IMap.t;
mutable decode : int IMap.t; }
let enc_type =
function
| "SHIFTJIS" | "SHIFT_JIS" | "SHIFT-JIS" | "SJS" | "SJIS" | "CP932" -> `Sjs
| "EUC-JP" | "EUC" | "EUC_JP" -> `Euc
| "UTF8" | "UTF-8" -> `Utf8
| _ -> `Other
let sjs_to_euc s =
let b = Buffer.create 0 in
let rec getc idx =
if idx = String.length s then
Buffer.contents b
else
let c = s.[idx] in
let c1 =
match c with
| '\x00'..'\x7f' -> Buffer.add_char b c; -1
| '\xa0'..'\xdf' -> Printf.bprintf b "\x8e%c" c; -1
| '\x81'..'\x9f' -> int_of_char c - 0x71
| '\xe0'..'\xef'
| '\xf0'..'\xfc' -> int_of_char c - 0xb1
| _ -> Optpp.sysError "invalid character in input"
in
if c1 = -1 then
getc (idx + 1)
else
let c1 = (c1 lsl 1) + 1 in
if idx + 1 = String.length s then Optpp.sysError "invalid character in input";
let c2 = int_of_char s.[idx + 1] in
let c2 = if c2 > 0x7f then c2 - 1 else c2 in
let c1', c2' =
if c2 >= 0x9e
then (c1 + 1) lor 0x80, (c2 - 0x7d) lor 0x80
else c1 lor 0x80, (c2 - 0x1f) lor 0x80
in
Buffer.add_char b (char_of_int c1');
Buffer.add_char b (char_of_int c2');
getc (idx + 2)
in
getc 0 |
|
a0bdbf64ab39c6a4d68ad7049969384c2ef365cc944de2b8e6a7e2ad5c513e1f | MLstate/opalang | qmlAstUtils.ml |
Copyright © 2011 , 2012 MLstate
This file is part of .
is free software : you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License , version 3 , as published by
the Free Software Foundation .
is distributed in the hope that it will be useful , but WITHOUT ANY
WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU Affero General Public License for
more details .
You should have received a copy of the GNU Affero General Public License
along with . If not , see < / > .
Copyright © 2011, 2012 MLstate
This file is part of Opa.
Opa is free software: you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License, version 3, as published by
the Free Software Foundation.
Opa is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
more details.
You should have received a copy of the GNU Affero General Public License
along with Opa. If not, see </>.
*)
(* refactoring in progress *)
(* depends *)
module List = BaseList
module String = BaseString
(* alias *)
module TypeIdent = QmlAst.TypeIdent
(* shorthands *)
module Q = QmlAst
(* -- *)
let rec traverse_coerce e = match e with
| Q.Coerce (_, e, _) -> traverse_coerce e
| _ -> e
let map_exprident code f =
let f' x = match x with
| Q.Ident (label, y) ->
let fy = f y in
if y == fy then x else Q.Ident (label, fy)
| _ -> x
in QmlAstWalk.CodeExpr.map (QmlAstWalk.Expr.map_up f') code
let rec get_deeper_expr ?(except=fun _ -> false) e =
if except e then e
else
match e with
(* special forms to document !!! *)
| Q.LetIn (_, [id, e1], Q.Ident (_, id'))
| Q.LetRecIn (_, [id, e1], Q.Ident (_, id')) when Ident.equal id id' ->
get_deeper_expr ~except e1
(* forms with a unique inner expr *)
| Q.LetIn (_, _, e) | Q.LetRecIn (_, _, e)
| Q.Lambda (_, _, e) | Q.Coerce (_, e, _)
| Q.Match (_, _, [_, e])
-> get_deeper_expr ~except e
| Q.Dot _ | Q.Path _
| Q.Bypass _
| Q.Ident _ | Q.Const _
| Q.Record _ | Q.ExtendRecord _
| Q.Apply _ | Q.Directive _
| Q.Match _ -> e
let substitute old_expr new_expr e =
let old_annot = Q.QAnnot.expr old_expr in
let aux tra e =
if Annot.equal (Q.QAnnot.expr e) old_annot
then new_expr
else tra e
in
QmlAstWalk.Expr.traverse_map aux e
let collect_annot old_annot e =
let coll tra acc e =
if Annot.equal (Q.QAnnot.expr e) old_annot
then e::acc
else tra acc e
in
QmlAstWalk.Expr.traverse_fold coll [] e
let collect old_expr e =
let old_annot = Q.QAnnot.expr old_expr in
collect_annot old_annot e
type stop_expansiveness =
[ `nonexpansive
| `unsafe_cast
| `fail
| `todo
]
type ('a,'b,'c) strictly_non_expansive =
[ `expand of 'a
| `doctype of 'b
| `sliced_expr
| `warncoerce
| `extendwith
| `specialize of 'c
| `may_cps
| `worker
| `workable
| Q.opavalue_directive
| `async
| `deprecated
]
type non_expansive = [
| `module_
| `recval
| Q.slicer_directive
| Q.closure_instrumentation_directive
]
let is_expansive =
QmlAstWalk.Expr.traverse_exists
(fun tra -> function
| Q.Const _
| Q.Ident _
| Q.Lambda _
| Q.Bypass _ -> false
| Q.Directive (_, `llarray, [], _) ->
false (* the empty array is the only one that is not expansive
* because it is not mutable *)
| Q.Directive (_, #stop_expansiveness, _, _) ->
false
| Q.Directive (_, (#strictly_non_expansive | #non_expansive), _exprs, _) as d
-> tra d
| Q.Directive _ -> true
| Q.Apply _ -> true
| e -> tra e)
let is_expansive_strict =
QmlAstWalk.Expr.traverse_exists
(fun tra -> function
| Q.Const _
| Q.Ident _
| Q.Lambda _
| Q.Bypass _ -> false
| Q.Apply _
| Q.Record _ -> true
| Q.Directive (_, #strictly_non_expansive, _exprs, _) as d
-> tra d
| Q.Directive _ -> true
| e -> tra e
)
(* only elements taking part in the expression type counts *)
let expansive_nodes_related_to_type ?(strict=false) =
QmlAstWalk.Expr.traverse_fold
(fun tra acc -> function
| Q.Const _
| Q.Ident _
| Q.Lambda _
| Q.Bypass _ -> acc
| Q.Directive (_, `llarray, [], _) when not(strict) ->
acc (* the empty array is the only one that is not expansive
* because it is not mutable *)
| Q.Directive (_, #stop_expansiveness, _, _) when not(strict) ->
acc
| Q.Directive (_, #non_expansive, _exprs, _) as d when not(strict) ->
tra acc d
| Q.Directive (_, #strictly_non_expansive, _exprs, _) as d
-> tra acc d
| (Q.Directive(a, _, _, _)
| Q.Apply(a, _, _)) as e ->
tra (a::acc) e
| e -> tra acc e) []
let is_expansive_with_options = function
| `disabled -> (fun _ -> false)
| `normal -> is_expansive
| `strict -> is_expansive_strict
let expansive_nodes_related_to_type_with_options = function
| `disabled -> (fun _ -> [])
| `normal -> expansive_nodes_related_to_type ~strict:false
| `strict -> expansive_nodes_related_to_type ~strict:true
module App =
struct
type 'a util = Q.expr -> Q.expr list -> 'a
let to_list ?(strict=true) e =
match e with
| Q.Apply (_, f, args) -> f::args
| _ ->
if strict then invalid_arg "QmlAstUtils.App.to_list"
else [e]
let from_list l =
match l with
| f::args -> QmlAstCons.UntypedExpr.apply f args
| _ -> invalid_arg "QmlAstUtils.App.from_list"
let nary_args_number _f args = List.length args
let curryfied_args_number f _x =
let rec aux cpt e =
match e with
(* | Directive (#structural_ignored_directive, ...) *)
| Q.Apply (_, f, args) -> aux (cpt + List.length args) f
| _ -> cpt
in
aux 1 f
end
module ExprIdent =
struct
let string = function
| Q.Ident (_, n) -> Ident.to_uniq_string n
| _ -> assert false
let change_ident id expr =
match expr with
| Q.Ident (label, _) -> Q.Ident (label, id)
| _ -> invalid_arg "QmlAstUtils.Ident.change_ident"
let substitute ident_map expr =
let aux expr =
match expr with
| Q.Ident (_, i) -> (
match IdentMap.find_opt i ident_map with
| Some e -> e ()
| None -> expr
)
| _ -> expr
in
QmlAstWalk.Expr.map_up aux expr
end
module Lambda =
struct
type 'a util = Ident.t list -> Q.expr -> 'a
let nary_arity params _body = List.length params
let curryfied_arity params body =
let rec aux cpt e =
match e with
(* | Directive (#structural_ignored_directive, ...) -> aux cpt expr *)
| Q.Coerce (_, e, _) -> aux cpt e
| Q.Lambda (_, params, body) -> aux (cpt + List.length params) body
| _ -> cpt
in aux (List.length params) body
(* deprecated *)
let count e =
match e with
| Q.Lambda (_, params, body) -> curryfied_arity params body
| _ -> 0
(* ************************************************************************ *)
(** {b Visibility}: Exported outside this module. *)
(* ************************************************************************ *)
let eta_expand_ast arity e =
(* Use as position for of generated pieces of code, the position of the
currently processed expression. *)
let pos = Q.Pos.expr e in
let idents =
List.init
arity (fun i -> Ident.next (Printf.sprintf "eta_%d_%d" i arity)) in
let exps =
List.map
(fun i ->
let label = Annot.next_label pos in
QmlAstCons.UntypedExprWithLabel.ident ~label i)
idents in
let label_lambda = Annot.next_label pos in
let label_apply = Annot.next_label pos in
QmlAstCons.UntypedExprWithLabel.lambda
~label: label_lambda idents
(QmlAstCons.UntypedExprWithLabel.apply ~label: label_apply e exps)
end
module Coerce =
struct
let uncoerce e =
let rec aux e acc =
match e with
| Q.Coerce (_, e, ty)-> aux e ((Q.Label.expr e, ty)::acc)
| _ -> e, acc
in aux e []
let recoerce e lanty =
List.foldl (fun (label, ty) e -> QmlAstCons.UntypedExprWithLabel.coerce ~label e ty) lanty e
let rm_coerces e = fst (uncoerce e)
end
module FreeVars =
struct
let pat_fold f pat acc0 =
let aux acc pat = match pat with
| Q.PatVar (label, i) | Q.PatAs (label, _, i) ->
f acc (Annot.annot label) i
| _ -> acc
in
QmlAstWalk.Pattern.fold_down aux acc0 pat
let pat pat = pat_fold (fun acc _ i -> IdentSet.add i acc) pat IdentSet.empty
let expr_fold f expr acc0 =
QmlAstWalk.Expr.fold_with_exprmap
(fun bound acc e -> match e with
| Q.Ident (label, i) when IdentMap.find_opt i bound = None ->
f acc (Annot.annot label) i
| _ -> acc)
acc0 expr
let expr pat = expr_fold (fun acc _ i -> IdentSet.add i acc) pat IdentSet.empty
end
module Const =
struct
let limits byte =
Big_int.minus_big_int (Big_int.power_int_positive_int 2 byte),
Big_int.pred_big_int (Big_int.power_int_positive_int 2 byte)
-2 ^ 53 , 2 ^ 53 - 1
limits 53
-2 ^ 62 , 2 ^ 62 - 1
#<Ifstatic:OCAML_WORD_SIZE 64>
limits 62
#<Else>
limits 30
#<End>
let int_limits = ref int_js_limits
let compare a b =
match a, b with
| Q.Int a, Q.Int b -> Big_int.compare_big_int a b
| Q.Float a, Q.Float b -> Pervasives.compare a b
| Q.String a, Q.String b -> String.compare a b
| _ -> Pervasives.compare a b
let equal a b = compare a b = 0
let check_int i =
let min, max = !int_limits in
Big_int.le_big_int i max && Big_int.ge_big_int i min
let min_int () =
fst !int_limits
let max_int () =
snd !int_limits
let set_limits = function
| `js -> int_limits := int_js_limits
| `ml -> int_limits := int_ml_limits
end
module Record =
struct
type 'a util = (string * Q.expr) list -> 'a
let uncons_tuple fields =
let mapi i (f, e) =
let field = QmlAstCons.Tuple.field (succ i) in
if String.compare f field <> 0
then raise Not_found
else e
in
try Some (List.mapi mapi fields)
with
| Not_found -> None
let uncons_qml_tuple fields =
let (@=) s s' = String.compare s s' = 0 in
let s_fst = QmlAstCons.Tuple.qml_fst in
let s_snd = QmlAstCons.Tuple.qml_snd in
let rec aux ?(fail=true) acc fields =
match fields with
| [ ( ss_fst, fst ) ; ( ss_snd, Q.Record (_, fields)) ]
when s_fst @= ss_fst && s_snd @= ss_snd
-> aux ~fail:false (fst::acc) fields
| [ ( ss_fst, fst ) ; ( ss_snd, snd ) ]
when s_fst @= ss_fst && s_snd @= ss_snd
-> List.rev (snd::fst::acc)
| _ ->
if fail then raise Not_found
else
List.rev ((QmlAstCons.UntypedExpr.record fields)::acc)
in
try
Some (aux [] fields)
with Not_found -> None
let uncons fields_exprs_list = List.split fields_exprs_list
let cons fields exprs =
QmlAstCons.UntypedExpr.record (List.combine fields exprs)
end
module Tuple =
struct
let uncons e =
match (traverse_coerce e) with
| Q.Record (_, fields) -> Record.uncons_tuple fields
| _ -> None
let uncons_typeident typeident =
match String.split_char '_' (QmlAst.TypeIdent.to_string typeident) with
| "tuple", r -> Base.int_of_string_opt r
| _ -> None
let uncons_qml_tuple e =
match (traverse_coerce e) with
| Q.Record (_, fields) -> Record.uncons_qml_tuple fields
| _ -> None
end
module Pat = QmlAstWatch.Pat
module Match =
struct
type 'a util = Q.expr -> (Q.pat * Q.expr) list -> 'a
let uncons_ifthenelse = QmlAstWatch.uncons_ifthenelse
let uncons if_ pats_exprs =
let pats, expr = List.split pats_exprs in
(if_, pats, expr)
let cons if_ pats exprs =
let p = List.combine pats exprs in
QmlAstCons.UntypedExpr.match_ if_ p
end
module LetIn =
struct
type 'a util = (Q.ident * Q.expr) list -> Q.expr -> 'a
let rev_uncons (l : (Q.ident * Q.expr) list) e =
let rec aux acc e =
match e with
| Q.LetIn (_, l, e) -> aux (l::acc) e
| _ -> acc,e
in aux [l] e
let uncons (l : (Q.ident * Q.expr) list) e =
let rev_u,e = rev_uncons l e in
List.rev rev_u, e
let cons l e =
List.fold_right
(fun l e -> QmlAstCons.UntypedExpr.letin l e) l e
end
module LetRecIn =
struct
type 'a util = (Q.ident * Q.expr) list -> Q.expr -> 'a
let rev_uncons (l : (Q.ident * Q.expr) list) e =
let rec aux acc e =
match e with
| Q.LetRecIn (_, l, e) -> aux (l::acc) e
| _ -> acc,e
in aux [l] e
let uncons (l : (Q.ident * Q.expr) list) e =
let rev_u,e = rev_uncons l e in
List.rev rev_u, e
let cons l e =
List.fold_right
(fun l e -> QmlAstCons.UntypedExpr.letrecin l e) l e
end
module Code =
struct
let insert ~deps ~insert code =
let last = function
| Q.NewVal (_, bindings)
| Q.NewValRec (_, bindings) ->
List.exists (fun (i, _) -> IdentSet.mem i deps) bindings
| _ -> false
in
let rec aux acc = function
| [] ->
insert @ acc
| code_elt :: tl ->
if last code_elt
then
List.rev_append tl (code_elt ::(insert @ acc))
else
aux (code_elt::acc) tl
in
aux [] (List.rev code)
end
| null | https://raw.githubusercontent.com/MLstate/opalang/424b369160ce693406cece6ac033d75d85f5df4f/compiler/libqmlcompil/qmlAstUtils.ml | ocaml | refactoring in progress
depends
alias
shorthands
--
special forms to document !!!
forms with a unique inner expr
the empty array is the only one that is not expansive
* because it is not mutable
only elements taking part in the expression type counts
the empty array is the only one that is not expansive
* because it is not mutable
| Directive (#structural_ignored_directive, ...)
| Directive (#structural_ignored_directive, ...) -> aux cpt expr
deprecated
************************************************************************
* {b Visibility}: Exported outside this module.
************************************************************************
Use as position for of generated pieces of code, the position of the
currently processed expression. |
Copyright © 2011 , 2012 MLstate
This file is part of .
is free software : you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License , version 3 , as published by
the Free Software Foundation .
is distributed in the hope that it will be useful , but WITHOUT ANY
WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU Affero General Public License for
more details .
You should have received a copy of the GNU Affero General Public License
along with . If not , see < / > .
Copyright © 2011, 2012 MLstate
This file is part of Opa.
Opa is free software: you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License, version 3, as published by
the Free Software Foundation.
Opa is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
more details.
You should have received a copy of the GNU Affero General Public License
along with Opa. If not, see </>.
*)
module List = BaseList
module String = BaseString
module TypeIdent = QmlAst.TypeIdent
module Q = QmlAst
let rec traverse_coerce e = match e with
| Q.Coerce (_, e, _) -> traverse_coerce e
| _ -> e
let map_exprident code f =
let f' x = match x with
| Q.Ident (label, y) ->
let fy = f y in
if y == fy then x else Q.Ident (label, fy)
| _ -> x
in QmlAstWalk.CodeExpr.map (QmlAstWalk.Expr.map_up f') code
let rec get_deeper_expr ?(except=fun _ -> false) e =
if except e then e
else
match e with
| Q.LetIn (_, [id, e1], Q.Ident (_, id'))
| Q.LetRecIn (_, [id, e1], Q.Ident (_, id')) when Ident.equal id id' ->
get_deeper_expr ~except e1
| Q.LetIn (_, _, e) | Q.LetRecIn (_, _, e)
| Q.Lambda (_, _, e) | Q.Coerce (_, e, _)
| Q.Match (_, _, [_, e])
-> get_deeper_expr ~except e
| Q.Dot _ | Q.Path _
| Q.Bypass _
| Q.Ident _ | Q.Const _
| Q.Record _ | Q.ExtendRecord _
| Q.Apply _ | Q.Directive _
| Q.Match _ -> e
let substitute old_expr new_expr e =
let old_annot = Q.QAnnot.expr old_expr in
let aux tra e =
if Annot.equal (Q.QAnnot.expr e) old_annot
then new_expr
else tra e
in
QmlAstWalk.Expr.traverse_map aux e
let collect_annot old_annot e =
let coll tra acc e =
if Annot.equal (Q.QAnnot.expr e) old_annot
then e::acc
else tra acc e
in
QmlAstWalk.Expr.traverse_fold coll [] e
let collect old_expr e =
let old_annot = Q.QAnnot.expr old_expr in
collect_annot old_annot e
type stop_expansiveness =
[ `nonexpansive
| `unsafe_cast
| `fail
| `todo
]
type ('a,'b,'c) strictly_non_expansive =
[ `expand of 'a
| `doctype of 'b
| `sliced_expr
| `warncoerce
| `extendwith
| `specialize of 'c
| `may_cps
| `worker
| `workable
| Q.opavalue_directive
| `async
| `deprecated
]
type non_expansive = [
| `module_
| `recval
| Q.slicer_directive
| Q.closure_instrumentation_directive
]
let is_expansive =
QmlAstWalk.Expr.traverse_exists
(fun tra -> function
| Q.Const _
| Q.Ident _
| Q.Lambda _
| Q.Bypass _ -> false
| Q.Directive (_, `llarray, [], _) ->
| Q.Directive (_, #stop_expansiveness, _, _) ->
false
| Q.Directive (_, (#strictly_non_expansive | #non_expansive), _exprs, _) as d
-> tra d
| Q.Directive _ -> true
| Q.Apply _ -> true
| e -> tra e)
let is_expansive_strict =
QmlAstWalk.Expr.traverse_exists
(fun tra -> function
| Q.Const _
| Q.Ident _
| Q.Lambda _
| Q.Bypass _ -> false
| Q.Apply _
| Q.Record _ -> true
| Q.Directive (_, #strictly_non_expansive, _exprs, _) as d
-> tra d
| Q.Directive _ -> true
| e -> tra e
)
let expansive_nodes_related_to_type ?(strict=false) =
QmlAstWalk.Expr.traverse_fold
(fun tra acc -> function
| Q.Const _
| Q.Ident _
| Q.Lambda _
| Q.Bypass _ -> acc
| Q.Directive (_, `llarray, [], _) when not(strict) ->
| Q.Directive (_, #stop_expansiveness, _, _) when not(strict) ->
acc
| Q.Directive (_, #non_expansive, _exprs, _) as d when not(strict) ->
tra acc d
| Q.Directive (_, #strictly_non_expansive, _exprs, _) as d
-> tra acc d
| (Q.Directive(a, _, _, _)
| Q.Apply(a, _, _)) as e ->
tra (a::acc) e
| e -> tra acc e) []
let is_expansive_with_options = function
| `disabled -> (fun _ -> false)
| `normal -> is_expansive
| `strict -> is_expansive_strict
let expansive_nodes_related_to_type_with_options = function
| `disabled -> (fun _ -> [])
| `normal -> expansive_nodes_related_to_type ~strict:false
| `strict -> expansive_nodes_related_to_type ~strict:true
module App =
struct
type 'a util = Q.expr -> Q.expr list -> 'a
let to_list ?(strict=true) e =
match e with
| Q.Apply (_, f, args) -> f::args
| _ ->
if strict then invalid_arg "QmlAstUtils.App.to_list"
else [e]
let from_list l =
match l with
| f::args -> QmlAstCons.UntypedExpr.apply f args
| _ -> invalid_arg "QmlAstUtils.App.from_list"
let nary_args_number _f args = List.length args
let curryfied_args_number f _x =
let rec aux cpt e =
match e with
| Q.Apply (_, f, args) -> aux (cpt + List.length args) f
| _ -> cpt
in
aux 1 f
end
module ExprIdent =
struct
let string = function
| Q.Ident (_, n) -> Ident.to_uniq_string n
| _ -> assert false
let change_ident id expr =
match expr with
| Q.Ident (label, _) -> Q.Ident (label, id)
| _ -> invalid_arg "QmlAstUtils.Ident.change_ident"
let substitute ident_map expr =
let aux expr =
match expr with
| Q.Ident (_, i) -> (
match IdentMap.find_opt i ident_map with
| Some e -> e ()
| None -> expr
)
| _ -> expr
in
QmlAstWalk.Expr.map_up aux expr
end
module Lambda =
struct
type 'a util = Ident.t list -> Q.expr -> 'a
let nary_arity params _body = List.length params
let curryfied_arity params body =
let rec aux cpt e =
match e with
| Q.Coerce (_, e, _) -> aux cpt e
| Q.Lambda (_, params, body) -> aux (cpt + List.length params) body
| _ -> cpt
in aux (List.length params) body
let count e =
match e with
| Q.Lambda (_, params, body) -> curryfied_arity params body
| _ -> 0
let eta_expand_ast arity e =
let pos = Q.Pos.expr e in
let idents =
List.init
arity (fun i -> Ident.next (Printf.sprintf "eta_%d_%d" i arity)) in
let exps =
List.map
(fun i ->
let label = Annot.next_label pos in
QmlAstCons.UntypedExprWithLabel.ident ~label i)
idents in
let label_lambda = Annot.next_label pos in
let label_apply = Annot.next_label pos in
QmlAstCons.UntypedExprWithLabel.lambda
~label: label_lambda idents
(QmlAstCons.UntypedExprWithLabel.apply ~label: label_apply e exps)
end
module Coerce =
struct
let uncoerce e =
let rec aux e acc =
match e with
| Q.Coerce (_, e, ty)-> aux e ((Q.Label.expr e, ty)::acc)
| _ -> e, acc
in aux e []
let recoerce e lanty =
List.foldl (fun (label, ty) e -> QmlAstCons.UntypedExprWithLabel.coerce ~label e ty) lanty e
let rm_coerces e = fst (uncoerce e)
end
module FreeVars =
struct
let pat_fold f pat acc0 =
let aux acc pat = match pat with
| Q.PatVar (label, i) | Q.PatAs (label, _, i) ->
f acc (Annot.annot label) i
| _ -> acc
in
QmlAstWalk.Pattern.fold_down aux acc0 pat
let pat pat = pat_fold (fun acc _ i -> IdentSet.add i acc) pat IdentSet.empty
let expr_fold f expr acc0 =
QmlAstWalk.Expr.fold_with_exprmap
(fun bound acc e -> match e with
| Q.Ident (label, i) when IdentMap.find_opt i bound = None ->
f acc (Annot.annot label) i
| _ -> acc)
acc0 expr
let expr pat = expr_fold (fun acc _ i -> IdentSet.add i acc) pat IdentSet.empty
end
module Const =
struct
let limits byte =
Big_int.minus_big_int (Big_int.power_int_positive_int 2 byte),
Big_int.pred_big_int (Big_int.power_int_positive_int 2 byte)
-2 ^ 53 , 2 ^ 53 - 1
limits 53
-2 ^ 62 , 2 ^ 62 - 1
#<Ifstatic:OCAML_WORD_SIZE 64>
limits 62
#<Else>
limits 30
#<End>
let int_limits = ref int_js_limits
let compare a b =
match a, b with
| Q.Int a, Q.Int b -> Big_int.compare_big_int a b
| Q.Float a, Q.Float b -> Pervasives.compare a b
| Q.String a, Q.String b -> String.compare a b
| _ -> Pervasives.compare a b
let equal a b = compare a b = 0
let check_int i =
let min, max = !int_limits in
Big_int.le_big_int i max && Big_int.ge_big_int i min
let min_int () =
fst !int_limits
let max_int () =
snd !int_limits
let set_limits = function
| `js -> int_limits := int_js_limits
| `ml -> int_limits := int_ml_limits
end
module Record =
struct
type 'a util = (string * Q.expr) list -> 'a
let uncons_tuple fields =
let mapi i (f, e) =
let field = QmlAstCons.Tuple.field (succ i) in
if String.compare f field <> 0
then raise Not_found
else e
in
try Some (List.mapi mapi fields)
with
| Not_found -> None
let uncons_qml_tuple fields =
let (@=) s s' = String.compare s s' = 0 in
let s_fst = QmlAstCons.Tuple.qml_fst in
let s_snd = QmlAstCons.Tuple.qml_snd in
let rec aux ?(fail=true) acc fields =
match fields with
| [ ( ss_fst, fst ) ; ( ss_snd, Q.Record (_, fields)) ]
when s_fst @= ss_fst && s_snd @= ss_snd
-> aux ~fail:false (fst::acc) fields
| [ ( ss_fst, fst ) ; ( ss_snd, snd ) ]
when s_fst @= ss_fst && s_snd @= ss_snd
-> List.rev (snd::fst::acc)
| _ ->
if fail then raise Not_found
else
List.rev ((QmlAstCons.UntypedExpr.record fields)::acc)
in
try
Some (aux [] fields)
with Not_found -> None
let uncons fields_exprs_list = List.split fields_exprs_list
let cons fields exprs =
QmlAstCons.UntypedExpr.record (List.combine fields exprs)
end
module Tuple =
struct
let uncons e =
match (traverse_coerce e) with
| Q.Record (_, fields) -> Record.uncons_tuple fields
| _ -> None
let uncons_typeident typeident =
match String.split_char '_' (QmlAst.TypeIdent.to_string typeident) with
| "tuple", r -> Base.int_of_string_opt r
| _ -> None
let uncons_qml_tuple e =
match (traverse_coerce e) with
| Q.Record (_, fields) -> Record.uncons_qml_tuple fields
| _ -> None
end
module Pat = QmlAstWatch.Pat
module Match =
struct
type 'a util = Q.expr -> (Q.pat * Q.expr) list -> 'a
let uncons_ifthenelse = QmlAstWatch.uncons_ifthenelse
let uncons if_ pats_exprs =
let pats, expr = List.split pats_exprs in
(if_, pats, expr)
let cons if_ pats exprs =
let p = List.combine pats exprs in
QmlAstCons.UntypedExpr.match_ if_ p
end
module LetIn =
struct
type 'a util = (Q.ident * Q.expr) list -> Q.expr -> 'a
let rev_uncons (l : (Q.ident * Q.expr) list) e =
let rec aux acc e =
match e with
| Q.LetIn (_, l, e) -> aux (l::acc) e
| _ -> acc,e
in aux [l] e
let uncons (l : (Q.ident * Q.expr) list) e =
let rev_u,e = rev_uncons l e in
List.rev rev_u, e
let cons l e =
List.fold_right
(fun l e -> QmlAstCons.UntypedExpr.letin l e) l e
end
module LetRecIn =
struct
type 'a util = (Q.ident * Q.expr) list -> Q.expr -> 'a
let rev_uncons (l : (Q.ident * Q.expr) list) e =
let rec aux acc e =
match e with
| Q.LetRecIn (_, l, e) -> aux (l::acc) e
| _ -> acc,e
in aux [l] e
let uncons (l : (Q.ident * Q.expr) list) e =
let rev_u,e = rev_uncons l e in
List.rev rev_u, e
let cons l e =
List.fold_right
(fun l e -> QmlAstCons.UntypedExpr.letrecin l e) l e
end
module Code =
struct
let insert ~deps ~insert code =
let last = function
| Q.NewVal (_, bindings)
| Q.NewValRec (_, bindings) ->
List.exists (fun (i, _) -> IdentSet.mem i deps) bindings
| _ -> false
in
let rec aux acc = function
| [] ->
insert @ acc
| code_elt :: tl ->
if last code_elt
then
List.rev_append tl (code_elt ::(insert @ acc))
else
aux (code_elt::acc) tl
in
aux [] (List.rev code)
end
|
b64cf0c37fc76b3dd0bf956e8fb36737976e43cd6abb1c5e4cde686085fc52d4 | CarlosMChica/HaskellBook | givenATypeWriteThefunction.hs | module GivenATypeWriteTheFunction where
i :: a -> a
i x = x
c :: a -> b -> a
c x y = x
c'' :: b -> a -> b
c'' x y = x
c' :: a -> b -> b
c' x y = y
r :: [a] -> [a]
r = tail
solution 2 r ( x : xs ) = [ x ]
soltuion 3 r xs = reverse xs
co :: (b -> c) -> (a -> b) -> a -> c
co f g = f . g
a :: (a -> c) -> a -> a
a _ x = x
a' :: (a -> b) -> a -> b
a' f = f
| null | https://raw.githubusercontent.com/CarlosMChica/HaskellBook/86f82cf36cd00003b1a1aebf264e4b5d606ddfad/chapter5/givenATypeWriteThefunction.hs | haskell | module GivenATypeWriteTheFunction where
i :: a -> a
i x = x
c :: a -> b -> a
c x y = x
c'' :: b -> a -> b
c'' x y = x
c' :: a -> b -> b
c' x y = y
r :: [a] -> [a]
r = tail
solution 2 r ( x : xs ) = [ x ]
soltuion 3 r xs = reverse xs
co :: (b -> c) -> (a -> b) -> a -> c
co f g = f . g
a :: (a -> c) -> a -> a
a _ x = x
a' :: (a -> b) -> a -> b
a' f = f
|
|
dce9d026b3466eae9d274ad138b1a423e33a38a907feb318cc537a9a75d52001 | NorfairKing/validity | HashableSpec.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE TypeApplications #
| Standard ' Spec 's for ' Hashable ' instances .
--
-- You will need @TypeApplications@ to use these.
module Test.Validity.HashableSpec where
import Data.GenValidity
import Data.Hashable
import GHC.Generics
import Test.Hspec
import Test.Validity.Hashable
import Test.Validity.Utils
spec :: Spec
spec = do
hashableSpec @Rational
hashableSpec @Double
hashableSpec @Int
hashableSpecOnArbitrary @Int
hashableSpec @HashableValid
failsBecause "Two equal elements aren't hashed to the same value!" $
hashableSpec @HashableInvalid
newtype HashableValid
= HashableValid Int
deriving (Show, Generic)
Number used in the definition of
hT = 7
instance Eq HashableValid where
(==) (HashableValid x) (HashableValid y) = (x `mod` hT) == (y `mod` hT)
instance Hashable HashableValid where
hashWithSalt n (HashableValid a) = (int ^ expo) `mod` hT
where
int = 1 + (a `mod` hT)
expo = 1 + (n `mod` hT)
instance Validity HashableValid
instance GenValid HashableValid
newtype HashableInvalid
= HashableInvalid Int
deriving (Show, Generic)
Numbers used in the definition of
hF = 8
hM :: Int
hM = 3
instance Eq HashableInvalid where
(==) (HashableInvalid x) (HashableInvalid y) = (x `mod` hF) == (y `mod` hF)
instance Hashable HashableInvalid where
hashWithSalt n (HashableInvalid a) = (int ^ expo) `mod` hM
where
int = 1 + (a `mod` hM)
expo = 1 + (n `mod` hM)
instance Validity HashableInvalid
instance GenValid HashableInvalid
| null | https://raw.githubusercontent.com/NorfairKing/validity/35bc8d45b27e6c21429e4b681b16e46ccd541b3b/genvalidity-hspec-hashable/test/Test/Validity/HashableSpec.hs | haskell |
You will need @TypeApplications@ to use these. | # LANGUAGE DeriveGeneric #
# LANGUAGE TypeApplications #
| Standard ' Spec 's for ' Hashable ' instances .
module Test.Validity.HashableSpec where
import Data.GenValidity
import Data.Hashable
import GHC.Generics
import Test.Hspec
import Test.Validity.Hashable
import Test.Validity.Utils
spec :: Spec
spec = do
hashableSpec @Rational
hashableSpec @Double
hashableSpec @Int
hashableSpecOnArbitrary @Int
hashableSpec @HashableValid
failsBecause "Two equal elements aren't hashed to the same value!" $
hashableSpec @HashableInvalid
newtype HashableValid
= HashableValid Int
deriving (Show, Generic)
Number used in the definition of
hT = 7
instance Eq HashableValid where
(==) (HashableValid x) (HashableValid y) = (x `mod` hT) == (y `mod` hT)
instance Hashable HashableValid where
hashWithSalt n (HashableValid a) = (int ^ expo) `mod` hT
where
int = 1 + (a `mod` hT)
expo = 1 + (n `mod` hT)
instance Validity HashableValid
instance GenValid HashableValid
newtype HashableInvalid
= HashableInvalid Int
deriving (Show, Generic)
Numbers used in the definition of
hF = 8
hM :: Int
hM = 3
instance Eq HashableInvalid where
(==) (HashableInvalid x) (HashableInvalid y) = (x `mod` hF) == (y `mod` hF)
instance Hashable HashableInvalid where
hashWithSalt n (HashableInvalid a) = (int ^ expo) `mod` hM
where
int = 1 + (a `mod` hM)
expo = 1 + (n `mod` hM)
instance Validity HashableInvalid
instance GenValid HashableInvalid
|
74632be2fef0fecdbcc50c16537cf2818341123e6dceef3ee2496d5bef16371d | LexiFi/menhir | CompletedNat.mli | (******************************************************************************)
(* *)
(* *)
, Paris
, PPS , Université Paris Diderot
(* *)
. All rights reserved . This file is distributed under the
terms of the GNU General Public License version 2 , as described in the
(* file LICENSE. *)
(* *)
(******************************************************************************)
(* The natural numbers, completed with [Infinity], and ordered towards
zero (i.e. [Infinity] is [bottom], [Finite 0] is [top]). *)
type t =
| Finite of int
| Infinity
include Fix.PROPERTY with type property = t
val epsilon: t
val singleton: 'a -> t
val min: t -> t -> t
val add: t -> t -> t
val min_lazy: t -> (unit -> t) -> t
val add_lazy: t -> (unit -> t) -> t
val print: t -> string
| null | https://raw.githubusercontent.com/LexiFi/menhir/794e64e7997d4d3f91d36dd49aaecc942ea858b7/attic/src/CompletedNat.mli | ocaml | ****************************************************************************
file LICENSE.
****************************************************************************
The natural numbers, completed with [Infinity], and ordered towards
zero (i.e. [Infinity] is [bottom], [Finite 0] is [top]). |
, Paris
, PPS , Université Paris Diderot
. All rights reserved . This file is distributed under the
terms of the GNU General Public License version 2 , as described in the
type t =
| Finite of int
| Infinity
include Fix.PROPERTY with type property = t
val epsilon: t
val singleton: 'a -> t
val min: t -> t -> t
val add: t -> t -> t
val min_lazy: t -> (unit -> t) -> t
val add_lazy: t -> (unit -> t) -> t
val print: t -> string
|
36b69b87458ea62231fc308db402f362c5187be8dd73fcaba798b139578bad8f | archaelus/erms | esvc_voucher_export.erl | %%%-------------------------------------------------------------------
@copyright Catalyst IT Ltd ( )
%%%
@author < >
%% @version {@vsn}, {@date} {@time}
@doc Voucher CVS file export code
%% @end
%%%-------------------------------------------------------------------
-module(esvc_voucher_export).
-include_lib("eunit/include/eunit.hrl").
-include_lib("logging.hrl").
-include_lib("mnesia_model.hrl").
-include_lib("esvc_voucher.hrl").
%% API
-export([cvs_report/2]).
%%====================================================================
%% API
%%====================================================================
cvs_report(Filename , Date ) - > [ ListOfCampaigns ]
%% @doc Reads a Campaign's counter statistics for the specified
Date and destructively puts them in CVS format in the file Filename .
%% @end
cvs_report(Filename,Date) ->
{ok, File} = file:open(Filename, write),
Campaigns = esvc_voucher_db:list_campaign_counters(Date),
lists:foreach(fun (Camp) ->
Rec=Camp#counter.key,
[Trig|_]=Rec#campaign_stat.trigger,
{Year,Month,Day}=Rec#campaign_stat.date,
io:format(File,"~p-~p-~p, ~p, ~p, ~p~n",
[Year,Month,Day,Rec#campaign_stat.name,Trig,Camp#counter.count])
end,
Campaigns),
file:close(File).
%%====================================================================
Internal functions
%%====================================================================
%%====================================================================
EUnit tests
%%====================================================================
| null | https://raw.githubusercontent.com/archaelus/erms/5dbe5e79516a16e461e7a2a345dd80fbf92ef6fa/src/esvc_voucher_export.erl | erlang | -------------------------------------------------------------------
@version {@vsn}, {@date} {@time}
@end
-------------------------------------------------------------------
API
====================================================================
API
====================================================================
@doc Reads a Campaign's counter statistics for the specified
@end
====================================================================
====================================================================
====================================================================
==================================================================== | @copyright Catalyst IT Ltd ( )
@author < >
@doc Voucher CVS file export code
-module(esvc_voucher_export).
-include_lib("eunit/include/eunit.hrl").
-include_lib("logging.hrl").
-include_lib("mnesia_model.hrl").
-include_lib("esvc_voucher.hrl").
-export([cvs_report/2]).
cvs_report(Filename , Date ) - > [ ListOfCampaigns ]
Date and destructively puts them in CVS format in the file Filename .
cvs_report(Filename,Date) ->
{ok, File} = file:open(Filename, write),
Campaigns = esvc_voucher_db:list_campaign_counters(Date),
lists:foreach(fun (Camp) ->
Rec=Camp#counter.key,
[Trig|_]=Rec#campaign_stat.trigger,
{Year,Month,Day}=Rec#campaign_stat.date,
io:format(File,"~p-~p-~p, ~p, ~p, ~p~n",
[Year,Month,Day,Rec#campaign_stat.name,Trig,Camp#counter.count])
end,
Campaigns),
file:close(File).
Internal functions
EUnit tests
|
35aaefab84abeaf28804850f1a3cf63bd4c04d2b9712c718ae02b3be2f28624d | mtgred/netrunner | right_pane.cljs | (ns nr.gameboard.right-pane
(:require [cljs.core.async :refer [put!]]
[nr.appstate :refer [app-state]]
[nr.gameboard.card-preview :refer [zoom-channel]]
[nr.gameboard.log :refer [log-pane]]
[nr.gameboard.replay :refer [notes-pane notes-shared-pane]]
[nr.gameboard.state :refer [game-state]]
[nr.gameboard.settings :refer [settings-pane]]
[nr.translations :refer [tr]]
[reagent.core :as r]))
(defonce loaded-tabs (r/atom {}))
(defonce available-tabs
{:log
{:hiccup [log-pane]
:label (tr [:log.game-log "Game Log"])}
:notes
{:hiccup [notes-pane]
:label (tr [:log.annotating "Annotating"])}
:notes-shared
{:hiccup [notes-shared-pane]
:label (tr [:log.shared "Shared Annotations"])}
:settings
{:hiccup [settings-pane]
:label (tr [:log.settings "Settings"])}})
(defn- resize-card-zoom
"Resizes the card zoom based on the values in the app-state"
[]
(let [width (get-in @app-state [:options :log-width])
top (get-in @app-state [:options :log-top])
max-card-width (- width 5)
max-card-height (- top 10)
card-ratio (/ 418 300)]
(if (> (/ max-card-height max-card-width) card-ratio)
(-> ".card-zoom" js/$
(.css "width" max-card-width)
(.css "height" (int (* max-card-width card-ratio))))
(-> ".card-zoom" js/$
(.css "width" (int (/ max-card-height card-ratio)))
(.css "height" max-card-height)))
(-> ".right-pane" js/$ (.css "width" width))
(-> ".content-pane" js/$
(.css "left" 0)
(.css "top" top)
(.css "height" "auto")
(.css "width" width))))
(defn- pane-resize [event ui]
"Resize the card zoom to fit the available space"
(let [width (.. ui -size -width)
top (.. ui -position -top)]
(swap! app-state assoc-in [:options :log-width] width) ;;XXX: rename
(swap! app-state assoc-in [:options :log-top] top)
(.setItem js/localStorage "log-width" width)
(.setItem js/localStorage "log-top" top)
(resize-card-zoom)))
(defn- pane-start-resize [event ui]
"Display a zoomed card when resizing so the user can visualize how the
resulting zoom will look."
(when-let [card (get-in @game-state [:runner :identity])]
(put! zoom-channel card)))
(defn- pane-stop-resize [event ui]
(put! zoom-channel false))
(defn- tab-selector [selected-tab]
(fn []
[:div.panel.panel-top.blue-shade.selector
(doall (for [[tab {:keys [label]}] (seq @loaded-tabs)]
[:a {:key tab
:on-click #(reset! selected-tab tab)} label]))]))
(defn load-tab [tab]
(let [{:keys [hiccup label]}
(get available-tabs tab
{:hiccup [:div.error "This should not happen"]
:label "???"})]
(swap! loaded-tabs assoc tab {:hiccup hiccup :label label})))
(defn unload-tab [tab]
(swap! loaded-tabs dissoc tab))
(defn clear-tabs []
(reset! loaded-tabs {}))
(defn content-pane [& tabs]
(let [selected-tab (r/atom nil)]
(clear-tabs)
(doseq [tab tabs]
(load-tab tab))
(reset! selected-tab (first tabs))
(r/create-class
{:display-name "content-pane"
:component-did-mount
(fn [this]
(-> ".content-pane" js/$ (.resizable #js {:handles "w, n, nw"
:resize pane-resize
:start pane-start-resize
:stop pane-stop-resize}))
(resize-card-zoom))
:reagent-render
(fn []
[:div.content-pane
[tab-selector selected-tab]
[:div.panel.blue-shade.panel-bottom.content
(get-in @loaded-tabs [@selected-tab :hiccup] "nothing here")]])})))
| null | https://raw.githubusercontent.com/mtgred/netrunner/42c5ecc6a7c69c173822c0e9ef1a8d09b02b5bdc/src/cljs/nr/gameboard/right_pane.cljs | clojure | XXX: rename | (ns nr.gameboard.right-pane
(:require [cljs.core.async :refer [put!]]
[nr.appstate :refer [app-state]]
[nr.gameboard.card-preview :refer [zoom-channel]]
[nr.gameboard.log :refer [log-pane]]
[nr.gameboard.replay :refer [notes-pane notes-shared-pane]]
[nr.gameboard.state :refer [game-state]]
[nr.gameboard.settings :refer [settings-pane]]
[nr.translations :refer [tr]]
[reagent.core :as r]))
(defonce loaded-tabs (r/atom {}))
(defonce available-tabs
{:log
{:hiccup [log-pane]
:label (tr [:log.game-log "Game Log"])}
:notes
{:hiccup [notes-pane]
:label (tr [:log.annotating "Annotating"])}
:notes-shared
{:hiccup [notes-shared-pane]
:label (tr [:log.shared "Shared Annotations"])}
:settings
{:hiccup [settings-pane]
:label (tr [:log.settings "Settings"])}})
(defn- resize-card-zoom
"Resizes the card zoom based on the values in the app-state"
[]
(let [width (get-in @app-state [:options :log-width])
top (get-in @app-state [:options :log-top])
max-card-width (- width 5)
max-card-height (- top 10)
card-ratio (/ 418 300)]
(if (> (/ max-card-height max-card-width) card-ratio)
(-> ".card-zoom" js/$
(.css "width" max-card-width)
(.css "height" (int (* max-card-width card-ratio))))
(-> ".card-zoom" js/$
(.css "width" (int (/ max-card-height card-ratio)))
(.css "height" max-card-height)))
(-> ".right-pane" js/$ (.css "width" width))
(-> ".content-pane" js/$
(.css "left" 0)
(.css "top" top)
(.css "height" "auto")
(.css "width" width))))
(defn- pane-resize [event ui]
"Resize the card zoom to fit the available space"
(let [width (.. ui -size -width)
top (.. ui -position -top)]
(swap! app-state assoc-in [:options :log-top] top)
(.setItem js/localStorage "log-width" width)
(.setItem js/localStorage "log-top" top)
(resize-card-zoom)))
(defn- pane-start-resize [event ui]
"Display a zoomed card when resizing so the user can visualize how the
resulting zoom will look."
(when-let [card (get-in @game-state [:runner :identity])]
(put! zoom-channel card)))
(defn- pane-stop-resize [event ui]
(put! zoom-channel false))
(defn- tab-selector [selected-tab]
(fn []
[:div.panel.panel-top.blue-shade.selector
(doall (for [[tab {:keys [label]}] (seq @loaded-tabs)]
[:a {:key tab
:on-click #(reset! selected-tab tab)} label]))]))
(defn load-tab [tab]
(let [{:keys [hiccup label]}
(get available-tabs tab
{:hiccup [:div.error "This should not happen"]
:label "???"})]
(swap! loaded-tabs assoc tab {:hiccup hiccup :label label})))
(defn unload-tab [tab]
(swap! loaded-tabs dissoc tab))
(defn clear-tabs []
(reset! loaded-tabs {}))
(defn content-pane [& tabs]
(let [selected-tab (r/atom nil)]
(clear-tabs)
(doseq [tab tabs]
(load-tab tab))
(reset! selected-tab (first tabs))
(r/create-class
{:display-name "content-pane"
:component-did-mount
(fn [this]
(-> ".content-pane" js/$ (.resizable #js {:handles "w, n, nw"
:resize pane-resize
:start pane-start-resize
:stop pane-stop-resize}))
(resize-card-zoom))
:reagent-render
(fn []
[:div.content-pane
[tab-selector selected-tab]
[:div.panel.blue-shade.panel-bottom.content
(get-in @loaded-tabs [@selected-tab :hiccup] "nothing here")]])})))
|
7440c5483d61543e97c5ded13c1919cba30b1a246a9d59f35793d6aded107e05 | janestreet/async_smtp | envelope.mli | open! Core
open Email_message
Two envelopes are equal when they produce the same SMTP output . In
particular , ids are ignored for comparison . Same is true for hashing .
particular, ids are ignored for comparison. Same is true for hashing. *)
type t [@@deriving sexp_of]
type envelope = t [@@deriving sexp_of]
include Comparable.S_plain with type t := t
include Hashable.S_plain with type t := t
include Envelope_container.With_headers with type t := t
include Envelope_container.With_info with type t := t
val create : (email:Email.t -> unit -> t) Envelope_info.create
val create' : info:Envelope_info.t -> email:Email.t -> t
val info : t -> Envelope_info.t
val email : t -> Email.t
val set : (?email:Email.t -> t -> unit -> t) Envelope_info.set
val set' : t -> ?info:Envelope_info.t -> ?email:Email.t -> unit -> t
(* Extracts sender and recipients from the headers. *)
val of_email : Email.t -> t Or_error.t
val modify_email : t -> f:(Email.t -> Email.t) -> t
val of_bodiless : Envelope_bodiless.t -> Email.Raw_content.t -> t
val split_bodiless : t -> Envelope_bodiless.t * Email.Raw_content.t
val with_bodiless : t -> (Envelope_bodiless.t -> Envelope_bodiless.t) -> t
module Stable : sig
module V1 : sig
type t [@@deriving bin_io, sexp]
end
module V2 : sig
include Stable_without_comparator with type t = t
val of_v1 : V1.t -> t
end
end
| null | https://raw.githubusercontent.com/janestreet/async_smtp/72c538d76f5c7453bbc89af44d93931cd499a912/types/envelope.mli | ocaml | Extracts sender and recipients from the headers. | open! Core
open Email_message
Two envelopes are equal when they produce the same SMTP output . In
particular , ids are ignored for comparison . Same is true for hashing .
particular, ids are ignored for comparison. Same is true for hashing. *)
type t [@@deriving sexp_of]
type envelope = t [@@deriving sexp_of]
include Comparable.S_plain with type t := t
include Hashable.S_plain with type t := t
include Envelope_container.With_headers with type t := t
include Envelope_container.With_info with type t := t
val create : (email:Email.t -> unit -> t) Envelope_info.create
val create' : info:Envelope_info.t -> email:Email.t -> t
val info : t -> Envelope_info.t
val email : t -> Email.t
val set : (?email:Email.t -> t -> unit -> t) Envelope_info.set
val set' : t -> ?info:Envelope_info.t -> ?email:Email.t -> unit -> t
val of_email : Email.t -> t Or_error.t
val modify_email : t -> f:(Email.t -> Email.t) -> t
val of_bodiless : Envelope_bodiless.t -> Email.Raw_content.t -> t
val split_bodiless : t -> Envelope_bodiless.t * Email.Raw_content.t
val with_bodiless : t -> (Envelope_bodiless.t -> Envelope_bodiless.t) -> t
module Stable : sig
module V1 : sig
type t [@@deriving bin_io, sexp]
end
module V2 : sig
include Stable_without_comparator with type t = t
val of_v1 : V1.t -> t
end
end
|
8f7974c1721957fb98dd319974b81860e0927ae10461f1680a99515d7264a88f | returntocorp/semgrep | test_parsing_cpp.ml | open Common
module PI = Parse_info
module PS = Parsing_stat
module Flag = Flag_parsing
module Flag_cpp = Flag_parsing_cpp
(*****************************************************************************)
(* Subsystem testing *)
(*****************************************************************************)
let test_tokens_cpp file =
Flag.verbose_lexing := true;
Flag.verbose_parsing := true;
let toks = Parse_cpp.tokens (Parsing_helpers.file file) in
toks |> List.iter (fun x -> pr2_gen x);
()
let test_parse_cpp ?lang xs =
let fullxs, _skipped_paths =
Lib_parsing_cpp.find_source_files_of_dir_or_files xs
|> Skip_code.filter_files_if_skip_list ~root:xs
in
Parse_cpp.init_defs !Flag_cpp.macros_h;
let stat_list = ref [] in
let newscore = Common2.empty_score () in
fullxs
|> Console.progress (fun k ->
List.iter (fun file ->
k ();
let stat =
try
Common.save_excursion Flag.error_recovery true (fun () ->
Common.save_excursion Flag.exn_when_lexical_error false
(fun () ->
let res =
match lang with
| None -> Parse_cpp.parse file
| Some lang -> Parse_cpp.parse_with_lang ~lang file
in
res.Parsing_result.stat))
with
| exn ->
(* TODO: be more strict, List.hd failure, Stack overflow *)
pr2 (spf "PB on %s, exn = %s" file (Common.exn_to_s exn));
Parsing_stat.bad_stat file
in
Common.push stat stat_list;
let s = spf "bad = %d" stat.PS.error_line_count in
if stat.PS.error_line_count =|= 0 then
Hashtbl.add newscore file Common2.Ok
else Hashtbl.add newscore file (Common2.Pb s)));
Parsing_stat.print_recurring_problematic_tokens !stat_list;
Parsing_stat.print_parsing_stat_list !stat_list;
Parsing_stat.print_regression_information ~ext:"cpp" xs newscore;
TODO : restore layer generation for errors
( match xs with
| [ dirname ] when
let layer_file = " /tmp / layer_parse_errors_red_green.json " in
pr2 ( spf " generating parse error layer in % s " layer_file ) ;
let layer =
Layer_parse_errors.gen_red_green_layer ~root : ! stat_list
in
Layer_code.save_layer layer layer_file ;
let layer_file = " /tmp / layer_parse_errors_heatmap.json " in
pr2 ( spf " generating parse error layer in % s " layer_file ) ;
let layer =
Layer_parse_errors.gen_heatmap_layer ~root : ! stat_list
in
Layer_code.save_layer layer layer_file
| _ - > ( ) ) ;
(match xs with
| [ dirname ] when Common2.is_directory dirname ->
let layer_file = "/tmp/layer_parse_errors_red_green.json" in
pr2 (spf "generating parse error layer in %s" layer_file);
let layer =
Layer_parse_errors.gen_red_green_layer ~root:dirname !stat_list
in
Layer_code.save_layer layer layer_file;
let layer_file = "/tmp/layer_parse_errors_heatmap.json" in
pr2 (spf "generating parse error layer in %s" layer_file);
let layer =
Layer_parse_errors.gen_heatmap_layer ~root:dirname !stat_list
in
Layer_code.save_layer layer layer_file
| _ -> ());
*)
()
let test_dump_cpp file =
Parse_cpp.init_defs !Flag_cpp.macros_h;
let ast = Parse_cpp.parse_program file in
let s = Ast_cpp.show_program ast in
pr s
let test_dump_cpp_full file =
Parse_cpp.init_defs !Flag_cpp.macros_h;
let ast = Parse_cpp.parse_program file in
let toks = Parse_cpp.tokens (Parsing_helpers.file file) in
let _precision =
{ Meta_parse_info.full_info = true; type_info = false; token_info = true }
in
TODO ~precision
pr s;
toks
|> List.iter (fun tok ->
match tok with
| Parser_cpp.TComment ii ->
let v = Meta_parse_info.vof_info_adjustable_precision ii in
let s = OCaml.string_of_v v in
pr s
| _ -> ());
()
let test_dump_cpp_view file =
Parse_cpp.init_defs !Flag_cpp.macros_h;
let toks_orig = Parse_cpp.tokens (Parsing_helpers.file file) in
let toks =
toks_orig
|> Common.exclude (fun x ->
Token_helpers_cpp.is_comment x || Token_helpers_cpp.is_eof x)
in
let extended = toks |> List.map Token_views_cpp.mk_token_extended in
Parsing_hacks_cpp.find_template_inf_sup extended;
let multi = Token_views_cpp.mk_multi extended in
Token_views_context.set_context_tag_multi multi;
let v = Token_views_cpp.vof_multi_grouped_list multi in
let s = OCaml.string_of_v v in
pr s
let test_parse_cpp_fuzzy xs =
let fullxs, _skipped_paths =
Lib_parsing_cpp.find_source_files_of_dir_or_files xs
|> Skip_code.filter_files_if_skip_list ~root:xs
in
fullxs
|> Console.progress (fun k ->
List.iter (fun file ->
k ();
Common.save_excursion Flag_parsing_cpp.strict_lexer true (fun () ->
try
let _fuzzy = Parse_cpp.parse_fuzzy file in
()
with
| exn ->
pr2
(spf "PB with: %s, exn = %s" file (Common.exn_to_s exn)))))
let test_dump_cpp_fuzzy file =
let fuzzy , _ toks = Parse_cpp.parse_fuzzy file in
let v = Meta_ast_fuzzy.vof_trees fuzzy in
let s = OCaml.string_of_v v in
pr2 s
let test_dump_cpp_fuzzy file =
let fuzzy, _toks = Parse_cpp.parse_fuzzy file in
let v = Meta_ast_fuzzy.vof_trees fuzzy in
let s = OCaml.string_of_v v in
pr2 s
*)
let test_parse_cpp_dyp xs =
let fullxs = Lib_parsing_cpp.find_source_files_of_dir_or_files xs
| > Skip_code.filter_files_if_skip_list ~root : xs
in
fullxs | > Console.progress ( fun k - > List.iter ( fun file - >
k ( ) ;
true ( fun ( ) - >
try (
let _ cst = Parse_cpp.parse_with_dypgen file in
( )
)
with exn - >
pr2 ( spf " PB with : % s , exn = % s " file ( Common.exn_to_s exn ) ) ;
( )
)
) )
let test_dump_cpp_dyp file =
let ast = Parse_cpp.parse_with_dypgen file in
let s = Cst_cpp.show_program ast in
pr s
let test_parse_cpp_dyp xs =
let fullxs = Lib_parsing_cpp.find_source_files_of_dir_or_files xs
|> Skip_code.filter_files_if_skip_list ~root:xs
in
fullxs |> Console.progress (fun k -> List.iter (fun file ->
k ();
Common.save_excursion Flag_parsing_cpp.strict_lexer true (fun () ->
try (
let _cst = Parse_cpp.parse_with_dypgen file in
()
)
with exn ->
pr2 (spf "PB with: %s, exn = %s" file (Common.exn_to_s exn));
()
)
))
let test_dump_cpp_dyp file =
let ast = Parse_cpp.parse_with_dypgen file in
let s = Cst_cpp.show_program ast in
pr s
*)
(*****************************************************************************)
(* Main entry for Arg *)
(*****************************************************************************)
let actions () =
[
("-tokens_cpp", " <file>", Arg_helpers.mk_action_1_arg test_tokens_cpp);
( "-parse_cpp",
" <file or dir>",
Arg_helpers.mk_action_n_arg test_parse_cpp );
( "-parse_cpp_c",
" <file or dir>",
Arg_helpers.mk_action_n_arg (test_parse_cpp ~lang:Flag_cpp.C) );
( "-parse_cpp_cplusplus",
" <file or dir>",
Arg_helpers.mk_action_n_arg (test_parse_cpp ~lang:Flag_cpp.Cplusplus) );
(*
"-parse_cpp_dyp", " <file or dir>",
Arg_helpers.mk_action_n_arg (test_parse_cpp_dyp);
*)
("-dump_cpp", " <file>", Arg_helpers.mk_action_1_arg test_dump_cpp);
( "-dump_cpp_full",
" <file>",
Arg_helpers.mk_action_1_arg test_dump_cpp_full );
( "-dump_cpp_view",
" <file>",
Arg_helpers.mk_action_1_arg test_dump_cpp_view );
" -dump_cpp_dyp " , " < file > " ,
Common.mk_action_1_arg test_dump_cpp_dyp ;
"-dump_cpp_dyp", " <file>",
Common.mk_action_1_arg test_dump_cpp_dyp;
*)
( "-parse_cpp_fuzzy",
" <files or dirs>",
Arg_helpers.mk_action_n_arg test_parse_cpp_fuzzy )
( " -dump_cpp_fuzzy " , " < file > " , Arg_helpers.mk_action_1_arg test_dump_cpp_fuzzy ) ;
("-dump_cpp_fuzzy", " <file>", Arg_helpers.mk_action_1_arg test_dump_cpp_fuzzy);
*);
]
| null | https://raw.githubusercontent.com/returntocorp/semgrep/ecfb452fde8fab7afffa5d2cdfc5ef7c1da91ad9/languages/cpp/menhir/test_parsing_cpp.ml | ocaml | ***************************************************************************
Subsystem testing
***************************************************************************
TODO: be more strict, List.hd failure, Stack overflow
***************************************************************************
Main entry for Arg
***************************************************************************
"-parse_cpp_dyp", " <file or dir>",
Arg_helpers.mk_action_n_arg (test_parse_cpp_dyp);
| open Common
module PI = Parse_info
module PS = Parsing_stat
module Flag = Flag_parsing
module Flag_cpp = Flag_parsing_cpp
let test_tokens_cpp file =
Flag.verbose_lexing := true;
Flag.verbose_parsing := true;
let toks = Parse_cpp.tokens (Parsing_helpers.file file) in
toks |> List.iter (fun x -> pr2_gen x);
()
let test_parse_cpp ?lang xs =
let fullxs, _skipped_paths =
Lib_parsing_cpp.find_source_files_of_dir_or_files xs
|> Skip_code.filter_files_if_skip_list ~root:xs
in
Parse_cpp.init_defs !Flag_cpp.macros_h;
let stat_list = ref [] in
let newscore = Common2.empty_score () in
fullxs
|> Console.progress (fun k ->
List.iter (fun file ->
k ();
let stat =
try
Common.save_excursion Flag.error_recovery true (fun () ->
Common.save_excursion Flag.exn_when_lexical_error false
(fun () ->
let res =
match lang with
| None -> Parse_cpp.parse file
| Some lang -> Parse_cpp.parse_with_lang ~lang file
in
res.Parsing_result.stat))
with
| exn ->
pr2 (spf "PB on %s, exn = %s" file (Common.exn_to_s exn));
Parsing_stat.bad_stat file
in
Common.push stat stat_list;
let s = spf "bad = %d" stat.PS.error_line_count in
if stat.PS.error_line_count =|= 0 then
Hashtbl.add newscore file Common2.Ok
else Hashtbl.add newscore file (Common2.Pb s)));
Parsing_stat.print_recurring_problematic_tokens !stat_list;
Parsing_stat.print_parsing_stat_list !stat_list;
Parsing_stat.print_regression_information ~ext:"cpp" xs newscore;
TODO : restore layer generation for errors
( match xs with
| [ dirname ] when
let layer_file = " /tmp / layer_parse_errors_red_green.json " in
pr2 ( spf " generating parse error layer in % s " layer_file ) ;
let layer =
Layer_parse_errors.gen_red_green_layer ~root : ! stat_list
in
Layer_code.save_layer layer layer_file ;
let layer_file = " /tmp / layer_parse_errors_heatmap.json " in
pr2 ( spf " generating parse error layer in % s " layer_file ) ;
let layer =
Layer_parse_errors.gen_heatmap_layer ~root : ! stat_list
in
Layer_code.save_layer layer layer_file
| _ - > ( ) ) ;
(match xs with
| [ dirname ] when Common2.is_directory dirname ->
let layer_file = "/tmp/layer_parse_errors_red_green.json" in
pr2 (spf "generating parse error layer in %s" layer_file);
let layer =
Layer_parse_errors.gen_red_green_layer ~root:dirname !stat_list
in
Layer_code.save_layer layer layer_file;
let layer_file = "/tmp/layer_parse_errors_heatmap.json" in
pr2 (spf "generating parse error layer in %s" layer_file);
let layer =
Layer_parse_errors.gen_heatmap_layer ~root:dirname !stat_list
in
Layer_code.save_layer layer layer_file
| _ -> ());
*)
()
let test_dump_cpp file =
Parse_cpp.init_defs !Flag_cpp.macros_h;
let ast = Parse_cpp.parse_program file in
let s = Ast_cpp.show_program ast in
pr s
let test_dump_cpp_full file =
Parse_cpp.init_defs !Flag_cpp.macros_h;
let ast = Parse_cpp.parse_program file in
let toks = Parse_cpp.tokens (Parsing_helpers.file file) in
let _precision =
{ Meta_parse_info.full_info = true; type_info = false; token_info = true }
in
TODO ~precision
pr s;
toks
|> List.iter (fun tok ->
match tok with
| Parser_cpp.TComment ii ->
let v = Meta_parse_info.vof_info_adjustable_precision ii in
let s = OCaml.string_of_v v in
pr s
| _ -> ());
()
let test_dump_cpp_view file =
Parse_cpp.init_defs !Flag_cpp.macros_h;
let toks_orig = Parse_cpp.tokens (Parsing_helpers.file file) in
let toks =
toks_orig
|> Common.exclude (fun x ->
Token_helpers_cpp.is_comment x || Token_helpers_cpp.is_eof x)
in
let extended = toks |> List.map Token_views_cpp.mk_token_extended in
Parsing_hacks_cpp.find_template_inf_sup extended;
let multi = Token_views_cpp.mk_multi extended in
Token_views_context.set_context_tag_multi multi;
let v = Token_views_cpp.vof_multi_grouped_list multi in
let s = OCaml.string_of_v v in
pr s
let test_parse_cpp_fuzzy xs =
let fullxs, _skipped_paths =
Lib_parsing_cpp.find_source_files_of_dir_or_files xs
|> Skip_code.filter_files_if_skip_list ~root:xs
in
fullxs
|> Console.progress (fun k ->
List.iter (fun file ->
k ();
Common.save_excursion Flag_parsing_cpp.strict_lexer true (fun () ->
try
let _fuzzy = Parse_cpp.parse_fuzzy file in
()
with
| exn ->
pr2
(spf "PB with: %s, exn = %s" file (Common.exn_to_s exn)))))
let test_dump_cpp_fuzzy file =
let fuzzy , _ toks = Parse_cpp.parse_fuzzy file in
let v = Meta_ast_fuzzy.vof_trees fuzzy in
let s = OCaml.string_of_v v in
pr2 s
let test_dump_cpp_fuzzy file =
let fuzzy, _toks = Parse_cpp.parse_fuzzy file in
let v = Meta_ast_fuzzy.vof_trees fuzzy in
let s = OCaml.string_of_v v in
pr2 s
*)
let test_parse_cpp_dyp xs =
let fullxs = Lib_parsing_cpp.find_source_files_of_dir_or_files xs
| > Skip_code.filter_files_if_skip_list ~root : xs
in
fullxs | > Console.progress ( fun k - > List.iter ( fun file - >
k ( ) ;
true ( fun ( ) - >
try (
let _ cst = Parse_cpp.parse_with_dypgen file in
( )
)
with exn - >
pr2 ( spf " PB with : % s , exn = % s " file ( Common.exn_to_s exn ) ) ;
( )
)
) )
let test_dump_cpp_dyp file =
let ast = Parse_cpp.parse_with_dypgen file in
let s = Cst_cpp.show_program ast in
pr s
let test_parse_cpp_dyp xs =
let fullxs = Lib_parsing_cpp.find_source_files_of_dir_or_files xs
|> Skip_code.filter_files_if_skip_list ~root:xs
in
fullxs |> Console.progress (fun k -> List.iter (fun file ->
k ();
Common.save_excursion Flag_parsing_cpp.strict_lexer true (fun () ->
try (
let _cst = Parse_cpp.parse_with_dypgen file in
()
)
with exn ->
pr2 (spf "PB with: %s, exn = %s" file (Common.exn_to_s exn));
()
)
))
let test_dump_cpp_dyp file =
let ast = Parse_cpp.parse_with_dypgen file in
let s = Cst_cpp.show_program ast in
pr s
*)
let actions () =
[
("-tokens_cpp", " <file>", Arg_helpers.mk_action_1_arg test_tokens_cpp);
( "-parse_cpp",
" <file or dir>",
Arg_helpers.mk_action_n_arg test_parse_cpp );
( "-parse_cpp_c",
" <file or dir>",
Arg_helpers.mk_action_n_arg (test_parse_cpp ~lang:Flag_cpp.C) );
( "-parse_cpp_cplusplus",
" <file or dir>",
Arg_helpers.mk_action_n_arg (test_parse_cpp ~lang:Flag_cpp.Cplusplus) );
("-dump_cpp", " <file>", Arg_helpers.mk_action_1_arg test_dump_cpp);
( "-dump_cpp_full",
" <file>",
Arg_helpers.mk_action_1_arg test_dump_cpp_full );
( "-dump_cpp_view",
" <file>",
Arg_helpers.mk_action_1_arg test_dump_cpp_view );
" -dump_cpp_dyp " , " < file > " ,
Common.mk_action_1_arg test_dump_cpp_dyp ;
"-dump_cpp_dyp", " <file>",
Common.mk_action_1_arg test_dump_cpp_dyp;
*)
( "-parse_cpp_fuzzy",
" <files or dirs>",
Arg_helpers.mk_action_n_arg test_parse_cpp_fuzzy )
( " -dump_cpp_fuzzy " , " < file > " , Arg_helpers.mk_action_1_arg test_dump_cpp_fuzzy ) ;
("-dump_cpp_fuzzy", " <file>", Arg_helpers.mk_action_1_arg test_dump_cpp_fuzzy);
*);
]
|
1800184ae6255076beb530026b1e9a277db52c5daaf30694d5d52e558c51735f | patricoferris/ocaml-multicore-monorepo | zzz.ml | module Key = struct
type t = Optint.Int63.t
let compare = Optint.Int63.compare
end
module Job = struct
type t = {
time : float;
thread : unit Suspended.t;
}
let compare a b = Float.compare a.time b.time
end
module Q = Psq.Make(Key)(Job)
type t = {
mutable sleep_queue: Q.t;
mutable next_id : Optint.Int63.t;
}
let create () = { sleep_queue = Q.empty; next_id = Optint.Int63.zero }
let add t time thread =
let id = t.next_id in
t.next_id <- Optint.Int63.succ t.next_id;
let sleeper = { Job.time; thread } in
t.sleep_queue <- Q.add id sleeper t.sleep_queue;
id
let remove t id =
t.sleep_queue <- Q.remove id t.sleep_queue
let pop t ~now =
match Q.min t.sleep_queue with
| Some (_, { Job.time; thread }) when time <= now ->
if Eio.Private.Fiber_context.clear_cancel_fn thread.fiber then (
t.sleep_queue <- Option.get (Q.rest t.sleep_queue);
`Due thread
) else (
(* This shouldn't happen, since any cancellation will happen in the same domain as the [pop]. *)
assert false
)
| Some (_, { Job.time; _ }) -> `Wait_until time
| None -> `Nothing
| null | https://raw.githubusercontent.com/patricoferris/ocaml-multicore-monorepo/624b3293ee41e83736fe7ac3a79f810c2b70f68b/duniverse/eio/lib_eio/utils/zzz.ml | ocaml | This shouldn't happen, since any cancellation will happen in the same domain as the [pop]. | module Key = struct
type t = Optint.Int63.t
let compare = Optint.Int63.compare
end
module Job = struct
type t = {
time : float;
thread : unit Suspended.t;
}
let compare a b = Float.compare a.time b.time
end
module Q = Psq.Make(Key)(Job)
type t = {
mutable sleep_queue: Q.t;
mutable next_id : Optint.Int63.t;
}
let create () = { sleep_queue = Q.empty; next_id = Optint.Int63.zero }
let add t time thread =
let id = t.next_id in
t.next_id <- Optint.Int63.succ t.next_id;
let sleeper = { Job.time; thread } in
t.sleep_queue <- Q.add id sleeper t.sleep_queue;
id
let remove t id =
t.sleep_queue <- Q.remove id t.sleep_queue
let pop t ~now =
match Q.min t.sleep_queue with
| Some (_, { Job.time; thread }) when time <= now ->
if Eio.Private.Fiber_context.clear_cancel_fn thread.fiber then (
t.sleep_queue <- Option.get (Q.rest t.sleep_queue);
`Due thread
) else (
assert false
)
| Some (_, { Job.time; _ }) -> `Wait_until time
| None -> `Nothing
|
2433081036addea6d24a48e7c280844656344420a8e25cdff9e6c497d8ef06e7 | informatimago/lisp | source-test.lisp | -*- mode : lisp;coding : utf-8 -*-
;;;;**************************************************************************
FILE : source-test.lisp
;;;;LANGUAGE: Common-Lisp
;;;;SYSTEM: Common-Lisp
USER - INTERFACE :
;;;;DESCRIPTION
;;;;
Test source.lisp
;;;;
< PJB > < >
MODIFICATIONS
2015 - 02 - 23 < PJB > Created .
;;;;LEGAL
AGPL3
;;;;
Copyright 2015 - 2016
;;;;
;;;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;;;; (at your option) any later version.
;;;;
;;;; This program is distributed in the hope that it will be useful,
;;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details .
;;;;
You should have received a copy of the GNU Affero General Public License
;;;; along with this program. If not, see </>.
;;;;**************************************************************************
(eval-when (:compile-toplevel :load-toplevel :execute)
(setf *readtable* (copy-readtable nil)))
(defpackage "COM.INFORMATIMAGO.TOOLS.SOURCE.TEST"
(:use "COMMON-LISP"
"COM.INFORMATIMAGO.COMMON-LISP.CESARUM.SIMPLE-TEST"
"COM.INFORMATIMAGO.TOOLS.SOURCE")
(:export "TEST/ALL"))
(in-package "COM.INFORMATIMAGO.TOOLS.SOURCE.TEST")
(define-test test/all ()
:success)
;;;; THE END ;;;;
| null | https://raw.githubusercontent.com/informatimago/lisp/571af24c06ba466e01b4c9483f8bb7690bc46d03/tools/source-test.lisp | lisp | coding : utf-8 -*-
**************************************************************************
LANGUAGE: Common-Lisp
SYSTEM: Common-Lisp
DESCRIPTION
LEGAL
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
along with this program. If not, see </>.
**************************************************************************
THE END ;;;; | FILE : source-test.lisp
USER - INTERFACE :
Test source.lisp
< PJB > < >
MODIFICATIONS
2015 - 02 - 23 < PJB > Created .
AGPL3
Copyright 2015 - 2016
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
GNU Affero General Public License for more details .
You should have received a copy of the GNU Affero General Public License
(eval-when (:compile-toplevel :load-toplevel :execute)
(setf *readtable* (copy-readtable nil)))
(defpackage "COM.INFORMATIMAGO.TOOLS.SOURCE.TEST"
(:use "COMMON-LISP"
"COM.INFORMATIMAGO.COMMON-LISP.CESARUM.SIMPLE-TEST"
"COM.INFORMATIMAGO.TOOLS.SOURCE")
(:export "TEST/ALL"))
(in-package "COM.INFORMATIMAGO.TOOLS.SOURCE.TEST")
(define-test test/all ()
:success)
|
fe5ad7b4ad59aece87ad916bde5e694d8a41d026e081975aaae58b0b4a599e67 | rwilcox/my-learnings-docs | learning_helm.md.rkt | #lang scribble/text
@(require "scribble-utils.rkt")
---
path: /learnings/helm
title: Learning Helm
---
# Table Of Contents
<!-- toc -->
# Intro / Why
Automate Version handling, rollback, installation
Templatize k8s resources, search and reuse templates
## components
* helm client (CLI)
* charts — application configuration definitions
* repositories — where charts are stored
* release — chart instances loaded into k8s
### helm client / CLI interesting facts
Can be extended with plugins
## misc
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Helm contains a template function that enables you to look up resources in the Kubernetes cluster. The lookup template function is able to return either an individual object or a list of objects}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Helm charts can be cryptographically signed and verified}
## v2 Vs v3
### Differences in required supporting infrastructure
V2: Helm -> Tiller pod -> k8s cluster
V3: helm -> k8s cluster via role based access controls
### User Facing Differences
#### Chart name <<Helm_Name_Differences_In_V2_V3>>
In Helm 2: unless you provided a `--name` parameter, Helm created adjective-noun names for releases.
In Helm 3 this now uses the name of the chart, or what you override with `--name-template`_OR_ `--generate-name`
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{In Helm 2, "friendly names" were generated using adjectives and animal names. That was removed in Helm 3 due to complaints that release names were unprofessional.}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{In Helm 3, naming has been changed. Now instance names are scoped to Kubernetes namespaces. We could install two instances named mysite as long as they each lived in a different namespace.}
# Helm chart storage (different types of repositories)
## notes
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Chart repositories do, however, present a few key challenges:
* They have no concept of namespaces; all charts for a repo are listed in a single index
* They have no fine-grained access control; you either have access to all charts in the repo or none of them
* Chart packages with different names but the exact same raw contents are stored twice
* repository index can become extremely large, causing Helm to consume a lot of memory}
## using repositories from the CLI
Helm provides search and repo add commands for selecting different repos, searching them and getting a specific helm chart.
## public access
Helm publishes a public one [Helm official stable charts](/).
You could use hemp fetch to get the public ones, inspect and install from your file system.
## A static site
[perhaps hosted via GitHub pages](-to-host-helm-chart-repository-on-github-b76c854e1462)
Just configure your helm CLI to have a registry that points to (the site)
just need a index.yaml file! `helm repo index .` generates this!
You can also use the raw.github URL to the repository, and add that as a remote with Github user and password
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Chart Releaser, or cr, is a command-line tool that leverages GitHub releases for hosting chart packages. It has the ability to detect charts in a Git repo, package them, and upload each of them as artifacts to GitHub releases named after the unique chart version.}
## nexus
OCI compatible ?
### ECR
OCI compatible ?
### [chart museum]()
Installation options:
* Download a chart museum binary
* docker image
* Helm chart
Can point storage to S3, GCP, Azure Blob storage, local file system, etc.
OCI compatible ?
### Screw it, a folder on your local machine
(great for writing charts, then seeing how it applies with an actual service)
set your Chart.yaml 's dependencies `repository` field to `file-development-charts-or-whatever/my-specific-chart-folder-yes-you-need-this/`
# Deployments
Can see these via helm ls.
When a Helm chart is installed becomes a release (this is a Helm standard object type)
## Attributes
* **Revisions**: number of times you’ve deployed the service to this cluster (this is NOT the artifact version number AND is reset say with a new cluster)
* **name**: for more info see Helm_Name_Differences_In_V2_V3
## environmental variables for a deployment
Vs changing these one by one in k8s pods
## Reverting a deploy
`helm rollback $artifactName $revision`
## Removing a microservice completely from the cluster
`helm delete --purge $name`
# Hooks
possibilities:
* preinstall
* post-install
* pre-delete
* post-delete
* pre-upgrade
* post-upgrade
* pre-rollback
* post-rollback
just a yaml file with
metadata:
annotations:
"helm.sh/hook": "pre-install"
Hooks can be a part of deployments in addition to having the same lifecycle for Kubernetes Jobs (See Kubernetes_Jobs).
You can _also_ do multiple jobs associated with a hook! Just use weight to make sure to set the `hook-weight` annotation to different values to control which goes first.
## See also
* K8s_Init_Containers
*
# Templates
can run values through various operations, like quote and upper.
{{ quote .Value.some.value.here }}
[List of built in functions](/)
Can for example even look up attributes from the running k8s cluster!
Uses template functions from [Sprig template library](/)
Pipe character to send values into another function
Can use `with` to drill into a nested values object without navigating the object graph every time in a certain scope (Pascal has a similar syntax feature)
Variables are assigned by Pascal / Smalltalk assignment syntax
{{- $var := "foo" -}}
{{- — do not print the results from this
-}} — do not print out a new line
## falsiness in template language
Falsely:
* Boolean false
* numeric zero
* empty string
* nil
* empty collection
## Container Types in template language
### Dealing with arrays with dictionaries inside them
If you have a values.yaml objecting looking like this:
```yaml
myArrayOfDictionaries:
- nameOrWhateverTheValueIs: foobar
- nameOrWhateverTheValueIs: second item in the array
```
the following idiom is your friend
```{{- with (first .Values.myArrayOfDictionaries) }}
{{ .NameOrWhateverTheValueIs }}
{{- end }}
```
You could also do `{{- with ( index .Values.myArrayOfDictionaries 3 ) }}` to get the fourth item in the dictiona
## Object Traversal In Template language
In deeply or optionally nested objects you may get a lot of `nil pointer evaluating interface {}.someField` messages. See [Helm issues about traversing deeply nested objects]()
The [empty](/#empty) function, for example, will error if something on the object path is nil. It may also error in _very_ odd places (I would have thought .Values.globals exists by default, but nope(?)).
Two ways to handle this:
`{{ empty (.Values.myDictionary | default dict).myField }}` <-- this will correctly not error and return empty for `myField` if the traversal fails.
`dig "myDictionary" "myField" .Values)` ( [documentation]() ). **BUT** `dig` only works on Dictionary objects, it will not work on arbitrary objects that use the dot accessor for field access (aka: arbitrary objects)
## template includes
_filename.tpl — traditionally starts with underscore
** but** using built in objects in these templates might not work like you expect! Need to pass root context at the template call site
### How you create a block you're going to include
{{- define "template_name" }}
foobar: baz
{{- end }
}
### How you call it: with the template tag
{{- template "template_name" .}}
(. can also be $)
`template` is relatively literal include mechanism - you must make sure you do the whitespace alignment properly across the two files
### How you can call it: with the include tag
{{ include "template_name" . | indent 4 }}
## Values / The Template Nature
can specify in three locations (precedence):
* parent chart
* values.YAML
*—set parameters
[source](-1-0.helm.sh/docs/chart_template_guide/values_files/)
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{In other words, --set values override settings from passed-in values files, which in turn override anything in the chart’s default values.yaml file.}
## See also:
* Helm_Development_Checking_Your_Created_Chart
*
## Using Helm as a preprocessor for something else
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Sometimes you want to intercept the YAML, modify it with your own tool, and then load it into Kubernetes. Helm provides a way to execute this external tool without having to resort to using helm template. The flag --post-renderer on the install, upgrade, rollback, and template will cause Helm to send the YAML data to the command, and then read the results back into Helm. This is a great way to work with tools like Kustomize.}
## Looking up very dynamic values from k8s
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Helm contains a template function that enables you to look up resources in the Kubernetes cluster. The lookup template function is able to return either an individual object or a list of objects}
# CLI bits
## Passing complex objects through set parameter
an array where each element is a dictionary
`-set 'mything.globals.myArrayOfDictionaries[0].myField=myValues' --set 'mything.globals.myArrayOfDictionaries[1].myField=myValueForArrayItemTwo' `
[source of some of this documentation](-passing-array-values-through-set)
Alternative: maybe just [put the extra values in a seperate file and include them](#issuecomment-431447235)
This would look like:
```yaml
mything:
globals:
myArrayOfDictionaries:
- myField: myValue
```
and call `helm template` with the `-f` option specifying the file name
# Release
This is a built in object you can refer to in the Jinja templates!
# Developing
## making a new chart
`helm create $name`
creates the skeleton of what you need
### interesting files
* values.schema.json <-- OPTION schema for values in values.yaml file!!!
* crds <-- custom k8s resources
* templates <-- templates + values = k8s resources
*
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Helm provides the optional ability for each chart to provide its own schema for its values using JSON Schema. JSON Schema provides a vocabulary to describe JSON files. YAML is a superset of JSON, and you can transform content between the two file formats. This makes it possible to use a JSON Schema to validate the content of a YAML file.}
[See excellent blog post on this](-tricks-input-validation-with-values-schema-json/)
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{When you run the commands helm install, helm upgrade, helm lint, and helm template, Helm will validate the values against what it finds in the values.schema.json file.}
## Making sure your template works (local machine development) <<Helm_Development_Checking_Your_Created_Chart>>
* `helm lint`
* `helm template` <-- renders the Helm chart as a k8s resource. You could use this to ensure you're telling k8s to do what you think you're telling it
* `helm install --dry-run` <-- same as `helm template` (?)
*
## version numbering
charts.yaml:
* `version` attribute, which is the chart version. Per convention should be incremented every time you change something, including the app version
* `appVersion` attribute: version number of the application being deployed
## manually creating a chart artifact
`helm package chartName` <-- makes a .tar file for this with the correct version number appending.
You could theoretically use `curl` to upload this to the chart repository (but you likely don't want to directly do that...)
## deploying a chart
the Helm Push plugin is a good solution here. can run this after a helm package, or have the push plugin do it for you...
## Subclassing and chart libraries
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{You may run into the situation where you are creating multiple similar charts—charts that share a lot of the same templates. For these situations, there are library charts.
}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{It provides a blueprint that is meant to be overridden by the caller in a chart that includes this library. mylib.configmap is a special template. This is the template another chart will use. It takes mylib.configmap.tpl along with another template, yet to be defined, containing overrides, and merges them into one output. mylib.configmap uses a utility function that handles the merging and is handy to reuse.}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{When a child chart has declared an export property, its contents can be imported directly into a parent chart.}
## tests
### built in integration / environment validation unit tests
(Sometimes also called "helm hook test")
Stored in `templates/test`.
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Tests typically live in the tests subdirectory of the templates directory. Putting the tests in this directory provides a useful separation. This is a convention and not required for tests to run.}
It's just another k8s pod. Will not get deployed as a service, but the exit code of the command is checked for non-zero exit.
For example, a test can check that the webservice server your pod _should_ have launched _did_ launch.
Can be ran during deployment process lifecycle.
Interesting notes: because it's a separate pod definition, you don't have to use the docker container your normal application uses. You could use busybox and call `wget`, you could write a custom binary and put it in the container, whatever. [Example]()
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{In Helm version 2 there was a hook named test-success for running tests. Helm version 3 provides backward compatibility and will run this hook name as a test.}
### actual unit tests
See Helm plugin [helm-unittest](-unittest) where you can test your post processed YAML (ie making sure one of your if conditions resulted correctly, or whatever)
### Chart Testing
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Chart Testing can be installed and used in various ways. For example, you can use it as a binary application on a development system or in a container within a continuous integration system. Learn more about using and setting it up for your situation on the project page.
}
See also:
* [Builtin quality for Helm charts: unit testing to the rescue](:rwilcox/b:1a1911796101)
### debugging WTF went wrong with your chart
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Helm provides tools designed to ease debugging. Between helm get manifest and kubectl get, you have tools for comparing what Kubernetes thinks is the current object with what the chart produced. This is particularly helpful when a resource that should be managed by Helm was manually edited outside of Helm (e.g., using kubectl edit).}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{While --dry-run is designed for debugging, helm template is designed to isolate the template rendering process of Helm from the installation or upgrade logic.}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{The template command performs the first four phases (load the chart, determine the values, render the templates, format to YAML). But it does this with a few additional caveats:
* During helm template, Helm never contacts a remote Kubernetes server.
* The template command always acts like an installation.
* Template functions and directives that would normally require contacting a Kubernetes server will instead only return default data.
* The chart only has access to default Kubernetes kinds.}
# Introspecting a repository
## searching for an artifact / chart in a repository
helm search $repo/$artifactName
As Helm keeps a local cache of repositories, you may need to manually `helm repo update` before these queries return expected results...
By default `helm search` only returns latest version of an artifact in the repository. Use `helm search -l` to list all artifact coordinates.
## get previously stored template
### Helm v2
helm fetch $repo/$artifactName --version=$arifactVersion --untar
### Helm v3
helm pull $repo/$name --version=$artifactVersion --untar
## Get K8s resources created by a chart
### Helm 3
helm get manifest $repo/$releaseName
# Operating
## Making and Tracking cluster changes
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Each release record contains enough information to re-create the Kubernetes objects for that revision (an important thing for helm rollback).}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{helm uninstall command has a flag called --keep-history. Normally, a deletion event will destroy all release records associated with that installation. But when --keep-history is specified, you can see the history of an installation even after it has been deleted:}
## Best Practices
@quote-note[
#:original-highlight "the recommendation is to put in resource limits and then turn them into comments."
#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Wait couldn’t you set them to (Dev machine minimum) and ??? have helm —set them on big boy targets (knowing that, practically speaking, QA resource allocations will != prod)}
## Unwedging Stuff
Sometimes you can unwedge stuff by rolling back _then_ trying your `helm upgrade`
`helm rollback $my-release $my-revision --namespace=$my-namespace`
Which might work.
You might be able to `helm delete $my-release -n $my-namespace`
### Where Helm stores state in k8s
Helm 3 stores its release state in secrets in k8s, in a format like so `sh.helm.release.v1.<RELEASE_NAME>.v<LATEST_REVISION>`
You may have to delete these too, especially if you have deleted all the k8s services, deployments, etc etc that your Helm chart creates.
[Source]()
# In a microsevice's CI/CD process
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{there is an upgrade shortcut available that will just reuse the last set of values that you sent:
`$ helm upgrade mysite bitnami/drupal --reuse-values`}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{The --reuse-values flag will tell Helm to reload the server-side copy of the last set of values, and then use those to generate the upgrade}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{One recommendation for using --wait in CI is to use a long --timeout (five or ten minutes) to ensure that Kubernetes has time to resolve any transient failures.}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{ --atomic flag instead of the --wait flag. This flag causes the same behavior as --wait unless the release fails. Then, instead of marking the release as failed and exiting, it performs an automatic rollback to the last successful release. In automated systems, the --atomic flag is more resistent to outages, since it is less likely to have a failure as its end result. }
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{ --wait will track such objects, waiting until the pods they create are marked as Running by Kubernetes.}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{But with --wait, the success criteria for an installation is modified. A chart is not considered successfully installed unless (1) the Kubernetes API server accepts the manifest and (2) all of the pods created by the chart reach the Running state before Helm’s timeout expires.}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{The helm upgrade --install command will install a release if it does not exist already, or will upgrade a release if a release by that name is found. Underneath the hood, it works by querying Kubernetes for a release with the given name. If that release does not exist, it switches out of the upgrade logic and into the install logic.}
# Charts that depend on other charts
## On Sub Charts
You can put dependencies in the `charts/` folder. Like `charts/my-sub-dependency-chart`
From within the parents values.yml you can interject values into the subchart.
Like so
```yaml
my-sub-dependency-chart:
keyToOverride: value
```
(values are passed to the subchart as the bare key, no namespace)
### Global values and charts
use the `global` key in the parents values.yml and the name will be the same everywhere, in the subcharts and the parent chart.
### and Chart.yaml
[can not read parent .Chart value from subchart]()
> A subchart is considered "stand-alone", which means a subchart can never explicitly depend on its parent chart.
> For that reason, a subchart cannot access the values of its parent.
[source](/)
when I tried this in a template file I was only able to access fields on `.Chart` where they were in the (current) chart, ie not exported from the parent chart.
## Dependent Charts
charts.yml file:
`dependencies` key: give name, version and repository
`helm dependency update` <-- updates dependencies
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{When you want to control if a single feature is enabled or disabled through a dependency, you can use the condition property on a dependency}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{When a child chart has declared an export property, its contents can be imported directly into a parent chart.}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Dependencies are specified in the Chart.yaml file. The following is the dependencies section in the Chart.yaml file for a chart named rocket:}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{When a chart has dependencies listed under the dependencies field in Chart.yaml, a special file named Chart.lock is generated and updated each time you run the command helm dependency update. When a chart contains a Chart.lock file, operators can run helm dependency build to generate the charts/ directory without the need to renegotiate dependencies.}
## Starters
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Starters, or starter packs, are similar to Helm charts, except that they are meant to be used as templates for new charts.}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Any Helm chart can be converted into a starter. The only thing that separates a starter from a standard chart is the presence of dynamic references to the chart name in a starter’s templates.}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{To specify a custom starter, you can use the --starter option when creating a new chart:}
# Helmfile
Can deploy multiple charts in a herd.
See [declaratively running helm charts using helmfile]([-to-declaratively-run-helm-charts-using-helmfile-ac78572e6088)
Can select various sections you want to act on with selectors
Can also use template helmfile subcommand to see rendered k8s charts
# See also
* [waytoeasylearn tutorial on Helm](-introduction/)
* [learning Helm O'Reilly book](-Helm-Managing-Apps-Kubernetes/dp/1492083658)
* [Awesome List For Helm](-helm)
* [My pinboard t:helm](:rwilcox/t:helm)
| null | https://raw.githubusercontent.com/rwilcox/my-learnings-docs/8b312ae8940e5c7295b95e157a55c5a1a7074307/learning_helm.md.rkt | racket | all charts for a repo are listed in a single index
you either have access to all charts in the repo or none of them | #lang scribble/text
@(require "scribble-utils.rkt")
---
path: /learnings/helm
title: Learning Helm
---
# Table Of Contents
<!-- toc -->
# Intro / Why
Automate Version handling, rollback, installation
Templatize k8s resources, search and reuse templates
## components
* helm client (CLI)
* charts — application configuration definitions
* repositories — where charts are stored
* release — chart instances loaded into k8s
### helm client / CLI interesting facts
Can be extended with plugins
## misc
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Helm contains a template function that enables you to look up resources in the Kubernetes cluster. The lookup template function is able to return either an individual object or a list of objects}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Helm charts can be cryptographically signed and verified}
## v2 Vs v3
### Differences in required supporting infrastructure
V2: Helm -> Tiller pod -> k8s cluster
V3: helm -> k8s cluster via role based access controls
### User Facing Differences
#### Chart name <<Helm_Name_Differences_In_V2_V3>>
In Helm 2: unless you provided a `--name` parameter, Helm created adjective-noun names for releases.
In Helm 3 this now uses the name of the chart, or what you override with `--name-template`_OR_ `--generate-name`
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{In Helm 2, "friendly names" were generated using adjectives and animal names. That was removed in Helm 3 due to complaints that release names were unprofessional.}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{In Helm 3, naming has been changed. Now instance names are scoped to Kubernetes namespaces. We could install two instances named mysite as long as they each lived in a different namespace.}
# Helm chart storage (different types of repositories)
## notes
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Chart repositories do, however, present a few key challenges:
* Chart packages with different names but the exact same raw contents are stored twice
* repository index can become extremely large, causing Helm to consume a lot of memory}
## using repositories from the CLI
Helm provides search and repo add commands for selecting different repos, searching them and getting a specific helm chart.
## public access
Helm publishes a public one [Helm official stable charts](/).
You could use hemp fetch to get the public ones, inspect and install from your file system.
## A static site
[perhaps hosted via GitHub pages](-to-host-helm-chart-repository-on-github-b76c854e1462)
Just configure your helm CLI to have a registry that points to (the site)
just need a index.yaml file! `helm repo index .` generates this!
You can also use the raw.github URL to the repository, and add that as a remote with Github user and password
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Chart Releaser, or cr, is a command-line tool that leverages GitHub releases for hosting chart packages. It has the ability to detect charts in a Git repo, package them, and upload each of them as artifacts to GitHub releases named after the unique chart version.}
## nexus
OCI compatible ?
### ECR
OCI compatible ?
### [chart museum]()
Installation options:
* Download a chart museum binary
* docker image
* Helm chart
Can point storage to S3, GCP, Azure Blob storage, local file system, etc.
OCI compatible ?
### Screw it, a folder on your local machine
(great for writing charts, then seeing how it applies with an actual service)
set your Chart.yaml 's dependencies `repository` field to `file-development-charts-or-whatever/my-specific-chart-folder-yes-you-need-this/`
# Deployments
Can see these via helm ls.
When a Helm chart is installed becomes a release (this is a Helm standard object type)
## Attributes
* **Revisions**: number of times you’ve deployed the service to this cluster (this is NOT the artifact version number AND is reset say with a new cluster)
* **name**: for more info see Helm_Name_Differences_In_V2_V3
## environmental variables for a deployment
Vs changing these one by one in k8s pods
## Reverting a deploy
`helm rollback $artifactName $revision`
## Removing a microservice completely from the cluster
`helm delete --purge $name`
# Hooks
possibilities:
* preinstall
* post-install
* pre-delete
* post-delete
* pre-upgrade
* post-upgrade
* pre-rollback
* post-rollback
just a yaml file with
metadata:
annotations:
"helm.sh/hook": "pre-install"
Hooks can be a part of deployments in addition to having the same lifecycle for Kubernetes Jobs (See Kubernetes_Jobs).
You can _also_ do multiple jobs associated with a hook! Just use weight to make sure to set the `hook-weight` annotation to different values to control which goes first.
## See also
* K8s_Init_Containers
*
# Templates
can run values through various operations, like quote and upper.
{{ quote .Value.some.value.here }}
[List of built in functions](/)
Can for example even look up attributes from the running k8s cluster!
Uses template functions from [Sprig template library](/)
Pipe character to send values into another function
Can use `with` to drill into a nested values object without navigating the object graph every time in a certain scope (Pascal has a similar syntax feature)
Variables are assigned by Pascal / Smalltalk assignment syntax
{{- $var := "foo" -}}
{{- — do not print the results from this
-}} — do not print out a new line
## falsiness in template language
Falsely:
* Boolean false
* numeric zero
* empty string
* nil
* empty collection
## Container Types in template language
### Dealing with arrays with dictionaries inside them
If you have a values.yaml objecting looking like this:
```yaml
myArrayOfDictionaries:
- nameOrWhateverTheValueIs: foobar
- nameOrWhateverTheValueIs: second item in the array
```
the following idiom is your friend
```{{- with (first .Values.myArrayOfDictionaries) }}
{{ .NameOrWhateverTheValueIs }}
{{- end }}
```
You could also do `{{- with ( index .Values.myArrayOfDictionaries 3 ) }}` to get the fourth item in the dictiona
## Object Traversal In Template language
In deeply or optionally nested objects you may get a lot of `nil pointer evaluating interface {}.someField` messages. See [Helm issues about traversing deeply nested objects]()
The [empty](/#empty) function, for example, will error if something on the object path is nil. It may also error in _very_ odd places (I would have thought .Values.globals exists by default, but nope(?)).
Two ways to handle this:
`{{ empty (.Values.myDictionary | default dict).myField }}` <-- this will correctly not error and return empty for `myField` if the traversal fails.
`dig "myDictionary" "myField" .Values)` ( [documentation]() ). **BUT** `dig` only works on Dictionary objects, it will not work on arbitrary objects that use the dot accessor for field access (aka: arbitrary objects)
## template includes
_filename.tpl — traditionally starts with underscore
** but** using built in objects in these templates might not work like you expect! Need to pass root context at the template call site
### How you create a block you're going to include
{{- define "template_name" }}
foobar: baz
{{- end }
}
### How you call it: with the template tag
{{- template "template_name" .}}
(. can also be $)
`template` is relatively literal include mechanism - you must make sure you do the whitespace alignment properly across the two files
### How you can call it: with the include tag
{{ include "template_name" . | indent 4 }}
## Values / The Template Nature
can specify in three locations (precedence):
* parent chart
* values.YAML
*—set parameters
[source](-1-0.helm.sh/docs/chart_template_guide/values_files/)
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{In other words, --set values override settings from passed-in values files, which in turn override anything in the chart’s default values.yaml file.}
## See also:
* Helm_Development_Checking_Your_Created_Chart
*
## Using Helm as a preprocessor for something else
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Sometimes you want to intercept the YAML, modify it with your own tool, and then load it into Kubernetes. Helm provides a way to execute this external tool without having to resort to using helm template. The flag --post-renderer on the install, upgrade, rollback, and template will cause Helm to send the YAML data to the command, and then read the results back into Helm. This is a great way to work with tools like Kustomize.}
## Looking up very dynamic values from k8s
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Helm contains a template function that enables you to look up resources in the Kubernetes cluster. The lookup template function is able to return either an individual object or a list of objects}
# CLI bits
## Passing complex objects through set parameter
an array where each element is a dictionary
`-set 'mything.globals.myArrayOfDictionaries[0].myField=myValues' --set 'mything.globals.myArrayOfDictionaries[1].myField=myValueForArrayItemTwo' `
[source of some of this documentation](-passing-array-values-through-set)
Alternative: maybe just [put the extra values in a seperate file and include them](#issuecomment-431447235)
This would look like:
```yaml
mything:
globals:
myArrayOfDictionaries:
- myField: myValue
```
and call `helm template` with the `-f` option specifying the file name
# Release
This is a built in object you can refer to in the Jinja templates!
# Developing
## making a new chart
`helm create $name`
creates the skeleton of what you need
### interesting files
* values.schema.json <-- OPTION schema for values in values.yaml file!!!
* crds <-- custom k8s resources
* templates <-- templates + values = k8s resources
*
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Helm provides the optional ability for each chart to provide its own schema for its values using JSON Schema. JSON Schema provides a vocabulary to describe JSON files. YAML is a superset of JSON, and you can transform content between the two file formats. This makes it possible to use a JSON Schema to validate the content of a YAML file.}
[See excellent blog post on this](-tricks-input-validation-with-values-schema-json/)
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{When you run the commands helm install, helm upgrade, helm lint, and helm template, Helm will validate the values against what it finds in the values.schema.json file.}
## Making sure your template works (local machine development) <<Helm_Development_Checking_Your_Created_Chart>>
* `helm lint`
* `helm template` <-- renders the Helm chart as a k8s resource. You could use this to ensure you're telling k8s to do what you think you're telling it
* `helm install --dry-run` <-- same as `helm template` (?)
*
## version numbering
charts.yaml:
* `version` attribute, which is the chart version. Per convention should be incremented every time you change something, including the app version
* `appVersion` attribute: version number of the application being deployed
## manually creating a chart artifact
`helm package chartName` <-- makes a .tar file for this with the correct version number appending.
You could theoretically use `curl` to upload this to the chart repository (but you likely don't want to directly do that...)
## deploying a chart
the Helm Push plugin is a good solution here. can run this after a helm package, or have the push plugin do it for you...
## Subclassing and chart libraries
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{You may run into the situation where you are creating multiple similar charts—charts that share a lot of the same templates. For these situations, there are library charts.
}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{It provides a blueprint that is meant to be overridden by the caller in a chart that includes this library. mylib.configmap is a special template. This is the template another chart will use. It takes mylib.configmap.tpl along with another template, yet to be defined, containing overrides, and merges them into one output. mylib.configmap uses a utility function that handles the merging and is handy to reuse.}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{When a child chart has declared an export property, its contents can be imported directly into a parent chart.}
## tests
### built in integration / environment validation unit tests
(Sometimes also called "helm hook test")
Stored in `templates/test`.
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Tests typically live in the tests subdirectory of the templates directory. Putting the tests in this directory provides a useful separation. This is a convention and not required for tests to run.}
It's just another k8s pod. Will not get deployed as a service, but the exit code of the command is checked for non-zero exit.
For example, a test can check that the webservice server your pod _should_ have launched _did_ launch.
Can be ran during deployment process lifecycle.
Interesting notes: because it's a separate pod definition, you don't have to use the docker container your normal application uses. You could use busybox and call `wget`, you could write a custom binary and put it in the container, whatever. [Example]()
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{In Helm version 2 there was a hook named test-success for running tests. Helm version 3 provides backward compatibility and will run this hook name as a test.}
### actual unit tests
See Helm plugin [helm-unittest](-unittest) where you can test your post processed YAML (ie making sure one of your if conditions resulted correctly, or whatever)
### Chart Testing
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Chart Testing can be installed and used in various ways. For example, you can use it as a binary application on a development system or in a container within a continuous integration system. Learn more about using and setting it up for your situation on the project page.
}
See also:
* [Builtin quality for Helm charts: unit testing to the rescue](:rwilcox/b:1a1911796101)
### debugging WTF went wrong with your chart
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Helm provides tools designed to ease debugging. Between helm get manifest and kubectl get, you have tools for comparing what Kubernetes thinks is the current object with what the chart produced. This is particularly helpful when a resource that should be managed by Helm was manually edited outside of Helm (e.g., using kubectl edit).}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{While --dry-run is designed for debugging, helm template is designed to isolate the template rendering process of Helm from the installation or upgrade logic.}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{The template command performs the first four phases (load the chart, determine the values, render the templates, format to YAML). But it does this with a few additional caveats:
* During helm template, Helm never contacts a remote Kubernetes server.
* The template command always acts like an installation.
* Template functions and directives that would normally require contacting a Kubernetes server will instead only return default data.
* The chart only has access to default Kubernetes kinds.}
# Introspecting a repository
## searching for an artifact / chart in a repository
helm search $repo/$artifactName
As Helm keeps a local cache of repositories, you may need to manually `helm repo update` before these queries return expected results...
By default `helm search` only returns latest version of an artifact in the repository. Use `helm search -l` to list all artifact coordinates.
## get previously stored template
### Helm v2
helm fetch $repo/$artifactName --version=$arifactVersion --untar
### Helm v3
helm pull $repo/$name --version=$artifactVersion --untar
## Get K8s resources created by a chart
### Helm 3
helm get manifest $repo/$releaseName
# Operating
## Making and Tracking cluster changes
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Each release record contains enough information to re-create the Kubernetes objects for that revision (an important thing for helm rollback).}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{helm uninstall command has a flag called --keep-history. Normally, a deletion event will destroy all release records associated with that installation. But when --keep-history is specified, you can see the history of an installation even after it has been deleted:}
## Best Practices
@quote-note[
#:original-highlight "the recommendation is to put in resource limits and then turn them into comments."
#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Wait couldn’t you set them to (Dev machine minimum) and ??? have helm —set them on big boy targets (knowing that, practically speaking, QA resource allocations will != prod)}
## Unwedging Stuff
Sometimes you can unwedge stuff by rolling back _then_ trying your `helm upgrade`
`helm rollback $my-release $my-revision --namespace=$my-namespace`
Which might work.
You might be able to `helm delete $my-release -n $my-namespace`
### Where Helm stores state in k8s
Helm 3 stores its release state in secrets in k8s, in a format like so `sh.helm.release.v1.<RELEASE_NAME>.v<LATEST_REVISION>`
You may have to delete these too, especially if you have deleted all the k8s services, deployments, etc etc that your Helm chart creates.
[Source]()
# In a microsevice's CI/CD process
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{there is an upgrade shortcut available that will just reuse the last set of values that you sent:
`$ helm upgrade mysite bitnami/drupal --reuse-values`}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{The --reuse-values flag will tell Helm to reload the server-side copy of the last set of values, and then use those to generate the upgrade}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{One recommendation for using --wait in CI is to use a long --timeout (five or ten minutes) to ensure that Kubernetes has time to resolve any transient failures.}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{ --atomic flag instead of the --wait flag. This flag causes the same behavior as --wait unless the release fails. Then, instead of marking the release as failed and exiting, it performs an automatic rollback to the last successful release. In automated systems, the --atomic flag is more resistent to outages, since it is less likely to have a failure as its end result. }
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{ --wait will track such objects, waiting until the pods they create are marked as Running by Kubernetes.}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{But with --wait, the success criteria for an installation is modified. A chart is not considered successfully installed unless (1) the Kubernetes API server accepts the manifest and (2) all of the pods created by the chart reach the Running state before Helm’s timeout expires.}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{The helm upgrade --install command will install a release if it does not exist already, or will upgrade a release if a release by that name is found. Underneath the hood, it works by querying Kubernetes for a release with the given name. If that release does not exist, it switches out of the upgrade logic and into the install logic.}
# Charts that depend on other charts
## On Sub Charts
You can put dependencies in the `charts/` folder. Like `charts/my-sub-dependency-chart`
From within the parents values.yml you can interject values into the subchart.
Like so
```yaml
my-sub-dependency-chart:
keyToOverride: value
```
(values are passed to the subchart as the bare key, no namespace)
### Global values and charts
use the `global` key in the parents values.yml and the name will be the same everywhere, in the subcharts and the parent chart.
### and Chart.yaml
[can not read parent .Chart value from subchart]()
> A subchart is considered "stand-alone", which means a subchart can never explicitly depend on its parent chart.
> For that reason, a subchart cannot access the values of its parent.
[source](/)
when I tried this in a template file I was only able to access fields on `.Chart` where they were in the (current) chart, ie not exported from the parent chart.
## Dependent Charts
charts.yml file:
`dependencies` key: give name, version and repository
`helm dependency update` <-- updates dependencies
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{When you want to control if a single feature is enabled or disabled through a dependency, you can use the condition property on a dependency}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{When a child chart has declared an export property, its contents can be imported directly into a parent chart.}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Dependencies are specified in the Chart.yaml file. The following is the dependencies section in the Chart.yaml file for a chart named rocket:}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{When a chart has dependencies listed under the dependencies field in Chart.yaml, a special file named Chart.lock is generated and updated each time you run the command helm dependency update. When a chart contains a Chart.lock file, operators can run helm dependency build to generate the charts/ directory without the need to renegotiate dependencies.}
## Starters
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Starters, or starter packs, are similar to Helm charts, except that they are meant to be used as templates for new charts.}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{Any Helm chart can be converted into a starter. The only thing that separates a starter from a standard chart is the presence of dynamic references to the chart name in a starter’s templates.}
@quote-highlight[#:title "Learning Helm"
#:author "N/A"
#:page-number 0]{To specify a custom starter, you can use the --starter option when creating a new chart:}
# Helmfile
Can deploy multiple charts in a herd.
See [declaratively running helm charts using helmfile]([-to-declaratively-run-helm-charts-using-helmfile-ac78572e6088)
Can select various sections you want to act on with selectors
Can also use template helmfile subcommand to see rendered k8s charts
# See also
* [waytoeasylearn tutorial on Helm](-introduction/)
* [learning Helm O'Reilly book](-Helm-Managing-Apps-Kubernetes/dp/1492083658)
* [Awesome List For Helm](-helm)
* [My pinboard t:helm](:rwilcox/t:helm)
|
d15ab25c63f259c6f1986eb30f9bf57cda6dc847698a5f6c449bcb61aa8591f5 | dhess/sicp-solutions | ex2.62.scm | (define (union-set set1 set2)
(cond ((null? set1) set2)
((null? set2) set1)
(else (let ((x1 (car set1))
(x2 (car set2)))
(cond ((= x1 x2)
(cons x1 (union-set (cdr set1) (cdr set2))))
((< x1 x2)
(cons x1 (union-set (cdr set1) set2)))
(else
(cons x2 (union-set set1 (cdr set2)))))))))
| null | https://raw.githubusercontent.com/dhess/sicp-solutions/2cf78db98917e9cb1252efda76fddc8e45fe4140/chap2/ex2.62.scm | scheme | (define (union-set set1 set2)
(cond ((null? set1) set2)
((null? set2) set1)
(else (let ((x1 (car set1))
(x2 (car set2)))
(cond ((= x1 x2)
(cons x1 (union-set (cdr set1) (cdr set2))))
((< x1 x2)
(cons x1 (union-set (cdr set1) set2)))
(else
(cons x2 (union-set set1 (cdr set2)))))))))
|
|
a88ede46d1a55d4ed7d698e015e83381a15bc3aa3b04985499f41f5da17a6dcd | huangjs/cl | zuchk.lisp | ;;; Compiled by f2cl version:
( " f2cl1.l , v 1.215 2009/04/07 22:05:21 rtoy Exp $ "
" f2cl2.l , v 1.37 2008/02/22 22:19:33 rtoy Exp $ "
" f2cl3.l , v 1.6 2008/02/22 22:19:33 rtoy Exp $ "
" f2cl4.l , v 1.7 2008/02/22 22:19:34 rtoy Exp $ "
" f2cl5.l , v 1.200 2009/01/19 02:38:17 rtoy Exp $ "
" f2cl6.l , v 1.48 2008/08/24 00:56:27 rtoy Exp $ "
" macros.l , v 1.112 2009/01/08 12:57:19 " )
Using Lisp CMU Common Lisp 19f ( 19F )
;;;
;;; Options: ((:prune-labels nil) (:auto-save t) (:relaxed-array-decls t)
;;; (:coerce-assigns :as-needed) (:array-type ':simple-array)
;;; (:array-slicing nil) (:declare-common nil)
;;; (:float-format double-float))
(in-package :slatec)
(defun zuchk (yr yi nz ascle tol)
(declare (type (f2cl-lib:integer4) nz) (type (double-float) tol ascle yi yr))
(prog ((ss 0.0) (st 0.0) (wr 0.0) (wi 0.0))
(declare (type (double-float) wi wr st ss))
(setf nz 0)
(setf wr (abs yr))
(setf wi (abs yi))
(setf st (min wr wi))
(if (> st ascle) (go end_label))
(setf ss (max wr wi))
(setf st (/ st tol))
(if (< ss st) (setf nz 1))
(go end_label)
end_label
(return (values nil nil nz nil nil))))
(in-package #-gcl #:cl-user #+gcl "CL-USER")
#+#.(cl:if (cl:find-package '#:f2cl) '(and) '(or))
(eval-when (:load-toplevel :compile-toplevel :execute)
(setf (gethash 'fortran-to-lisp::zuchk fortran-to-lisp::*f2cl-function-info*)
(fortran-to-lisp::make-f2cl-finfo
:arg-types '((double-float) (double-float)
(fortran-to-lisp::integer4) (double-float)
(double-float))
:return-values '(nil nil fortran-to-lisp::nz nil nil)
:calls 'nil)))
| null | https://raw.githubusercontent.com/huangjs/cl/96158b3f82f82a6b7d53ef04b3b29c5c8de2dbf7/lib/maxima/src/numerical/slatec/zuchk.lisp | lisp | Compiled by f2cl version:
Options: ((:prune-labels nil) (:auto-save t) (:relaxed-array-decls t)
(:coerce-assigns :as-needed) (:array-type ':simple-array)
(:array-slicing nil) (:declare-common nil)
(:float-format double-float)) | ( " f2cl1.l , v 1.215 2009/04/07 22:05:21 rtoy Exp $ "
" f2cl2.l , v 1.37 2008/02/22 22:19:33 rtoy Exp $ "
" f2cl3.l , v 1.6 2008/02/22 22:19:33 rtoy Exp $ "
" f2cl4.l , v 1.7 2008/02/22 22:19:34 rtoy Exp $ "
" f2cl5.l , v 1.200 2009/01/19 02:38:17 rtoy Exp $ "
" f2cl6.l , v 1.48 2008/08/24 00:56:27 rtoy Exp $ "
" macros.l , v 1.112 2009/01/08 12:57:19 " )
Using Lisp CMU Common Lisp 19f ( 19F )
(in-package :slatec)
(defun zuchk (yr yi nz ascle tol)
(declare (type (f2cl-lib:integer4) nz) (type (double-float) tol ascle yi yr))
(prog ((ss 0.0) (st 0.0) (wr 0.0) (wi 0.0))
(declare (type (double-float) wi wr st ss))
(setf nz 0)
(setf wr (abs yr))
(setf wi (abs yi))
(setf st (min wr wi))
(if (> st ascle) (go end_label))
(setf ss (max wr wi))
(setf st (/ st tol))
(if (< ss st) (setf nz 1))
(go end_label)
end_label
(return (values nil nil nz nil nil))))
(in-package #-gcl #:cl-user #+gcl "CL-USER")
#+#.(cl:if (cl:find-package '#:f2cl) '(and) '(or))
(eval-when (:load-toplevel :compile-toplevel :execute)
(setf (gethash 'fortran-to-lisp::zuchk fortran-to-lisp::*f2cl-function-info*)
(fortran-to-lisp::make-f2cl-finfo
:arg-types '((double-float) (double-float)
(fortran-to-lisp::integer4) (double-float)
(double-float))
:return-values '(nil nil fortran-to-lisp::nz nil nil)
:calls 'nil)))
|
4c87f8c3025a082cec78150a6313b28431bf548f0ce2c46273647c32d3ad7cb5 | sunshineclt/Racket-Helper | homework11-2.rkt | #lang racket
;basic amb evaluator
(require racket/mpair)
(define (my-display x)
(cond ((and (mpair? x) (eq? (mcar x) 'primitive))
(display (get-list-head x 2)))
((and (pair? x) (eq? (car x) 'procedure))
(display (get-list-head x 3)))
(else (display x))))
(define (my-newline) (newline))
(define (simple-proc-obj proc-obj)
(if (mpair? proc-obj)
(get-list-head proc-obj 2)
(get-list-head proc-obj 3)))
(define (get-list-head lst n)
(if (= n 0)
'()
(if (mpair? lst)
(mcons (mcar lst) (get-list-head (mcdr lst) (- n 1)))
(cons (car lst) (get-list-head (cdr lst) (- n 1))))))
(define (get-mlist-head lst n)
(if (= n 0)
'()
(mcons (mcar lst) (get-mlist-head (mcdr lst) (- n 1)))))
(define (mymlist->list mlst)
(if (null? mlst)
'()
(if (mpair? mlst)
(let ((first (mcar mlst)))
(if (or (mpair? first) (pair? first))
(cons (mymlist->list first)
(mymlist->list (mcdr mlst)))
(cons first (mymlist->list (mcdr mlst)))))
(let ((first (car mlst)))
(if (or (mpair? first) (pair? first))
(cons (mymlist->list first)
(mymlist->list (cdr mlst)))
(cons first (mymlist->list (cdr mlst))))))))
(define (mylist->mlist lst)
(if (null? lst)
'()
(if (pair? lst)
(let ((first (car lst)))
(if (or (mpair? first) (pair? first))
(mcons (mylist->mlist first)
(mylist->mlist (cdr lst)))
(mcons first (mylist->mlist (cdr lst)))))
(let ((first (mcar lst)))
(if (or (mpair? first) (pair? first))
(mcons (mylist->mlist first)
(mylist->mlist (mcdr lst)))
(mcons first (mylist->mlist (mcdr lst))))))))
(define mcadr (lambda (x) (mcar (mcdr x))))
(define set-cdr! set-mcdr!)
(define set-car! set-mcar!)
(define (self-evaluating? exp)
(cond ((number? exp) true)
((string? exp) true)
(else false)))
(define (mtagged-list? exp tag)
(if (mpair? exp)
(eq? (mcar exp) tag)
false))
(define (tagged-list? exp tag)
(if (pair? exp)
(eq? (car exp) tag)
false))
(define (variable? exp) (symbol? exp))
(define (quoted? exp) (tagged-list? exp 'quote))
(define (text-of-quotation exp) (cadr exp))
(define (assignment? exp) (tagged-list? exp 'set!))
(define (assignment-variable exp) (cadr exp))
(define (assignment-value exp) (caddr exp))
(define (definition? exp) (tagged-list? exp 'define))
(define (if-fail? exp) (tagged-list? exp 'if-fail))
(define (all-answer? exp) (tagged-list? exp 'all-answer))
(define (let? exp) (tagged-list? exp 'let))
(define (let-body exp) (cddr exp))
(define (let-clauses exp) (cadr exp))
(define (let->combination exp)
(cons (make-lambda (map car (let-clauses exp))
(let-body exp)) (map cadr (let-clauses exp))))
(define (definition-variable exp)
(if (variable? (cadr exp))
(cadr exp)
(caadr exp)))
(define (definition-value exp)
(if (symbol? (cadr exp))
(caddr exp)
(make-lambda (cdadr exp)
(cddr exp))))
(define (lambda? exp) (tagged-list? exp 'lambda))
(define (lambda-parameters exp) (cadr exp))
(define (lambda-body exp) (cddr exp))
(define (make-lambda parameters body)
(cons 'lambda (cons parameters body)))
(define (if? exp) (tagged-list? exp 'if))
(define (if-predicate exp) (cadr exp))
(define (if-consequent exp) (caddr exp))
(define (if-alternative exp)
(if (null? (cdddr exp))
'false
(cadddr exp)))
(define (make-if predicate consequent alternative)
(list 'if predicate consequent alternative))
(define (begin? exp)
(tagged-list? exp 'begin))
(define (begin-actions exp) (cdr exp))
(define (last-exp? seq) (null? (cdr seq)))
(define (first-exp seq) (car seq))
(define (rest-exps seq) (cdr seq))
(define (make-begin seq)
(cons 'begin seq))
(define (sequence->exp seq)
(cond ((null? seq) seq)
((last-exp? seq) (first-exp seq))
(else (make-begin seq))))
(define (application? exp) (pair? exp))
(define (operator exp) (car exp))
(define (operands exp) (cdr exp))
(define (no-operands? ops) (null? ops))
(define (first-operand ops) (car ops))
(define (rest-operands ops) (cdr ops))
(define (cond? exp) (tagged-list? exp 'cond))
(define (cond-clauses exp) (cdr exp))
(define (cond-else-clause? clause)
(eq? (cond-predicate clause) 'else))
(define (cond-predicate clause) (car clause))
(define (cond-actions clause) (cdr clause))
(define (expand-clauses clauses)
(if (null? clauses)
'false
(let ((first (car clauses))
(rest (cdr clauses)))
(if (cond-else-clause? first)
(if (null? rest)
(sequence->exp (cond-actions first))
(error "ELSE clause isn't last -- COND->IF"
clauses))
(make-if (cond-predicate first)
(sequence->exp (cond-actions first))
(expand-clauses rest))))))
(define (cond->if exp)
(expand-clauses (cond-clauses exp)))
(define (true? x)
(not (eq? x false)))
(define (false? x)
(eq? x false))
(define (make-procedure parameters body env)
(list 'procedure parameters body env))
(define (compound-procedure? p)
(tagged-list? p 'procedure))
(define (procedure-parameters p)
(list->mlist (cadr p)))
(define (procedure-body p)
(caddr p))
(define (procedure-enviroment p)
(cadddr p))
(define (enclosing-environment env) (mcdr env))
(define (first-frame env) (mcar env))
(define the-empty-environment (mlist ))
(define (make-frame variables values)
(mcons variables values))
(define (frame-variables frame ) (mcar frame))
(define (frame-values frame) (mcdr frame))
(define (add-binding-to-frame! var val frame)
(set-car! frame (mcons var (mcar frame)))
(set-cdr! frame (mcons val (mcdr frame))))
(define (extend-environment vars vals base-env)
(if (= (mlength vars) (mlength vals))
(mcons (make-frame vars vals) base-env)
(if (< (mlength vars) (mlength vals))
(error "Too many arguments supplied" vars vals)
(error "Too few arguments supplied" vars vals))))
(define (lookup-variable-value var env)
(define (env-loop env)
(define (scan vars vals)
(cond ((null? vars)
(env-loop (enclosing-environment env)))
((eq? var (mcar vars))
(mcar vals))
(else (scan (mcdr vars) (mcdr vals)))))
(if (eq? env the-empty-environment)
(error "Unbound variable" var)
(let ((frame (first-frame env)))
(scan (frame-variables frame)
(frame-values frame)))))
(env-loop env))
(define (set-variable-value! var val env)
(define (env-loop env)
(define (scan vars vals)
(cond ((null? vars)
(env-loop (enclosing-environment env)))
((eq? var (mcar vars))
(set-car! vals val))
(else (scan (mcdr vars) (mcdr vals)))))
(if (eq? env the-empty-environment)
(error "Unbound variable --SET!" var)
(let ((frame (first-frame env)))
(scan (frame-variables frame)
(frame-values frame)))))
(env-loop env))
(define (define-variable! var val env)
(let ((frame (first-frame env)))
(define (scan vars vals)
(cond ((null? vars)
(add-binding-to-frame! var val frame))
((eq? var (mcar vars))
(set-car! vals val))
(else (scan (mcdr vars) (mcdr vals)))))
(scan (frame-variables frame)
(frame-values frame))))
(define (my-square x ) (* x x))
(define (apply-primitive-procedure proc args)
(apply
(primitive-implementation proc) (mymlist->list args)))
(define primitive-procedures
(mlist (mlist 'car car)
(mlist 'cdr cdr)
(mlist 'cons cons)
(mlist 'null? null?)
(mlist '+ +)
(mlist '* *)
(mlist '- -)
(mlist '/ /)
(mlist '< <)
(mlist '> >)
(mlist '= =)
(mlist 'number? number?)
(mlist 'pair? pair?)
(mlist 'not not)
(mlist 'remainder remainder)
(mlist 'length length)
(mlist 'sqrt sqrt)
(mlist 'list list)
(mlist 'symbol? symbol?)
(mlist 'eq? eq?)
(mlist 'cadr cadr)
(mlist 'append append)
(mlist 'display display)
(mlist 'newline newline)
(mlist 'not not)
(mlist 'void void)
(mlist 'my-square my-square)
))
(define (primitive-procedure-names)
(mmap mcar
primitive-procedures))
(define (primitive-procedure-objects)
(mmap (lambda (proc) (mlist 'primitive (mcadr proc)))
primitive-procedures))
(define (setup-environment )
(let ((initial-env
(extend-environment (primitive-procedure-names)
(primitive-procedure-objects)
the-empty-environment)))
(define-variable! 'true true initial-env)
(define-variable! 'false false initial-env)
initial-env))
(define (primitive-procedure? proc)
(mtagged-list? proc 'primitive))
(define (primitive-implementation proc) (mcadr proc))
(define (prompt-for-input string)
(newline) (newline) (display string) (newline))
(define (announce-output string)
(newline) (display string) (newline))
(define (user-print object)
(if (compound-procedure? object)
(display (list 'compound-procedure
(procedure-parameters object)
(procedure-body object)
'<procedure-env>))
(display object)))
(define (ambeval exp env succeed fail)
((analyze exp) env succeed fail))
(define (analyze exp)
(cond ((self-evaluating? exp)
(analyze-self-evaluating exp))
((null? exp) (lambda (env succeed fail) (succeed ((void)) fail)))
((quoted? exp) (analyze-quoted exp))
((variable? exp) (analyze-variable exp))
((assignment? exp) (analyze-assignment exp))
((definition? exp) (analyze-definition exp))
((if? exp) (analyze-if exp))
((if-fail? exp) (analyze-if-fail exp))
((all-answer? exp) (analyze-all-answer exp))
((lambda? exp) (analyze-lambda exp))
((begin? exp) (analyze-sequence (begin-actions exp)))
((cond? exp) (analyze (cond->if exp)))
((let? exp) (analyze (let->combination exp)))
((amb? exp) (analyze-amb exp))
((application? exp) (analyze-application exp))
(else
(error "Unknown expression type -- ANALYZE" exp))))
(define (analyze-self-evaluating exp)
(lambda (env succeed fail)
(succeed exp fail)))
(define (analyze-quoted exp)
(let ((qval (text-of-quotation exp)))
(lambda (env succeed fail)
(succeed qval fail))))
(define (analyze-variable exp)
(lambda (env succeed fail)
(succeed (lookup-variable-value exp env) fail)
))
(define (analyze-lambda exp)
(let ((vars (lambda-parameters exp))
(bproc (analyze-sequence (lambda-body exp))))
(lambda (env succeed fail)
(succeed (make-procedure vars bproc env) fail)
)))
(define (analyze-if exp)
(let ((pproc (analyze (if-predicate exp)))
(cproc (analyze (if-consequent exp)))
(aproc (analyze (if-alternative exp))))
(lambda (env succeed fail)
(pproc env
(lambda (pred-value fail2)
(if (true? pred-value)
(cproc env succeed fail2)
(aproc env succeed fail2)))
fail))))
(define (analyze-definition exp)
(let ((var (definition-variable exp))
(vproc (analyze (definition-value exp))))
(lambda (env succeed fail)
(vproc env
(lambda (val fail2)
(define-variable! var val env)
(succeed (void) fail2)
)
fail))))
(define (analyze-assignment exp)
(let ((var (assignment-variable exp))
(vproc (analyze (assignment-value exp))))
(lambda (env succeed fail)
(vproc env
(lambda (val fail2)
(let ((old-value
(lookup-variable-value var env)))
(set-variable-value! var val env)
(succeed (void)
(lambda ()
(set-variable-value! var
old-value
env)
(fail2)))))
fail))))
(define (analyze-sequence exps)
(define (sequentially a b)
(lambda (env succeed fail)
(a env
(lambda (a-value fail2)
(b env succeed fail2))
fail)))
(define (loop first-proc rest-procs)
(if (null? rest-procs)
first-proc
(loop (sequentially first-proc (car rest-procs))
(cdr rest-procs))))
(let ((procs (map analyze exps)))
(if (null? procs)
(error "Empty sequence -- ANALYZE")
(void))
(loop (car procs) (cdr procs))))
(define (analyze-application exp)
(let ((fproc (analyze (operator exp)))
(aprocs (map analyze (operands exp))))
(lambda (env succeed fail)
(fproc env
(lambda (proc fail2)
(get-args aprocs
env
(lambda (args fail3)
(execute-application
proc args succeed fail3))
fail2))
fail))))
(define (get-args aprocs env scd fail)
(if (null? aprocs)
(scd '() fail)
((car aprocs) env
(lambda (arg fail2)
(get-args (cdr aprocs)
env
(lambda (args fail3)
(scd (cons arg args)
fail3))
fail2))
fail)))
(define (execute-application proc args succeed fail)
(cond ((primitive-procedure? proc)
(let ((m (apply-primitive-procedure proc args)))
(succeed m fail)
))
((compound-procedure? proc)
((procedure-body proc)
(extend-environment (procedure-parameters proc)
(list->mlist args)
(procedure-environment proc))
succeed
fail))
(else
(error
"Unknown procedure type -- EXECUTE-APPLICATION"
proc))))
(define (amb? exp) (tagged-list? exp 'amb))
(define (amb-choices exp) (cdr exp))
(define (analyze-amb exp)
(let ((cprocs (map analyze (amb-choices exp))))
(lambda (env succeed fail)
(define (try-next choices)
(if (null? choices)
(fail)
((car choices) env
succeed
(lambda ()
(try-next (cdr choices))))))
(try-next cprocs))))
(define (analyze-if-fail exp)
(let ((first (analyze (cadr exp)))
(second (analyze (caddr exp))))
(lambda (env succeed fail)
(first env
(lambda (val fail2)
(first env succeed fail))
(lambda ()
(second env succeed fail))))))
(define (analyze-all-answer exp)
(let ((actual-exp (analyze (cadr exp))))
(lambda (env succeed fail)
(actual-exp env
(lambda (val next-alternative)
(display val)
(newline)
(next-alternative))
(lambda ()
(define (fail-loop)
(driver-loop (lambda ()
(glb-fail)
(fail-loop))))
(fail-loop))))))
(define (procedure-environment proc)
(car (cdr (cdr (cdr proc)))))
(define rq '(define (require p)
(if (not p)
(amb)
(void))))
(define glb-succeed
(lambda (val next)
(display "succeed,val = " ) (display val) (newline)
))
(define glb-fail
(lambda ()
(display "There are no more answers.") (newline)))
(define glb-env (setup-environment))
(ambeval rq glb-env (lambda (val fail) (void)) glb-fail)
(define (my-driver-loop)
(let ((input (read)))
(if (eq? input eof)
(void)
(begin (ambeval input glb-env
(lambda (val fail)
(if (eq? val (void))
(void)
(begin (display val) (newline))))
glb-fail)
(my-driver-loop)))))
;(my-driver-loop)
(define (driver-loop try-again)
(let ((input (read)))
(if (eq? input eof)
(void)
(if (eq? input 'try-again)
(try-again)
(begin
(ambeval input glb-env
(lambda (val next-alternative)
(if (eq? val (void))
(driver-loop next-alternative)
(begin (display val)
(newline)
(driver-loop next-alternative))))
(lambda ()
(glb-fail)
(define (fail-loop)
(driver-loop (lambda ()
(glb-fail)
(fail-loop))))
(fail-loop))))))))
(driver-loop (void))
| null | https://raw.githubusercontent.com/sunshineclt/Racket-Helper/bf85f38dd8d084db68265bb98d8c38bada6494ec/%E9%99%88%E4%B9%90%E5%A4%A9/Week11/homework11-2.rkt | racket | basic amb evaluator
(my-driver-loop) | #lang racket
(require racket/mpair)
(define (my-display x)
(cond ((and (mpair? x) (eq? (mcar x) 'primitive))
(display (get-list-head x 2)))
((and (pair? x) (eq? (car x) 'procedure))
(display (get-list-head x 3)))
(else (display x))))
(define (my-newline) (newline))
(define (simple-proc-obj proc-obj)
(if (mpair? proc-obj)
(get-list-head proc-obj 2)
(get-list-head proc-obj 3)))
(define (get-list-head lst n)
(if (= n 0)
'()
(if (mpair? lst)
(mcons (mcar lst) (get-list-head (mcdr lst) (- n 1)))
(cons (car lst) (get-list-head (cdr lst) (- n 1))))))
(define (get-mlist-head lst n)
(if (= n 0)
'()
(mcons (mcar lst) (get-mlist-head (mcdr lst) (- n 1)))))
(define (mymlist->list mlst)
(if (null? mlst)
'()
(if (mpair? mlst)
(let ((first (mcar mlst)))
(if (or (mpair? first) (pair? first))
(cons (mymlist->list first)
(mymlist->list (mcdr mlst)))
(cons first (mymlist->list (mcdr mlst)))))
(let ((first (car mlst)))
(if (or (mpair? first) (pair? first))
(cons (mymlist->list first)
(mymlist->list (cdr mlst)))
(cons first (mymlist->list (cdr mlst))))))))
(define (mylist->mlist lst)
(if (null? lst)
'()
(if (pair? lst)
(let ((first (car lst)))
(if (or (mpair? first) (pair? first))
(mcons (mylist->mlist first)
(mylist->mlist (cdr lst)))
(mcons first (mylist->mlist (cdr lst)))))
(let ((first (mcar lst)))
(if (or (mpair? first) (pair? first))
(mcons (mylist->mlist first)
(mylist->mlist (mcdr lst)))
(mcons first (mylist->mlist (mcdr lst))))))))
(define mcadr (lambda (x) (mcar (mcdr x))))
(define set-cdr! set-mcdr!)
(define set-car! set-mcar!)
(define (self-evaluating? exp)
(cond ((number? exp) true)
((string? exp) true)
(else false)))
(define (mtagged-list? exp tag)
(if (mpair? exp)
(eq? (mcar exp) tag)
false))
(define (tagged-list? exp tag)
(if (pair? exp)
(eq? (car exp) tag)
false))
(define (variable? exp) (symbol? exp))
(define (quoted? exp) (tagged-list? exp 'quote))
(define (text-of-quotation exp) (cadr exp))
(define (assignment? exp) (tagged-list? exp 'set!))
(define (assignment-variable exp) (cadr exp))
(define (assignment-value exp) (caddr exp))
(define (definition? exp) (tagged-list? exp 'define))
(define (if-fail? exp) (tagged-list? exp 'if-fail))
(define (all-answer? exp) (tagged-list? exp 'all-answer))
(define (let? exp) (tagged-list? exp 'let))
(define (let-body exp) (cddr exp))
(define (let-clauses exp) (cadr exp))
(define (let->combination exp)
(cons (make-lambda (map car (let-clauses exp))
(let-body exp)) (map cadr (let-clauses exp))))
(define (definition-variable exp)
(if (variable? (cadr exp))
(cadr exp)
(caadr exp)))
(define (definition-value exp)
(if (symbol? (cadr exp))
(caddr exp)
(make-lambda (cdadr exp)
(cddr exp))))
(define (lambda? exp) (tagged-list? exp 'lambda))
(define (lambda-parameters exp) (cadr exp))
(define (lambda-body exp) (cddr exp))
(define (make-lambda parameters body)
(cons 'lambda (cons parameters body)))
(define (if? exp) (tagged-list? exp 'if))
(define (if-predicate exp) (cadr exp))
(define (if-consequent exp) (caddr exp))
(define (if-alternative exp)
(if (null? (cdddr exp))
'false
(cadddr exp)))
(define (make-if predicate consequent alternative)
(list 'if predicate consequent alternative))
(define (begin? exp)
(tagged-list? exp 'begin))
(define (begin-actions exp) (cdr exp))
(define (last-exp? seq) (null? (cdr seq)))
(define (first-exp seq) (car seq))
(define (rest-exps seq) (cdr seq))
(define (make-begin seq)
(cons 'begin seq))
(define (sequence->exp seq)
(cond ((null? seq) seq)
((last-exp? seq) (first-exp seq))
(else (make-begin seq))))
(define (application? exp) (pair? exp))
(define (operator exp) (car exp))
(define (operands exp) (cdr exp))
(define (no-operands? ops) (null? ops))
(define (first-operand ops) (car ops))
(define (rest-operands ops) (cdr ops))
(define (cond? exp) (tagged-list? exp 'cond))
(define (cond-clauses exp) (cdr exp))
(define (cond-else-clause? clause)
(eq? (cond-predicate clause) 'else))
(define (cond-predicate clause) (car clause))
(define (cond-actions clause) (cdr clause))
(define (expand-clauses clauses)
(if (null? clauses)
'false
(let ((first (car clauses))
(rest (cdr clauses)))
(if (cond-else-clause? first)
(if (null? rest)
(sequence->exp (cond-actions first))
(error "ELSE clause isn't last -- COND->IF"
clauses))
(make-if (cond-predicate first)
(sequence->exp (cond-actions first))
(expand-clauses rest))))))
(define (cond->if exp)
(expand-clauses (cond-clauses exp)))
(define (true? x)
(not (eq? x false)))
(define (false? x)
(eq? x false))
(define (make-procedure parameters body env)
(list 'procedure parameters body env))
(define (compound-procedure? p)
(tagged-list? p 'procedure))
(define (procedure-parameters p)
(list->mlist (cadr p)))
(define (procedure-body p)
(caddr p))
(define (procedure-enviroment p)
(cadddr p))
(define (enclosing-environment env) (mcdr env))
(define (first-frame env) (mcar env))
(define the-empty-environment (mlist ))
(define (make-frame variables values)
(mcons variables values))
(define (frame-variables frame ) (mcar frame))
(define (frame-values frame) (mcdr frame))
(define (add-binding-to-frame! var val frame)
(set-car! frame (mcons var (mcar frame)))
(set-cdr! frame (mcons val (mcdr frame))))
(define (extend-environment vars vals base-env)
(if (= (mlength vars) (mlength vals))
(mcons (make-frame vars vals) base-env)
(if (< (mlength vars) (mlength vals))
(error "Too many arguments supplied" vars vals)
(error "Too few arguments supplied" vars vals))))
(define (lookup-variable-value var env)
(define (env-loop env)
(define (scan vars vals)
(cond ((null? vars)
(env-loop (enclosing-environment env)))
((eq? var (mcar vars))
(mcar vals))
(else (scan (mcdr vars) (mcdr vals)))))
(if (eq? env the-empty-environment)
(error "Unbound variable" var)
(let ((frame (first-frame env)))
(scan (frame-variables frame)
(frame-values frame)))))
(env-loop env))
(define (set-variable-value! var val env)
(define (env-loop env)
(define (scan vars vals)
(cond ((null? vars)
(env-loop (enclosing-environment env)))
((eq? var (mcar vars))
(set-car! vals val))
(else (scan (mcdr vars) (mcdr vals)))))
(if (eq? env the-empty-environment)
(error "Unbound variable --SET!" var)
(let ((frame (first-frame env)))
(scan (frame-variables frame)
(frame-values frame)))))
(env-loop env))
(define (define-variable! var val env)
(let ((frame (first-frame env)))
(define (scan vars vals)
(cond ((null? vars)
(add-binding-to-frame! var val frame))
((eq? var (mcar vars))
(set-car! vals val))
(else (scan (mcdr vars) (mcdr vals)))))
(scan (frame-variables frame)
(frame-values frame))))
(define (my-square x ) (* x x))
(define (apply-primitive-procedure proc args)
(apply
(primitive-implementation proc) (mymlist->list args)))
(define primitive-procedures
(mlist (mlist 'car car)
(mlist 'cdr cdr)
(mlist 'cons cons)
(mlist 'null? null?)
(mlist '+ +)
(mlist '* *)
(mlist '- -)
(mlist '/ /)
(mlist '< <)
(mlist '> >)
(mlist '= =)
(mlist 'number? number?)
(mlist 'pair? pair?)
(mlist 'not not)
(mlist 'remainder remainder)
(mlist 'length length)
(mlist 'sqrt sqrt)
(mlist 'list list)
(mlist 'symbol? symbol?)
(mlist 'eq? eq?)
(mlist 'cadr cadr)
(mlist 'append append)
(mlist 'display display)
(mlist 'newline newline)
(mlist 'not not)
(mlist 'void void)
(mlist 'my-square my-square)
))
(define (primitive-procedure-names)
(mmap mcar
primitive-procedures))
(define (primitive-procedure-objects)
(mmap (lambda (proc) (mlist 'primitive (mcadr proc)))
primitive-procedures))
(define (setup-environment )
(let ((initial-env
(extend-environment (primitive-procedure-names)
(primitive-procedure-objects)
the-empty-environment)))
(define-variable! 'true true initial-env)
(define-variable! 'false false initial-env)
initial-env))
(define (primitive-procedure? proc)
(mtagged-list? proc 'primitive))
(define (primitive-implementation proc) (mcadr proc))
(define (prompt-for-input string)
(newline) (newline) (display string) (newline))
(define (announce-output string)
(newline) (display string) (newline))
(define (user-print object)
(if (compound-procedure? object)
(display (list 'compound-procedure
(procedure-parameters object)
(procedure-body object)
'<procedure-env>))
(display object)))
(define (ambeval exp env succeed fail)
((analyze exp) env succeed fail))
(define (analyze exp)
(cond ((self-evaluating? exp)
(analyze-self-evaluating exp))
((null? exp) (lambda (env succeed fail) (succeed ((void)) fail)))
((quoted? exp) (analyze-quoted exp))
((variable? exp) (analyze-variable exp))
((assignment? exp) (analyze-assignment exp))
((definition? exp) (analyze-definition exp))
((if? exp) (analyze-if exp))
((if-fail? exp) (analyze-if-fail exp))
((all-answer? exp) (analyze-all-answer exp))
((lambda? exp) (analyze-lambda exp))
((begin? exp) (analyze-sequence (begin-actions exp)))
((cond? exp) (analyze (cond->if exp)))
((let? exp) (analyze (let->combination exp)))
((amb? exp) (analyze-amb exp))
((application? exp) (analyze-application exp))
(else
(error "Unknown expression type -- ANALYZE" exp))))
(define (analyze-self-evaluating exp)
(lambda (env succeed fail)
(succeed exp fail)))
(define (analyze-quoted exp)
(let ((qval (text-of-quotation exp)))
(lambda (env succeed fail)
(succeed qval fail))))
(define (analyze-variable exp)
(lambda (env succeed fail)
(succeed (lookup-variable-value exp env) fail)
))
(define (analyze-lambda exp)
(let ((vars (lambda-parameters exp))
(bproc (analyze-sequence (lambda-body exp))))
(lambda (env succeed fail)
(succeed (make-procedure vars bproc env) fail)
)))
(define (analyze-if exp)
(let ((pproc (analyze (if-predicate exp)))
(cproc (analyze (if-consequent exp)))
(aproc (analyze (if-alternative exp))))
(lambda (env succeed fail)
(pproc env
(lambda (pred-value fail2)
(if (true? pred-value)
(cproc env succeed fail2)
(aproc env succeed fail2)))
fail))))
(define (analyze-definition exp)
(let ((var (definition-variable exp))
(vproc (analyze (definition-value exp))))
(lambda (env succeed fail)
(vproc env
(lambda (val fail2)
(define-variable! var val env)
(succeed (void) fail2)
)
fail))))
(define (analyze-assignment exp)
(let ((var (assignment-variable exp))
(vproc (analyze (assignment-value exp))))
(lambda (env succeed fail)
(vproc env
(lambda (val fail2)
(let ((old-value
(lookup-variable-value var env)))
(set-variable-value! var val env)
(succeed (void)
(lambda ()
(set-variable-value! var
old-value
env)
(fail2)))))
fail))))
(define (analyze-sequence exps)
(define (sequentially a b)
(lambda (env succeed fail)
(a env
(lambda (a-value fail2)
(b env succeed fail2))
fail)))
(define (loop first-proc rest-procs)
(if (null? rest-procs)
first-proc
(loop (sequentially first-proc (car rest-procs))
(cdr rest-procs))))
(let ((procs (map analyze exps)))
(if (null? procs)
(error "Empty sequence -- ANALYZE")
(void))
(loop (car procs) (cdr procs))))
(define (analyze-application exp)
(let ((fproc (analyze (operator exp)))
(aprocs (map analyze (operands exp))))
(lambda (env succeed fail)
(fproc env
(lambda (proc fail2)
(get-args aprocs
env
(lambda (args fail3)
(execute-application
proc args succeed fail3))
fail2))
fail))))
(define (get-args aprocs env scd fail)
(if (null? aprocs)
(scd '() fail)
((car aprocs) env
(lambda (arg fail2)
(get-args (cdr aprocs)
env
(lambda (args fail3)
(scd (cons arg args)
fail3))
fail2))
fail)))
(define (execute-application proc args succeed fail)
(cond ((primitive-procedure? proc)
(let ((m (apply-primitive-procedure proc args)))
(succeed m fail)
))
((compound-procedure? proc)
((procedure-body proc)
(extend-environment (procedure-parameters proc)
(list->mlist args)
(procedure-environment proc))
succeed
fail))
(else
(error
"Unknown procedure type -- EXECUTE-APPLICATION"
proc))))
(define (amb? exp) (tagged-list? exp 'amb))
(define (amb-choices exp) (cdr exp))
(define (analyze-amb exp)
(let ((cprocs (map analyze (amb-choices exp))))
(lambda (env succeed fail)
(define (try-next choices)
(if (null? choices)
(fail)
((car choices) env
succeed
(lambda ()
(try-next (cdr choices))))))
(try-next cprocs))))
(define (analyze-if-fail exp)
(let ((first (analyze (cadr exp)))
(second (analyze (caddr exp))))
(lambda (env succeed fail)
(first env
(lambda (val fail2)
(first env succeed fail))
(lambda ()
(second env succeed fail))))))
(define (analyze-all-answer exp)
(let ((actual-exp (analyze (cadr exp))))
(lambda (env succeed fail)
(actual-exp env
(lambda (val next-alternative)
(display val)
(newline)
(next-alternative))
(lambda ()
(define (fail-loop)
(driver-loop (lambda ()
(glb-fail)
(fail-loop))))
(fail-loop))))))
(define (procedure-environment proc)
(car (cdr (cdr (cdr proc)))))
(define rq '(define (require p)
(if (not p)
(amb)
(void))))
(define glb-succeed
(lambda (val next)
(display "succeed,val = " ) (display val) (newline)
))
(define glb-fail
(lambda ()
(display "There are no more answers.") (newline)))
(define glb-env (setup-environment))
(ambeval rq glb-env (lambda (val fail) (void)) glb-fail)
(define (my-driver-loop)
(let ((input (read)))
(if (eq? input eof)
(void)
(begin (ambeval input glb-env
(lambda (val fail)
(if (eq? val (void))
(void)
(begin (display val) (newline))))
glb-fail)
(my-driver-loop)))))
(define (driver-loop try-again)
(let ((input (read)))
(if (eq? input eof)
(void)
(if (eq? input 'try-again)
(try-again)
(begin
(ambeval input glb-env
(lambda (val next-alternative)
(if (eq? val (void))
(driver-loop next-alternative)
(begin (display val)
(newline)
(driver-loop next-alternative))))
(lambda ()
(glb-fail)
(define (fail-loop)
(driver-loop (lambda ()
(glb-fail)
(fail-loop))))
(fail-loop))))))))
(driver-loop (void))
|
917bea59820db18f9cd20711d0b210897e1ccb3b120201045a7ff081e2cf2e4c | BrunoBonacci/mulog | cloudwatch_test.clj | (ns com.brunobonacci.mulog.publishers.cloudwatch.cloudwatch-test
(:require [com.brunobonacci.mulog :as u]
[com.brunobonacci.mulog.publishers.cloudwatch.test-publisher :as tp]
[midje.sweet :refer :all]))
(fact "publish to local cloudwatch logs service and assert the published message"
(tp/with-local-cloudwatch-publisher
(u/log ::hello :to "cloudwatch test message"))
=> (just
[(just
{:mulog/trace-id anything
:mulog/timestamp number?
:mulog/event-name "com.brunobonacci.mulog.publishers.cloudwatch.cloudwatch-test/hello",
:mulog/namespace "com.brunobonacci.mulog.publishers.cloudwatch.cloudwatch-test",
:to "cloudwatch test message"})]))
(fact "publish nested traces (events must be published in timestamp order)"
(->>
(tp/with-local-cloudwatch-publisher
(u/trace ::level1
[]
(Thread/sleep 1)
(u/trace ::level2
[]
(Thread/sleep 1)
(u/log ::level3 :to "cloudwatch test message"))))
(map :mulog/event-name))
=> ["com.brunobonacci.mulog.publishers.cloudwatch.cloudwatch-test/level1"
"com.brunobonacci.mulog.publishers.cloudwatch.cloudwatch-test/level2"
"com.brunobonacci.mulog.publishers.cloudwatch.cloudwatch-test/level3"])
| null | https://raw.githubusercontent.com/BrunoBonacci/mulog/e31f84ccf6d62d43c1c620ef5584722886e0d8a5/mulog-cloudwatch/test/com/brunobonacci/mulog/publishers/cloudwatch/cloudwatch_test.clj | clojure | (ns com.brunobonacci.mulog.publishers.cloudwatch.cloudwatch-test
(:require [com.brunobonacci.mulog :as u]
[com.brunobonacci.mulog.publishers.cloudwatch.test-publisher :as tp]
[midje.sweet :refer :all]))
(fact "publish to local cloudwatch logs service and assert the published message"
(tp/with-local-cloudwatch-publisher
(u/log ::hello :to "cloudwatch test message"))
=> (just
[(just
{:mulog/trace-id anything
:mulog/timestamp number?
:mulog/event-name "com.brunobonacci.mulog.publishers.cloudwatch.cloudwatch-test/hello",
:mulog/namespace "com.brunobonacci.mulog.publishers.cloudwatch.cloudwatch-test",
:to "cloudwatch test message"})]))
(fact "publish nested traces (events must be published in timestamp order)"
(->>
(tp/with-local-cloudwatch-publisher
(u/trace ::level1
[]
(Thread/sleep 1)
(u/trace ::level2
[]
(Thread/sleep 1)
(u/log ::level3 :to "cloudwatch test message"))))
(map :mulog/event-name))
=> ["com.brunobonacci.mulog.publishers.cloudwatch.cloudwatch-test/level1"
"com.brunobonacci.mulog.publishers.cloudwatch.cloudwatch-test/level2"
"com.brunobonacci.mulog.publishers.cloudwatch.cloudwatch-test/level3"])
|
|
5a69e7121babf72ac6ef02adb4830e59b2b1b546d899283225101271913cdffd | vonzhou/LearnYouHaskellForGreatGood | foo2.hs | foo :: Maybe String
foo = do
x <- Just 3
y <- Just "!"
Just (show x ++ y)
marySue :: Maybe Bool
marySue = do
x <- Just 9
Just (x > 8)
| null | https://raw.githubusercontent.com/vonzhou/LearnYouHaskellForGreatGood/439d848deac53ef6da6df433078b7f1dcf54d18d/chapter13/foo2.hs | haskell | foo :: Maybe String
foo = do
x <- Just 3
y <- Just "!"
Just (show x ++ y)
marySue :: Maybe Bool
marySue = do
x <- Just 9
Just (x > 8)
|
|
947107ae235aea6c2d7fd6f4d750b922cf0f6c7ab4ffc40105ff27fe8c0ab748 | timbod7/haskell-chart | Axis.hs | -----------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.Chart.Axis
Copyright : ( c ) 2006
-- License : BSD-style (see chart/COPYRIGHT)
--
-- Code to calculate and render axes.
--
module Graphics.Rendering.Chart.Axis(
module Graphics.Rendering.Chart.Axis.Types,
module Graphics.Rendering.Chart.Axis.Floating,
module Graphics.Rendering.Chart.Axis.Int,
module Graphics.Rendering.Chart.Axis.Time,
module Graphics.Rendering.Chart.Axis.Unit,
module Graphics.Rendering.Chart.Axis.Indexed,
) where
import Graphics.Rendering.Chart.Axis.Types
import Graphics.Rendering.Chart.Axis.Floating
import Graphics.Rendering.Chart.Axis.Int
import Graphics.Rendering.Chart.Axis.Time
import Graphics.Rendering.Chart.Axis.Unit
import Graphics.Rendering.Chart.Axis.Indexed
| null | https://raw.githubusercontent.com/timbod7/haskell-chart/8c5a823652ea1b4ec2adbced4a92a8161065ead6/chart/Graphics/Rendering/Chart/Axis.hs | haskell | ---------------------------------------------------------------------------
|
Module : Graphics.Rendering.Chart.Axis
License : BSD-style (see chart/COPYRIGHT)
Code to calculate and render axes.
| Copyright : ( c ) 2006
module Graphics.Rendering.Chart.Axis(
module Graphics.Rendering.Chart.Axis.Types,
module Graphics.Rendering.Chart.Axis.Floating,
module Graphics.Rendering.Chart.Axis.Int,
module Graphics.Rendering.Chart.Axis.Time,
module Graphics.Rendering.Chart.Axis.Unit,
module Graphics.Rendering.Chart.Axis.Indexed,
) where
import Graphics.Rendering.Chart.Axis.Types
import Graphics.Rendering.Chart.Axis.Floating
import Graphics.Rendering.Chart.Axis.Int
import Graphics.Rendering.Chart.Axis.Time
import Graphics.Rendering.Chart.Axis.Unit
import Graphics.Rendering.Chart.Axis.Indexed
|
b3aa7215696e9d1f27120e8baa7f65ccf512316a8bc55f1a17ffff71119c4321 | spurious/sagittarius-scheme-mirror | basic-hash-tables.scm | -*- mode : scheme ; coding : utf-8 ; -*-
;;;
;;; SRFI-69 Basic hash tables
;;;
Copyright ( c ) 2010 - 2015 < >
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;;
;;; 1. Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;;
;;; 2. Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
;;;
;;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
;;; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
;;; A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
;;; OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
;;; TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
;;; PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
;;;
(library (srfi :69 basic-hash-tables)
(export make-hash-table (rename (hashtable? hash-table?))
alist->hash-table
(rename
(hashtable-equivalence-function hash-table-equivalence-function)
(hashtable-hash-function hash-table-hash-function))
hash-table-ref hash-table-ref/default
(rename (hashtable-set! hash-table-set!)
(hashtable-delete! hash-table-delete!)
(hashtable-contains? hash-table-exists?))
hash-table-update!
(rename (hashtable-update! hash-table-update!/default))
(rename (hashtable-size hash-table-size)
(hashtable-keys-list hash-table-keys)
(hashtable-values-list hash-table-values))
hash-table-walk hash-table-fold
(rename (hashtable->alist hash-table->alist))
hash-table-copy hash-table-merge!
(rename (equal-hash hash)
(eq-hash hash-by-identity))
string-hash string-ci-hash)
(import (rnrs)
;; make-string-hashtable and make-equal-hashtable
(sagittarius)
(sagittarius control)
(only (util hashtables) hashtable->alist hashtable-for-each
hashtable-fold))
(define make-hash-table
(case-lambda
((eql? hash) (make-hashtable hash eql?))
((eql?)
(cond ((eq? eql? eq?) (make-eq-hashtable))
((eq? eql? eqv?) (make-eqv-hashtable))
((eq? eql? equal?) (make-equal-hashtable))
((eq? eql? string=?) (make-string-hashtable))
((eq? eql? string-ci=?) (make-hashtable string-ci=? string-ci-hash))
(else
(assertion-violation 'make-hash-table
"unknown equivalent procedure" eql?))))
(() (make-equal-hashtable))))
(define no-entry (list 'no-entry))
;; a bit different from (util hashtables)
maybe it 's better to adjust that one to this one
(define (alist->hash-table alist . opts)
(rlet1 ht (apply make-hash-table opts)
(for-each (lambda (kv)
(hashtable-update!
ht
(car kv)
(lambda (x) (if (eq? no-entry x) (cdr kv) x))
no-entry)) alist)))
(define (failure-thunk who key)
(lambda () (error who "no association for key" key)))
(define hash-table-ref
(case-lambda
((ht key thunk)
(let ((val (hashtable-ref ht key no-entry)))
(if (eq? val no-entry)
(thunk)
val)))
((ht key)
(hash-table-ref ht key (failure-thunk 'hash-table-ref key)))))
;; builtin hashtable-ref allow not to have default
(define (hash-table-ref/default ht key default)
(hashtable-ref ht key default))
(define hash-table-update!
(case-lambda
((ht key proc thunk)
(hashtable-update! ht key
(lambda (v)
(if (eq? v no-entry)
(thunk)
(proc v)))
no-entry))
((ht key proc)
(hash-table-update! ht key proc (failure-thunk 'hash-table-update! key)))))
(define (wrong-type-argument-message expect got . nth)
(if (null? nth)
(format "expected ~a, but got ~a" expect got)
(format "expected ~a, but got ~a, as argument ~a" expect got
(car nth))))
(define (hash-table-walk table proc) (hashtable-for-each proc table))
(define (hash-table-fold table kons knil) (hashtable-fold kons table knil))
(define (hash-table-copy ht) (hashtable-copy ht #t))
(define (hash-table-merge! ht1 ht2)
(hashtable-for-each (lambda (k v) (hashtable-set! ht1 k v)) ht2)
ht1)
)
| null | https://raw.githubusercontent.com/spurious/sagittarius-scheme-mirror/53f104188934109227c01b1e9a9af5312f9ce997/sitelib/srfi/%253a69/basic-hash-tables.scm | scheme | coding : utf-8 ; -*-
SRFI-69 Basic hash tables
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
make-string-hashtable and make-equal-hashtable
a bit different from (util hashtables)
builtin hashtable-ref allow not to have default | Copyright ( c ) 2010 - 2015 < >
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
(library (srfi :69 basic-hash-tables)
(export make-hash-table (rename (hashtable? hash-table?))
alist->hash-table
(rename
(hashtable-equivalence-function hash-table-equivalence-function)
(hashtable-hash-function hash-table-hash-function))
hash-table-ref hash-table-ref/default
(rename (hashtable-set! hash-table-set!)
(hashtable-delete! hash-table-delete!)
(hashtable-contains? hash-table-exists?))
hash-table-update!
(rename (hashtable-update! hash-table-update!/default))
(rename (hashtable-size hash-table-size)
(hashtable-keys-list hash-table-keys)
(hashtable-values-list hash-table-values))
hash-table-walk hash-table-fold
(rename (hashtable->alist hash-table->alist))
hash-table-copy hash-table-merge!
(rename (equal-hash hash)
(eq-hash hash-by-identity))
string-hash string-ci-hash)
(import (rnrs)
(sagittarius)
(sagittarius control)
(only (util hashtables) hashtable->alist hashtable-for-each
hashtable-fold))
(define make-hash-table
(case-lambda
((eql? hash) (make-hashtable hash eql?))
((eql?)
(cond ((eq? eql? eq?) (make-eq-hashtable))
((eq? eql? eqv?) (make-eqv-hashtable))
((eq? eql? equal?) (make-equal-hashtable))
((eq? eql? string=?) (make-string-hashtable))
((eq? eql? string-ci=?) (make-hashtable string-ci=? string-ci-hash))
(else
(assertion-violation 'make-hash-table
"unknown equivalent procedure" eql?))))
(() (make-equal-hashtable))))
(define no-entry (list 'no-entry))
maybe it 's better to adjust that one to this one
(define (alist->hash-table alist . opts)
(rlet1 ht (apply make-hash-table opts)
(for-each (lambda (kv)
(hashtable-update!
ht
(car kv)
(lambda (x) (if (eq? no-entry x) (cdr kv) x))
no-entry)) alist)))
(define (failure-thunk who key)
(lambda () (error who "no association for key" key)))
(define hash-table-ref
(case-lambda
((ht key thunk)
(let ((val (hashtable-ref ht key no-entry)))
(if (eq? val no-entry)
(thunk)
val)))
((ht key)
(hash-table-ref ht key (failure-thunk 'hash-table-ref key)))))
(define (hash-table-ref/default ht key default)
(hashtable-ref ht key default))
(define hash-table-update!
(case-lambda
((ht key proc thunk)
(hashtable-update! ht key
(lambda (v)
(if (eq? v no-entry)
(thunk)
(proc v)))
no-entry))
((ht key proc)
(hash-table-update! ht key proc (failure-thunk 'hash-table-update! key)))))
(define (wrong-type-argument-message expect got . nth)
(if (null? nth)
(format "expected ~a, but got ~a" expect got)
(format "expected ~a, but got ~a, as argument ~a" expect got
(car nth))))
(define (hash-table-walk table proc) (hashtable-for-each proc table))
(define (hash-table-fold table kons knil) (hashtable-fold kons table knil))
(define (hash-table-copy ht) (hashtable-copy ht #t))
(define (hash-table-merge! ht1 ht2)
(hashtable-for-each (lambda (k v) (hashtable-set! ht1 k v)) ht2)
ht1)
)
|
20491f36fb16bf830d06529b8f914ff7a11bc1c7d78369ea34eb403dc8e00326 | carl-eastlund/dracula | hash.rkt | #lang scheme
(require "define.ss" (for-syntax syntax/parse))
(define-if-unbound (hash-has-key? table key)
(let/ec return
(hash-ref table key (lambda () (return #f)))
#t))
(define-if-unbound (hash-equal? table)
(and (hash? table)
(not (hash-eq? table))
(not (hash-eqv? table))))
(define (hash-ref/check table key)
(hash-ref table key))
(define (hash-ref/identity table key)
(hash-ref table key (lambda () key)))
(define (hash-ref/default table key default)
(hash-ref table key (lambda () default)))
(define (hash-ref/failure table key failure)
(hash-ref table key (lambda () (failure))))
(define (hash-domain table)
(for/list ([i (in-hash-keys table)]) i))
(define (hash-range table)
(for/list ([i (in-hash-values table)]) i))
(define ((hash-duplicate-error name) key value1 value2)
(error name "duplicate values for key ~e: ~e and ~e" key value1 value2))
(define (hash-union
#:combine [combine #f]
#:combine/key [combine/key
(if combine
(lambda (k x y) (combine x y))
(hash-duplicate-error 'hash-union))]
one . rest)
(for*/fold ([one one]) ([two (in-list rest)] [(k v) (in-hash two)])
(hash-set one k (if (hash-has-key? one k)
(combine/key k (hash-ref one k) v)
v))))
(define (hash-union!
#:combine [combine #f]
#:combine/key [combine/key
(if combine
(lambda (k x y) (combine x y))
(hash-duplicate-error 'hash-union))]
one . rest)
(for* ([two (in-list rest)] [(k v) (in-hash two)])
(hash-set! one k (if (hash-has-key? one k)
(combine/key k (hash-ref one k) v)
v))))
(define-syntaxes [ hash hash! ]
(let ()
(define-syntax-class key/value
#:attributes [key value]
(pattern [key:expr value:expr]))
(define-splicing-syntax-class immutable-hash-type
#:attributes [constructor]
(pattern (~seq #:eqv) #:attr constructor #'make-immutable-hasheqv)
(pattern (~seq #:eq) #:attr constructor #'make-immutable-hasheq)
(pattern (~seq (~optional #:equal) ...)
#:attr constructor #'make-immutable-hash))
(define-splicing-syntax-class mutable-hash-type
#:attributes [constructor]
(pattern (~seq #:base constructor:expr))
(pattern (~seq (~or (~once #:eqv) (~once #:weak)) ...)
#:attr constructor #'(make-weak-hasheqv))
(pattern (~seq (~or (~once #:eq) (~once #:weak)) ...)
#:attr constructor #'(make-weak-hasheq))
(pattern (~seq (~or (~optional #:equal) (~once #:weak)) ...)
#:attr constructor #'(make-weak-hash))
(pattern (~seq #:eqv) #:attr constructor #'(make-hasheqv))
(pattern (~seq #:eq) #:attr constructor #'(make-hasheq))
(pattern (~seq (~optional #:equal) ...) #:attr constructor #'(make-hash)))
(define (parse-hash stx)
(syntax-parse stx
[(_ (~seq type:immutable-hash-type) elem:key/value ...)
(syntax/loc stx
(type.constructor (list (cons elem.key elem.value) ...)))]
[(_ #:base h:expr elem:key/value ...)
(syntax/loc stx
(for/fold
([table h])
([key (in-list (list elem.key ...))]
[value (in-list (list elem.value ...))])
(hash-set table key value)))]))
(define (parse-hash! stx)
(syntax-parse stx
[(_ (~seq type:mutable-hash-type) elem:key/value ...)
(syntax/loc stx
(let ([table type.constructor])
(for ([key (in-list (list elem.key ...))]
[value (in-list (list elem.value ...))])
(hash-set! table key value))
table))]))
(values parse-hash parse-hash!)))
(provide hash hash! hash-has-key? hash-equal?)
(provide/contract
[hash-ref/identity (-> hash? any/c any/c)]
[hash-ref/default (-> hash? any/c any/c any/c)]
[hash-ref/failure (-> hash? any/c (-> any/c) any/c)]
[hash-ref/check
(->d ([table hash?] [key any/c]) ()
#:pre-cond (hash-has-key? table key)
[_ any/c])]
[hash-domain (-> hash? list?)]
[hash-range (-> hash? list?)]
[hash-union (->* [(and/c hash? immutable?)]
[#:combine
(-> any/c any/c any/c)
#:combine/key
(-> any/c any/c any/c any/c)]
#:rest (listof hash?)
(and/c hash? immutable?))]
[hash-union! (->* [(and/c hash? (not/c immutable?))]
[#:combine
(-> any/c any/c any/c)
#:combine/key
(-> any/c any/c any/c any/c)]
#:rest (listof hash?)
void?)])
| null | https://raw.githubusercontent.com/carl-eastlund/dracula/a937f4b40463779246e3544e4021c53744a33847/private/scheme/hash.rkt | racket | #lang scheme
(require "define.ss" (for-syntax syntax/parse))
(define-if-unbound (hash-has-key? table key)
(let/ec return
(hash-ref table key (lambda () (return #f)))
#t))
(define-if-unbound (hash-equal? table)
(and (hash? table)
(not (hash-eq? table))
(not (hash-eqv? table))))
(define (hash-ref/check table key)
(hash-ref table key))
(define (hash-ref/identity table key)
(hash-ref table key (lambda () key)))
(define (hash-ref/default table key default)
(hash-ref table key (lambda () default)))
(define (hash-ref/failure table key failure)
(hash-ref table key (lambda () (failure))))
(define (hash-domain table)
(for/list ([i (in-hash-keys table)]) i))
(define (hash-range table)
(for/list ([i (in-hash-values table)]) i))
(define ((hash-duplicate-error name) key value1 value2)
(error name "duplicate values for key ~e: ~e and ~e" key value1 value2))
(define (hash-union
#:combine [combine #f]
#:combine/key [combine/key
(if combine
(lambda (k x y) (combine x y))
(hash-duplicate-error 'hash-union))]
one . rest)
(for*/fold ([one one]) ([two (in-list rest)] [(k v) (in-hash two)])
(hash-set one k (if (hash-has-key? one k)
(combine/key k (hash-ref one k) v)
v))))
(define (hash-union!
#:combine [combine #f]
#:combine/key [combine/key
(if combine
(lambda (k x y) (combine x y))
(hash-duplicate-error 'hash-union))]
one . rest)
(for* ([two (in-list rest)] [(k v) (in-hash two)])
(hash-set! one k (if (hash-has-key? one k)
(combine/key k (hash-ref one k) v)
v))))
(define-syntaxes [ hash hash! ]
(let ()
(define-syntax-class key/value
#:attributes [key value]
(pattern [key:expr value:expr]))
(define-splicing-syntax-class immutable-hash-type
#:attributes [constructor]
(pattern (~seq #:eqv) #:attr constructor #'make-immutable-hasheqv)
(pattern (~seq #:eq) #:attr constructor #'make-immutable-hasheq)
(pattern (~seq (~optional #:equal) ...)
#:attr constructor #'make-immutable-hash))
(define-splicing-syntax-class mutable-hash-type
#:attributes [constructor]
(pattern (~seq #:base constructor:expr))
(pattern (~seq (~or (~once #:eqv) (~once #:weak)) ...)
#:attr constructor #'(make-weak-hasheqv))
(pattern (~seq (~or (~once #:eq) (~once #:weak)) ...)
#:attr constructor #'(make-weak-hasheq))
(pattern (~seq (~or (~optional #:equal) (~once #:weak)) ...)
#:attr constructor #'(make-weak-hash))
(pattern (~seq #:eqv) #:attr constructor #'(make-hasheqv))
(pattern (~seq #:eq) #:attr constructor #'(make-hasheq))
(pattern (~seq (~optional #:equal) ...) #:attr constructor #'(make-hash)))
(define (parse-hash stx)
(syntax-parse stx
[(_ (~seq type:immutable-hash-type) elem:key/value ...)
(syntax/loc stx
(type.constructor (list (cons elem.key elem.value) ...)))]
[(_ #:base h:expr elem:key/value ...)
(syntax/loc stx
(for/fold
([table h])
([key (in-list (list elem.key ...))]
[value (in-list (list elem.value ...))])
(hash-set table key value)))]))
(define (parse-hash! stx)
(syntax-parse stx
[(_ (~seq type:mutable-hash-type) elem:key/value ...)
(syntax/loc stx
(let ([table type.constructor])
(for ([key (in-list (list elem.key ...))]
[value (in-list (list elem.value ...))])
(hash-set! table key value))
table))]))
(values parse-hash parse-hash!)))
(provide hash hash! hash-has-key? hash-equal?)
(provide/contract
[hash-ref/identity (-> hash? any/c any/c)]
[hash-ref/default (-> hash? any/c any/c any/c)]
[hash-ref/failure (-> hash? any/c (-> any/c) any/c)]
[hash-ref/check
(->d ([table hash?] [key any/c]) ()
#:pre-cond (hash-has-key? table key)
[_ any/c])]
[hash-domain (-> hash? list?)]
[hash-range (-> hash? list?)]
[hash-union (->* [(and/c hash? immutable?)]
[#:combine
(-> any/c any/c any/c)
#:combine/key
(-> any/c any/c any/c any/c)]
#:rest (listof hash?)
(and/c hash? immutable?))]
[hash-union! (->* [(and/c hash? (not/c immutable?))]
[#:combine
(-> any/c any/c any/c)
#:combine/key
(-> any/c any/c any/c any/c)]
#:rest (listof hash?)
void?)])
|
|
1efe18ff6b602c0adc572718bd1cd50ce77d6ed4764f9ae64f804f1030f51e4d | wireapp/saml2-web-sso | Error.hs | # LANGUAGE GeneralizedNewtypeDeriving #
{-# LANGUAGE OverloadedStrings #-}
module SAML2.WebSSO.Error where
import Data.String.Conversions
import Data.Void (Void, absurd)
import Servant.Server
data Error err
= UnknownIdP LT
| Forbidden LT
| BadSamlResponseBase64Error LT
| BadSamlResponseXmlError LT
| BadSamlResponseSamlError LT
| BadSamlResponseFormFieldMissing
| BadSamlResponseIssuerMissing
| BadSamlResponseNoAssertions
| BadSamlResponseAssertionWithoutID
| BadSamlResponseInvalidSignature LT
| BadServerConfig LT
| InvalidCert LT
| UnknownError
| CustomServant ServerError
| CustomError err
deriving (Eq, Show)
type SimpleError = Error Void
toServerError :: SimpleError -> ServerError
toServerError (UnknownIdP msg) = err404 {errBody = "Unknown IdP: " <> cs msg}
toServerError (Forbidden msg) = err403 {errBody = cs msg}
( this should probably be 401 , not 403 , but according to the standard we would also need to add
a WWW - Authenticate header if we do that , and we are not using saml , not basic auth .
-- #4xx_Client_errors)
toServerError (BadSamlResponseBase64Error msg) = err400 {errBody = "Bad response: base64 error: " <> cs msg}
toServerError (BadSamlResponseXmlError msg) = err400 {errBody = "Bad response: xml parse error: " <> cs msg}
toServerError (BadSamlResponseSamlError msg) = err400 {errBody = "Bad response: saml parse error: " <> cs msg}
toServerError BadSamlResponseFormFieldMissing = err400 {errBody = "Bad response: SAMLResponse form field missing from HTTP body"}
toServerError BadSamlResponseIssuerMissing = err400 {errBody = "Bad response: no Issuer in AuthnResponse"}
toServerError BadSamlResponseNoAssertions = err400 {errBody = "Bad response: no assertions in AuthnResponse"}
toServerError BadSamlResponseAssertionWithoutID = err400 {errBody = "Bad response: assertion without ID"}
toServerError (BadSamlResponseInvalidSignature msg) = err400 {errBody = cs msg}
toServerError (InvalidCert msg) = err400 {errBody = "Invalid certificate: " <> cs msg}
toServerError (BadServerConfig msg) = err400 {errBody = "Invalid server config: " <> cs msg}
toServerError UnknownError = err500 {errBody = "Internal server error. Please consult the logs."}
toServerError (CustomServant err) = err
toServerError (CustomError avoid) = absurd avoid
| null | https://raw.githubusercontent.com/wireapp/saml2-web-sso/ac88b934bb4a91d4d4bb90c620277188e4087043/src/SAML2/WebSSO/Error.hs | haskell | # LANGUAGE OverloadedStrings #
#4xx_Client_errors) | # LANGUAGE GeneralizedNewtypeDeriving #
module SAML2.WebSSO.Error where
import Data.String.Conversions
import Data.Void (Void, absurd)
import Servant.Server
data Error err
= UnknownIdP LT
| Forbidden LT
| BadSamlResponseBase64Error LT
| BadSamlResponseXmlError LT
| BadSamlResponseSamlError LT
| BadSamlResponseFormFieldMissing
| BadSamlResponseIssuerMissing
| BadSamlResponseNoAssertions
| BadSamlResponseAssertionWithoutID
| BadSamlResponseInvalidSignature LT
| BadServerConfig LT
| InvalidCert LT
| UnknownError
| CustomServant ServerError
| CustomError err
deriving (Eq, Show)
type SimpleError = Error Void
toServerError :: SimpleError -> ServerError
toServerError (UnknownIdP msg) = err404 {errBody = "Unknown IdP: " <> cs msg}
toServerError (Forbidden msg) = err403 {errBody = cs msg}
( this should probably be 401 , not 403 , but according to the standard we would also need to add
a WWW - Authenticate header if we do that , and we are not using saml , not basic auth .
toServerError (BadSamlResponseBase64Error msg) = err400 {errBody = "Bad response: base64 error: " <> cs msg}
toServerError (BadSamlResponseXmlError msg) = err400 {errBody = "Bad response: xml parse error: " <> cs msg}
toServerError (BadSamlResponseSamlError msg) = err400 {errBody = "Bad response: saml parse error: " <> cs msg}
toServerError BadSamlResponseFormFieldMissing = err400 {errBody = "Bad response: SAMLResponse form field missing from HTTP body"}
toServerError BadSamlResponseIssuerMissing = err400 {errBody = "Bad response: no Issuer in AuthnResponse"}
toServerError BadSamlResponseNoAssertions = err400 {errBody = "Bad response: no assertions in AuthnResponse"}
toServerError BadSamlResponseAssertionWithoutID = err400 {errBody = "Bad response: assertion without ID"}
toServerError (BadSamlResponseInvalidSignature msg) = err400 {errBody = cs msg}
toServerError (InvalidCert msg) = err400 {errBody = "Invalid certificate: " <> cs msg}
toServerError (BadServerConfig msg) = err400 {errBody = "Invalid server config: " <> cs msg}
toServerError UnknownError = err500 {errBody = "Internal server error. Please consult the logs."}
toServerError (CustomServant err) = err
toServerError (CustomError avoid) = absurd avoid
|
6a2a8596cfd5ad5ccc422bfa5d545ddbb776b643cad5842288e0762407a49be4 | qitab/qmynd | command-initialize-database.lisp | ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; ;;;
Free Software published under an MIT - like license . See LICENSE ; ; ;
;;; ;;;
Copyright ( c ) 2012 - 2013 Google , Inc. All rights reserved . ; ; ;
;;; ;;;
Original author : ; ; ;
;;; ;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(in-package :qmynd-impl)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; 15.6.3 command-initialize-database -- change the default schema
;; We don't actually receive this packet as a client, but it looks like this.
;; (define-packet command-initialize-database
( ( tag : mysql - type ( integer 1 ) : value + mysql - command - initialize - database+ : transient t : bind nil )
;; (schema-name :mysql-type (string :eof))))
Returns OK or ERR packet
(defun send-command-initialize-database (schema-name)
(mysql-command-init +mysql-command-initialize-database+)
(mysql-write-packet
(flexi-streams:with-output-to-sequence (s)
(write-byte +mysql-command-initialize-database+ s)
(write-sequence (babel:string-to-octets schema-name) s)))
(prog1
(parse-response (mysql-read-packet))
(setf (mysql-connection-default-schema *mysql-connection*) schema-name)))
| null | https://raw.githubusercontent.com/qitab/qmynd/7e56daf73f0ed5f49a931c01af75fb874bcf3445/src/mysql-protocol/text-protocol/command-initialize-database.lisp | lisp |
;;;
; ;
;;;
; ;
;;;
; ;
;;;
15.6.3 command-initialize-database -- change the default schema
We don't actually receive this packet as a client, but it looks like this.
(define-packet command-initialize-database
(schema-name :mysql-type (string :eof)))) |
(in-package :qmynd-impl)
( ( tag : mysql - type ( integer 1 ) : value + mysql - command - initialize - database+ : transient t : bind nil )
Returns OK or ERR packet
(defun send-command-initialize-database (schema-name)
(mysql-command-init +mysql-command-initialize-database+)
(mysql-write-packet
(flexi-streams:with-output-to-sequence (s)
(write-byte +mysql-command-initialize-database+ s)
(write-sequence (babel:string-to-octets schema-name) s)))
(prog1
(parse-response (mysql-read-packet))
(setf (mysql-connection-default-schema *mysql-connection*) schema-name)))
|
59242ec0356df563c31c8ca31c4b637d086466acbd808df241e067de75e5e9b2 | HunterYIboHu/htdp2-solution | key-event-4.3.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-beginner-reader.ss" "lang")((modname key-event-4.3) (read-case-sensitive #t) (teachpacks ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp"))) (htdp-settings #(#t write repeating-decimal #f #t none #f ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp")) #f)))
; 常量
(define RADIUS 10)
(define BALL
(circle RADIUS "solid" "red"))
(define WIDTH-OF-WORLD (* 20 RADIUS))
(define HEIGHT-OF-WORLD (/ WIDTH-OF-WORLD 2))
(define BACKGROUND
(empty-scene WIDTH-OF-WORLD
HEIGHT-OF-WORLD))
(define Y-BALL (- HEIGHT-OF-WORLD RADIUS))
; Position is a Number
; interpretation distance between the left margin and the ball
Position KeyEvent - > Position
; computes the next location of the ball
(check-expect (keh 13 "left") 8)
(check-expect (keh 13 "right") 18)
(check-expect (keh 13 "a") 13)
(define (keh p k)
(cond [(string=? "left" k) (- p 5)]
[(string=? "right" k) (+ p 5)]
[else p]))
; Position -> Position
if the Position is over right edge , then reset it to 0 ;
else if the Position is over left edge , then reset it to R - EDGE
;(define (tock p)
; ... p ...)
; Position -> Image
; render the position to the BALL place in BACKGROUND
(define (render p)
(place-image BALL
p Y-BALL
BACKGROUND))
; launch
(define (roll-ball p)
(big-bang p
[on-key keh]
[to-draw render]))
(roll-ball 0)
| null | https://raw.githubusercontent.com/HunterYIboHu/htdp2-solution/6182b4c2ef650ac7059f3c143f639d09cd708516/Chapter1/Section4/key-event-4.3.rkt | racket | about the language level of this file in a form that our tools can easily process.
常量
Position is a Number
interpretation distance between the left margin and the ball
computes the next location of the ball
Position -> Position
(define (tock p)
... p ...)
Position -> Image
render the position to the BALL place in BACKGROUND
launch | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-beginner-reader.ss" "lang")((modname key-event-4.3) (read-case-sensitive #t) (teachpacks ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp"))) (htdp-settings #(#t write repeating-decimal #f #t none #f ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp")) #f)))
(define RADIUS 10)
(define BALL
(circle RADIUS "solid" "red"))
(define WIDTH-OF-WORLD (* 20 RADIUS))
(define HEIGHT-OF-WORLD (/ WIDTH-OF-WORLD 2))
(define BACKGROUND
(empty-scene WIDTH-OF-WORLD
HEIGHT-OF-WORLD))
(define Y-BALL (- HEIGHT-OF-WORLD RADIUS))
Position KeyEvent - > Position
(check-expect (keh 13 "left") 8)
(check-expect (keh 13 "right") 18)
(check-expect (keh 13 "a") 13)
(define (keh p k)
(cond [(string=? "left" k) (- p 5)]
[(string=? "right" k) (+ p 5)]
[else p]))
else if the Position is over left edge , then reset it to R - EDGE
(define (render p)
(place-image BALL
p Y-BALL
BACKGROUND))
(define (roll-ball p)
(big-bang p
[on-key keh]
[to-draw render]))
(roll-ball 0)
|
3a706a74e8e462001f6d657be6c5027285d3d971b14cfad5b54a4391b9d973e2 | plewto/Cadejo | overwrite_warning.clj | (ns cadejo.ui.util.overwrite-warning
"Pops up confirmation dialog prior to overwriting existing file.
Overwrite warnings may be disabled by config"
(:require [cadejo.util.path :as path])
(:require [cadejo.config :as config])
(:use [seesaw.core]))
(defn overwrite-warning
([parent file-type filename]
(if (or (not (config/warn-on-file-overwrite))
(not (path/exists filename)))
true
(let [selection* (atom false)
msg (format "Overwrite %s file '%s'" file-type filename)
yes-fn (fn [d]
(swap! selection* (fn [n] true))
(return-from-dialog d true))
no-fn (fn [d]
(swap! selection* (fn [n] false))
(return-from-dialog d false))
dia (dialog
:content msg
:option-type :yes-no
:type :warning
:default-option :no
:modal? true
:parent parent
:success-fn yes-fn
:no-fn no-fn)]
(pack! dia)
(show! dia)
@selection*)))
([parent filename]
(overwrite-warning parent "" filename)))
| null | https://raw.githubusercontent.com/plewto/Cadejo/2a98610ce1f5fe01dce5f28d986a38c86677fd67/src/cadejo/ui/util/overwrite_warning.clj | clojure | (ns cadejo.ui.util.overwrite-warning
"Pops up confirmation dialog prior to overwriting existing file.
Overwrite warnings may be disabled by config"
(:require [cadejo.util.path :as path])
(:require [cadejo.config :as config])
(:use [seesaw.core]))
(defn overwrite-warning
([parent file-type filename]
(if (or (not (config/warn-on-file-overwrite))
(not (path/exists filename)))
true
(let [selection* (atom false)
msg (format "Overwrite %s file '%s'" file-type filename)
yes-fn (fn [d]
(swap! selection* (fn [n] true))
(return-from-dialog d true))
no-fn (fn [d]
(swap! selection* (fn [n] false))
(return-from-dialog d false))
dia (dialog
:content msg
:option-type :yes-no
:type :warning
:default-option :no
:modal? true
:parent parent
:success-fn yes-fn
:no-fn no-fn)]
(pack! dia)
(show! dia)
@selection*)))
([parent filename]
(overwrite-warning parent "" filename)))
|
|
7f408a1de69408c1fa7d458eb0266f362984cad0b30b87f2968ef4acb112118b | mbutterick/aoc-racket | day08.rkt | #lang scribble/lp2
@(require scribble/manual aoc-racket/helper)
@aoc-title[8]
@defmodule[aoc-racket/day08]
@link[""]{The puzzle}. Our @link-rp["day08-input.txt"]{input} consists of a list of seemingly random strings within quotation marks.
@chunk[<day08>
<day08-setup>
<day08-q1>
<day08-q2>
<day08-test>]
@isection{What's the difference between the literal length of the strings, and their length in memory?}
The puzzle relies the fact that within strings, certain single characters — like the backslash @litchar{\} and double-quote mark @litchar{"} — are described with more than one character. Thus, the question asks us to compare the two lengths.
The literal length of the string is trivial — use @iracket[string-length]. The memory length requires interpreting a string as a Racket value, which (as seen in @secref{Day_7}) simply means using @iracket[read].
@chunk[<day08-setup>
(require racket rackunit)
(provide (all-defined-out))
]
@chunk[<day08-q1>
(define (memory-length str) (string-length (read (open-input-string str))))
(define (q1 strs)
(- (apply + (map string-length strs)) (apply + (map memory-length strs))))]
@isection{What's the difference between the re-encoded length of the literal string, and the original length?}
This question simply comes down to — do you know how to use the string-formatting functions in your programming language?
In Racket, a string can be re-encoded with @iracket[~v]. Not a very puzzling puzzle overall.
@chunk[<day08-q2>
(define (encoded-length str) (string-length (~v str)))
(define (q2 strs)
(- (apply + (map encoded-length strs)) (apply + (map string-length strs)))) ]
@section{Testing Day 8}
@chunk[<day08-test>
(module+ test
(define input-strs (file->lines "day08-input.txt"))
(check-equal? (q1 input-strs) 1333)
(check-equal? (q2 input-strs) 2046))]
| null | https://raw.githubusercontent.com/mbutterick/aoc-racket/2c6cb2f3ad876a91a82f33ce12844f7758b969d6/day08.rkt | racket | #lang scribble/lp2
@(require scribble/manual aoc-racket/helper)
@aoc-title[8]
@defmodule[aoc-racket/day08]
@link[""]{The puzzle}. Our @link-rp["day08-input.txt"]{input} consists of a list of seemingly random strings within quotation marks.
@chunk[<day08>
<day08-setup>
<day08-q1>
<day08-q2>
<day08-test>]
@isection{What's the difference between the literal length of the strings, and their length in memory?}
The puzzle relies the fact that within strings, certain single characters — like the backslash @litchar{\} and double-quote mark @litchar{"} — are described with more than one character. Thus, the question asks us to compare the two lengths.
The literal length of the string is trivial — use @iracket[string-length]. The memory length requires interpreting a string as a Racket value, which (as seen in @secref{Day_7}) simply means using @iracket[read].
@chunk[<day08-setup>
(require racket rackunit)
(provide (all-defined-out))
]
@chunk[<day08-q1>
(define (memory-length str) (string-length (read (open-input-string str))))
(define (q1 strs)
(- (apply + (map string-length strs)) (apply + (map memory-length strs))))]
@isection{What's the difference between the re-encoded length of the literal string, and the original length?}
This question simply comes down to — do you know how to use the string-formatting functions in your programming language?
In Racket, a string can be re-encoded with @iracket[~v]. Not a very puzzling puzzle overall.
@chunk[<day08-q2>
(define (encoded-length str) (string-length (~v str)))
(define (q2 strs)
(- (apply + (map encoded-length strs)) (apply + (map string-length strs)))) ]
@section{Testing Day 8}
@chunk[<day08-test>
(module+ test
(define input-strs (file->lines "day08-input.txt"))
(check-equal? (q1 input-strs) 1333)
(check-equal? (q2 input-strs) 2046))]
|
|
05bc39bbaa1563f337d36626b5ef64b406f0588a6c04b51fcc490de2379d9dbe | haskell-openal/OpenAL | QueryUtils.hs | # LANGUAGE CPP #
{-# OPTIONS_HADDOCK hide #-}
--------------------------------------------------------------------------------
-- |
-- Module : Sound.OpenAL.AL.QueryUtils
Copyright : ( c ) 2003 - 2016
-- License : BSD3
--
Maintainer : < >
-- Stability : stable
-- Portability : portable
--
This is a purely internal module with utilities to query OpenAL state .
--
--------------------------------------------------------------------------------
module Sound.OpenAL.AL.QueryUtils (
GetPName(..), marshalGetPName,
StringName(..), getString
) where
#if __GLASGOW_HASKELL__ >= 704
-- Make the foreign imports happy.
import Foreign.C.Types
#endif
import Foreign.Ptr ( Ptr )
import Sound.OpenAL.AL.BasicTypes
import Sound.OpenAL.AL.String
import Sound.OpenAL.Constants
--------------------------------------------------------------------------------
data GetPName =
GetDistanceModel
| GetDopplerFactor
| GetSpeedOfSound
| GetPosition
| GetVelocity
| GetGain
| GetOrientation
| GetSourceRelative
| GetSourceType
| GetLooping
| GetBuffer
| GetBuffersQueued
| GetBuffersProcessed
| GetMinGain
| GetMaxGain
| GetReferenceDistance
| GetRolloffFactor
| GetMaxDistance
| GetPitch
| GetDirection
| GetConeInnerAngle
| GetConeOuterAngle
| GetConeOuterGain
| GetSecOffset
| GetSampleOffset
| GetByteOffset
| GetSourceState
marshalGetPName :: GetPName -> ALenum
marshalGetPName x = case x of
GetDistanceModel -> al_DISTANCE_MODEL
GetDopplerFactor -> al_DOPPLER_FACTOR
GetSpeedOfSound -> al_SPEED_OF_SOUND
GetPosition -> al_POSITION
GetVelocity -> al_VELOCITY
GetGain -> al_GAIN
GetOrientation -> al_ORIENTATION
GetSourceRelative -> al_SOURCE_RELATIVE
GetSourceType -> al_SOURCE_TYPE
GetLooping -> al_LOOPING
GetBuffer -> al_BUFFER
GetBuffersQueued -> al_BUFFERS_QUEUED
GetBuffersProcessed -> al_BUFFERS_PROCESSED
GetMinGain -> al_MIN_GAIN
GetMaxGain -> al_MAX_GAIN
GetReferenceDistance -> al_REFERENCE_DISTANCE
GetRolloffFactor -> al_ROLLOFF_FACTOR
GetMaxDistance -> al_MAX_DISTANCE
GetPitch -> al_PITCH
GetDirection -> al_DIRECTION
GetConeInnerAngle -> al_CONE_INNER_ANGLE
GetConeOuterAngle -> al_CONE_OUTER_ANGLE
GetConeOuterGain -> al_CONE_OUTER_GAIN
GetSecOffset -> al_SEC_OFFSET
GetSampleOffset -> al_SAMPLE_OFFSET
GetByteOffset -> al_BYTE_OFFSET
GetSourceState -> al_SOURCE_STATE
--------------------------------------------------------------------------------
data StringName =
Vendor
| Renderer
| Version
| Extensions
| ALErrorCategory ALenum
marshalStringName :: StringName -> ALenum
marshalStringName x = case x of
Vendor -> al_VENDOR
Renderer -> al_RENDERER
Version -> al_VERSION
Extensions -> al_EXTENSIONS
ALErrorCategory e -> e
getString :: StringName -> IO String
getString n = alGetString (marshalStringName n) >>= peekALString
foreign import ccall unsafe "alGetString"
alGetString :: ALenum -> IO (Ptr ALchar)
| null | https://raw.githubusercontent.com/haskell-openal/OpenAL/5131984f172dffc43ca8b482f215d120523fb137/src/Sound/OpenAL/AL/QueryUtils.hs | haskell | # OPTIONS_HADDOCK hide #
------------------------------------------------------------------------------
|
Module : Sound.OpenAL.AL.QueryUtils
License : BSD3
Stability : stable
Portability : portable
------------------------------------------------------------------------------
Make the foreign imports happy.
------------------------------------------------------------------------------
------------------------------------------------------------------------------ | # LANGUAGE CPP #
Copyright : ( c ) 2003 - 2016
Maintainer : < >
This is a purely internal module with utilities to query OpenAL state .
module Sound.OpenAL.AL.QueryUtils (
GetPName(..), marshalGetPName,
StringName(..), getString
) where
#if __GLASGOW_HASKELL__ >= 704
import Foreign.C.Types
#endif
import Foreign.Ptr ( Ptr )
import Sound.OpenAL.AL.BasicTypes
import Sound.OpenAL.AL.String
import Sound.OpenAL.Constants
data GetPName =
GetDistanceModel
| GetDopplerFactor
| GetSpeedOfSound
| GetPosition
| GetVelocity
| GetGain
| GetOrientation
| GetSourceRelative
| GetSourceType
| GetLooping
| GetBuffer
| GetBuffersQueued
| GetBuffersProcessed
| GetMinGain
| GetMaxGain
| GetReferenceDistance
| GetRolloffFactor
| GetMaxDistance
| GetPitch
| GetDirection
| GetConeInnerAngle
| GetConeOuterAngle
| GetConeOuterGain
| GetSecOffset
| GetSampleOffset
| GetByteOffset
| GetSourceState
marshalGetPName :: GetPName -> ALenum
marshalGetPName x = case x of
GetDistanceModel -> al_DISTANCE_MODEL
GetDopplerFactor -> al_DOPPLER_FACTOR
GetSpeedOfSound -> al_SPEED_OF_SOUND
GetPosition -> al_POSITION
GetVelocity -> al_VELOCITY
GetGain -> al_GAIN
GetOrientation -> al_ORIENTATION
GetSourceRelative -> al_SOURCE_RELATIVE
GetSourceType -> al_SOURCE_TYPE
GetLooping -> al_LOOPING
GetBuffer -> al_BUFFER
GetBuffersQueued -> al_BUFFERS_QUEUED
GetBuffersProcessed -> al_BUFFERS_PROCESSED
GetMinGain -> al_MIN_GAIN
GetMaxGain -> al_MAX_GAIN
GetReferenceDistance -> al_REFERENCE_DISTANCE
GetRolloffFactor -> al_ROLLOFF_FACTOR
GetMaxDistance -> al_MAX_DISTANCE
GetPitch -> al_PITCH
GetDirection -> al_DIRECTION
GetConeInnerAngle -> al_CONE_INNER_ANGLE
GetConeOuterAngle -> al_CONE_OUTER_ANGLE
GetConeOuterGain -> al_CONE_OUTER_GAIN
GetSecOffset -> al_SEC_OFFSET
GetSampleOffset -> al_SAMPLE_OFFSET
GetByteOffset -> al_BYTE_OFFSET
GetSourceState -> al_SOURCE_STATE
data StringName =
Vendor
| Renderer
| Version
| Extensions
| ALErrorCategory ALenum
marshalStringName :: StringName -> ALenum
marshalStringName x = case x of
Vendor -> al_VENDOR
Renderer -> al_RENDERER
Version -> al_VERSION
Extensions -> al_EXTENSIONS
ALErrorCategory e -> e
getString :: StringName -> IO String
getString n = alGetString (marshalStringName n) >>= peekALString
foreign import ccall unsafe "alGetString"
alGetString :: ALenum -> IO (Ptr ALchar)
|
693f4b17b5deca01b96b62f6b7e9076f26c7d415581e958d92d743da7396f12e | DavidVujic/clojurescript-amplified | avatar_stories.cljs | (ns app.stories.avatar-stories
(:require [app.components.user-avatar :as user-avatar]
[app.stories.helper :as helper]
[reagent.core :as reagent]))
(def ^:export default
(helper/->default {:title "An Avatar Component"
:component user-avatar/user-avatar
:args {:message "David"
:image-url ""}}))
(defn ^:export user-avatar [args]
(let [params (-> args helper/->params)
message (:message params)
image-url (:image-url params)]
(reagent/as-element
[user-avatar/user-avatar message image-url])))
| null | https://raw.githubusercontent.com/DavidVujic/clojurescript-amplified/3a98ed2a86a1aba49fcc55f0ca1207731df32620/src/stories/app/stories/avatar_stories.cljs | clojure | (ns app.stories.avatar-stories
(:require [app.components.user-avatar :as user-avatar]
[app.stories.helper :as helper]
[reagent.core :as reagent]))
(def ^:export default
(helper/->default {:title "An Avatar Component"
:component user-avatar/user-avatar
:args {:message "David"
:image-url ""}}))
(defn ^:export user-avatar [args]
(let [params (-> args helper/->params)
message (:message params)
image-url (:image-url params)]
(reagent/as-element
[user-avatar/user-avatar message image-url])))
|
|
8ce79b615ae7737dd740ce5814a56f38a82266e90c717f2620ffc4542d1a2fe3 | mukul-rathi/bolt | test_remove_variable_shadowing.ml | open Core
open Desugaring.Remove_variable_shadowing
open Ast.Ast_types
open Desugaring.Desugared_ast
let print_error_string = function Ok _ -> "" | Error e -> Error.to_string_hum e
let test_error_if_var_not_in_var_map () =
let expected_error =
Fmt.str "Error: no unique var name for (potentially) shadowed variable foo@." in
let result =
remove_var_shadowing_expr
(Identifier (Lexing.dummy_pos, Variable (TEVoid, Var_name.of_string "foo", [])))
[] in
Alcotest.(check string) "same error string" expected_error (print_error_string result)
let () =
let open Alcotest in
run "Remove Variable Shadowing"
[("Errors", [test_case "Var not in var map" `Quick test_error_if_var_not_in_var_map])]
| null | https://raw.githubusercontent.com/mukul-rathi/bolt/1faf19d698852fdb6af2ee005a5f036ee1c76503/tests/frontend/alcotest/desugaring/test_remove_variable_shadowing.ml | ocaml | open Core
open Desugaring.Remove_variable_shadowing
open Ast.Ast_types
open Desugaring.Desugared_ast
let print_error_string = function Ok _ -> "" | Error e -> Error.to_string_hum e
let test_error_if_var_not_in_var_map () =
let expected_error =
Fmt.str "Error: no unique var name for (potentially) shadowed variable foo@." in
let result =
remove_var_shadowing_expr
(Identifier (Lexing.dummy_pos, Variable (TEVoid, Var_name.of_string "foo", [])))
[] in
Alcotest.(check string) "same error string" expected_error (print_error_string result)
let () =
let open Alcotest in
run "Remove Variable Shadowing"
[("Errors", [test_case "Var not in var map" `Quick test_error_if_var_not_in_var_map])]
|
|
7404183ff3a625ce18c3f39cdd67475aaf2415b77b76ce5df9d2260090974d14 | rd--/hsc3 | F0.hs | | ( f0plugins )
module Sound.Sc3.Ugen.Bindings.Hw.External.F0 where
import Sound.Sc3.Common.Rate
import qualified Sound.Sc3.Ugen.Bindings.Hw.Construct as C
import Sound.Sc3.Ugen.Ugen
| Emulation of the sound generation hardware of the Atari TIA chip .
atari2600 :: Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen
atari2600 audc0 audc1 audf0 audf1 audv0 audv1 rate = C.mkOsc ar "Atari2600" [audc0,audc1,audf0,audf1,audv0,audv1,rate] 1
-- | POKEY Chip Sound Simulator
mzPokey :: Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen
mzPokey f1 c1 f2 c2 f3 c3 f4 c4 ctl = C.mkOsc ar "MZPokey" [f1,c1,f2,c2,f3,c3,f4,c4,ctl] 1
-- | A phasor that can loop.
redPhasor :: Rate -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen
redPhasor rate trig rate_ start end loop loopstart loopend = C.mkOsc rate "RedPhasor" [trig,rate_,start,end,loop,loopstart,loopend] 1
-- | A phasor that can loop.
redPhasor2 :: Rate -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen
redPhasor2 rate trig rate_ start end loop loopstart loopend = C.mkOsc rate "RedPhasor2" [trig,rate_,start,end,loop,loopstart,loopend] 1
-- Local Variables:
-- truncate-lines:t
-- End:
| null | https://raw.githubusercontent.com/rd--/hsc3/024d45b6b5166e5cd3f0142fbf65aeb6ef642d46/Sound/Sc3/Ugen/Bindings/Hw/External/F0.hs | haskell | | POKEY Chip Sound Simulator
| A phasor that can loop.
| A phasor that can loop.
Local Variables:
truncate-lines:t
End: | | ( f0plugins )
module Sound.Sc3.Ugen.Bindings.Hw.External.F0 where
import Sound.Sc3.Common.Rate
import qualified Sound.Sc3.Ugen.Bindings.Hw.Construct as C
import Sound.Sc3.Ugen.Ugen
| Emulation of the sound generation hardware of the Atari TIA chip .
atari2600 :: Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen
atari2600 audc0 audc1 audf0 audf1 audv0 audv1 rate = C.mkOsc ar "Atari2600" [audc0,audc1,audf0,audf1,audv0,audv1,rate] 1
mzPokey :: Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen
mzPokey f1 c1 f2 c2 f3 c3 f4 c4 ctl = C.mkOsc ar "MZPokey" [f1,c1,f2,c2,f3,c3,f4,c4,ctl] 1
redPhasor :: Rate -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen
redPhasor rate trig rate_ start end loop loopstart loopend = C.mkOsc rate "RedPhasor" [trig,rate_,start,end,loop,loopstart,loopend] 1
redPhasor2 :: Rate -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen -> Ugen
redPhasor2 rate trig rate_ start end loop loopstart loopend = C.mkOsc rate "RedPhasor2" [trig,rate_,start,end,loop,loopstart,loopend] 1
|
79c7dca8519342b23f647a2fec8de33326ed1f2821fa22f354b3d70fea1d0529 | Tritlo/dynamic-haskell-plugin | Plugin.hs | Copyright ( c ) 2020 - 2021
# LANGUAGE LambdaCase #
# LANGUAGE TypeFamilies #
# LANGUAGE DataKinds #
# LANGUAGE RecordWildCards #
# LANGUAGE TupleSections #
# LANGUAGE CPP #
# LANGUAGE FlexibleContexts #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE PolyKinds #
# LANGUAGE PatternSynonyms #
# LANGUAGE TypeApplications #
# LANGUAGE ViewPatterns #
module Data.Dynamic.Plugin ( plugin, Default, TypeError(..), ErrorMessage(..),
castDyn, dynDispatch, pattern Is) where
import Control.Monad
( when, unless, guard, foldM, zipWithM, msum, filterM, replicateM )
import Data.Maybe (mapMaybe, catMaybes, fromMaybe, fromJust, listToMaybe, isJust)
import Data.Either
import Data.IORef
import Data.List (nubBy, sortOn, intersperse, or, partition, minimumBy, maximumBy, sort, find)
import Control.Arrow ((&&&))
import Data.Function (on)
import Data.Kind (Constraint)
import Data.Data (Data, toConstr)
import Prelude hiding ((<>))
import qualified Data.Set as Set
import Data.Set (Set)
import Data.Proxy
import Data.Dynamic
import Text.Read (readMaybe)
import GHC.TypeLits(TypeError(..), ErrorMessage(..))
import Data.Coerce
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import System.IO.Unsafe (unsafePerformIO)
import Bag
import FV (fvVarListVarSet, fvVarSet)
import qualified TcEnv as Tc (tcLookup)
import DsUtils
import GhcPlugins hiding (TcPlugin)
import TcRnTypes
import TcPluginM
import ErrUtils (Severity(SevWarning))
import TcEvidence
import TysPrim
import PrelNames
import TyCoRep
import ClsInst
import Class
import Inst hiding (newWanted)
import MkId
import TcMType hiding (newWanted, newFlexiTyVar, zonkTcType)
import qualified TcMType as TcM
import TcType
import CoAxiom
import Unify
import TcHsSyn
import InstEnv
Holefits
import RdrName (globalRdrEnvElts)
import TcRnMonad (keepAlive, getLclEnv, getGlobalRdrEnv, getGblEnv, newSysName, setGblEnv)
import TcHoleErrors
import PrelInfo (knownKeyNames)
import Data.Graph (graphFromEdges, topSort, scc)
import DsBinds (dsHsWrapper)
import DsMonad (initDsTc)
import TcEvTerm (evCallStack)
import GHC.Hs.Expr
import Constraint
import Predicate
import GHC.TypeLits (TypeError(..),ErrorMessage(..))
import Data . Typeable
import Type.Reflection (SomeTypeRep(..), someTypeRep)
import Data.Dynamic
import GHC.Stack
--------------------------------------------------------------------------------
-- Exported
plugin :: Plugin
plugin = defaultPlugin { tcPlugin = Just . dynamicPlugin
, pluginRecompile = purePlugin
, installCoreToDos = coreDyn }
| The family allows us to ' default ' free type variables of a given
-- kind in a constraint to the given value, i.e. if there is an instance
-- Default k for and a is a free type variable of kind k in constraint c,
-- then a ~ Default k will be added to the context of c, and
Γ , a ~ Defaul k |- c : Constraint checked for validity .
type family Default k :: k
| castDyn casts a Dynamic to any typeable value , and fails with a descriptive
error if the types do nt match . Automatically inserted for casting Dynamic
-- values back to static values.
castDyn :: forall a . (Typeable a, HasCallStack) => Dynamic -> a
castDyn arg = fromDyn arg err
where err = error ("Couldn't match expected type '" ++ target
++ "' with actual dynamic type '" ++ actual ++ "'")
target = show (someTypeRep (Proxy :: Proxy a))
actual = show (dynTypeRep arg)
dynDispatch :: forall b . (Typeable b)
=> [(SomeTypeRep, Dynamic)] -- ^ Provided by the plugin
-> String -- ^ The name of the function
-> String -- ^ The name of the class
-> Dynamic -> b
dynDispatch insts fun_name class_name dispatcher =
case lookup argt insts of
Just f ->
fromDyn f
(error $ "Type mismatch when dispatching '"
++ fun_name
++ "' expecting '" ++ show targett
++"' but got '" ++ show (dynTypeRep f)
++ "' using dispatch table for '"
++ class_name ++ "'!")
_ -> error $ "No instance of '" ++ class_name ++ " " ++ show argt ++ "'"
++ " found when dispatching for '"
++ fun_name ++ " :: " ++ show targett
++ "', with 'Dynamic ~ " ++ show argt
++ "' in this context."
where argt = dynTypeRep dispatcher
targett = someTypeRep (Proxy :: Proxy b)
pattern Is :: forall a. (Typeable a) => a -> Dynamic
pattern Is res <- (fromDynamic @a -> Just res)
--------------------------------------------------------------------------------
data Log = Log { log_pred_ty :: Type, log_loc :: CtLoc}
| LogDefault { log_pred_ty :: Type, log_loc :: CtLoc,
log_var :: Var, log_kind :: Kind, log_res :: Type }
| LogMarshal { log_pred_ty :: Type, log_loc :: CtLoc, log_to_dyn :: Bool}
| LogSDoc {log_pred_ty :: Type, log_loc :: CtLoc, log_msg :: SDoc}
logSrc :: Log -> RealSrcSpan
logSrc = ctLocSpan . log_loc
instance Ord Log where
compare a@Log{} b@Log{} =
if logSrc a == logSrc b
then (compare `on` showSDocUnsafe . ppr) a b
else (compare `on` logSrc) a b
compare Log{} _ = LT
compare _ Log{} = GT
compare a@LogDefault{} b@LogDefault{} =
if logSrc a == logSrc b
then (compare `on` showSDocUnsafe . ppr) a b
else (compare `on` logSrc) a b
compare LogDefault{} _ = LT
compare _ LogDefault{} = GT
compare a@LogMarshal{} b@LogMarshal{} =
if logSrc a == logSrc b
then (compare `on` showSDocUnsafe . ppr) a b
else (compare `on` logSrc) a b
compare LogMarshal{} _ = LT
compare _ LogMarshal{} = GT
compare a@LogSDoc{} b@LogSDoc{} =
if logSrc a == logSrc b
then (compare `on` showSDocUnsafe . ppr) a b
else (compare `on` logSrc) a b
instance Eq Log where
a@Log{} == b@Log{} =
((==) `on` logSrc) a b && (eqType `on` log_pred_ty) a b
Log{} == _ = False
a@LogDefault{} == b@LogDefault{} =
((==) `on` logSrc) a b && (eqType `on` log_pred_ty) a b
&& ((==) `on` log_var) a b
LogDefault{} == _ = False
a@LogMarshal{} == b@LogMarshal{} =
((==) `on` logSrc) a b && (eqType `on` log_pred_ty) a b
LogMarshal{} == _ = False
a@LogSDoc{} == b@LogSDoc{} =
((==) `on` logSrc) a b
&& (eqType `on` log_pred_ty) a b
&& ((==) `on` showSDocUnsafe . log_msg) a b
LogSDoc{} == _ = False
instance Outputable Log where
-- We do some extra work to pretty print the Defaulting messages
ppr Log{..}
| Just msg <- userTypeError_maybe log_pred_ty = pprUserTypeErrorTy msg
| otherwise = text "DataDynamicPlugin" <+> ppr log_pred_ty
ppr LogDefault{..} = fsep [ text "Defaulting"
-- We want to print a instead of a0
, quotes (ppr (mkTyVarTy log_var)
<+> dcolon <+> ppr log_kind)
, text "to"
, quotes (ppr log_res)
, text "in"
, quotes (ppr log_pred_ty)]
where printFlav Given = "Will default"
printFlav _ = "Defaulting"
ppr LogMarshal{..} = fsep [ text "Marshalling"
, quotes (ppr log_pred_ty)
, text (if log_to_dyn
then "to Dynamic"
else "from Dynamic") ]
ppr LogSDoc{..} = log_msg
zonkLog :: Log -> TcPluginM Log
zonkLog log@Log{..} = do zonked <- zonkTcType log_pred_ty
return $ log{log_pred_ty=zonked}
We do n't want to zonk LogDefault , since then we ca n't see what variable was
-- being defaulted.
zonkLog log = return log
logToErr :: Log -> TcPluginM Ct
logToErr Log{..} = mkWanted log_loc log_pred_ty
logToErr LogDefault{..} =
sDocToTyErr [ text "Defaulting"
, quotes (ppr (mkTyVarTy log_var)
<+> dcolon <+> ppr log_kind)
, text "to"
, quotes (ppr log_res)
, text "in"
, quotes (ppr log_pred_ty)] >>= mkWanted log_loc
logToErr LogMarshal{..} =
sDocToTyErr [ text "Marshalling"
, quotes (ppr log_pred_ty)
, text (if log_to_dyn
then "to Dynamic"
else "from Dynamic") ] >>= mkWanted log_loc
logToErr LogSDoc{..} = sDocToTyErr [log_msg] >>= mkWanted log_loc
sDocToTyErr :: [SDoc] -> TcPluginM Type
sDocToTyErr docs =
do txtCon <- promoteDataCon <$> tcLookupDataCon typeErrorTextDataConName
appCon <- promoteDataCon <$> tcLookupDataCon typeErrorAppendDataConName
dflags <- unsafeTcPluginTcM getDynFlags
let txt str = mkTyConApp txtCon [mkStrLitTy $ fsLit str]
sppr = txt . showSDoc dflags . ppr
app ty1 ty2 = mkTyConApp appCon [ty1, ty2]
mkTyErr $ foldl1 app $ map sppr $ intersperse (text " ") docs
addWarning :: DynFlags -> Log -> TcPluginM ()
addWarning dflags log = tcPluginIO $ warn (ppr log)
where warn = putLogMsg dflags NoReason SevWarning
(RealSrcSpan (logSrc log)) (defaultErrStyle dflags)
data Flags = Flags { f_debug :: Bool
, f_quiet :: Bool
, f_keep_errors :: Bool
} deriving (Show)
getFlags :: [CommandLineOption] -> Flags
getFlags opts = Flags { f_debug = "debug" `elem` opts
, f_quiet = "quiet" `elem` opts
, f_keep_errors = "keep_errors" `elem` opts
}
pprOut :: Outputable a => String -> a -> TcPluginM ()
pprOut str a = do dflags <- unsafeTcPluginTcM getDynFlags
tcPluginIO $ putStrLn (str ++ " " ++ showSDoc dflags (ppr a))
dynamicPlugin :: [CommandLineOption] -> TcPlugin
dynamicPlugin opts = TcPlugin initialize solve stop
where
flags@Flags{..} = getFlags opts
initialize = do
when f_debug $ tcPluginIO $ putStrLn "Starting DataDynamicPlugin in debug mode..."
when f_debug $ tcPluginIO $ print flags
tcPluginIO $ newIORef Set.empty
solve :: IORef (Set Log) -> [Ct] -> [Ct] -> [Ct] -> TcPluginM TcPluginResult
solve warns given derived wanted = do
dflags <- unsafeTcPluginTcM getDynFlags
let pprDebug :: Outputable a => String -> a -> TcPluginM ()
pprDebug str a = when f_debug $ pprOut str a
pprDebug "Solving" empty
pprDebug "-------" empty
mapM_ (pprDebug "Given:") given
mapM_ (pprDebug "Derived:") derived
mapM_ (pprDebug "Wanted:") wanted
pprDebug "-------" empty
pluginTyCons <- getPluginTyCons
let solveWFun :: ([Ct], ([(EvTerm, Ct)],[Ct], Set Log)) -> (SolveFun, String)
-> TcPluginM ([Ct], ([(EvTerm, Ct)],[Ct], Set Log))
solveWFun (unsolved, (solved, more, logs)) (solveFun, explain) = do
(still_unsolved, (new_solved, new_more, new_logs)) <-
inspectSol <$> mapM (solveFun pluginTyCons) unsolved
mapM_ (pprDebug (explain ++ "-sols")) new_solved
mapM_ (pprDebug (explain ++ "-more")) new_more
return (still_unsolved, (solved ++ new_solved,
more ++ new_more,
logs `Set.union` new_logs))
order :: [(SolveFun, String)]
order = [ (solveDynamic, "Discharging")
, (solveDefault, "Defaulting")
, (solveDynamicTypeables, "SDTs")
, (solveDynDispatch, "Checking Dynamic Dispatch") ]
to_check = wanted ++ derived
(_, (solved_wanteds, more_cts, logs)) <-
foldM solveWFun (to_check, ([],[],Set.empty)) order
errs <- if f_keep_errors
then mapM logToErr (Set.toAscList logs)
else tcPluginIO $ modifyIORef warns (logs `Set.union`) >> mempty
return $ TcPluginOk solved_wanteds (errs ++ more_cts)
stop warns = do dflags <- unsafeTcPluginTcM getDynFlags
logs <- Set.toAscList <$> tcPluginIO (readIORef warns)
zonked_logs <- mapM zonkLog logs
unless f_quiet $ mapM_ (addWarning dflags) zonked_logs
data PluginTyCons = PTC { ptc_default :: TyCon
, ptc_dc :: DynCasts }
data DynCasts = DC { dc_typeable :: Class
, dc_dynamic :: TyCon
, dc_to_dyn :: Id
, dc_cast_dyn :: Id
, dc_has_call_stack :: TyCon
, dc_dyn_dispatch :: Id
, dc_sometyperep :: TyCon
, dc_sometyperep_dc :: DataCon
, dc_typerep :: Id }
getPluginTyCons :: TcPluginM PluginTyCons
getPluginTyCons =
do fpmRes <- findImportedModule (mkModuleName "Data.Dynamic.Plugin") Nothing
dc_dynamic <- getTyCon dYNAMIC "Dynamic"
dc_typeable <- getClass tYPEABLE_INTERNAL "Typeable"
dc_sometyperep <- getTyCon tYPEABLE_INTERNAL "SomeTypeRep"
dc_sometyperep_dc <- getDataCon tYPEABLE_INTERNAL "SomeTypeRep"
dc_typerep <- getId tYPEABLE_INTERNAL "typeRep"
dc_to_dyn <- getId dYNAMIC "toDyn"
dc_has_call_stack <- getTyCon gHC_STACK_TYPES "HasCallStack"
case fpmRes of
Found _ mod ->
do ptc_default <- getTyCon mod "Default"
dc_cast_dyn <- getId mod "castDyn"
dc_dyn_dispatch <- getId mod "dynDispatch"
let ptc_dc = DC {..}
return PTC{..}
NoPackage uid -> pprPanic "Plugin module not found (no package)!" (ppr uid)
FoundMultiple ms -> pprPanic "Multiple plugin modules found!" (ppr ms)
NotFound{..} -> pprPanic "Plugin module not found!" empty
where getTyCon mod name = lookupOrig mod (mkTcOcc name) >>= tcLookupTyCon
getDataCon mod name = lookupOrig mod (mkDataOcc name) >>= tcLookupDataCon
getPromDataCon mod name = promoteDataCon <$> getDataCon mod name
getClass mod name = lookupOrig mod (mkClsOcc name) >>= tcLookupClass
getId mod name = lookupOrig mod (mkVarOcc name) >>= tcLookupId
type Solution = Either Ct (Maybe (EvTerm, Ct), -- The solution to the Ct
[Ct], -- Possible additional work
Set Log) -- What we did
type SolveFun = PluginTyCons -> Ct -> TcPluginM Solution
wontSolve :: Ct -> TcPluginM Solution
wontSolve = return . Left
couldSolve :: Maybe (EvTerm,Ct) -> [Ct] -> Set Log -> TcPluginM Solution
couldSolve ev work logs = return (Right (ev,work,logs))
-- Defaults any ambiguous type variables of kind k to l if Default k = l
solveDefault :: SolveFun
solveDefault ptc@PTC{..} ct =
do defaults <- catMaybes <$> mapM getDefault (tyCoVarsOfCtList ct)
if null defaults then wontSolve ct
-- We make assertions that `a ~ def` for all free a in pred_ty of ct. and
-- add these as new assertions. For meta type variables (i.e. ones that
-- have been instantiated with a `forall`, e.g. `forall a. Less H a`), an
-- assert is a derived, meaning that we emit a wanted that requires no
-- evidence . E.g. when checking `forall (a :: Label) . Less H a` and we
-- have `type instance Default Label = L`, we emit a `a0 ~ L`.
For skolems ( " rigid " type variables like the a in ` True : : F ) ,
-- we cannot touch the variable so we cannot unify them with a derived. In
-- that case, we emit a given, saying that `a ~ L` i.e. we essentially
-- change the type of `True :: F a Bool` to `True :: a ~ L => F a Bool`.
-- Note that we cannot simply emit a given for both, since we cannot
-- mention a meta type variable in a given.
else do let (eq_tys, logs) = unzip $ map mkTyEq defaults
assert_eqs <- mapM mkAssert eq_tys
couldSolve Nothing assert_eqs (Set.fromList logs)
where mkAssert = either (mkDerived bump) (uncurry (mkGiven bump))
bump = bumpCtLocDepth $ ctLoc ct
getDefault var = fmap ((var,) . snd) <$> matchFam ptc_default [varType var]
mkTyEq (var,def) = ( if isMetaTyVar var then Left pred_ty
else Right (pred_ty, proof),
LogDefault{log_pred_ty = ctPred ct,
log_var = var, log_kind = varType var,
log_res = def, log_loc =ctLoc ct})
where EvExpr proof = mkProof "data-dynamic-default" (mkTyVarTy var) defApp
pred_ty = mkPrimEqPredRole Nominal (mkTyVarTy var) defApp
defApp = mkTyConApp ptc_default [varType var]
mkTyErr :: Type -> TcPluginM Type
mkTyErr msg = flip mkTyConApp [typeKind msg, msg] <$>
tcLookupTyCon errorMessageTypeErrorFamName
| Creates a type error with the given string at the given loc .
mkTypeErrorCt :: CtLoc -> String -> TcPluginM Ct
mkTypeErrorCt loc str =
do txtCon <- promoteDataCon <$> tcLookupDataCon typeErrorTextDataConName
appCon <- promoteDataCon <$> tcLookupDataCon typeErrorAppendDataConName
vappCon <- promoteDataCon <$> tcLookupDataCon typeErrorVAppendDataConName
let txt str = mkTyConApp txtCon [mkStrLitTy $ fsLit str]
app ty1 ty2 = mkTyConApp appCon [ty1, ty2]
vapp ty1 ty2 = mkTyConApp vappCon [ty1, ty2]
unwty = foldr1 app . map txt . intersperse " "
ty_err_ty = foldr1 vapp $ map (unwty . words) $ lines str
te <- mkTyErr ty_err_ty
mkWanted loc te
getErrMsgCon :: TcPluginM TyCon
getErrMsgCon = lookupOrig gHC_TYPELITS (mkTcOcc "ErrorMessage") >>= tcLookupTyCon
Utils
mkDerived :: CtLoc -> PredType -> TcPluginM Ct
mkDerived loc eq_ty = flip setCtLoc loc . CNonCanonical <$> newDerived loc eq_ty
mkWanted :: CtLoc -> PredType -> TcPluginM Ct
mkWanted loc eq_ty = flip setCtLoc loc . CNonCanonical <$> newWanted loc eq_ty
mkGiven :: CtLoc -> PredType -> EvExpr -> TcPluginM Ct
mkGiven loc eq_ty ev = flip setCtLoc loc . CNonCanonical <$> newGiven loc eq_ty ev
mkProof :: String -> Type -> Type -> EvTerm
mkProof str ty1 ty2 = evCoercion $ mkUnivCo (PluginProv str) Nominal ty1 ty2
splitEquality :: Type -> Maybe (Kind, Type, Type)
splitEquality pred =
do (tyCon, [k1, k2, ty1,ty2]) <- splitTyConApp_maybe pred
guard (tyCon == eqPrimTyCon)
guard (k1 `eqType` k2)
return (k1, ty1,ty2)
inspectSol :: Ord d => [Either a (Maybe b, [c], Set d)]
-> ([a], ([b], [c], Set d))
inspectSol xs = (ls, (catMaybes sols, concat more, Set.unions logs))
where (ls, rs) = partitionEithers xs
(sols, more, logs) = unzip3 rs
----------------------------------------------------------------
Marshalling to and from Dynamic
----------------------------------------------------------------
-- | Solves Γ |- (a :: Type) ~ (b :: Type) if a ~ Dynamic or b ~ Dynamic
solveDynamic :: SolveFun
solveDynamic ptc@PTC{..} ct
| Just (k1,ty1,ty2) <- splitEquality (ctPred ct) = do
let DC {..} = ptc_dc
dynamic = mkTyConApp dc_dynamic []
kIsType = tcIsLiftedTypeKind k1
isDyn ty = ty `tcEqType` dynamic
if kIsType && (isDyn ty1 || isDyn ty2)
then marshalDynamic k1 ty1 ty2 ptc ct
else wontSolve ct
| otherwise = wontSolve ct
dYNAMICPLUGINPROV :: String
dYNAMICPLUGINPROV = "data-dynamic"
marshalDynamic :: Kind -> Type -> Type -> SolveFun
marshalDynamic k1 ty1 ty2 PTC{..} ct@(CIrredCan CtWanted{ctev_dest = HoleDest coho} _) =
do let DC {..} = ptc_dc
dynamic = mkTyConApp dc_dynamic []
isDyn ty = ty `tcEqType` dynamic
relTy = if isDyn ty1 then ty2 else ty1
log = Set.singleton (LogMarshal relTy (ctLoc ct) (isDyn ty2))
hasTypeable = mkTyConApp (classTyCon dc_typeable) [k1, relTy]
hasCallStack = mkTyConApp dc_has_call_stack []
checks@[check_typeable, check_call_stack] <- mapM (mkWanted (ctLoc ct)) [hasTypeable, hasCallStack]
call_stack <- mkFromDynErrCallStack dc_cast_dyn ct $ ctEvEvId $ ctEvidence check_call_stack
let typeableDict = ctEvEvId $ ctEvidence check_typeable
evExpr = if isDyn ty1
then mkApps (Var dc_cast_dyn) [Type relTy, Var typeableDict, call_stack]
else mkApps (Var dc_to_dyn) [Type relTy, Var typeableDict]
(at1,at2) = if isDyn ty1 then (dynamic, relTy) else (relTy, dynamic)
deb <- unsafeTcPluginTcM $ mkSysLocalM (fsLit dYNAMICPLUGINPROV) (exprType evExpr)
let mkProof prov = mkUnivCo (PluginProv prov) Nominal at1 at2
if isTopTcLevel (ctLocLevel $ ctLoc ct)
then do -- setEvBind allows us to emit the evExpr we built, and since
-- we're at the top, it will be emitted as an exported variable
let prov = marshalVarToString deb
setEvBind $ mkGivenEvBind (setIdExported deb) (EvExpr evExpr)
couldSolve (Just (evCoercion (mkProof prov), ct)) checks log
else do -- we're within a function, so setting the evBinds won't actually
-- put it within scope.
let prov = dYNAMICPLUGINPROV
let_b = Let (NonRec deb evExpr)
-- By binding and seqing, we ensure that the evExpr
-- doesn't get erased.
(seqVar deb $ Coercion $ mkProof prov)
couldSolve (Just (EvExpr let_b, ct)) checks log
marshalDynamic _ _ _ _ ct = wontSolve ct
-- By applying the same function when generating the provinence and for the
-- lookup of the variable name later, we know we will find the corresponding
-- variable.
marshalVarToString :: Var -> String
marshalVarToString var = nstr ++ "_" ++ ustr
where nstr = occNameString (occName var)
ustr = show (varUnique var)
mkFromDynErrCallStack :: Id -> Ct -> EvVar -> TcPluginM EvExpr
mkFromDynErrCallStack fdid ct csDict =
flip mkCast coercion <$>
unsafeTcPluginTcM (evCallStack (EvCsPushCall name loc var))
where name = idName fdid
loc = ctLocSpan (ctLoc ct)
var = Var csDict
coercion = mkSymCo (unwrapIP (exprType var))
-- | Post-processing for Dynamics
type DynExprMap = Map (Either String Var) (Expr Var) -- These we need to find from case exprs.
-- | Here we replace the "proofs" of the casts with te actual calls to toDyn
-- and castDyn.
coreDyn :: [CommandLineOption] -> [CoreToDo] -> CoreM [CoreToDo]
coreDyn clo tds = return $ CoreDoPluginPass "DataDynamicPlugin" (bindsOnlyPass addDyn):tds
where
Flags {..} = getFlags clo
found var expr = Map.singleton var expr
addDyn :: CoreProgram -> CoreM CoreProgram
addDyn program = mapM (addDynToBind dexprs) program
where
dexprs = Map.fromList $ concatMap getDynamicCastsBind program
We need to find the two types of expressions , either the exported globals
-- (which we can then directly use, or the seq'd ones buried within cases
-- for locals).
-- We grab the `data-dynamic_a1bK :: A -> Dynamics` from the binds, and
-- the `case case <dyn_expr> of {DEFAULT -> <UnivCo proof>} of <covar>` from
-- the expressions, where Left <data-dynamic_var_name> and Right <covar>.
getDynamicCastsBind :: CoreBind -> [(Either String Var, Expr Var)]
getDynamicCastsBind (NonRec var expr) |
occNameString (occName var) == dYNAMICPLUGINPROV =
(Left $ marshalVarToString var, Var var):getDynamicCastsExpr expr
getDynamicCastsBind (NonRec _ expr) = getDynamicCastsExpr expr
getDynamicCastsBind (Rec as) =
-- The top level ones will never be recursive.
concatMap (getDynamicCastsExpr . snd) as
getDynamicCastsExpr :: Expr Var -> [(Either String Var, Expr Var)]
getDynamicCastsExpr (Var _) = []
getDynamicCastsExpr (Lit _) = []
getDynamicCastsExpr (App expr arg) =
concatMap getDynamicCastsExpr [expr, arg]
getDynamicCastsExpr (Lam _ expr) = getDynamicCastsExpr expr
getDynamicCastsExpr (Let bind expr) =
getDynamicCastsBind bind ++ getDynamicCastsExpr expr
getDynamicCastsExpr c@(Case expr covar _ alts) =
ecasts ++ concatMap gdcAlts alts
where gdcAlts (_,_,e) = getDynamicCastsExpr e
ecasts = case expr of
This is the expression build by the seqVar , though unfortunately ,
-- the var itself isn't preserved. It's OK though, since we have
-- to replace the covar itself and not from the variable name.
Case dexpr _ _ [(DEFAULT, [], Coercion (UnivCo (PluginProv prov) _ _ _))] |
prov == dYNAMICPLUGINPROV -> [(Right covar, dexpr)]
_ -> getDynamicCastsExpr expr
getDynamicCastsExpr (Cast expr _) = getDynamicCastsExpr expr
getDynamicCastsExpr (Tick _ expr) = getDynamicCastsExpr expr
getDynamicCastsExpr (Type _) = []
getDynamicCastsExpr (Coercion _) = []
addDynToBind :: DynExprMap -> CoreBind -> CoreM CoreBind
addDynToBind dexprs (NonRec b expr) = NonRec b <$> addDynToExpr dexprs expr
addDynToBind dexprs (Rec as) = do
let (vs, exprs) = unzip as
nexprs <- mapM (addDynToExpr dexprs) exprs
return (Rec $ zip vs nexprs)
addDynToExpr :: DynExprMap -> Expr Var -> CoreM (Expr Var)
addDynToExpr _ e@(Var _) = pure e
addDynToExpr _ e@(Lit _) = pure e
addDynToExpr dexprs (App expr arg) =
App <$> addDynToExpr dexprs expr <*> addDynToExpr dexprs arg
addDynToExpr dexprs (Lam b expr) = Lam b <$> addDynToExpr dexprs expr
addDynToExpr dexprs (Let binds expr) = Let <$> addDynToBind dexprs binds
<*> addDynToExpr dexprs expr
addDynToExpr dexprs (Case expr b ty alts) =
(\ne na -> Case ne b ty na) <$> addDynToExpr dexprs expr
<*> mapM addDynToAlt alts
where addDynToAlt (c, bs, expr) = (c, bs,) <$> addDynToExpr dexprs expr
-- Cast is the only place that we do any work beyond just recursing over
-- the sub-expressions. Here we replace the
( A ` cast ` UnivCo ( PluginProv < data - dynamic_var_name > ) Nominal A Dynamic )
and ( B ` cast ` SubCo < covar > ) that was generated in the TcPlugin with
-- the respective (data-dynamic_var_name A) (i.e. apply the function to A)
-- and (toDyn @B ... B).
addDynToExpr dexprs orig@(Cast expr coercion) = do
nexpr <- addDynToExpr dexprs expr
case coercion of
UnivCo (PluginProv prov) _ _ _ |
Just expr <- dexprs Map.!? Left prov -> found expr nexpr
SubCo (CoVarCo co) | Just expr <- dexprs Map.!? Right co -> found expr nexpr
UnivCo (PluginProv _) _ _ _ -> pprPanic "Unfound var" $ ppr coercion
_ -> return (Cast nexpr coercion)
where found expr nexpr = do
let res = App expr nexpr
when f_debug $
liftIO $ putStrLn $ showSDocUnsafe $
text "Replacing" <+> parens (ppr orig)
<+> text "with" <+> parens (ppr res)
return res
addDynToExpr dexprs (Tick t expr) = Tick t <$> addDynToExpr dexprs expr
addDynToExpr _ e@(Type _) = pure e
addDynToExpr _ e@(Coercion _) = pure e
-- | Solves Γ |- C Dynamic
solveDynDispatch :: SolveFun
solveDynDispatch ptc@PTC{..} ct | CDictCan{..} <- ct
, [arg] <- cc_tyargs
, arg `tcEqType` dynamic = do
class_insts <- flip classInstances cc_class <$> getInstEnvs
let (unsaturated, saturated) = partition (not . null . is_tvs) class_insts
class_tys = map is_tys saturated
-- We can only dispatch on singe argument classes
if not (all ((1 ==) . length) class_tys) then wontSolve ct
else do
Make sure we check any superclasses
scChecks <- mapM (mkWanted (ctLoc ct) .
flip piResultTys cc_tyargs .
mkSpecForAllTys (classTyVars cc_class))
$ classSCTheta cc_class
let scEvIds = map (evId . ctEvId) scChecks
args_n_checks <- mapM (methodToDynDispatch cc_class class_tys)
(classMethods cc_class)
let logs = Set.fromList $ [LogSDoc (ctPred ct) (ctLoc ct) $
fsep ([text "Building dispatch table for"
, quotes $ ppr $ ctPred ct
, text "based on"
, fsep $ map (quotes . ppr) saturated
] ++ if null unsaturated then []
else [ text "Skipping unsaturated instances"
, fsep $ map (quotes . ppr) unsaturated ])]
classCon = tyConSingleDataCon (classTyCon cc_class)
(args, checks) = unzip args_n_checks
proof = evDataConApp classCon cc_tyargs $ scEvIds ++ args
couldSolve (Just (proof, ct)) (scChecks ++ concat checks) logs
| otherwise = wontSolve ct
where
DC {..} = ptc_dc
dynamic = mkTyConApp dc_dynamic []
sometyperep = mkTyConApp dc_sometyperep []
| The workhorse . Creates the dictonary for C Dynamic on the fly .
methodToDynDispatch :: Class
-> [[Type]]
-> Id
-> TcPluginM (EvExpr, [Ct])
For method ' loo : : Show a = > Int - > a - > Int - > Int ' in with instances
Foo A and , this will generate the following ( in Core ):
-- Notation: {Foo A} = The dictionary for Foo A
-- (\ (k :: Show Dynamic) (l :: Int) (m :: Dynamic) ->
-- dynDispatch @(Show Dynamic => Int -> Dynamic -> Int -> Int)
-- {Typeable (Show Dynamic => Int -> Dynamic -> Int -> Int)}
-- -- ^ Only used too lookup in the table
-- [ (SomeTypeRep (typeRep :: TypeRep A), -- In core
-- toDyn @(Show Dynamic => Int -> Dynamic -> Int -> Int)
{ ( Show Dynamic = > Int - > Dynamic - > Int - > Int ) }
-- (\ (k :: Show Dynamic) (l :: Int) (m :: Dynamic) ->
-- loo @A {Foo A} {Show A} l (castDyn m)))
-- , (SomeTypeRep (typeRep :: TypeRep B), -- In core
-- toDyn @(Show Dynamic => Int -> Dynamic -> Int -> Int)
{ ( Show Dynamic = > Int - > Dynamic - > Int - > Int ) }
-- (\ (k :: Show Dynamic) (l :: Int) (m :: Dynamic) ->
-- loo @B {Foo B} {Show B} l (castDyn m)))]
-- -- ^ The dynamic dispatch table
-- "loo"
-- -- ^ The name of the function. Used for better error messages.
-- "Foo"
-- -- ^ The name of the class. Used for better error messages.
-- (m :: Dynamic)
-- -- ^ The dynamic value to dispatch on
-- (runtimeError @(Show Dynamic) "Should never be evaluated!")
-- -- ^ Provided to please the type gods. This dictionary
-- -- is just thrown away by the function after dispatch.
-- (l :: Int)
-- ^ The first argument to the function , captured before
-- -- we had the dynamic we could use to know which type
-- -- to dispatch on.
-- (m :: Dynamic)
-- -- ^ The dynamic again. This will go to a castDyn to the
-- -- proper type before being evaluated at the function.
-- )
And similar entries for each function in the class .
--
-- When given a dynamic (Dynamic (tr :: TypeRep a) (v :: a)), dynDispatch
-- looks up (SomeTypeRep tr :: SomeTypeRep) in the dispatch table.
-- If it finds a function 'f' that matches, it converts it to the expected
-- value with 'fromDyn f', if possible, and emits a runtime error otherwise.
-- If a function with the matching type is not found, it also emits a
-- runtime error, saying that no matching instance was found.
methodToDynDispatch cc_class class_tys fid = do
-- Names included for better error messages.
let fname = occNameFS (getOccName fid)
cname = occNameFS (getOccName cc_class)
fun_name <- unsafeTcPluginTcM $ mkStringExprFS fname
class_name <- unsafeTcPluginTcM $ mkStringExprFS cname
let (tvs, ty) = tcSplitForAllVarBndrs (varType fid)
(res, preds) = splitPreds ty
bound_preds = map (mkForAllTys tvs) preds
dpt_ty = mkBoxedTupleTy [sometyperep, dynamic]
fill_ty = piResultTys (mkForAllTys tvs res)
enough_dynamics = replicate (length $ head class_tys) dynamic
dyn_ty = fill_ty enough_dynamics
Whole ty is the type minus the a in the beginning
whole_ty = funResultTy $ piResultTys (varType fid) enough_dynamics
unsatisfied_preds = map (`piResultTy` dynamic) $ drop 1 bound_preds
mkMissingDict t =
mkRuntimeErrorApp rUNTIME_ERROR_ID t "Dynamic dictonary shouldn't be evaluated!"
dynb_pred_dicts = map mkMissingDict unsatisfied_preds
dyn_pred_vars <- unsafeTcPluginTcM $ mapM (mkSysLocalM (getOccFS fid)) unsatisfied_preds
let -- | The workhorse that constructs the dispatch tables.
mkDpEl :: Type -> [CoreBndr] -> [Type] -> TcPluginM (CoreExpr, [Ct])
mkDpEl res_ty revl dts@[dp_ty] =
do (tev, check_typeable) <- checkTypeable whole_ty
(dptev, check_typeable_dp) <- checkTypeable dp_ty
check_preds <- mapM (mkWanted (ctLoc ct) . flip piResultTys dts) bound_preds
let dyn_app = mkCoreApps (Var dc_to_dyn) [Type whole_ty, Var tev]
pevs = map ctEvId check_preds
fapp = mkCoreApps (Var fid) $ Type dp_ty : map Var pevs
toFappArg :: (Type, Type, CoreBndr) -> TcPluginM (CoreExpr, [Ct])
toFappArg (t1,t2,b) | tcEqType t1 t2 = return (Var b, [])
| otherwise = do
(tev, check_typeable) <- checkTypeable t2
ccs <- mkWanted (ctLoc ct) $ mkTyConApp dc_has_call_stack []
cs <- mkFromDynErrCallStack dc_cast_dyn ct $ ctEvEvId $ ctEvidence ccs
let app = mkCoreApps (Var dc_cast_dyn)
[Type t2, Var tev, cs, Var b]
return (app,[check_typeable, ccs])
matches :: [CoreBndr] -> Type -> [(Type, Type, CoreBndr)]
matches [] _ = []
matches (b:bs) ty = (varType b, t, b):matches bs r
where (t,r) = splitFunTy ty -- Safe, binders are as long or longer.
(fappArgs, fappChecks) <- unzip <$> mapM toFappArg (matches revl res_ty)
let fapp_app = mkCoreApps fapp fappArgs
-- If the result is dependent on the type, we must wrap it in
a toDyn . I.e. for Ord Dynamic ,
-- max :: a -> a -> a must have the type Dynamic -> Dynamic -> Dynamic
-- so we must cast the result to
--
NOTE BREAKS , i.e. ( A : : Dynamic ) ( B : : Dynamic )
-- is just the latter argument.
dfapp_arg = if (exprType (lambda fapp_app) `tcEqType` whole_ty)
then lambda fapp_app
else lambda (td fapp_app)
where dfapp_arg_mb = lambda fapp_app
lambda = mkCoreLams (dyn_pred_vars ++ revl)
td x = mkCoreApps (Var dc_to_dyn) [Type dp_ty, Var dptev, x]
dfapp = mkCoreApps dyn_app [dfapp_arg]
trapp = mkCoreApps (Var dc_typerep) [Type (tcTypeKind dp_ty), Type dp_ty, Var dptev]
strapp = mkCoreApps
(Var (dataConWrapId dc_sometyperep_dc))
[Type (tcTypeKind dp_ty), Type dp_ty, trapp]
checks = [check_typeable, check_typeable_dp] ++ check_preds ++ concat fappChecks
tup = mkCoreTup [strapp, dfapp]
return (tup, checks)
mkDpEl _ _ tys = pprPanic "Multi-param typeclasses not supported!" $ ppr tys
finalize (dp:lams) res_ty = do
let revl = reverse (dp:lams)
mkFunApp a b = mkTyConApp funTyCon [tcTypeKind a,tcTypeKind b, a, b]
(tev, check_typeable) <- checkTypeable whole_ty
let saturated = filter is_saturated class_tys
is_saturated = all (not . isPredTy)
dpt_els_n_checks <- mapM (\ct -> mkDpEl (fill_ty ct) revl ct) saturated
-- To make the types match up, we must make a dictionary for each of
-- the predicates, even though these will never be used.
let (dpt_els, dpt_checks) = unzip dpt_els_n_checks
app = mkCoreApps (Var dc_dyn_dispatch)
([ Type whole_ty, evId tev, mkListExpr dpt_ty dpt_els
, fun_name, class_name, Var dp]
++ dynb_pred_dicts
++ map Var revl)
checks = check_typeable:concat dpt_checks
TODO app to pred dicts
lamApp = mkCoreLams (dyn_pred_vars ++ revl) app
return (lamApp, checks)
We figure out all the arguments to the functions first from the type .
loop lams ty = do
case splitFunTy_maybe ty of
Just (t,r) -> do
bid <- unsafeTcPluginTcM $ mkSysLocalM (getOccFS fid) t
loop (bid:lams) r
_ -> finalize lams ty
loop [] dyn_ty
checkTypeable :: Type -> TcPluginM (EvId, Ct)
checkTypeable ty = do
c <- mkWanted (ctLoc ct) $ mkTyConApp (classTyCon dc_typeable) [tcTypeKind ty, ty]
return (ctEvId c, c)
splitPreds :: Type -> (Type, [PredType])
splitPreds ty =
case tcSplitPredFunTy_maybe ty of
Just (pt, t) -> (pt:) <$> splitPreds t
_ -> (ty, [])
| GHC does n't know how to solve ( Show Dynamic = > Dynamic - > Int ) ,
-- but in core it's the same as Show Dynamic -> Dynamic -> Int. So we simply
-- show that 'Show Dynamic' and 'Dynamic -> Int' are both typeable, and
-- construct the evidence that 'Show Dynamic => Dynamic -> Int' is thus
-- typeable.
solveDynamicTypeables :: SolveFun
solveDynamicTypeables ptc@PTC{..}
ct | CDictCan{..} <- ct
, cc_class == dc_typeable
, [kind, ty] <- cc_tyargs
, tcIsLiftedTypeKind kind
, (res_ty, preds@(p:ps)) <- splitPreds ty
, pts <- mapMaybe splitTyConApp_maybe preds
, all (tcEqType dynamic) $ concatMap snd pts =
do (r_typable_ev, r_typeable_ct) <- checkTypeable res_ty
-- We don't want to check the constraints here, since we won't need
them for the actual e.g. Show Dynamic , since we 'll never
call the function at Dynamic .
( mkWanted ( ctLoc ct ) ) preds
t_preds <- mapM checkTypeablePred pts
let (p_evs, p_cts) = unzip t_preds
checks = r_typeable_ct:concat p_cts
classCon = tyConSingleDataCon (classTyCon cc_class)
r_ty_ev = EvExpr $ evId r_typable_ev
(final_ty, proof) = foldr conTypeable (res_ty, r_ty_ev) p_evs
couldSolve (Just (proof, ct)) checks Set.empty
| otherwise = wontSolve ct
where
DC {..} = ptc_dc
dynamic = mkTyConApp dc_dynamic []
checkTypeablePred :: (TyCon, [Type]) -> TcPluginM ((Type, EvTerm), [Ct])
checkTypeablePred (tc, tys) = do
args_typeable <- mapM checkTypeable tys
let (_, evcts) = unzip args_typeable
ev = EvTypeableTyCon tc (map (EvExpr . evId . ctEvId) evcts)
ty = mkTyConApp tc tys
return ((ty, evTypeable ty ev), evcts)
conTypeable :: (Type, EvTerm) -> (Type, EvTerm) -> (Type, EvTerm)
conTypeable (fty, fterm) (argty, argterm) =
let res_ty = mkTyConApp funTyCon [tcTypeKind fty, tcTypeKind argty, fty, argty]
r_term = evTypeable res_ty $ EvTypeableTrFun fterm argterm
in (res_ty, r_term)
checkTypeable :: Type -> TcPluginM (EvId, Ct)
checkTypeable ty = do
c <- mkWanted (ctLoc ct) $ mkTyConApp (classTyCon dc_typeable) [tcTypeKind ty, ty]
return (ctEvId c, c)
| null | https://raw.githubusercontent.com/Tritlo/dynamic-haskell-plugin/4e0e57bcdf3b88137140cff436253fc6bfd85807/Data/Dynamic/Plugin.hs | haskell | ------------------------------------------------------------------------------
Exported
kind in a constraint to the given value, i.e. if there is an instance
Default k for and a is a free type variable of kind k in constraint c,
then a ~ Default k will be added to the context of c, and
values back to static values.
^ Provided by the plugin
^ The name of the function
^ The name of the class
------------------------------------------------------------------------------
We do some extra work to pretty print the Defaulting messages
We want to print a instead of a0
being defaulted.
The solution to the Ct
Possible additional work
What we did
Defaults any ambiguous type variables of kind k to l if Default k = l
We make assertions that `a ~ def` for all free a in pred_ty of ct. and
add these as new assertions. For meta type variables (i.e. ones that
have been instantiated with a `forall`, e.g. `forall a. Less H a`), an
assert is a derived, meaning that we emit a wanted that requires no
evidence . E.g. when checking `forall (a :: Label) . Less H a` and we
have `type instance Default Label = L`, we emit a `a0 ~ L`.
we cannot touch the variable so we cannot unify them with a derived. In
that case, we emit a given, saying that `a ~ L` i.e. we essentially
change the type of `True :: F a Bool` to `True :: a ~ L => F a Bool`.
Note that we cannot simply emit a given for both, since we cannot
mention a meta type variable in a given.
--------------------------------------------------------------
--------------------------------------------------------------
| Solves Γ |- (a :: Type) ~ (b :: Type) if a ~ Dynamic or b ~ Dynamic
setEvBind allows us to emit the evExpr we built, and since
we're at the top, it will be emitted as an exported variable
we're within a function, so setting the evBinds won't actually
put it within scope.
By binding and seqing, we ensure that the evExpr
doesn't get erased.
By applying the same function when generating the provinence and for the
lookup of the variable name later, we know we will find the corresponding
variable.
| Post-processing for Dynamics
These we need to find from case exprs.
| Here we replace the "proofs" of the casts with te actual calls to toDyn
and castDyn.
(which we can then directly use, or the seq'd ones buried within cases
for locals).
We grab the `data-dynamic_a1bK :: A -> Dynamics` from the binds, and
the `case case <dyn_expr> of {DEFAULT -> <UnivCo proof>} of <covar>` from
the expressions, where Left <data-dynamic_var_name> and Right <covar>.
The top level ones will never be recursive.
the var itself isn't preserved. It's OK though, since we have
to replace the covar itself and not from the variable name.
Cast is the only place that we do any work beyond just recursing over
the sub-expressions. Here we replace the
the respective (data-dynamic_var_name A) (i.e. apply the function to A)
and (toDyn @B ... B).
| Solves Γ |- C Dynamic
We can only dispatch on singe argument classes
Notation: {Foo A} = The dictionary for Foo A
(\ (k :: Show Dynamic) (l :: Int) (m :: Dynamic) ->
dynDispatch @(Show Dynamic => Int -> Dynamic -> Int -> Int)
{Typeable (Show Dynamic => Int -> Dynamic -> Int -> Int)}
-- ^ Only used too lookup in the table
[ (SomeTypeRep (typeRep :: TypeRep A), -- In core
toDyn @(Show Dynamic => Int -> Dynamic -> Int -> Int)
(\ (k :: Show Dynamic) (l :: Int) (m :: Dynamic) ->
loo @A {Foo A} {Show A} l (castDyn m)))
, (SomeTypeRep (typeRep :: TypeRep B), -- In core
toDyn @(Show Dynamic => Int -> Dynamic -> Int -> Int)
(\ (k :: Show Dynamic) (l :: Int) (m :: Dynamic) ->
loo @B {Foo B} {Show B} l (castDyn m)))]
-- ^ The dynamic dispatch table
"loo"
-- ^ The name of the function. Used for better error messages.
"Foo"
-- ^ The name of the class. Used for better error messages.
(m :: Dynamic)
-- ^ The dynamic value to dispatch on
(runtimeError @(Show Dynamic) "Should never be evaluated!")
-- ^ Provided to please the type gods. This dictionary
-- is just thrown away by the function after dispatch.
(l :: Int)
^ The first argument to the function , captured before
-- we had the dynamic we could use to know which type
-- to dispatch on.
(m :: Dynamic)
-- ^ The dynamic again. This will go to a castDyn to the
-- proper type before being evaluated at the function.
)
When given a dynamic (Dynamic (tr :: TypeRep a) (v :: a)), dynDispatch
looks up (SomeTypeRep tr :: SomeTypeRep) in the dispatch table.
If it finds a function 'f' that matches, it converts it to the expected
value with 'fromDyn f', if possible, and emits a runtime error otherwise.
If a function with the matching type is not found, it also emits a
runtime error, saying that no matching instance was found.
Names included for better error messages.
| The workhorse that constructs the dispatch tables.
Safe, binders are as long or longer.
If the result is dependent on the type, we must wrap it in
max :: a -> a -> a must have the type Dynamic -> Dynamic -> Dynamic
so we must cast the result to
is just the latter argument.
To make the types match up, we must make a dictionary for each of
the predicates, even though these will never be used.
but in core it's the same as Show Dynamic -> Dynamic -> Int. So we simply
show that 'Show Dynamic' and 'Dynamic -> Int' are both typeable, and
construct the evidence that 'Show Dynamic => Dynamic -> Int' is thus
typeable.
We don't want to check the constraints here, since we won't need | Copyright ( c ) 2020 - 2021
# LANGUAGE LambdaCase #
# LANGUAGE TypeFamilies #
# LANGUAGE DataKinds #
# LANGUAGE RecordWildCards #
# LANGUAGE TupleSections #
# LANGUAGE CPP #
# LANGUAGE FlexibleContexts #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE PolyKinds #
# LANGUAGE PatternSynonyms #
# LANGUAGE TypeApplications #
# LANGUAGE ViewPatterns #
module Data.Dynamic.Plugin ( plugin, Default, TypeError(..), ErrorMessage(..),
castDyn, dynDispatch, pattern Is) where
import Control.Monad
( when, unless, guard, foldM, zipWithM, msum, filterM, replicateM )
import Data.Maybe (mapMaybe, catMaybes, fromMaybe, fromJust, listToMaybe, isJust)
import Data.Either
import Data.IORef
import Data.List (nubBy, sortOn, intersperse, or, partition, minimumBy, maximumBy, sort, find)
import Control.Arrow ((&&&))
import Data.Function (on)
import Data.Kind (Constraint)
import Data.Data (Data, toConstr)
import Prelude hiding ((<>))
import qualified Data.Set as Set
import Data.Set (Set)
import Data.Proxy
import Data.Dynamic
import Text.Read (readMaybe)
import GHC.TypeLits(TypeError(..), ErrorMessage(..))
import Data.Coerce
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import System.IO.Unsafe (unsafePerformIO)
import Bag
import FV (fvVarListVarSet, fvVarSet)
import qualified TcEnv as Tc (tcLookup)
import DsUtils
import GhcPlugins hiding (TcPlugin)
import TcRnTypes
import TcPluginM
import ErrUtils (Severity(SevWarning))
import TcEvidence
import TysPrim
import PrelNames
import TyCoRep
import ClsInst
import Class
import Inst hiding (newWanted)
import MkId
import TcMType hiding (newWanted, newFlexiTyVar, zonkTcType)
import qualified TcMType as TcM
import TcType
import CoAxiom
import Unify
import TcHsSyn
import InstEnv
Holefits
import RdrName (globalRdrEnvElts)
import TcRnMonad (keepAlive, getLclEnv, getGlobalRdrEnv, getGblEnv, newSysName, setGblEnv)
import TcHoleErrors
import PrelInfo (knownKeyNames)
import Data.Graph (graphFromEdges, topSort, scc)
import DsBinds (dsHsWrapper)
import DsMonad (initDsTc)
import TcEvTerm (evCallStack)
import GHC.Hs.Expr
import Constraint
import Predicate
import GHC.TypeLits (TypeError(..),ErrorMessage(..))
import Data . Typeable
import Type.Reflection (SomeTypeRep(..), someTypeRep)
import Data.Dynamic
import GHC.Stack
plugin :: Plugin
plugin = defaultPlugin { tcPlugin = Just . dynamicPlugin
, pluginRecompile = purePlugin
, installCoreToDos = coreDyn }
| The family allows us to ' default ' free type variables of a given
Γ , a ~ Defaul k |- c : Constraint checked for validity .
type family Default k :: k
| castDyn casts a Dynamic to any typeable value , and fails with a descriptive
error if the types do nt match . Automatically inserted for casting Dynamic
castDyn :: forall a . (Typeable a, HasCallStack) => Dynamic -> a
castDyn arg = fromDyn arg err
where err = error ("Couldn't match expected type '" ++ target
++ "' with actual dynamic type '" ++ actual ++ "'")
target = show (someTypeRep (Proxy :: Proxy a))
actual = show (dynTypeRep arg)
dynDispatch :: forall b . (Typeable b)
-> Dynamic -> b
dynDispatch insts fun_name class_name dispatcher =
case lookup argt insts of
Just f ->
fromDyn f
(error $ "Type mismatch when dispatching '"
++ fun_name
++ "' expecting '" ++ show targett
++"' but got '" ++ show (dynTypeRep f)
++ "' using dispatch table for '"
++ class_name ++ "'!")
_ -> error $ "No instance of '" ++ class_name ++ " " ++ show argt ++ "'"
++ " found when dispatching for '"
++ fun_name ++ " :: " ++ show targett
++ "', with 'Dynamic ~ " ++ show argt
++ "' in this context."
where argt = dynTypeRep dispatcher
targett = someTypeRep (Proxy :: Proxy b)
pattern Is :: forall a. (Typeable a) => a -> Dynamic
pattern Is res <- (fromDynamic @a -> Just res)
data Log = Log { log_pred_ty :: Type, log_loc :: CtLoc}
| LogDefault { log_pred_ty :: Type, log_loc :: CtLoc,
log_var :: Var, log_kind :: Kind, log_res :: Type }
| LogMarshal { log_pred_ty :: Type, log_loc :: CtLoc, log_to_dyn :: Bool}
| LogSDoc {log_pred_ty :: Type, log_loc :: CtLoc, log_msg :: SDoc}
logSrc :: Log -> RealSrcSpan
logSrc = ctLocSpan . log_loc
instance Ord Log where
compare a@Log{} b@Log{} =
if logSrc a == logSrc b
then (compare `on` showSDocUnsafe . ppr) a b
else (compare `on` logSrc) a b
compare Log{} _ = LT
compare _ Log{} = GT
compare a@LogDefault{} b@LogDefault{} =
if logSrc a == logSrc b
then (compare `on` showSDocUnsafe . ppr) a b
else (compare `on` logSrc) a b
compare LogDefault{} _ = LT
compare _ LogDefault{} = GT
compare a@LogMarshal{} b@LogMarshal{} =
if logSrc a == logSrc b
then (compare `on` showSDocUnsafe . ppr) a b
else (compare `on` logSrc) a b
compare LogMarshal{} _ = LT
compare _ LogMarshal{} = GT
compare a@LogSDoc{} b@LogSDoc{} =
if logSrc a == logSrc b
then (compare `on` showSDocUnsafe . ppr) a b
else (compare `on` logSrc) a b
instance Eq Log where
a@Log{} == b@Log{} =
((==) `on` logSrc) a b && (eqType `on` log_pred_ty) a b
Log{} == _ = False
a@LogDefault{} == b@LogDefault{} =
((==) `on` logSrc) a b && (eqType `on` log_pred_ty) a b
&& ((==) `on` log_var) a b
LogDefault{} == _ = False
a@LogMarshal{} == b@LogMarshal{} =
((==) `on` logSrc) a b && (eqType `on` log_pred_ty) a b
LogMarshal{} == _ = False
a@LogSDoc{} == b@LogSDoc{} =
((==) `on` logSrc) a b
&& (eqType `on` log_pred_ty) a b
&& ((==) `on` showSDocUnsafe . log_msg) a b
LogSDoc{} == _ = False
instance Outputable Log where
ppr Log{..}
| Just msg <- userTypeError_maybe log_pred_ty = pprUserTypeErrorTy msg
| otherwise = text "DataDynamicPlugin" <+> ppr log_pred_ty
ppr LogDefault{..} = fsep [ text "Defaulting"
, quotes (ppr (mkTyVarTy log_var)
<+> dcolon <+> ppr log_kind)
, text "to"
, quotes (ppr log_res)
, text "in"
, quotes (ppr log_pred_ty)]
where printFlav Given = "Will default"
printFlav _ = "Defaulting"
ppr LogMarshal{..} = fsep [ text "Marshalling"
, quotes (ppr log_pred_ty)
, text (if log_to_dyn
then "to Dynamic"
else "from Dynamic") ]
ppr LogSDoc{..} = log_msg
zonkLog :: Log -> TcPluginM Log
zonkLog log@Log{..} = do zonked <- zonkTcType log_pred_ty
return $ log{log_pred_ty=zonked}
We do n't want to zonk LogDefault , since then we ca n't see what variable was
zonkLog log = return log
logToErr :: Log -> TcPluginM Ct
logToErr Log{..} = mkWanted log_loc log_pred_ty
logToErr LogDefault{..} =
sDocToTyErr [ text "Defaulting"
, quotes (ppr (mkTyVarTy log_var)
<+> dcolon <+> ppr log_kind)
, text "to"
, quotes (ppr log_res)
, text "in"
, quotes (ppr log_pred_ty)] >>= mkWanted log_loc
logToErr LogMarshal{..} =
sDocToTyErr [ text "Marshalling"
, quotes (ppr log_pred_ty)
, text (if log_to_dyn
then "to Dynamic"
else "from Dynamic") ] >>= mkWanted log_loc
logToErr LogSDoc{..} = sDocToTyErr [log_msg] >>= mkWanted log_loc
sDocToTyErr :: [SDoc] -> TcPluginM Type
sDocToTyErr docs =
do txtCon <- promoteDataCon <$> tcLookupDataCon typeErrorTextDataConName
appCon <- promoteDataCon <$> tcLookupDataCon typeErrorAppendDataConName
dflags <- unsafeTcPluginTcM getDynFlags
let txt str = mkTyConApp txtCon [mkStrLitTy $ fsLit str]
sppr = txt . showSDoc dflags . ppr
app ty1 ty2 = mkTyConApp appCon [ty1, ty2]
mkTyErr $ foldl1 app $ map sppr $ intersperse (text " ") docs
addWarning :: DynFlags -> Log -> TcPluginM ()
addWarning dflags log = tcPluginIO $ warn (ppr log)
where warn = putLogMsg dflags NoReason SevWarning
(RealSrcSpan (logSrc log)) (defaultErrStyle dflags)
data Flags = Flags { f_debug :: Bool
, f_quiet :: Bool
, f_keep_errors :: Bool
} deriving (Show)
getFlags :: [CommandLineOption] -> Flags
getFlags opts = Flags { f_debug = "debug" `elem` opts
, f_quiet = "quiet" `elem` opts
, f_keep_errors = "keep_errors" `elem` opts
}
pprOut :: Outputable a => String -> a -> TcPluginM ()
pprOut str a = do dflags <- unsafeTcPluginTcM getDynFlags
tcPluginIO $ putStrLn (str ++ " " ++ showSDoc dflags (ppr a))
dynamicPlugin :: [CommandLineOption] -> TcPlugin
dynamicPlugin opts = TcPlugin initialize solve stop
where
flags@Flags{..} = getFlags opts
initialize = do
when f_debug $ tcPluginIO $ putStrLn "Starting DataDynamicPlugin in debug mode..."
when f_debug $ tcPluginIO $ print flags
tcPluginIO $ newIORef Set.empty
solve :: IORef (Set Log) -> [Ct] -> [Ct] -> [Ct] -> TcPluginM TcPluginResult
solve warns given derived wanted = do
dflags <- unsafeTcPluginTcM getDynFlags
let pprDebug :: Outputable a => String -> a -> TcPluginM ()
pprDebug str a = when f_debug $ pprOut str a
pprDebug "Solving" empty
pprDebug "-------" empty
mapM_ (pprDebug "Given:") given
mapM_ (pprDebug "Derived:") derived
mapM_ (pprDebug "Wanted:") wanted
pprDebug "-------" empty
pluginTyCons <- getPluginTyCons
let solveWFun :: ([Ct], ([(EvTerm, Ct)],[Ct], Set Log)) -> (SolveFun, String)
-> TcPluginM ([Ct], ([(EvTerm, Ct)],[Ct], Set Log))
solveWFun (unsolved, (solved, more, logs)) (solveFun, explain) = do
(still_unsolved, (new_solved, new_more, new_logs)) <-
inspectSol <$> mapM (solveFun pluginTyCons) unsolved
mapM_ (pprDebug (explain ++ "-sols")) new_solved
mapM_ (pprDebug (explain ++ "-more")) new_more
return (still_unsolved, (solved ++ new_solved,
more ++ new_more,
logs `Set.union` new_logs))
order :: [(SolveFun, String)]
order = [ (solveDynamic, "Discharging")
, (solveDefault, "Defaulting")
, (solveDynamicTypeables, "SDTs")
, (solveDynDispatch, "Checking Dynamic Dispatch") ]
to_check = wanted ++ derived
(_, (solved_wanteds, more_cts, logs)) <-
foldM solveWFun (to_check, ([],[],Set.empty)) order
errs <- if f_keep_errors
then mapM logToErr (Set.toAscList logs)
else tcPluginIO $ modifyIORef warns (logs `Set.union`) >> mempty
return $ TcPluginOk solved_wanteds (errs ++ more_cts)
stop warns = do dflags <- unsafeTcPluginTcM getDynFlags
logs <- Set.toAscList <$> tcPluginIO (readIORef warns)
zonked_logs <- mapM zonkLog logs
unless f_quiet $ mapM_ (addWarning dflags) zonked_logs
data PluginTyCons = PTC { ptc_default :: TyCon
, ptc_dc :: DynCasts }
data DynCasts = DC { dc_typeable :: Class
, dc_dynamic :: TyCon
, dc_to_dyn :: Id
, dc_cast_dyn :: Id
, dc_has_call_stack :: TyCon
, dc_dyn_dispatch :: Id
, dc_sometyperep :: TyCon
, dc_sometyperep_dc :: DataCon
, dc_typerep :: Id }
getPluginTyCons :: TcPluginM PluginTyCons
getPluginTyCons =
do fpmRes <- findImportedModule (mkModuleName "Data.Dynamic.Plugin") Nothing
dc_dynamic <- getTyCon dYNAMIC "Dynamic"
dc_typeable <- getClass tYPEABLE_INTERNAL "Typeable"
dc_sometyperep <- getTyCon tYPEABLE_INTERNAL "SomeTypeRep"
dc_sometyperep_dc <- getDataCon tYPEABLE_INTERNAL "SomeTypeRep"
dc_typerep <- getId tYPEABLE_INTERNAL "typeRep"
dc_to_dyn <- getId dYNAMIC "toDyn"
dc_has_call_stack <- getTyCon gHC_STACK_TYPES "HasCallStack"
case fpmRes of
Found _ mod ->
do ptc_default <- getTyCon mod "Default"
dc_cast_dyn <- getId mod "castDyn"
dc_dyn_dispatch <- getId mod "dynDispatch"
let ptc_dc = DC {..}
return PTC{..}
NoPackage uid -> pprPanic "Plugin module not found (no package)!" (ppr uid)
FoundMultiple ms -> pprPanic "Multiple plugin modules found!" (ppr ms)
NotFound{..} -> pprPanic "Plugin module not found!" empty
where getTyCon mod name = lookupOrig mod (mkTcOcc name) >>= tcLookupTyCon
getDataCon mod name = lookupOrig mod (mkDataOcc name) >>= tcLookupDataCon
getPromDataCon mod name = promoteDataCon <$> getDataCon mod name
getClass mod name = lookupOrig mod (mkClsOcc name) >>= tcLookupClass
getId mod name = lookupOrig mod (mkVarOcc name) >>= tcLookupId
type SolveFun = PluginTyCons -> Ct -> TcPluginM Solution
wontSolve :: Ct -> TcPluginM Solution
wontSolve = return . Left
couldSolve :: Maybe (EvTerm,Ct) -> [Ct] -> Set Log -> TcPluginM Solution
couldSolve ev work logs = return (Right (ev,work,logs))
solveDefault :: SolveFun
solveDefault ptc@PTC{..} ct =
do defaults <- catMaybes <$> mapM getDefault (tyCoVarsOfCtList ct)
if null defaults then wontSolve ct
For skolems ( " rigid " type variables like the a in ` True : : F ) ,
else do let (eq_tys, logs) = unzip $ map mkTyEq defaults
assert_eqs <- mapM mkAssert eq_tys
couldSolve Nothing assert_eqs (Set.fromList logs)
where mkAssert = either (mkDerived bump) (uncurry (mkGiven bump))
bump = bumpCtLocDepth $ ctLoc ct
getDefault var = fmap ((var,) . snd) <$> matchFam ptc_default [varType var]
mkTyEq (var,def) = ( if isMetaTyVar var then Left pred_ty
else Right (pred_ty, proof),
LogDefault{log_pred_ty = ctPred ct,
log_var = var, log_kind = varType var,
log_res = def, log_loc =ctLoc ct})
where EvExpr proof = mkProof "data-dynamic-default" (mkTyVarTy var) defApp
pred_ty = mkPrimEqPredRole Nominal (mkTyVarTy var) defApp
defApp = mkTyConApp ptc_default [varType var]
mkTyErr :: Type -> TcPluginM Type
mkTyErr msg = flip mkTyConApp [typeKind msg, msg] <$>
tcLookupTyCon errorMessageTypeErrorFamName
| Creates a type error with the given string at the given loc .
mkTypeErrorCt :: CtLoc -> String -> TcPluginM Ct
mkTypeErrorCt loc str =
do txtCon <- promoteDataCon <$> tcLookupDataCon typeErrorTextDataConName
appCon <- promoteDataCon <$> tcLookupDataCon typeErrorAppendDataConName
vappCon <- promoteDataCon <$> tcLookupDataCon typeErrorVAppendDataConName
let txt str = mkTyConApp txtCon [mkStrLitTy $ fsLit str]
app ty1 ty2 = mkTyConApp appCon [ty1, ty2]
vapp ty1 ty2 = mkTyConApp vappCon [ty1, ty2]
unwty = foldr1 app . map txt . intersperse " "
ty_err_ty = foldr1 vapp $ map (unwty . words) $ lines str
te <- mkTyErr ty_err_ty
mkWanted loc te
getErrMsgCon :: TcPluginM TyCon
getErrMsgCon = lookupOrig gHC_TYPELITS (mkTcOcc "ErrorMessage") >>= tcLookupTyCon
Utils
mkDerived :: CtLoc -> PredType -> TcPluginM Ct
mkDerived loc eq_ty = flip setCtLoc loc . CNonCanonical <$> newDerived loc eq_ty
mkWanted :: CtLoc -> PredType -> TcPluginM Ct
mkWanted loc eq_ty = flip setCtLoc loc . CNonCanonical <$> newWanted loc eq_ty
mkGiven :: CtLoc -> PredType -> EvExpr -> TcPluginM Ct
mkGiven loc eq_ty ev = flip setCtLoc loc . CNonCanonical <$> newGiven loc eq_ty ev
mkProof :: String -> Type -> Type -> EvTerm
mkProof str ty1 ty2 = evCoercion $ mkUnivCo (PluginProv str) Nominal ty1 ty2
splitEquality :: Type -> Maybe (Kind, Type, Type)
splitEquality pred =
do (tyCon, [k1, k2, ty1,ty2]) <- splitTyConApp_maybe pred
guard (tyCon == eqPrimTyCon)
guard (k1 `eqType` k2)
return (k1, ty1,ty2)
inspectSol :: Ord d => [Either a (Maybe b, [c], Set d)]
-> ([a], ([b], [c], Set d))
inspectSol xs = (ls, (catMaybes sols, concat more, Set.unions logs))
where (ls, rs) = partitionEithers xs
(sols, more, logs) = unzip3 rs
Marshalling to and from Dynamic
solveDynamic :: SolveFun
solveDynamic ptc@PTC{..} ct
| Just (k1,ty1,ty2) <- splitEquality (ctPred ct) = do
let DC {..} = ptc_dc
dynamic = mkTyConApp dc_dynamic []
kIsType = tcIsLiftedTypeKind k1
isDyn ty = ty `tcEqType` dynamic
if kIsType && (isDyn ty1 || isDyn ty2)
then marshalDynamic k1 ty1 ty2 ptc ct
else wontSolve ct
| otherwise = wontSolve ct
dYNAMICPLUGINPROV :: String
dYNAMICPLUGINPROV = "data-dynamic"
marshalDynamic :: Kind -> Type -> Type -> SolveFun
marshalDynamic k1 ty1 ty2 PTC{..} ct@(CIrredCan CtWanted{ctev_dest = HoleDest coho} _) =
do let DC {..} = ptc_dc
dynamic = mkTyConApp dc_dynamic []
isDyn ty = ty `tcEqType` dynamic
relTy = if isDyn ty1 then ty2 else ty1
log = Set.singleton (LogMarshal relTy (ctLoc ct) (isDyn ty2))
hasTypeable = mkTyConApp (classTyCon dc_typeable) [k1, relTy]
hasCallStack = mkTyConApp dc_has_call_stack []
checks@[check_typeable, check_call_stack] <- mapM (mkWanted (ctLoc ct)) [hasTypeable, hasCallStack]
call_stack <- mkFromDynErrCallStack dc_cast_dyn ct $ ctEvEvId $ ctEvidence check_call_stack
let typeableDict = ctEvEvId $ ctEvidence check_typeable
evExpr = if isDyn ty1
then mkApps (Var dc_cast_dyn) [Type relTy, Var typeableDict, call_stack]
else mkApps (Var dc_to_dyn) [Type relTy, Var typeableDict]
(at1,at2) = if isDyn ty1 then (dynamic, relTy) else (relTy, dynamic)
deb <- unsafeTcPluginTcM $ mkSysLocalM (fsLit dYNAMICPLUGINPROV) (exprType evExpr)
let mkProof prov = mkUnivCo (PluginProv prov) Nominal at1 at2
if isTopTcLevel (ctLocLevel $ ctLoc ct)
let prov = marshalVarToString deb
setEvBind $ mkGivenEvBind (setIdExported deb) (EvExpr evExpr)
couldSolve (Just (evCoercion (mkProof prov), ct)) checks log
let prov = dYNAMICPLUGINPROV
let_b = Let (NonRec deb evExpr)
(seqVar deb $ Coercion $ mkProof prov)
couldSolve (Just (EvExpr let_b, ct)) checks log
marshalDynamic _ _ _ _ ct = wontSolve ct
marshalVarToString :: Var -> String
marshalVarToString var = nstr ++ "_" ++ ustr
where nstr = occNameString (occName var)
ustr = show (varUnique var)
mkFromDynErrCallStack :: Id -> Ct -> EvVar -> TcPluginM EvExpr
mkFromDynErrCallStack fdid ct csDict =
flip mkCast coercion <$>
unsafeTcPluginTcM (evCallStack (EvCsPushCall name loc var))
where name = idName fdid
loc = ctLocSpan (ctLoc ct)
var = Var csDict
coercion = mkSymCo (unwrapIP (exprType var))
coreDyn :: [CommandLineOption] -> [CoreToDo] -> CoreM [CoreToDo]
coreDyn clo tds = return $ CoreDoPluginPass "DataDynamicPlugin" (bindsOnlyPass addDyn):tds
where
Flags {..} = getFlags clo
found var expr = Map.singleton var expr
addDyn :: CoreProgram -> CoreM CoreProgram
addDyn program = mapM (addDynToBind dexprs) program
where
dexprs = Map.fromList $ concatMap getDynamicCastsBind program
We need to find the two types of expressions , either the exported globals
getDynamicCastsBind :: CoreBind -> [(Either String Var, Expr Var)]
getDynamicCastsBind (NonRec var expr) |
occNameString (occName var) == dYNAMICPLUGINPROV =
(Left $ marshalVarToString var, Var var):getDynamicCastsExpr expr
getDynamicCastsBind (NonRec _ expr) = getDynamicCastsExpr expr
getDynamicCastsBind (Rec as) =
concatMap (getDynamicCastsExpr . snd) as
getDynamicCastsExpr :: Expr Var -> [(Either String Var, Expr Var)]
getDynamicCastsExpr (Var _) = []
getDynamicCastsExpr (Lit _) = []
getDynamicCastsExpr (App expr arg) =
concatMap getDynamicCastsExpr [expr, arg]
getDynamicCastsExpr (Lam _ expr) = getDynamicCastsExpr expr
getDynamicCastsExpr (Let bind expr) =
getDynamicCastsBind bind ++ getDynamicCastsExpr expr
getDynamicCastsExpr c@(Case expr covar _ alts) =
ecasts ++ concatMap gdcAlts alts
where gdcAlts (_,_,e) = getDynamicCastsExpr e
ecasts = case expr of
This is the expression build by the seqVar , though unfortunately ,
Case dexpr _ _ [(DEFAULT, [], Coercion (UnivCo (PluginProv prov) _ _ _))] |
prov == dYNAMICPLUGINPROV -> [(Right covar, dexpr)]
_ -> getDynamicCastsExpr expr
getDynamicCastsExpr (Cast expr _) = getDynamicCastsExpr expr
getDynamicCastsExpr (Tick _ expr) = getDynamicCastsExpr expr
getDynamicCastsExpr (Type _) = []
getDynamicCastsExpr (Coercion _) = []
addDynToBind :: DynExprMap -> CoreBind -> CoreM CoreBind
addDynToBind dexprs (NonRec b expr) = NonRec b <$> addDynToExpr dexprs expr
addDynToBind dexprs (Rec as) = do
let (vs, exprs) = unzip as
nexprs <- mapM (addDynToExpr dexprs) exprs
return (Rec $ zip vs nexprs)
addDynToExpr :: DynExprMap -> Expr Var -> CoreM (Expr Var)
addDynToExpr _ e@(Var _) = pure e
addDynToExpr _ e@(Lit _) = pure e
addDynToExpr dexprs (App expr arg) =
App <$> addDynToExpr dexprs expr <*> addDynToExpr dexprs arg
addDynToExpr dexprs (Lam b expr) = Lam b <$> addDynToExpr dexprs expr
addDynToExpr dexprs (Let binds expr) = Let <$> addDynToBind dexprs binds
<*> addDynToExpr dexprs expr
addDynToExpr dexprs (Case expr b ty alts) =
(\ne na -> Case ne b ty na) <$> addDynToExpr dexprs expr
<*> mapM addDynToAlt alts
where addDynToAlt (c, bs, expr) = (c, bs,) <$> addDynToExpr dexprs expr
( A ` cast ` UnivCo ( PluginProv < data - dynamic_var_name > ) Nominal A Dynamic )
and ( B ` cast ` SubCo < covar > ) that was generated in the TcPlugin with
addDynToExpr dexprs orig@(Cast expr coercion) = do
nexpr <- addDynToExpr dexprs expr
case coercion of
UnivCo (PluginProv prov) _ _ _ |
Just expr <- dexprs Map.!? Left prov -> found expr nexpr
SubCo (CoVarCo co) | Just expr <- dexprs Map.!? Right co -> found expr nexpr
UnivCo (PluginProv _) _ _ _ -> pprPanic "Unfound var" $ ppr coercion
_ -> return (Cast nexpr coercion)
where found expr nexpr = do
let res = App expr nexpr
when f_debug $
liftIO $ putStrLn $ showSDocUnsafe $
text "Replacing" <+> parens (ppr orig)
<+> text "with" <+> parens (ppr res)
return res
addDynToExpr dexprs (Tick t expr) = Tick t <$> addDynToExpr dexprs expr
addDynToExpr _ e@(Type _) = pure e
addDynToExpr _ e@(Coercion _) = pure e
solveDynDispatch :: SolveFun
solveDynDispatch ptc@PTC{..} ct | CDictCan{..} <- ct
, [arg] <- cc_tyargs
, arg `tcEqType` dynamic = do
class_insts <- flip classInstances cc_class <$> getInstEnvs
let (unsaturated, saturated) = partition (not . null . is_tvs) class_insts
class_tys = map is_tys saturated
if not (all ((1 ==) . length) class_tys) then wontSolve ct
else do
Make sure we check any superclasses
scChecks <- mapM (mkWanted (ctLoc ct) .
flip piResultTys cc_tyargs .
mkSpecForAllTys (classTyVars cc_class))
$ classSCTheta cc_class
let scEvIds = map (evId . ctEvId) scChecks
args_n_checks <- mapM (methodToDynDispatch cc_class class_tys)
(classMethods cc_class)
let logs = Set.fromList $ [LogSDoc (ctPred ct) (ctLoc ct) $
fsep ([text "Building dispatch table for"
, quotes $ ppr $ ctPred ct
, text "based on"
, fsep $ map (quotes . ppr) saturated
] ++ if null unsaturated then []
else [ text "Skipping unsaturated instances"
, fsep $ map (quotes . ppr) unsaturated ])]
classCon = tyConSingleDataCon (classTyCon cc_class)
(args, checks) = unzip args_n_checks
proof = evDataConApp classCon cc_tyargs $ scEvIds ++ args
couldSolve (Just (proof, ct)) (scChecks ++ concat checks) logs
| otherwise = wontSolve ct
where
DC {..} = ptc_dc
dynamic = mkTyConApp dc_dynamic []
sometyperep = mkTyConApp dc_sometyperep []
| The workhorse . Creates the dictonary for C Dynamic on the fly .
methodToDynDispatch :: Class
-> [[Type]]
-> Id
-> TcPluginM (EvExpr, [Ct])
For method ' loo : : Show a = > Int - > a - > Int - > Int ' in with instances
Foo A and , this will generate the following ( in Core ):
{ ( Show Dynamic = > Int - > Dynamic - > Int - > Int ) }
{ ( Show Dynamic = > Int - > Dynamic - > Int - > Int ) }
And similar entries for each function in the class .
methodToDynDispatch cc_class class_tys fid = do
let fname = occNameFS (getOccName fid)
cname = occNameFS (getOccName cc_class)
fun_name <- unsafeTcPluginTcM $ mkStringExprFS fname
class_name <- unsafeTcPluginTcM $ mkStringExprFS cname
let (tvs, ty) = tcSplitForAllVarBndrs (varType fid)
(res, preds) = splitPreds ty
bound_preds = map (mkForAllTys tvs) preds
dpt_ty = mkBoxedTupleTy [sometyperep, dynamic]
fill_ty = piResultTys (mkForAllTys tvs res)
enough_dynamics = replicate (length $ head class_tys) dynamic
dyn_ty = fill_ty enough_dynamics
Whole ty is the type minus the a in the beginning
whole_ty = funResultTy $ piResultTys (varType fid) enough_dynamics
unsatisfied_preds = map (`piResultTy` dynamic) $ drop 1 bound_preds
mkMissingDict t =
mkRuntimeErrorApp rUNTIME_ERROR_ID t "Dynamic dictonary shouldn't be evaluated!"
dynb_pred_dicts = map mkMissingDict unsatisfied_preds
dyn_pred_vars <- unsafeTcPluginTcM $ mapM (mkSysLocalM (getOccFS fid)) unsatisfied_preds
mkDpEl :: Type -> [CoreBndr] -> [Type] -> TcPluginM (CoreExpr, [Ct])
mkDpEl res_ty revl dts@[dp_ty] =
do (tev, check_typeable) <- checkTypeable whole_ty
(dptev, check_typeable_dp) <- checkTypeable dp_ty
check_preds <- mapM (mkWanted (ctLoc ct) . flip piResultTys dts) bound_preds
let dyn_app = mkCoreApps (Var dc_to_dyn) [Type whole_ty, Var tev]
pevs = map ctEvId check_preds
fapp = mkCoreApps (Var fid) $ Type dp_ty : map Var pevs
toFappArg :: (Type, Type, CoreBndr) -> TcPluginM (CoreExpr, [Ct])
toFappArg (t1,t2,b) | tcEqType t1 t2 = return (Var b, [])
| otherwise = do
(tev, check_typeable) <- checkTypeable t2
ccs <- mkWanted (ctLoc ct) $ mkTyConApp dc_has_call_stack []
cs <- mkFromDynErrCallStack dc_cast_dyn ct $ ctEvEvId $ ctEvidence ccs
let app = mkCoreApps (Var dc_cast_dyn)
[Type t2, Var tev, cs, Var b]
return (app,[check_typeable, ccs])
matches :: [CoreBndr] -> Type -> [(Type, Type, CoreBndr)]
matches [] _ = []
matches (b:bs) ty = (varType b, t, b):matches bs r
(fappArgs, fappChecks) <- unzip <$> mapM toFappArg (matches revl res_ty)
let fapp_app = mkCoreApps fapp fappArgs
a toDyn . I.e. for Ord Dynamic ,
NOTE BREAKS , i.e. ( A : : Dynamic ) ( B : : Dynamic )
dfapp_arg = if (exprType (lambda fapp_app) `tcEqType` whole_ty)
then lambda fapp_app
else lambda (td fapp_app)
where dfapp_arg_mb = lambda fapp_app
lambda = mkCoreLams (dyn_pred_vars ++ revl)
td x = mkCoreApps (Var dc_to_dyn) [Type dp_ty, Var dptev, x]
dfapp = mkCoreApps dyn_app [dfapp_arg]
trapp = mkCoreApps (Var dc_typerep) [Type (tcTypeKind dp_ty), Type dp_ty, Var dptev]
strapp = mkCoreApps
(Var (dataConWrapId dc_sometyperep_dc))
[Type (tcTypeKind dp_ty), Type dp_ty, trapp]
checks = [check_typeable, check_typeable_dp] ++ check_preds ++ concat fappChecks
tup = mkCoreTup [strapp, dfapp]
return (tup, checks)
mkDpEl _ _ tys = pprPanic "Multi-param typeclasses not supported!" $ ppr tys
finalize (dp:lams) res_ty = do
let revl = reverse (dp:lams)
mkFunApp a b = mkTyConApp funTyCon [tcTypeKind a,tcTypeKind b, a, b]
(tev, check_typeable) <- checkTypeable whole_ty
let saturated = filter is_saturated class_tys
is_saturated = all (not . isPredTy)
dpt_els_n_checks <- mapM (\ct -> mkDpEl (fill_ty ct) revl ct) saturated
let (dpt_els, dpt_checks) = unzip dpt_els_n_checks
app = mkCoreApps (Var dc_dyn_dispatch)
([ Type whole_ty, evId tev, mkListExpr dpt_ty dpt_els
, fun_name, class_name, Var dp]
++ dynb_pred_dicts
++ map Var revl)
checks = check_typeable:concat dpt_checks
TODO app to pred dicts
lamApp = mkCoreLams (dyn_pred_vars ++ revl) app
return (lamApp, checks)
We figure out all the arguments to the functions first from the type .
loop lams ty = do
case splitFunTy_maybe ty of
Just (t,r) -> do
bid <- unsafeTcPluginTcM $ mkSysLocalM (getOccFS fid) t
loop (bid:lams) r
_ -> finalize lams ty
loop [] dyn_ty
checkTypeable :: Type -> TcPluginM (EvId, Ct)
checkTypeable ty = do
c <- mkWanted (ctLoc ct) $ mkTyConApp (classTyCon dc_typeable) [tcTypeKind ty, ty]
return (ctEvId c, c)
splitPreds :: Type -> (Type, [PredType])
splitPreds ty =
case tcSplitPredFunTy_maybe ty of
Just (pt, t) -> (pt:) <$> splitPreds t
_ -> (ty, [])
| GHC does n't know how to solve ( Show Dynamic = > Dynamic - > Int ) ,
solveDynamicTypeables :: SolveFun
solveDynamicTypeables ptc@PTC{..}
ct | CDictCan{..} <- ct
, cc_class == dc_typeable
, [kind, ty] <- cc_tyargs
, tcIsLiftedTypeKind kind
, (res_ty, preds@(p:ps)) <- splitPreds ty
, pts <- mapMaybe splitTyConApp_maybe preds
, all (tcEqType dynamic) $ concatMap snd pts =
do (r_typable_ev, r_typeable_ct) <- checkTypeable res_ty
them for the actual e.g. Show Dynamic , since we 'll never
call the function at Dynamic .
( mkWanted ( ctLoc ct ) ) preds
t_preds <- mapM checkTypeablePred pts
let (p_evs, p_cts) = unzip t_preds
checks = r_typeable_ct:concat p_cts
classCon = tyConSingleDataCon (classTyCon cc_class)
r_ty_ev = EvExpr $ evId r_typable_ev
(final_ty, proof) = foldr conTypeable (res_ty, r_ty_ev) p_evs
couldSolve (Just (proof, ct)) checks Set.empty
| otherwise = wontSolve ct
where
DC {..} = ptc_dc
dynamic = mkTyConApp dc_dynamic []
checkTypeablePred :: (TyCon, [Type]) -> TcPluginM ((Type, EvTerm), [Ct])
checkTypeablePred (tc, tys) = do
args_typeable <- mapM checkTypeable tys
let (_, evcts) = unzip args_typeable
ev = EvTypeableTyCon tc (map (EvExpr . evId . ctEvId) evcts)
ty = mkTyConApp tc tys
return ((ty, evTypeable ty ev), evcts)
conTypeable :: (Type, EvTerm) -> (Type, EvTerm) -> (Type, EvTerm)
conTypeable (fty, fterm) (argty, argterm) =
let res_ty = mkTyConApp funTyCon [tcTypeKind fty, tcTypeKind argty, fty, argty]
r_term = evTypeable res_ty $ EvTypeableTrFun fterm argterm
in (res_ty, r_term)
checkTypeable :: Type -> TcPluginM (EvId, Ct)
checkTypeable ty = do
c <- mkWanted (ctLoc ct) $ mkTyConApp (classTyCon dc_typeable) [tcTypeKind ty, ty]
return (ctEvId c, c)
|
3937b198122da2c16c6192e83d1519795ffdbc07d8665d52a65aedad9477f8a1 | instedd/planwise | ident.clj | (ns planwise.model.ident
(:require [schema.core :as s]
[planwise.model.users :refer [User]]))
(def Ident
"User identity as found in the session cookie and JWE tokens"
{:user-id s/Int
:user-email s/Str
JWE tokens might include expiration information
(s/optional-key :exp) s/Int})
;; User identity related functions
;; The user identity is the user information carried around in the session
cookies and the JWE tokens .
(s/defn user->ident :- Ident
[user :- User]
{:user-email (:email user)
:user-id (:id user)})
(s/defn user-email :- s/Str
[user-ident :- Ident]
(:user-email user-ident))
(s/defn user-id :- s/Int
[user-ident :- Ident]
(:user-id user-ident))
| null | https://raw.githubusercontent.com/instedd/planwise/1bc2a5742ae3dc377dddf1f9e9bb60f0d2f59084/src/planwise/model/ident.clj | clojure | User identity related functions
The user identity is the user information carried around in the session | (ns planwise.model.ident
(:require [schema.core :as s]
[planwise.model.users :refer [User]]))
(def Ident
"User identity as found in the session cookie and JWE tokens"
{:user-id s/Int
:user-email s/Str
JWE tokens might include expiration information
(s/optional-key :exp) s/Int})
cookies and the JWE tokens .
(s/defn user->ident :- Ident
[user :- User]
{:user-email (:email user)
:user-id (:id user)})
(s/defn user-email :- s/Str
[user-ident :- Ident]
(:user-email user-ident))
(s/defn user-id :- s/Int
[user-ident :- Ident]
(:user-id user-ident))
|
ea738b4c34dd4c11407aeab507c44ddfcf3d21db4b0639022b857d514cc27ff6 | unnohideyuki/Tiger-in-Haskell | Semant.hs | module Semant where
import Debug.Trace
import qualified Data.List as List
import qualified Absyn as A
import qualified Env as E
import qualified Symbol as S
import qualified Types as T
import qualified Translate as TL
import qualified Temp
import qualified DalvikFrame as Frame
type VEnv = S.Table E.EnvEntry
type TEnv = S.Table T.Ty
type Unique = T.Unique
data ExpTy = ExpTy {expr::TL.Exp, ty::T.Ty}
data Optype = Arith | Comp | Eq
actual_ty :: Show pos => T.Ty -> pos -> T.Ty
actual_ty typ pos =
case typ of
T.NAME s t -> case t of
Just ty' -> actual_ty ty' pos
Nothing -> error $ show pos ++ "type not found (in actual_ty): " ++ s
T.ARRAY ty' u -> T.ARRAY (actual_ty ty' pos) u
_ -> typ
type_mismatch :: (Show a, Show b, Show c) => a -> b -> c -> t
type_mismatch e a pos =
error $ show pos ++ "type mismatch: expected " ++ show e ++ ", actual " ++ show a
check_type :: Show pos => T.Ty -> T.Ty -> pos -> Bool
check_type t1 t2 pos =
let
t1' = actual_ty t1 pos
t2' = actual_ty t2 pos
in
if t1' /= t2'
then
case (t1', t2') of
(T.RECORD _ _, T.NIL) -> True
(T.NIL, T.RECORD _ _) -> True
_ -> type_mismatch t1' t2' pos
else True
must_not_reach :: t
must_not_reach =
error "fatal: must not reach here"
transProg :: VEnv-> TEnv -> A.Exp -> (T.Ty, [Frame.Frag], Temp.Temp)
transProg venv tenv prog =
let
temp = Temp.create
(mainlevel, temp') =
TL.newLevel
TL.outermost (Temp.namedLabel "main") [] temp
errdest = Temp.namedLabel "_CanNotBreak_"
(expty, level', frgs, temp'') = transExp venv tenv errdest mainlevel [] temp' prog
(stm, temp3) = TL.bodyStm (expr expty) (ty expty) temp''
frag = Frame.Proc { Frame.get_body=stm
, Frame.get_frame=TL.frame level'}
in
(ty expty, frag:frgs, temp3)
transExp :: VEnv-> TEnv -> Temp.Label -> TL.Level -> [Frame.Frag]
-> Temp.Temp
-> A.Exp
-> (ExpTy, TL.Level, [Frame.Frag], Temp.Temp)
transExp venv tenv brkdest =
let
trexp :: TL.Level -> [Frame.Frag] -> Temp.Temp
-> A.Exp
-> (ExpTy, TL.Level, [Frame.Frag], Temp.Temp)
trexp level frgs temp A.NilExp = (ExpTy (TL.nilExp) T.NIL, level, frgs, temp)
trexp level frgs temp (A.IntExp i _) = (ExpTy (TL.intExp i) T.INT, level, frgs, temp)
trexp level frgs temp (A.StringExp s _) =
(ExpTy{expr=TL.stringExp s, ty=T.STRING}, level, frgs, temp)
trexp level frgs temp A.OpExp{A.oper=oper, A.lhs=lhs, A.rhs=rhs, A.pos=pos} =
let
(ExpTy {expr=e1, ty=lty }, lv', frgs', temp') = trexp level frgs temp lhs
(ExpTy {expr=e2, ty=rty }, lv'', frgs'', temp'') = trexp lv' frgs' temp' rhs
classify op =
case op of
A.PlusOp -> Arith
A.MinusOp -> Arith
A.TimesOp -> Arith
A.DivideOp -> Arith
A.LtOp -> Comp
A.GtOp -> Comp
A.LeOp -> Comp
A.GeOp -> Comp
A.EqOp -> Eq
A.NeqOp -> Eq
check_int typ pos' =
case typ of
T.INT -> True
_ -> error $ show pos' ++ ": integer required."
check_arith = check_int lty pos && check_int rty pos
check_eq =
case actual_ty lty pos of
T.INT -> check_type lty rty pos
T.STRING -> check_type lty rty pos
T.ARRAY _ _ -> check_type lty rty pos
T.RECORD _ _ -> check_type lty rty pos
T.NIL -> check_type lty rty pos
_ -> error $ show pos ++ "type error for equality operator: " ++ show (lty, rty)
check_comp =
case lty of
T.INT -> check_type lty rty pos
T.STRING -> check_type lty rty pos
_ -> error $ show pos ++ "type error for comparison: " ++ show lty
check_result =
case classify oper of
Arith -> check_arith
Comp -> check_comp
Eq -> check_eq
trop oper' =
case oper' of
A.PlusOp -> TL.plusOp
A.MinusOp -> TL.minusOp
A.TimesOp -> TL.timesOp
A.DivideOp -> TL.divideOp
A.LtOp -> TL.ltOp
A.GtOp -> TL.gtOp
A.LeOp -> TL.leOp
A.GeOp -> TL.geOp
A.EqOp -> TL.eqOp
A.NeqOp -> TL.neqOp
(binExp, temp3) =
let
c = trop oper
in
c e1 e2 temp''
(strcmpExp, temp3s) =
let
op = trop oper
in
TL.strcmpExp e1 e2 op temp''
in
if check_result then
case lty of
T.STRING -> (ExpTy{expr=strcmpExp, ty=T.INT}, lv'', frgs'', temp3s)
_ -> (ExpTy{expr=binExp, ty=T.INT}, lv'', frgs'', temp3)
else
must_not_reach
trexp level frgs temp (A.VarExp var) = trvar level frgs temp var
trexp level frgs temp A.RecordExp{A.fields=fields, A.typ=typ, A.pos=pos} =
case S.lookup tenv typ of
Nothing -> error $ show pos ++ "record type not found: " ++ typ
Just ty' -> case actual_ty ty' pos of
T.RECORD ftys_ty u ->
let
(level', frgs', temp', ftys_exp) =
foldr
(\(sym,e',pos') (l, f, t, xs) -> case trexp l f t e' of
(expty, l', f', t') -> (l', f', t', (sym, expty, pos'):xs)
)
(level, frgs, temp, [])
fields
(cs, {- level'' -} _, frgs'', temp'') =
foldr
(\(sym,_) (cs', lv, fs, tmp) ->
case lookup sym [(s,e')|(s,e',_)<-fields] of
Just e'' -> case trexp lv fs tmp e'' of
(ExpTy{expr=expr'}, l', f', t') -> (expr':cs', l', f', t')
_ -> must_not_reach
)
([], level', frgs', temp')
ftys_ty
(e, temp3) = TL.recordExp cs temp''
in
if checkrecord ftys_ty ftys_exp pos
then
{- TODO: check level''? -}
(ExpTy {expr=e, ty=T.RECORD ftys_ty u}, level', frgs'', temp3)
else
must_not_reach
_ -> must_not_reach
where
checkrecord ftys_ty ftys_exp pos0 =
let
checker (sym, ExpTy{ty=t2}, pos') =
case lookup sym ftys_ty of
Just t1 -> check_type t1 t2 pos'
Nothing -> error $ show pos0 ++ "field not found: " ++ sym
in
(length ftys_ty == length ftys_exp) && (and $ fmap checker ftys_exp)
trexp level frgs temp (A.SeqExp exps) =
let
(lv', frgs', temp', es) =
foldr
(\exp' (l, f, t, xs) -> case trexp l f t exp' of
(e', l', f', t') -> (l', f', t', e':xs)
)
(level, frgs, temp, [])
exps
ty' = if null exps
then T.UNIT
else case last es of ExpTy{ty=typ} -> typ
(e, temp'') = TL.seqExp [e' | ExpTy{expr=e'} <- es] temp'
in
(ExpTy{expr=e, ty=ty'}, lv', frgs', temp'')
trexp level frgs temp A.AssignExp{A.vvar=var, A.exp=exp0, A.pos=pos} =
let
(ExpTy {expr=lhs, ty=vty }, lv', frgs', temp') = trvar level frgs temp var
(ExpTy {expr=rhs, ty=ety }, lv'', frgs'', temp'') = trexp lv' frgs' temp' exp0
(e, temp3) = TL.assignExp lhs rhs temp''
in
if check_type vty ety pos
then (ExpTy {expr=e, ty=T.UNIT }, lv'', frgs'', temp3)
else undefined
trexp level frgs temp A.IfExp{ A.test=test, A.thene=thenexp, A.elsee=elseexp,
A.pos=pos} =
let
(ExpTy{expr=e1, ty=testty}, lv', frgs', temp') = trexp level frgs temp test
(ExpTy{expr=e2, ty=thenty}, lv'', frgs'', temp'') = trexp lv' frgs' temp' thenexp
in
if check_type T.INT testty pos
then
case elseexp of
Just elseexp' ->
let
(ExpTy{expr=e3, ty=elsety}, lv3, frgs3, temp3) = trexp lv'' frgs'' temp'' elseexp'
(e, temp4) = TL.ifThenElse e1 e2 e3 temp3
in
if check_type thenty elsety pos then
(ExpTy{expr=e, ty=thenty}, lv3, frgs3, temp4)
else undefined
Nothing -> if check_type T.UNIT thenty pos
then
let
(e, temp3) = TL.ifThen e1 e2 temp''
in
(ExpTy{expr=e, ty=thenty}, lv'', frgs'', temp3)
else
undefined
else
undefined
trexp level frgs temp A.WhileExp{A.test=test, A.body=body, A.pos=pos} =
let
(newdest, temp') = Temp.newLabel temp
(ExpTy{expr=e1, ty=testty}, lv', frgs', temp'') = trexp level frgs temp' test
(ExpTy{expr=e2, ty=bodyty}, lv'', frgs'', temp3) =
transExp venv tenv newdest lv' frgs' temp'' body
(e, temp4) = TL.whileExp e1 e2 newdest temp3
in
if check_type T.INT testty pos && check_type T.UNIT bodyty pos
then
(ExpTy{expr=e, ty=T.UNIT}, lv'', frgs'', temp4)
else
undefined
trexp level frgs temp (A.BreakExp _) =
(ExpTy {expr=TL.breakExp brkdest, ty=T.UNIT}, level, frgs, temp)
, A.pos = pos
let
transdecs (ve, te, lv, tmp, exps, fs) dec =
let
(ve', te', lv', t', exps', fs') = transDec ve te brkdest lv fs tmp dec
in
(ve', te', lv', t', exps++exps', fs')
(venv', tenv', level', temp', es, frgs') =
foldl transdecs (venv, tenv, level, temp, [], frgs) decs
(ExpTy {expr=ebody, ty=bodyty }, lv'', frgs'', temp'') =
transExp venv' tenv' brkdest level' frgs' temp' body
(e, temp3) = TL.letExp es ebody temp''
in
(ExpTy{expr=e, ty=bodyty}, lv'', frgs'', temp3)
trexp level frgs temp A.ArrayExp {A.typ=typ, A.size=size, A.init=init0,
A.pos=pos} =
case S.lookup tenv typ of
Nothing -> error $ show pos ++ "type not found: " ++ typ
Just t ->
let
ty1 = actual_ty t pos
in
case ty1 of
T.ARRAY ty' _ ->
let
(ExpTy{expr=siz, ty=sizety}, lv', frgs', temp') = trexp level frgs temp size
(ExpTy{expr=ini, ty=initty}, lv'', frgs'', temp'') = trexp lv' frgs' temp' init0
(e, temp3) = TL.arrayExp siz ini temp''
in
if check_type T.INT sizety pos && check_type ty' initty pos
then
(ExpTy {expr=e, ty=ty1}, lv'', frgs'', temp3)
else
undefined
_ -> must_not_reach
trexp level frgs temp A.ForExp{A.svar=svar, A.lo=lo, A.hi=hi, A.body=body,
A.pos=pos } =
{- translate to let/while expresion -}
let
ivar = A.SimpleVar svar pos
limitvar = A.SimpleVar "_limit" pos
decs = [A.VarDec { A.name' = svar
, A.escape' = False
, A.typ' = Nothing
, A.init' = lo
, A.pos' = pos }
,A.VarDec { A.name' = "_limit"
, A.escape' = False
, A.typ' = Nothing
, A.init' = hi
, A.pos' = pos}
]
loop = A.WhileExp { A.test = A.OpExp { A.oper = A.LeOp
, A.lhs = A.VarExp ivar
, A.rhs = A.VarExp limitvar
, A.pos = pos }
, A.body =
A.SeqExp [ body
, A.AssignExp {
A.vvar = ivar,
A.exp = A.OpExp {
A.oper = A.PlusOp,
A.lhs = A.VarExp ivar,
A.rhs = A.IntExp 1 pos,
A.pos = pos },
A.pos = pos }
]
, A.pos = pos }
in
trexp level frgs temp A.LetExp{A.decs=decs, A.body=loop, A.pos=pos}
trexp level frgs temp A.CallExp{A.func=func, A.args=args, A.pos=pos} =
case S.lookup venv func of
Nothing -> error $ show pos ++ "function not defined: " ++ func
Just (E.VarEntry _ _) ->
error $ show pos ++ "not a function: " ++ func
Just E.FunEntry{E.label=label, E.formals=formals, E.result=result} ->
let
(lv', frgs', temp', argtys) =
foldr
(\exp' (l, f, t, xs) -> case trexp l f t exp' of
(e', l', f', t') -> (l', f', t', e':xs))
(level, frgs, temp, [])
args
checkformals fmls argtys' =
let
checker (t1, ExpTy {ty=t2}) = check_type t1 t2 pos
ts = zip fmls argtys'
szcheck =
if (length fmls == length argtys') then
True
else
error $ show pos ++ "wrong number of arguments."
in
szcheck && (and $ fmap checker ts)
es = fmap expr argtys
(e, temp'') = TL.callExp label es temp'
in
if checkformals formals argtys
then
(ExpTy{expr=e, ty=actual_ty result pos}, lv', frgs', temp'')
else
undefined
trvar level frgs temp (A.SimpleVar sym pos) =
case S.lookup venv sym of
Just E.VarEntry {E.access=acc, E.ty=ty1}
-> (ExpTy {expr=TL.simpleVar acc level, ty=ty1}, level, frgs, temp)
Just _ -> error $ show pos ++ "not a variable: " ++ sym
_ -> error $ show pos ++ "undefined variable: " ++ sym
trvar level frgs temp (A.FieldVar var id' pos) =
let
(ExpTy{expr=e1, ty=ty1}, lv', frgs', temp') = trvar level frgs temp var
in
case actual_ty ty1 pos of
T.RECORD fs _ ->
case lookup id' [(s, (i, t))| (i, (s, t)) <- zip [0..] fs] of
Nothing -> error $ show pos ++ "field not found: " ++ id'
Just (i, ty') ->
let
(e, temp'') = TL.fieldVar e1 i temp'
in
(ExpTy{expr=e, ty=actual_ty ty' pos}, lv', frgs', temp'')
_ -> error $ show pos ++ "not a record: " ++ show ty1
trvar level frgs temp (A.SubscriptVar var exp0 pos) =
let
(ExpTy{expr=e1, ty=ty1}, lv', frgs', temp') = trvar level frgs temp var
in
case actual_ty ty1 pos of
T.ARRAY ty' _ ->
let
(ExpTy{expr=e2, ty=ty''}, lv'', frgs'', temp'') = trexp lv' frgs' temp' exp0
(e, temp3) = TL.subscriptVar e1 e2 temp''
in
case ty'' of
T.INT -> (ExpTy {expr=e, ty=ty'}, lv'', frgs'', temp3)
_ -> error $ show pos ++ "array subscript type:" ++ show ty''
_ -> error $ show pos ++ "not an array"
in
trexp
transTy :: S.Table T.Ty -> A.Ty -> Bool -> T.Ty
transTy tenv =
let
-- dirty hack: generate a unique number from the position.
pos2u (A.Pos l c) = fromIntegral $ l * 10000 + c
transty (A.NameTy sym _) False =
case S.lookup tenv sym of
Just typ -> typ
_ -> error "must not reach here, transy A.NameTy."
transty (A.NameTy sym pos) True =
let
follow_ty seen sym0 =
if List.all (/= sym0) seen then
case S.lookup tenv sym0 of
Just ty' ->
case ty' of
T.NAME s (Just (T.NAME s' _)) ->
T.NAME s (Just $ follow_ty (s:seen) s')
_ -> ty'
_ -> error "must not reach here, update A.NameTy. (2)"
else
{- must not reach here? -}
error $ show pos ++ "cyclic dependency': " ++ sym0
in
case S.lookup tenv sym of
Just ty' ->
case ty' of
T.NAME s _ ->
case S.lookup tenv s of
Just (T.NAME s' (Just (T.NAME s'' _))) ->
T.NAME s' (Just $ follow_ty [sym] s'')
Just ty1 -> ty1
Nothing -> must_not_reach
_ -> ty'
_ -> error "must not reach here, update A.NameTy."
transty (A.RecordTy fs pos) _ =
let
f A.Field { A.field_name = name, A.field_typ = typ } =
case S.lookup tenv typ of
Just ty' -> (name, ty')
Nothing -> error $ show pos ++ "type not defined (field): " ++ typ
in
if checkdup (fmap A.field_name fs) (fmap A.field_pos fs) then
T.RECORD (fmap f fs) (pos2u pos)
else
undefined
transty (A.ArrayTy sym pos) _ =
case S.lookup tenv sym of
Just ty' -> T.ARRAY ty' $ pos2u pos
Nothing -> error $ show pos ++ "type not defined (array): " ++ sym
in
transty
transDec :: VEnv -> TEnv -> Temp.Label -> TL.Level -> [Frame.Frag] -> Temp.Temp
-> A.Dec
-> (S.Table E.EnvEntry
, S.Table T.Ty
, TL.Level
, Temp.Temp
, [TL.Exp]
, [Frame.Frag]
)
transDec venv tenv brkdest =
let
trdec :: TL.Level -> [Frame.Frag] -> Temp.Temp
-> A.Dec
-> (S.Table E.EnvEntry
, S.Table T.Ty
, TL.Level
, Temp.Temp
, [TL.Exp]
, [Frame.Frag]
)
trdec level frgs temp A.VarDec{A.name'=name, A.typ'=typ, A.init'=init0,
A.escape'=esc, A.pos'=pos} =
let
(ExpTy{expr=rhs, ty=ty0}, lv', frgs', temp') =
transExp venv tenv brkdest level frgs temp init0
(access, lv'', temp'') = TL.allocLocal lv' esc temp'
lhs = TL.simpleVar access lv''
(e, temp3) = TL.assignExp lhs rhs temp''
ret n ty1 =
(S.insert venv n E.VarEntry {E.access=access, E.ty=ty1},
tenv, lv'', temp3, [e], frgs')
in
case typ of
Nothing -> if ty0 == T.NIL
then
error $
show pos ++ "nil can be used only in the long form."
else
ret name ty0
Just sym ->
case S.lookup tenv sym of
Nothing -> error $ show pos ++ "type not found: " ++ sym
Just ty' -> if check_type ty' ty0 pos
then
ret name ty0
else
undefined
trdec level frgs temp (A.TypeDec tdecs) =
let
{- inserting headers -}
tenv' =
foldl
(\acc (name, _, _) -> S.insert acc name (T.NAME name Nothing))
tenv
tdecs
transTy 1st pass
tenv'' =
foldl
(\acc (name, typ, _) ->
case S.lookup acc name of
Just (T.NAME n _) ->
S.insert acc n $ T.NAME n (Just $ transTy acc typ False)
_ -> error "must not reach here"
)
tenv'
tdecs
transTy 2nd pass : updating
tenv''' =
foldl
(\acc (name, typ, _) ->
case S.lookup acc name of
Just (T.NAME n _) ->
S.insert acc n $ T.NAME n (Just $ transTy acc typ True)
_ -> error "must not reach here."
)
tenv''
tdecs
names = fmap (\(n,_,_) -> n) tdecs
poss = fmap (\(_,_,pos) -> pos) tdecs
check_cyclic_dep [] = True
check_cyclic_dep ((name, ty0, p):xs) =
let
chkcyc seen typ pos' =
case typ of
Nothing -> error $ show pos' ++ "type not found: " ++ show ty0
Just ty' ->
case ty' of
T.NAME sym ty'' ->
if (List.all (/= sym) seen) then
chkcyc (sym:seen) ty'' pos'
else
False
_ -> True
in
case S.lookup tenv''' name of
Just (T.NAME _ typ) ->
if chkcyc [name] typ p then
check_cyclic_dep xs
else
error $ show p ++ "cyclic dependency: " ++ name
_ -> error "must not reach here."
in
if check_cyclic_dep tdecs && checkdup names poss
then
(venv, tenv''', level, temp, [], frgs)
else
undefined
trdec level frgs temp (A.FunctionDec fundecs) =
let
1st pass
transfun (ve, tt) A.FuncDec{A.name=name, A.params=params,
A.result=result, {- A.func_body=body,-}
A.func_pos=pos } =
let
rty =
case result of
Nothing -> T.UNIT
Just typ ->
case S.lookup tenv typ of
Nothing -> error $ show pos ++ "result type not found: " ++ show typ
Just t -> t
ftys =
fmap
(\A.Field { A.field_typ = typ, A.field_pos = p } ->
case S.lookup tenv typ of
Just t -> t
Nothing -> error $ show p ++ "type not found: " ++ typ)
params
(tlabel, t') = Temp.newLabel tt
label = tlabel ++ "_" ++ name
formals = fmap A.field_esc params
(lev, t'') = TL.newLevel level label formals t'
in
if checkdup (fmap A.field_name params) (fmap A.field_pos params) then
(S.insert ve name E.FunEntry { E.level = lev
, E.label = label
, E.formals = ftys
, E.result = rty
},
t'')
else
undefined
(venv', temp') = foldl transfun (venv,temp) fundecs
2nd pass
transbody
(acc, {- level -} _, tmp, fs) -- level not used?
A.FuncDec { A.name = name, A.params = params,
= result ,
A.func_pos = pos } =
let
Just E.FunEntry { E.level = lev
, E.result = rty
, E.formals = formals } =
S.lookup venv' name
transparam ve (A.Field{A.field_name=n}, t, a) =
S.insert ve n $ E.VarEntry {E.access=a, E.ty=t}
as = TL.acc_formals lev
venv_loc =
foldl transparam venv' $ zip3 params formals as
(ExpTy{expr=ebody, ty=bdty}, lv', fs', t') =
transExp venv_loc tenv brkdest lev fs tmp body
(stm, t'') = TL.bodyStm ebody bdty t'
frag = Frame.Proc { Frame.get_body=stm
, Frame.get_frame=TL.frame lv'}
in
(check_type rty bdty pos && acc, lv', t'', frag:fs')
(check_bodies, level', temp'', frgs') =
foldl transbody (True, level, temp', frgs) fundecs
in
if checkdup (fmap A.name fundecs) (fmap A.func_pos fundecs)
&& check_bodies
then
-- level (not level') should be returned here.
(venv', tenv, level, temp'', [], frgs')
else
undefined
in
trdec
checkdup :: Show pos => [String] -> [pos] -> Bool
checkdup [] _ = True
checkdup (name:ns) (pos:ps) =
if List.all (/= name) ns then
checkdup ns ps
else
error $ show pos ++ "duplicated defintion: " ++ name
checkdup (_:_) [] = error "fatal: checkdup (_:_) []"
| null | https://raw.githubusercontent.com/unnohideyuki/Tiger-in-Haskell/69fc976e64bfe7abfa842d7179e06eae740d36b9/final/src/Semant.hs | haskell | level''
TODO: check level''?
translate to let/while expresion
dirty hack: generate a unique number from the position.
must not reach here?
inserting headers
A.func_body=body,
level
level not used?
level (not level') should be returned here. | module Semant where
import Debug.Trace
import qualified Data.List as List
import qualified Absyn as A
import qualified Env as E
import qualified Symbol as S
import qualified Types as T
import qualified Translate as TL
import qualified Temp
import qualified DalvikFrame as Frame
type VEnv = S.Table E.EnvEntry
type TEnv = S.Table T.Ty
type Unique = T.Unique
data ExpTy = ExpTy {expr::TL.Exp, ty::T.Ty}
data Optype = Arith | Comp | Eq
actual_ty :: Show pos => T.Ty -> pos -> T.Ty
actual_ty typ pos =
case typ of
T.NAME s t -> case t of
Just ty' -> actual_ty ty' pos
Nothing -> error $ show pos ++ "type not found (in actual_ty): " ++ s
T.ARRAY ty' u -> T.ARRAY (actual_ty ty' pos) u
_ -> typ
type_mismatch :: (Show a, Show b, Show c) => a -> b -> c -> t
type_mismatch e a pos =
error $ show pos ++ "type mismatch: expected " ++ show e ++ ", actual " ++ show a
check_type :: Show pos => T.Ty -> T.Ty -> pos -> Bool
check_type t1 t2 pos =
let
t1' = actual_ty t1 pos
t2' = actual_ty t2 pos
in
if t1' /= t2'
then
case (t1', t2') of
(T.RECORD _ _, T.NIL) -> True
(T.NIL, T.RECORD _ _) -> True
_ -> type_mismatch t1' t2' pos
else True
must_not_reach :: t
must_not_reach =
error "fatal: must not reach here"
transProg :: VEnv-> TEnv -> A.Exp -> (T.Ty, [Frame.Frag], Temp.Temp)
transProg venv tenv prog =
let
temp = Temp.create
(mainlevel, temp') =
TL.newLevel
TL.outermost (Temp.namedLabel "main") [] temp
errdest = Temp.namedLabel "_CanNotBreak_"
(expty, level', frgs, temp'') = transExp venv tenv errdest mainlevel [] temp' prog
(stm, temp3) = TL.bodyStm (expr expty) (ty expty) temp''
frag = Frame.Proc { Frame.get_body=stm
, Frame.get_frame=TL.frame level'}
in
(ty expty, frag:frgs, temp3)
transExp :: VEnv-> TEnv -> Temp.Label -> TL.Level -> [Frame.Frag]
-> Temp.Temp
-> A.Exp
-> (ExpTy, TL.Level, [Frame.Frag], Temp.Temp)
transExp venv tenv brkdest =
let
trexp :: TL.Level -> [Frame.Frag] -> Temp.Temp
-> A.Exp
-> (ExpTy, TL.Level, [Frame.Frag], Temp.Temp)
trexp level frgs temp A.NilExp = (ExpTy (TL.nilExp) T.NIL, level, frgs, temp)
trexp level frgs temp (A.IntExp i _) = (ExpTy (TL.intExp i) T.INT, level, frgs, temp)
trexp level frgs temp (A.StringExp s _) =
(ExpTy{expr=TL.stringExp s, ty=T.STRING}, level, frgs, temp)
trexp level frgs temp A.OpExp{A.oper=oper, A.lhs=lhs, A.rhs=rhs, A.pos=pos} =
let
(ExpTy {expr=e1, ty=lty }, lv', frgs', temp') = trexp level frgs temp lhs
(ExpTy {expr=e2, ty=rty }, lv'', frgs'', temp'') = trexp lv' frgs' temp' rhs
classify op =
case op of
A.PlusOp -> Arith
A.MinusOp -> Arith
A.TimesOp -> Arith
A.DivideOp -> Arith
A.LtOp -> Comp
A.GtOp -> Comp
A.LeOp -> Comp
A.GeOp -> Comp
A.EqOp -> Eq
A.NeqOp -> Eq
check_int typ pos' =
case typ of
T.INT -> True
_ -> error $ show pos' ++ ": integer required."
check_arith = check_int lty pos && check_int rty pos
check_eq =
case actual_ty lty pos of
T.INT -> check_type lty rty pos
T.STRING -> check_type lty rty pos
T.ARRAY _ _ -> check_type lty rty pos
T.RECORD _ _ -> check_type lty rty pos
T.NIL -> check_type lty rty pos
_ -> error $ show pos ++ "type error for equality operator: " ++ show (lty, rty)
check_comp =
case lty of
T.INT -> check_type lty rty pos
T.STRING -> check_type lty rty pos
_ -> error $ show pos ++ "type error for comparison: " ++ show lty
check_result =
case classify oper of
Arith -> check_arith
Comp -> check_comp
Eq -> check_eq
trop oper' =
case oper' of
A.PlusOp -> TL.plusOp
A.MinusOp -> TL.minusOp
A.TimesOp -> TL.timesOp
A.DivideOp -> TL.divideOp
A.LtOp -> TL.ltOp
A.GtOp -> TL.gtOp
A.LeOp -> TL.leOp
A.GeOp -> TL.geOp
A.EqOp -> TL.eqOp
A.NeqOp -> TL.neqOp
(binExp, temp3) =
let
c = trop oper
in
c e1 e2 temp''
(strcmpExp, temp3s) =
let
op = trop oper
in
TL.strcmpExp e1 e2 op temp''
in
if check_result then
case lty of
T.STRING -> (ExpTy{expr=strcmpExp, ty=T.INT}, lv'', frgs'', temp3s)
_ -> (ExpTy{expr=binExp, ty=T.INT}, lv'', frgs'', temp3)
else
must_not_reach
trexp level frgs temp (A.VarExp var) = trvar level frgs temp var
trexp level frgs temp A.RecordExp{A.fields=fields, A.typ=typ, A.pos=pos} =
case S.lookup tenv typ of
Nothing -> error $ show pos ++ "record type not found: " ++ typ
Just ty' -> case actual_ty ty' pos of
T.RECORD ftys_ty u ->
let
(level', frgs', temp', ftys_exp) =
foldr
(\(sym,e',pos') (l, f, t, xs) -> case trexp l f t e' of
(expty, l', f', t') -> (l', f', t', (sym, expty, pos'):xs)
)
(level, frgs, temp, [])
fields
foldr
(\(sym,_) (cs', lv, fs, tmp) ->
case lookup sym [(s,e')|(s,e',_)<-fields] of
Just e'' -> case trexp lv fs tmp e'' of
(ExpTy{expr=expr'}, l', f', t') -> (expr':cs', l', f', t')
_ -> must_not_reach
)
([], level', frgs', temp')
ftys_ty
(e, temp3) = TL.recordExp cs temp''
in
if checkrecord ftys_ty ftys_exp pos
then
(ExpTy {expr=e, ty=T.RECORD ftys_ty u}, level', frgs'', temp3)
else
must_not_reach
_ -> must_not_reach
where
checkrecord ftys_ty ftys_exp pos0 =
let
checker (sym, ExpTy{ty=t2}, pos') =
case lookup sym ftys_ty of
Just t1 -> check_type t1 t2 pos'
Nothing -> error $ show pos0 ++ "field not found: " ++ sym
in
(length ftys_ty == length ftys_exp) && (and $ fmap checker ftys_exp)
trexp level frgs temp (A.SeqExp exps) =
let
(lv', frgs', temp', es) =
foldr
(\exp' (l, f, t, xs) -> case trexp l f t exp' of
(e', l', f', t') -> (l', f', t', e':xs)
)
(level, frgs, temp, [])
exps
ty' = if null exps
then T.UNIT
else case last es of ExpTy{ty=typ} -> typ
(e, temp'') = TL.seqExp [e' | ExpTy{expr=e'} <- es] temp'
in
(ExpTy{expr=e, ty=ty'}, lv', frgs', temp'')
trexp level frgs temp A.AssignExp{A.vvar=var, A.exp=exp0, A.pos=pos} =
let
(ExpTy {expr=lhs, ty=vty }, lv', frgs', temp') = trvar level frgs temp var
(ExpTy {expr=rhs, ty=ety }, lv'', frgs'', temp'') = trexp lv' frgs' temp' exp0
(e, temp3) = TL.assignExp lhs rhs temp''
in
if check_type vty ety pos
then (ExpTy {expr=e, ty=T.UNIT }, lv'', frgs'', temp3)
else undefined
trexp level frgs temp A.IfExp{ A.test=test, A.thene=thenexp, A.elsee=elseexp,
A.pos=pos} =
let
(ExpTy{expr=e1, ty=testty}, lv', frgs', temp') = trexp level frgs temp test
(ExpTy{expr=e2, ty=thenty}, lv'', frgs'', temp'') = trexp lv' frgs' temp' thenexp
in
if check_type T.INT testty pos
then
case elseexp of
Just elseexp' ->
let
(ExpTy{expr=e3, ty=elsety}, lv3, frgs3, temp3) = trexp lv'' frgs'' temp'' elseexp'
(e, temp4) = TL.ifThenElse e1 e2 e3 temp3
in
if check_type thenty elsety pos then
(ExpTy{expr=e, ty=thenty}, lv3, frgs3, temp4)
else undefined
Nothing -> if check_type T.UNIT thenty pos
then
let
(e, temp3) = TL.ifThen e1 e2 temp''
in
(ExpTy{expr=e, ty=thenty}, lv'', frgs'', temp3)
else
undefined
else
undefined
trexp level frgs temp A.WhileExp{A.test=test, A.body=body, A.pos=pos} =
let
(newdest, temp') = Temp.newLabel temp
(ExpTy{expr=e1, ty=testty}, lv', frgs', temp'') = trexp level frgs temp' test
(ExpTy{expr=e2, ty=bodyty}, lv'', frgs'', temp3) =
transExp venv tenv newdest lv' frgs' temp'' body
(e, temp4) = TL.whileExp e1 e2 newdest temp3
in
if check_type T.INT testty pos && check_type T.UNIT bodyty pos
then
(ExpTy{expr=e, ty=T.UNIT}, lv'', frgs'', temp4)
else
undefined
trexp level frgs temp (A.BreakExp _) =
(ExpTy {expr=TL.breakExp brkdest, ty=T.UNIT}, level, frgs, temp)
, A.pos = pos
let
transdecs (ve, te, lv, tmp, exps, fs) dec =
let
(ve', te', lv', t', exps', fs') = transDec ve te brkdest lv fs tmp dec
in
(ve', te', lv', t', exps++exps', fs')
(venv', tenv', level', temp', es, frgs') =
foldl transdecs (venv, tenv, level, temp, [], frgs) decs
(ExpTy {expr=ebody, ty=bodyty }, lv'', frgs'', temp'') =
transExp venv' tenv' brkdest level' frgs' temp' body
(e, temp3) = TL.letExp es ebody temp''
in
(ExpTy{expr=e, ty=bodyty}, lv'', frgs'', temp3)
trexp level frgs temp A.ArrayExp {A.typ=typ, A.size=size, A.init=init0,
A.pos=pos} =
case S.lookup tenv typ of
Nothing -> error $ show pos ++ "type not found: " ++ typ
Just t ->
let
ty1 = actual_ty t pos
in
case ty1 of
T.ARRAY ty' _ ->
let
(ExpTy{expr=siz, ty=sizety}, lv', frgs', temp') = trexp level frgs temp size
(ExpTy{expr=ini, ty=initty}, lv'', frgs'', temp'') = trexp lv' frgs' temp' init0
(e, temp3) = TL.arrayExp siz ini temp''
in
if check_type T.INT sizety pos && check_type ty' initty pos
then
(ExpTy {expr=e, ty=ty1}, lv'', frgs'', temp3)
else
undefined
_ -> must_not_reach
trexp level frgs temp A.ForExp{A.svar=svar, A.lo=lo, A.hi=hi, A.body=body,
A.pos=pos } =
let
ivar = A.SimpleVar svar pos
limitvar = A.SimpleVar "_limit" pos
decs = [A.VarDec { A.name' = svar
, A.escape' = False
, A.typ' = Nothing
, A.init' = lo
, A.pos' = pos }
,A.VarDec { A.name' = "_limit"
, A.escape' = False
, A.typ' = Nothing
, A.init' = hi
, A.pos' = pos}
]
loop = A.WhileExp { A.test = A.OpExp { A.oper = A.LeOp
, A.lhs = A.VarExp ivar
, A.rhs = A.VarExp limitvar
, A.pos = pos }
, A.body =
A.SeqExp [ body
, A.AssignExp {
A.vvar = ivar,
A.exp = A.OpExp {
A.oper = A.PlusOp,
A.lhs = A.VarExp ivar,
A.rhs = A.IntExp 1 pos,
A.pos = pos },
A.pos = pos }
]
, A.pos = pos }
in
trexp level frgs temp A.LetExp{A.decs=decs, A.body=loop, A.pos=pos}
trexp level frgs temp A.CallExp{A.func=func, A.args=args, A.pos=pos} =
case S.lookup venv func of
Nothing -> error $ show pos ++ "function not defined: " ++ func
Just (E.VarEntry _ _) ->
error $ show pos ++ "not a function: " ++ func
Just E.FunEntry{E.label=label, E.formals=formals, E.result=result} ->
let
(lv', frgs', temp', argtys) =
foldr
(\exp' (l, f, t, xs) -> case trexp l f t exp' of
(e', l', f', t') -> (l', f', t', e':xs))
(level, frgs, temp, [])
args
checkformals fmls argtys' =
let
checker (t1, ExpTy {ty=t2}) = check_type t1 t2 pos
ts = zip fmls argtys'
szcheck =
if (length fmls == length argtys') then
True
else
error $ show pos ++ "wrong number of arguments."
in
szcheck && (and $ fmap checker ts)
es = fmap expr argtys
(e, temp'') = TL.callExp label es temp'
in
if checkformals formals argtys
then
(ExpTy{expr=e, ty=actual_ty result pos}, lv', frgs', temp'')
else
undefined
trvar level frgs temp (A.SimpleVar sym pos) =
case S.lookup venv sym of
Just E.VarEntry {E.access=acc, E.ty=ty1}
-> (ExpTy {expr=TL.simpleVar acc level, ty=ty1}, level, frgs, temp)
Just _ -> error $ show pos ++ "not a variable: " ++ sym
_ -> error $ show pos ++ "undefined variable: " ++ sym
trvar level frgs temp (A.FieldVar var id' pos) =
let
(ExpTy{expr=e1, ty=ty1}, lv', frgs', temp') = trvar level frgs temp var
in
case actual_ty ty1 pos of
T.RECORD fs _ ->
case lookup id' [(s, (i, t))| (i, (s, t)) <- zip [0..] fs] of
Nothing -> error $ show pos ++ "field not found: " ++ id'
Just (i, ty') ->
let
(e, temp'') = TL.fieldVar e1 i temp'
in
(ExpTy{expr=e, ty=actual_ty ty' pos}, lv', frgs', temp'')
_ -> error $ show pos ++ "not a record: " ++ show ty1
trvar level frgs temp (A.SubscriptVar var exp0 pos) =
let
(ExpTy{expr=e1, ty=ty1}, lv', frgs', temp') = trvar level frgs temp var
in
case actual_ty ty1 pos of
T.ARRAY ty' _ ->
let
(ExpTy{expr=e2, ty=ty''}, lv'', frgs'', temp'') = trexp lv' frgs' temp' exp0
(e, temp3) = TL.subscriptVar e1 e2 temp''
in
case ty'' of
T.INT -> (ExpTy {expr=e, ty=ty'}, lv'', frgs'', temp3)
_ -> error $ show pos ++ "array subscript type:" ++ show ty''
_ -> error $ show pos ++ "not an array"
in
trexp
transTy :: S.Table T.Ty -> A.Ty -> Bool -> T.Ty
transTy tenv =
let
pos2u (A.Pos l c) = fromIntegral $ l * 10000 + c
transty (A.NameTy sym _) False =
case S.lookup tenv sym of
Just typ -> typ
_ -> error "must not reach here, transy A.NameTy."
transty (A.NameTy sym pos) True =
let
follow_ty seen sym0 =
if List.all (/= sym0) seen then
case S.lookup tenv sym0 of
Just ty' ->
case ty' of
T.NAME s (Just (T.NAME s' _)) ->
T.NAME s (Just $ follow_ty (s:seen) s')
_ -> ty'
_ -> error "must not reach here, update A.NameTy. (2)"
else
error $ show pos ++ "cyclic dependency': " ++ sym0
in
case S.lookup tenv sym of
Just ty' ->
case ty' of
T.NAME s _ ->
case S.lookup tenv s of
Just (T.NAME s' (Just (T.NAME s'' _))) ->
T.NAME s' (Just $ follow_ty [sym] s'')
Just ty1 -> ty1
Nothing -> must_not_reach
_ -> ty'
_ -> error "must not reach here, update A.NameTy."
transty (A.RecordTy fs pos) _ =
let
f A.Field { A.field_name = name, A.field_typ = typ } =
case S.lookup tenv typ of
Just ty' -> (name, ty')
Nothing -> error $ show pos ++ "type not defined (field): " ++ typ
in
if checkdup (fmap A.field_name fs) (fmap A.field_pos fs) then
T.RECORD (fmap f fs) (pos2u pos)
else
undefined
transty (A.ArrayTy sym pos) _ =
case S.lookup tenv sym of
Just ty' -> T.ARRAY ty' $ pos2u pos
Nothing -> error $ show pos ++ "type not defined (array): " ++ sym
in
transty
transDec :: VEnv -> TEnv -> Temp.Label -> TL.Level -> [Frame.Frag] -> Temp.Temp
-> A.Dec
-> (S.Table E.EnvEntry
, S.Table T.Ty
, TL.Level
, Temp.Temp
, [TL.Exp]
, [Frame.Frag]
)
transDec venv tenv brkdest =
let
trdec :: TL.Level -> [Frame.Frag] -> Temp.Temp
-> A.Dec
-> (S.Table E.EnvEntry
, S.Table T.Ty
, TL.Level
, Temp.Temp
, [TL.Exp]
, [Frame.Frag]
)
trdec level frgs temp A.VarDec{A.name'=name, A.typ'=typ, A.init'=init0,
A.escape'=esc, A.pos'=pos} =
let
(ExpTy{expr=rhs, ty=ty0}, lv', frgs', temp') =
transExp venv tenv brkdest level frgs temp init0
(access, lv'', temp'') = TL.allocLocal lv' esc temp'
lhs = TL.simpleVar access lv''
(e, temp3) = TL.assignExp lhs rhs temp''
ret n ty1 =
(S.insert venv n E.VarEntry {E.access=access, E.ty=ty1},
tenv, lv'', temp3, [e], frgs')
in
case typ of
Nothing -> if ty0 == T.NIL
then
error $
show pos ++ "nil can be used only in the long form."
else
ret name ty0
Just sym ->
case S.lookup tenv sym of
Nothing -> error $ show pos ++ "type not found: " ++ sym
Just ty' -> if check_type ty' ty0 pos
then
ret name ty0
else
undefined
trdec level frgs temp (A.TypeDec tdecs) =
let
tenv' =
foldl
(\acc (name, _, _) -> S.insert acc name (T.NAME name Nothing))
tenv
tdecs
transTy 1st pass
tenv'' =
foldl
(\acc (name, typ, _) ->
case S.lookup acc name of
Just (T.NAME n _) ->
S.insert acc n $ T.NAME n (Just $ transTy acc typ False)
_ -> error "must not reach here"
)
tenv'
tdecs
transTy 2nd pass : updating
tenv''' =
foldl
(\acc (name, typ, _) ->
case S.lookup acc name of
Just (T.NAME n _) ->
S.insert acc n $ T.NAME n (Just $ transTy acc typ True)
_ -> error "must not reach here."
)
tenv''
tdecs
names = fmap (\(n,_,_) -> n) tdecs
poss = fmap (\(_,_,pos) -> pos) tdecs
check_cyclic_dep [] = True
check_cyclic_dep ((name, ty0, p):xs) =
let
chkcyc seen typ pos' =
case typ of
Nothing -> error $ show pos' ++ "type not found: " ++ show ty0
Just ty' ->
case ty' of
T.NAME sym ty'' ->
if (List.all (/= sym) seen) then
chkcyc (sym:seen) ty'' pos'
else
False
_ -> True
in
case S.lookup tenv''' name of
Just (T.NAME _ typ) ->
if chkcyc [name] typ p then
check_cyclic_dep xs
else
error $ show p ++ "cyclic dependency: " ++ name
_ -> error "must not reach here."
in
if check_cyclic_dep tdecs && checkdup names poss
then
(venv, tenv''', level, temp, [], frgs)
else
undefined
trdec level frgs temp (A.FunctionDec fundecs) =
let
1st pass
transfun (ve, tt) A.FuncDec{A.name=name, A.params=params,
A.func_pos=pos } =
let
rty =
case result of
Nothing -> T.UNIT
Just typ ->
case S.lookup tenv typ of
Nothing -> error $ show pos ++ "result type not found: " ++ show typ
Just t -> t
ftys =
fmap
(\A.Field { A.field_typ = typ, A.field_pos = p } ->
case S.lookup tenv typ of
Just t -> t
Nothing -> error $ show p ++ "type not found: " ++ typ)
params
(tlabel, t') = Temp.newLabel tt
label = tlabel ++ "_" ++ name
formals = fmap A.field_esc params
(lev, t'') = TL.newLevel level label formals t'
in
if checkdup (fmap A.field_name params) (fmap A.field_pos params) then
(S.insert ve name E.FunEntry { E.level = lev
, E.label = label
, E.formals = ftys
, E.result = rty
},
t'')
else
undefined
(venv', temp') = foldl transfun (venv,temp) fundecs
2nd pass
transbody
A.FuncDec { A.name = name, A.params = params,
= result ,
A.func_pos = pos } =
let
Just E.FunEntry { E.level = lev
, E.result = rty
, E.formals = formals } =
S.lookup venv' name
transparam ve (A.Field{A.field_name=n}, t, a) =
S.insert ve n $ E.VarEntry {E.access=a, E.ty=t}
as = TL.acc_formals lev
venv_loc =
foldl transparam venv' $ zip3 params formals as
(ExpTy{expr=ebody, ty=bdty}, lv', fs', t') =
transExp venv_loc tenv brkdest lev fs tmp body
(stm, t'') = TL.bodyStm ebody bdty t'
frag = Frame.Proc { Frame.get_body=stm
, Frame.get_frame=TL.frame lv'}
in
(check_type rty bdty pos && acc, lv', t'', frag:fs')
(check_bodies, level', temp'', frgs') =
foldl transbody (True, level, temp', frgs) fundecs
in
if checkdup (fmap A.name fundecs) (fmap A.func_pos fundecs)
&& check_bodies
then
(venv', tenv, level, temp'', [], frgs')
else
undefined
in
trdec
checkdup :: Show pos => [String] -> [pos] -> Bool
checkdup [] _ = True
checkdup (name:ns) (pos:ps) =
if List.all (/= name) ns then
checkdup ns ps
else
error $ show pos ++ "duplicated defintion: " ++ name
checkdup (_:_) [] = error "fatal: checkdup (_:_) []"
|
caee701dce4dd15e8a0ae6736de156b303ed046c0789c665e1a8c17a66f7a124 | gsakkas/rite | 3021.ml |
let pipe fs = let f a x a = a in let base = 0 in List.fold_left f base fs;;
fix
let pipe fs = let f a x a = a in let base x = x in List.fold_left f base fs ; ;
let pipe fs = let f a x a = a in let base x = x in List.fold_left f base fs;;
*)
changed spans
( 2,45)-(2,46 )
fun x - > x
LamG VarPatG VarG
(2,45)-(2,46)
fun x -> x
LamG VarPatG VarG
*)
type error slice
( 2,15)-(2,74 )
( 2,21)-(2,30 )
( 2,23)-(2,30 )
( 2,25)-(2,30 )
( 2,34)-(2,74 )
( 2,45)-(2,46 )
( 2,50)-(2,64 )
( 2,50)-(2,74 )
( 2,65)-(2,66 )
( 2,67)-(2,71 )
(2,15)-(2,74)
(2,21)-(2,30)
(2,23)-(2,30)
(2,25)-(2,30)
(2,34)-(2,74)
(2,45)-(2,46)
(2,50)-(2,64)
(2,50)-(2,74)
(2,65)-(2,66)
(2,67)-(2,71)
*)
| null | https://raw.githubusercontent.com/gsakkas/rite/958a0ad2460e15734447bc07bd181f5d35956d3b/data/sp14/3021.ml | ocaml |
let pipe fs = let f a x a = a in let base = 0 in List.fold_left f base fs;;
fix
let pipe fs = let f a x a = a in let base x = x in List.fold_left f base fs ; ;
let pipe fs = let f a x a = a in let base x = x in List.fold_left f base fs;;
*)
changed spans
( 2,45)-(2,46 )
fun x - > x
LamG VarPatG VarG
(2,45)-(2,46)
fun x -> x
LamG VarPatG VarG
*)
type error slice
( 2,15)-(2,74 )
( 2,21)-(2,30 )
( 2,23)-(2,30 )
( 2,25)-(2,30 )
( 2,34)-(2,74 )
( 2,45)-(2,46 )
( 2,50)-(2,64 )
( 2,50)-(2,74 )
( 2,65)-(2,66 )
( 2,67)-(2,71 )
(2,15)-(2,74)
(2,21)-(2,30)
(2,23)-(2,30)
(2,25)-(2,30)
(2,34)-(2,74)
(2,45)-(2,46)
(2,50)-(2,64)
(2,50)-(2,74)
(2,65)-(2,66)
(2,67)-(2,71)
*)
|
|
810e181b841c2c24ac6dda4b2bcff1d91fc2d0a038bfa1e4862149088dab2404 | haskell-github/github | CommitsSpec.hs | {-# LANGUAGE OverloadedStrings #-}
module GitHub.CommitsSpec where
import GitHub.Auth (Auth (..))
import GitHub.Endpoints.Repos.Commits (commitSha, commitsForR, diffR, mkCommitName, FetchCount (..))
import GitHub.Request (github)
import Control.Monad (forM_)
import Data.Either.Compat (isRight)
import Data.List (nub, sort)
import Data.String (fromString)
import System.Environment (lookupEnv)
import Test.Hspec (Spec, describe, it, pendingWith, shouldBe,
shouldSatisfy)
import qualified Data.Vector as V
fromRightS :: Show a => Either a b -> b
fromRightS (Right b) = b
fromRightS (Left a) = error $ "Expected a Right and got a Left" ++ show a
withAuth :: (Auth -> IO ()) -> IO ()
withAuth action = do
mtoken <- lookupEnv "GITHUB_TOKEN"
case mtoken of
Nothing -> pendingWith "no GITHUB_TOKEN"
Just token -> action (OAuth $ fromString token)
spec :: Spec
spec = do
describe "commitsFor" $ do
it "works" $ withAuth $ \auth -> do
cs <- github auth commitsForR "haskell-github" "github" FetchAll
cs `shouldSatisfy` isRight
V.length (fromRightS cs) `shouldSatisfy` (> 300)
Page size is 30 , so we get 60 commits
it "limits the response" $ withAuth $ \auth -> do
cs <- github auth commitsForR "haskell-github" "github" (FetchAtLeast 40)
cs `shouldSatisfy` isRight
let cs' = fromRightS cs
V.length cs' `shouldSatisfy` (< 70)
let hashes = sort $ map commitSha $ V.toList cs'
hashes `shouldBe` nub hashes
describe "diff" $ do
it "works" $ withAuth $ \auth -> do
cs <- github auth commitsForR "haskell-github" "github" (FetchAtLeast 30)
cs `shouldSatisfy` isRight
let commits = take 10 . V.toList . fromRightS $ cs
let pairs = zip commits $ drop 1 commits
forM_ pairs $ \(a, b) -> do
d <- github auth diffR "haskell-github" "github" (commitSha a) (commitSha b)
d `shouldSatisfy` isRight
it "issue #155" $ withAuth $ \auth -> do
d <- github auth diffR "nomeata" "codespeed" (mkCommitName "ghc") (mkCommitName "tobami:master")
d `shouldSatisfy` isRight
-- diff that includes a commit where a submodule is removed
it "issue #339" $ withAuth $ \auth -> do
d <- github auth diffR "scott-fleischman" "repo-remove-submodule" "d03c152482169d809be9b1eab71dcf64d7405f76" "42cfd732b20cd093534f246e630b309186eb485d"
d `shouldSatisfy` isRight
| null | https://raw.githubusercontent.com/haskell-github/github/d9ac0c7ffbcc720a24d06f0a96ea4e3891316d1a/spec/GitHub/CommitsSpec.hs | haskell | # LANGUAGE OverloadedStrings #
diff that includes a commit where a submodule is removed | module GitHub.CommitsSpec where
import GitHub.Auth (Auth (..))
import GitHub.Endpoints.Repos.Commits (commitSha, commitsForR, diffR, mkCommitName, FetchCount (..))
import GitHub.Request (github)
import Control.Monad (forM_)
import Data.Either.Compat (isRight)
import Data.List (nub, sort)
import Data.String (fromString)
import System.Environment (lookupEnv)
import Test.Hspec (Spec, describe, it, pendingWith, shouldBe,
shouldSatisfy)
import qualified Data.Vector as V
fromRightS :: Show a => Either a b -> b
fromRightS (Right b) = b
fromRightS (Left a) = error $ "Expected a Right and got a Left" ++ show a
withAuth :: (Auth -> IO ()) -> IO ()
withAuth action = do
mtoken <- lookupEnv "GITHUB_TOKEN"
case mtoken of
Nothing -> pendingWith "no GITHUB_TOKEN"
Just token -> action (OAuth $ fromString token)
spec :: Spec
spec = do
describe "commitsFor" $ do
it "works" $ withAuth $ \auth -> do
cs <- github auth commitsForR "haskell-github" "github" FetchAll
cs `shouldSatisfy` isRight
V.length (fromRightS cs) `shouldSatisfy` (> 300)
Page size is 30 , so we get 60 commits
it "limits the response" $ withAuth $ \auth -> do
cs <- github auth commitsForR "haskell-github" "github" (FetchAtLeast 40)
cs `shouldSatisfy` isRight
let cs' = fromRightS cs
V.length cs' `shouldSatisfy` (< 70)
let hashes = sort $ map commitSha $ V.toList cs'
hashes `shouldBe` nub hashes
describe "diff" $ do
it "works" $ withAuth $ \auth -> do
cs <- github auth commitsForR "haskell-github" "github" (FetchAtLeast 30)
cs `shouldSatisfy` isRight
let commits = take 10 . V.toList . fromRightS $ cs
let pairs = zip commits $ drop 1 commits
forM_ pairs $ \(a, b) -> do
d <- github auth diffR "haskell-github" "github" (commitSha a) (commitSha b)
d `shouldSatisfy` isRight
it "issue #155" $ withAuth $ \auth -> do
d <- github auth diffR "nomeata" "codespeed" (mkCommitName "ghc") (mkCommitName "tobami:master")
d `shouldSatisfy` isRight
it "issue #339" $ withAuth $ \auth -> do
d <- github auth diffR "scott-fleischman" "repo-remove-submodule" "d03c152482169d809be9b1eab71dcf64d7405f76" "42cfd732b20cd093534f246e630b309186eb485d"
d `shouldSatisfy` isRight
|
5ec21a05b573537842b698ce129e639084d52da7f51e4464ff9b60efc337b80e | onedata/op-worker | dbsync_events.erl | %%%-------------------------------------------------------------------
@author
( C ) 2017 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
%%% @end
%%%-------------------------------------------------------------------
@doc DBSync hooks .
%%% @end
%%%-------------------------------------------------------------------
-module(dbsync_events).
-author("Rafal Slota").
-include("modules/datastore/datastore_models.hrl").
-include("modules/fslogic/fslogic_common.hrl").
-include_lib("ctool/include/logging.hrl").
-include_lib("ctool/include/errors.hrl").
%% API
-export([change_replicated/2]).
%%%===================================================================
%%% API
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Wrapper for change_replicated_internal, ignoring unsupported spaces.
%% @end
%%--------------------------------------------------------------------
-spec change_replicated(SpaceId :: binary(), undefined | datastore:doc()) ->
any().
change_replicated(_SpaceId, undefined) ->
ok;
change_replicated(SpaceId, Change) ->
true = dbsync_utils:is_supported(SpaceId, [oneprovider:get_id()]),
change_replicated_internal(SpaceId, Change).
%%%===================================================================
Internal functions
%%%===================================================================
%%--------------------------------------------------------------------
@private
%% @doc
%% Hook that runs just after change was replicated from remote provider.
%% Return value and any errors are ignored.
%% @end
%%--------------------------------------------------------------------
-spec change_replicated_internal(od_space:id(), datastore:doc()) ->
any() | no_return().
change_replicated_internal(SpaceId, #document{
value = #file_meta{}
} = FileDoc) ->
file_meta_change_replicated(SpaceId, FileDoc);
change_replicated_internal(SpaceId, #document{
deleted = false,
value = #file_location{uuid = FileUuid}
} = Doc) ->
?debug("change_replicated_internal: changed file_location ~p", [FileUuid]),
FileCtx = file_ctx:new_by_uuid(FileUuid, SpaceId),
ok = replica_dbsync_hook:on_file_location_change(FileCtx, Doc);
change_replicated_internal(SpaceId, #document{
key = FileUuid,
value = #times{} = Record,
deleted = true
}) ->
?debug("change_replicated_internal: deleted times ~p", [FileUuid]),
FileCtx = file_ctx:new_by_uuid(FileUuid, SpaceId),
dir_update_time_stats:report_update_of_nearest_dir(file_ctx:get_logical_guid_const(FileCtx), Record),
Emmit event in case of changed times / deleted file_meta propagation race
(catch fslogic_event_emitter:emit_file_removed(FileCtx, []));
change_replicated_internal(SpaceId, #document{
key = FileUuid,
value = #times{} = Record
}) ->
?debug("change_replicated_internal: changed times ~p", [FileUuid]),
FileCtx = file_ctx:new_by_uuid(FileUuid, SpaceId),
dir_update_time_stats:report_update_of_nearest_dir(file_ctx:get_logical_guid_const(FileCtx), Record),
(catch fslogic_event_emitter:emit_sizeless_file_attrs_changed(FileCtx));
change_replicated_internal(_SpaceId, #document{
key = FileUuid,
value = #custom_metadata{}
}) ->
?debug("change_replicated_internal: changed custom_metadata ~p", [FileUuid]);
change_replicated_internal(_SpaceId, Transfer = #document{
key = TransferId,
value = #transfer{}
}) ->
?debug("change_replicated_internal: changed transfer ~p", [TransferId]),
transfer_changes:handle(Transfer);
change_replicated_internal(_SpaceId, ReplicaDeletion = #document{
key = MsgId,
value = #replica_deletion{}
}) ->
?debug("change_replicated_internal: changed replica_deletion ~p", [MsgId]),
replica_deletion_changes:handle(ReplicaDeletion);
change_replicated_internal(_SpaceId, Index = #document{
key = IndexId,
value = #index{}
}) ->
?debug("change_replicated_internal: changed index ~p", [IndexId]),
view_changes:handle(Index);
change_replicated_internal(_SpaceId, #document{value = #traverse_task{}} = Task) ->
traverse:on_task_change(Task, oneprovider:get_id_or_undefined());
change_replicated_internal(_SpaceId, #document{key = JobId, value = #tree_traverse_job{}} = Doc) ->
case tree_traverse:get_job(Doc) of
{ok, Job, PoolName, TaskId} ->
traverse:on_job_change(Job, JobId, PoolName, TaskId, oneprovider:get_id_or_undefined());
?ERROR_NOT_FOUND ->
% TODO VFS-6391 fix race with file_meta
ok
end;
change_replicated_internal(SpaceId, QosEntry = #document{
key = QosEntryId,
value = #qos_entry{}
}) ->
?debug("change_replicated_internal: qos_entry ~p", [QosEntryId]),
qos_logic:handle_qos_entry_change(SpaceId, QosEntry);
change_replicated_internal(SpaceId, ArchiveRecallDetails = #document{
key = RecallId,
value = #archive_recall_details{}
}) ->
?debug("change_replicated_internal: archive_recall_details ~p", [RecallId]),
archive_recall_details:handle_remote_change(SpaceId, ArchiveRecallDetails);
change_replicated_internal(SpaceId, #document{value = #links_forest{key = LinkKey, model = Model}}) ->
?debug("change_replicated_internal: links_forest ~p", [LinkKey]),
link_replicated(Model, LinkKey, SpaceId);
change_replicated_internal(SpaceId, #document{value = #links_node{key = LinkKey, model = Model}}) ->
?debug("change_replicated_internal: links_node ~p", [LinkKey]),
link_replicated(Model, LinkKey, SpaceId);
change_replicated_internal(SpaceId, #document{value = #links_mask{key = LinkKey, model = Model}}) ->
?debug("change_replicated_internal: links_mask ~p", [LinkKey]),
link_replicated(Model, LinkKey, SpaceId);
change_replicated_internal(_SpaceId, _Change) ->
ok.
@private
-spec file_meta_change_replicated(od_space:id(), datastore:doc()) ->
any() | no_return().
file_meta_change_replicated(SpaceId, #document{
key = FileUuid,
value = #file_meta{deleted = Del1, type = ?LINK_TYPE},
deleted = Del2
} = LinkDoc) when Del1 or Del2 ->
?debug("file_meta_change_replicated: deleted hardlink file_meta ~p", [FileUuid]),
case file_meta:get_including_deleted(fslogic_file_id:ensure_referenced_uuid(FileUuid)) of
{ok, ReferencedDoc} ->
{ok, MergedDoc} = file_meta_hardlinks:merge_link_and_file_doc(LinkDoc, ReferencedDoc),
FileCtx = file_ctx:new_by_doc(MergedDoc, SpaceId),
fslogic_delete:handle_remotely_deleted_file(FileCtx);
Error ->
% TODO VFS-7531 - Handle dbsync events for hardlinks when referenced file_meta is missing
?warning("file_meta_change_replicated: deleted hardlink file_meta ~p - posthook failed with error ~p",
[FileUuid, Error])
end;
file_meta_change_replicated(SpaceId, #document{
key = FileUuid,
value = #file_meta{mode = CurrentMode, deleted = Del1},
deleted = Del2
} = FileDoc) when Del1 or Del2 ->
?debug("file_meta_change_replicated: deleted file_meta ~p", [FileUuid]),
FileCtx = file_ctx:new_by_doc(FileDoc, SpaceId),
{ok, FileCtx2} = sd_utils:chmod(FileCtx, CurrentMode),
fslogic_delete:handle_remotely_deleted_file(FileCtx2),
ok;
file_meta_change_replicated(SpaceId, #document{
key = FileUuid,
value = #file_meta{mode = CurrentMode, type = ?REGULAR_FILE_TYPE}
} = FileDoc) ->
?debug("file_meta_change_replicated: changed file_meta ~p", [FileUuid]),
FileCtx = file_ctx:new_by_doc(FileDoc, SpaceId),
{ok, FileCtx2} = sd_utils:chmod(FileCtx, CurrentMode),
ok = fslogic_event_emitter:emit_file_attr_changed(FileCtx2, []),
ok = file_meta_posthooks:execute_hooks(FileUuid, doc);
file_meta_change_replicated(SpaceId, #document{
key = FileUuid,
deleted = false,
value = #file_meta{type = ?LINK_TYPE}
} = LinkDoc) ->
?debug("file_meta_change_replicated: changed hardlink file_meta ~p", [FileUuid]),
case file_meta:get_including_deleted(fslogic_file_id:ensure_referenced_uuid(FileUuid)) of
{ok, ReferencedDoc} ->
{ok, MergedDoc} = file_meta_hardlinks:merge_link_and_file_doc(LinkDoc, ReferencedDoc),
FileCtx = file_ctx:new_by_doc(MergedDoc, SpaceId),
% TODO VFS-7914 - Do not invalidate cache, when it is not needed
ok = qos_logic:invalidate_cache_and_reconcile(FileCtx),
ok = fslogic_event_emitter:emit_file_attr_changed(FileCtx, []);
Error ->
% TODO VFS-7531 - Handle dbsync events for hardlinks when referenced file_meta is missing
?warning("file_meta_change_replicated: deleted hardlink file_meta ~p - posthook failed with error ~p",
[FileUuid, Error])
end;
file_meta_change_replicated(SpaceId, #document{
key = FileUuid,
deleted = false,
value = #file_meta{mode = CurrentMode}
} = FileDoc) ->
?debug("file_meta_change_replicated: changed file_meta ~p", [FileUuid]),
FileCtx = file_ctx:new_by_doc(FileDoc, SpaceId),
{ok, FileCtx2} = sd_utils:chmod(FileCtx, CurrentMode),
ok = fslogic_event_emitter:emit_file_attr_changed(FileCtx2, []),
ok = file_meta_posthooks:execute_hooks(FileUuid, doc).
@private
-spec link_replicated(module(), datastore:key(), od_space:id()) ->
any() | no_return().
link_replicated(file_meta, LinkKey, SpaceId) ->
dir_size_stats:report_remote_links_change(LinkKey, SpaceId),
case datastore_model:get_generic_key(file_meta, LinkKey) of
undefined ->
Legacy keys are not supported as it is impossible to retrieve GenericKey
ok;
GenericKey ->
file_meta_posthooks:execute_hooks(GenericKey, link)
end;
link_replicated(_Model, _LinkKey_, _SpaceId) ->
ok.
| null | https://raw.githubusercontent.com/onedata/op-worker/c43c9ffd9aa76b309b5140fa94fa0da06c9d719f/src/modules/dbsync/dbsync_events.erl | erlang | -------------------------------------------------------------------
@end
-------------------------------------------------------------------
@end
-------------------------------------------------------------------
API
===================================================================
API
===================================================================
--------------------------------------------------------------------
@doc
Wrapper for change_replicated_internal, ignoring unsupported spaces.
@end
--------------------------------------------------------------------
===================================================================
===================================================================
--------------------------------------------------------------------
@doc
Hook that runs just after change was replicated from remote provider.
Return value and any errors are ignored.
@end
--------------------------------------------------------------------
TODO VFS-6391 fix race with file_meta
TODO VFS-7531 - Handle dbsync events for hardlinks when referenced file_meta is missing
TODO VFS-7914 - Do not invalidate cache, when it is not needed
TODO VFS-7531 - Handle dbsync events for hardlinks when referenced file_meta is missing | @author
( C ) 2017 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
@doc DBSync hooks .
-module(dbsync_events).
-author("Rafal Slota").
-include("modules/datastore/datastore_models.hrl").
-include("modules/fslogic/fslogic_common.hrl").
-include_lib("ctool/include/logging.hrl").
-include_lib("ctool/include/errors.hrl").
-export([change_replicated/2]).
-spec change_replicated(SpaceId :: binary(), undefined | datastore:doc()) ->
any().
change_replicated(_SpaceId, undefined) ->
ok;
change_replicated(SpaceId, Change) ->
true = dbsync_utils:is_supported(SpaceId, [oneprovider:get_id()]),
change_replicated_internal(SpaceId, Change).
Internal functions
@private
-spec change_replicated_internal(od_space:id(), datastore:doc()) ->
any() | no_return().
change_replicated_internal(SpaceId, #document{
value = #file_meta{}
} = FileDoc) ->
file_meta_change_replicated(SpaceId, FileDoc);
change_replicated_internal(SpaceId, #document{
deleted = false,
value = #file_location{uuid = FileUuid}
} = Doc) ->
?debug("change_replicated_internal: changed file_location ~p", [FileUuid]),
FileCtx = file_ctx:new_by_uuid(FileUuid, SpaceId),
ok = replica_dbsync_hook:on_file_location_change(FileCtx, Doc);
change_replicated_internal(SpaceId, #document{
key = FileUuid,
value = #times{} = Record,
deleted = true
}) ->
?debug("change_replicated_internal: deleted times ~p", [FileUuid]),
FileCtx = file_ctx:new_by_uuid(FileUuid, SpaceId),
dir_update_time_stats:report_update_of_nearest_dir(file_ctx:get_logical_guid_const(FileCtx), Record),
Emmit event in case of changed times / deleted file_meta propagation race
(catch fslogic_event_emitter:emit_file_removed(FileCtx, []));
change_replicated_internal(SpaceId, #document{
key = FileUuid,
value = #times{} = Record
}) ->
?debug("change_replicated_internal: changed times ~p", [FileUuid]),
FileCtx = file_ctx:new_by_uuid(FileUuid, SpaceId),
dir_update_time_stats:report_update_of_nearest_dir(file_ctx:get_logical_guid_const(FileCtx), Record),
(catch fslogic_event_emitter:emit_sizeless_file_attrs_changed(FileCtx));
change_replicated_internal(_SpaceId, #document{
key = FileUuid,
value = #custom_metadata{}
}) ->
?debug("change_replicated_internal: changed custom_metadata ~p", [FileUuid]);
change_replicated_internal(_SpaceId, Transfer = #document{
key = TransferId,
value = #transfer{}
}) ->
?debug("change_replicated_internal: changed transfer ~p", [TransferId]),
transfer_changes:handle(Transfer);
change_replicated_internal(_SpaceId, ReplicaDeletion = #document{
key = MsgId,
value = #replica_deletion{}
}) ->
?debug("change_replicated_internal: changed replica_deletion ~p", [MsgId]),
replica_deletion_changes:handle(ReplicaDeletion);
change_replicated_internal(_SpaceId, Index = #document{
key = IndexId,
value = #index{}
}) ->
?debug("change_replicated_internal: changed index ~p", [IndexId]),
view_changes:handle(Index);
change_replicated_internal(_SpaceId, #document{value = #traverse_task{}} = Task) ->
traverse:on_task_change(Task, oneprovider:get_id_or_undefined());
change_replicated_internal(_SpaceId, #document{key = JobId, value = #tree_traverse_job{}} = Doc) ->
case tree_traverse:get_job(Doc) of
{ok, Job, PoolName, TaskId} ->
traverse:on_job_change(Job, JobId, PoolName, TaskId, oneprovider:get_id_or_undefined());
?ERROR_NOT_FOUND ->
ok
end;
change_replicated_internal(SpaceId, QosEntry = #document{
key = QosEntryId,
value = #qos_entry{}
}) ->
?debug("change_replicated_internal: qos_entry ~p", [QosEntryId]),
qos_logic:handle_qos_entry_change(SpaceId, QosEntry);
change_replicated_internal(SpaceId, ArchiveRecallDetails = #document{
key = RecallId,
value = #archive_recall_details{}
}) ->
?debug("change_replicated_internal: archive_recall_details ~p", [RecallId]),
archive_recall_details:handle_remote_change(SpaceId, ArchiveRecallDetails);
change_replicated_internal(SpaceId, #document{value = #links_forest{key = LinkKey, model = Model}}) ->
?debug("change_replicated_internal: links_forest ~p", [LinkKey]),
link_replicated(Model, LinkKey, SpaceId);
change_replicated_internal(SpaceId, #document{value = #links_node{key = LinkKey, model = Model}}) ->
?debug("change_replicated_internal: links_node ~p", [LinkKey]),
link_replicated(Model, LinkKey, SpaceId);
change_replicated_internal(SpaceId, #document{value = #links_mask{key = LinkKey, model = Model}}) ->
?debug("change_replicated_internal: links_mask ~p", [LinkKey]),
link_replicated(Model, LinkKey, SpaceId);
change_replicated_internal(_SpaceId, _Change) ->
ok.
@private
-spec file_meta_change_replicated(od_space:id(), datastore:doc()) ->
any() | no_return().
file_meta_change_replicated(SpaceId, #document{
key = FileUuid,
value = #file_meta{deleted = Del1, type = ?LINK_TYPE},
deleted = Del2
} = LinkDoc) when Del1 or Del2 ->
?debug("file_meta_change_replicated: deleted hardlink file_meta ~p", [FileUuid]),
case file_meta:get_including_deleted(fslogic_file_id:ensure_referenced_uuid(FileUuid)) of
{ok, ReferencedDoc} ->
{ok, MergedDoc} = file_meta_hardlinks:merge_link_and_file_doc(LinkDoc, ReferencedDoc),
FileCtx = file_ctx:new_by_doc(MergedDoc, SpaceId),
fslogic_delete:handle_remotely_deleted_file(FileCtx);
Error ->
?warning("file_meta_change_replicated: deleted hardlink file_meta ~p - posthook failed with error ~p",
[FileUuid, Error])
end;
file_meta_change_replicated(SpaceId, #document{
key = FileUuid,
value = #file_meta{mode = CurrentMode, deleted = Del1},
deleted = Del2
} = FileDoc) when Del1 or Del2 ->
?debug("file_meta_change_replicated: deleted file_meta ~p", [FileUuid]),
FileCtx = file_ctx:new_by_doc(FileDoc, SpaceId),
{ok, FileCtx2} = sd_utils:chmod(FileCtx, CurrentMode),
fslogic_delete:handle_remotely_deleted_file(FileCtx2),
ok;
file_meta_change_replicated(SpaceId, #document{
key = FileUuid,
value = #file_meta{mode = CurrentMode, type = ?REGULAR_FILE_TYPE}
} = FileDoc) ->
?debug("file_meta_change_replicated: changed file_meta ~p", [FileUuid]),
FileCtx = file_ctx:new_by_doc(FileDoc, SpaceId),
{ok, FileCtx2} = sd_utils:chmod(FileCtx, CurrentMode),
ok = fslogic_event_emitter:emit_file_attr_changed(FileCtx2, []),
ok = file_meta_posthooks:execute_hooks(FileUuid, doc);
file_meta_change_replicated(SpaceId, #document{
key = FileUuid,
deleted = false,
value = #file_meta{type = ?LINK_TYPE}
} = LinkDoc) ->
?debug("file_meta_change_replicated: changed hardlink file_meta ~p", [FileUuid]),
case file_meta:get_including_deleted(fslogic_file_id:ensure_referenced_uuid(FileUuid)) of
{ok, ReferencedDoc} ->
{ok, MergedDoc} = file_meta_hardlinks:merge_link_and_file_doc(LinkDoc, ReferencedDoc),
FileCtx = file_ctx:new_by_doc(MergedDoc, SpaceId),
ok = qos_logic:invalidate_cache_and_reconcile(FileCtx),
ok = fslogic_event_emitter:emit_file_attr_changed(FileCtx, []);
Error ->
?warning("file_meta_change_replicated: deleted hardlink file_meta ~p - posthook failed with error ~p",
[FileUuid, Error])
end;
file_meta_change_replicated(SpaceId, #document{
key = FileUuid,
deleted = false,
value = #file_meta{mode = CurrentMode}
} = FileDoc) ->
?debug("file_meta_change_replicated: changed file_meta ~p", [FileUuid]),
FileCtx = file_ctx:new_by_doc(FileDoc, SpaceId),
{ok, FileCtx2} = sd_utils:chmod(FileCtx, CurrentMode),
ok = fslogic_event_emitter:emit_file_attr_changed(FileCtx2, []),
ok = file_meta_posthooks:execute_hooks(FileUuid, doc).
@private
-spec link_replicated(module(), datastore:key(), od_space:id()) ->
any() | no_return().
link_replicated(file_meta, LinkKey, SpaceId) ->
dir_size_stats:report_remote_links_change(LinkKey, SpaceId),
case datastore_model:get_generic_key(file_meta, LinkKey) of
undefined ->
Legacy keys are not supported as it is impossible to retrieve GenericKey
ok;
GenericKey ->
file_meta_posthooks:execute_hooks(GenericKey, link)
end;
link_replicated(_Model, _LinkKey_, _SpaceId) ->
ok.
|
b34d14b4158cab1847f1dcaf62590785d44177cbe578af922d5cabb0edfeda37 | fffej/clojure-snippets | bloom.clj | (ns bloom
(:use bitarray)
(:use clojure.test)
(:import (java.security MessageDigest)))
(defn pad [n s]
(let [padding (- n (count s))]
(apply str (concat (apply str (repeat padding "0")) s))))
(defn md5-hash [s]
(let [m (MessageDigest/getInstance "MD5")]
(.update m (.getBytes (str s)) 0 (count s))
(let [x (.toString (BigInteger. 1 (.digest m)) 16)]
(pad 32 x))))
(def md5-hashes
(list
(fn [x] (BigInteger. (apply str (take 3 (md5-hash x))) 16))
(fn [x] (BigInteger. (apply str (take 3 (drop 4 (md5-hash x)))) 16))
(fn [x] (BigInteger. (apply str (take 3 (drop 8 (md5-hash x)))) 16))
(fn [x] (BigInteger. (apply str (take 3 (drop 12 (md5-hash x)))) 16))))
(defstruct bloom-filter :hashfns :value)
(defn make-bloom-filter
([n] (struct bloom-filter md5-hashes (bit-array n)))
([n fns] (struct bloom-filter fns (bit-array n))))
(defn add!
[bloom n]
(let [hashes (map (fn [x] (x n)) (bloom :hashfns))]
(doseq [x hashes] (set-bit! (bloom :value) x 1))
bloom))
(defn query
[bloom n]
(let [hashes (map (fn [x] (x n)) (bloom :hashfns))]
(reduce bit-and (map (fn [z] (get-bit (bloom :value) z)) hashes))))
(deftest test-bloom
(let [teststrs (map (fn [x] (str x)) (range 0 1000))
bloom (make-bloom-filter 0xFFF)]
(doseq [x teststrs]
(is (= 0 (query bloom x)))
(add! bloom x)
(is (= 0 (query bloom (str "not" x))))
(is (query bloom x)))))
| null | https://raw.githubusercontent.com/fffej/clojure-snippets/bf2e04502da88e4ecba8d4e4ffe9012f3984d451/bloom.clj | clojure | (ns bloom
(:use bitarray)
(:use clojure.test)
(:import (java.security MessageDigest)))
(defn pad [n s]
(let [padding (- n (count s))]
(apply str (concat (apply str (repeat padding "0")) s))))
(defn md5-hash [s]
(let [m (MessageDigest/getInstance "MD5")]
(.update m (.getBytes (str s)) 0 (count s))
(let [x (.toString (BigInteger. 1 (.digest m)) 16)]
(pad 32 x))))
(def md5-hashes
(list
(fn [x] (BigInteger. (apply str (take 3 (md5-hash x))) 16))
(fn [x] (BigInteger. (apply str (take 3 (drop 4 (md5-hash x)))) 16))
(fn [x] (BigInteger. (apply str (take 3 (drop 8 (md5-hash x)))) 16))
(fn [x] (BigInteger. (apply str (take 3 (drop 12 (md5-hash x)))) 16))))
(defstruct bloom-filter :hashfns :value)
(defn make-bloom-filter
([n] (struct bloom-filter md5-hashes (bit-array n)))
([n fns] (struct bloom-filter fns (bit-array n))))
(defn add!
[bloom n]
(let [hashes (map (fn [x] (x n)) (bloom :hashfns))]
(doseq [x hashes] (set-bit! (bloom :value) x 1))
bloom))
(defn query
[bloom n]
(let [hashes (map (fn [x] (x n)) (bloom :hashfns))]
(reduce bit-and (map (fn [z] (get-bit (bloom :value) z)) hashes))))
(deftest test-bloom
(let [teststrs (map (fn [x] (str x)) (range 0 1000))
bloom (make-bloom-filter 0xFFF)]
(doseq [x teststrs]
(is (= 0 (query bloom x)))
(add! bloom x)
(is (= 0 (query bloom (str "not" x))))
(is (query bloom x)))))
|
|
3910a7dab036237125e9f5d7d7660454ae3fc8a41be011c19c6f05567bd04147 | bootstrapworld/curr | GameTemplate.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-beginner-reader.ss" "lang")((modname GameTemplate) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f ())))
(require "Teachpacks/bootstrap-teachpack.rkt")
;; DATA:
The World is a :
; define-struct:
;; STARTING WORLD
;; GRAPHICS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; GRAPHICS FUNCTIONS:
;; draw-world: world -> Image
place DANGER , TARGET , CLOUD and PLAYER onto BACKGROUND at the right coordinates
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; UPDATING FUNCTIONS:
;; update-world: world -> world
;; What does your update-world function do?
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; KEY EVENTS:
;; keypress: world string -> world
;; What does your keypress function do?
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; TESTS FOR COND:
;; off-left? : number -> boolean
;; Checks whether an object has gone off the left side of the screen
;; off-right? : number -> boolean
;; Checks whether an object has gone off the right side of the screen
;; line-length : number number -> number
;; Finds 1D distance
;; distance : number number number number -> number
Finds the 2D distance between two points
;; collide? : number number number number -> boolean
determines whether two objects are within 50 pixels of eachother
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
big - bang using the START world
;; on a tick-event, use update-world
;; on a draw-event, use draw-world
;; on a key-event, use keypress
;(big-bang START
; (on-tick update-world)
; (on-draw draw-world)
; ) | null | https://raw.githubusercontent.com/bootstrapworld/curr/443015255eacc1c902a29978df0e3e8e8f3b9430/courses/reactive/resources/source-files/GameTemplate.rkt | racket | about the language level of this file in a form that our tools can easily process.
DATA:
define-struct:
STARTING WORLD
GRAPHICS
GRAPHICS FUNCTIONS:
draw-world: world -> Image
UPDATING FUNCTIONS:
update-world: world -> world
What does your update-world function do?
KEY EVENTS:
keypress: world string -> world
What does your keypress function do?
TESTS FOR COND:
off-left? : number -> boolean
Checks whether an object has gone off the left side of the screen
off-right? : number -> boolean
Checks whether an object has gone off the right side of the screen
line-length : number number -> number
Finds 1D distance
distance : number number number number -> number
collide? : number number number number -> boolean
on a tick-event, use update-world
on a draw-event, use draw-world
on a key-event, use keypress
(big-bang START
(on-tick update-world)
(on-draw draw-world)
) | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-beginner-reader.ss" "lang")((modname GameTemplate) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f ())))
(require "Teachpacks/bootstrap-teachpack.rkt")
The World is a :
place DANGER , TARGET , CLOUD and PLAYER onto BACKGROUND at the right coordinates
Finds the 2D distance between two points
determines whether two objects are within 50 pixels of eachother
big - bang using the START world
|
3045aa0cede83d7bdaf321e8d8367b9b9946fd0b5720b2a1eadc77d0d56824ed | logicmoo/logicmoo_nlu | dagunify.lsp | ;;; % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
% Example code from the book " Natural Language Processing in LISP " %
% published by %
% Copyright ( c ) 1989 , . %
;;; % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
;;;
;;; dagunify.lsp [Chapter 7] unification for dags
(uses 'subst)
;;; This library file provides the following utilities for operating
on :
( GET_VALUE feature dag subst1 ) returns ( value subst2 )
( COMBINE_VALUES ) returns substitution2 / nil ;
( FIND_FEATURE_VALUE feature dag substitution ) returns value/'ANY ;
;;;(unify dag1 dag2) returns substitution/nil;
;;;(simplify_features subst dag1) returns dag2;
( PUT_VALUE_IN ( feature value ) dag subst1 ) returns ( subst2 remainder_dag )
Unification
(defun unify (dag1 dag2)
(combine_values dag1 dag2 empty_subst))
(defun combine_values (dag1 dag2 substitution)
(let* (
(realdag1 (lookup_subst dag1 substitution))
(realdag2 (lookup_subst dag2 substitution)))
(if (equal realdag1 realdag2)
substitution
(if (isvar realdag1)
(add_subst realdag1 realdag2 substitution)
(if (isvar realdag2)
(add_subst realdag2 realdag1 substitution)
(if (and (listp realdag1) (listp realdag2))
;; make sure that everything in dag1 is in dag2
(do
((subst substitution))
((isvar realdag1)
;; finally put the rest of dag2 at the end of dag1
;; (as long as subst is not nil)
(and subst (add_subst realdag1 realdag2 subst)))
(let* (
(feature (caar realdag1))
(value (lookup_subst (cadar realdag1) subst)))
(if (equal feature '&)
(setq realdag1 value)
(let (
(subst_dag2 (put_value_in (list feature value) realdag2 subst)))
(setq realdag2 (cadr subst_dag2))
(setq subst (car subst_dag2))
(setq realdag1 (cdr realdag1))
(if (null subst) (return nil))))))
nil))))))
Go through a dag and add the feature - value pair in FPAIR ,
;;; adding to the substitution substitution
;;; if necessary. This function returns in a list:
;;;
;;; a) the new value of substitution
b ) the rest of the dag
( i.e. everything except that one feature - value pair )
(defun put_value_in (fpair dag substitution)
(let*
((realdag (lookup_subst dag substitution)))
(if (consp realdag)
(let ((value (assoc (car fpair) realdag)))
(if value
dag already has a value for that feature
(list
(combine_values (cadr value) (cadr fpair) substitution)
(delete_feature_entry (car fpair) realdag))
;; try the continuation entry
(let (
(rest (lookup_subst (cadar (last realdag)) substitution))
(first (butlast realdag)))
(if (isvar rest)
;; continuation is empty
(let ((newrest (newvar)))
(list
(add_subst rest (list fpair (list '& newrest)) substitution)
(append first (list (list '& newrest))))))
;; continuation non-empty - recurse
(let ((subst_rest (put_value_in fpair rest substitution)))
(list
(car subst_rest)
(append first (cadr subst_rest)))))))
(if (isvar realdag)
variable as dag - add to substitution
(let ((newrest (newvar)))
(list
(add_subst realdag (list fpair (list '& newrest)) substitution)
(list (list '& newrest))))
(error "Cannot find feature value in atom ~S" (list (car fpair) dag))))))
delete the entry for a given feature in a dag
;;; (guaranteed to come before the continuation entry)
(defun delete_feature_entry (feature dag)
(if (equal feature (caar dag))
(cdr dag)
(cons (car dag) (delete_feature_entry feature (cdr dag)))))
;;; Get value of a feature, adding to the substitution if necessary
;;; return a list consisting of a value and a new substitution
(defun get_value (feature dag substitution)
(let* (
(realdag (apply_subst substitution dag))
(value (and (consp realdag) (assoc feature realdag))))
(if value
(list (cadr value) substitution)
(if (isvar realdag)
(let* ((newrest (newvar)) (newvalue (newvar)))
(list
newvalue
(add_subst
realdag
(list (list feature newvalue) (list '& newrest))
substitution)))
(if (consp realdag)
(let ((rest (apply_subst substitution (cadar (last realdag)))))
(if (isvar rest)
(let* ((newrest (newvar)) (newvalue (newvar)))
(list
newvalue
(add_subst
rest
(list (list feature newvalue) (list '& newrest))
substitution)))
(get_value feature rest substitution)))
'(() ()))))))
find the value associated with a feature in a dag
;;; return ANY if there is no recorded value
(defun find_feature_value (feature dag substitution)
(let ((realdag (lookup_subst dag substitution)))
(if (consp realdag)
(let
((value (assoc feature realdag)))
(if value
(lookup_subst (cadr value) substitution)
(let ((rest (lookup_subst (cadar (last realdag)) substitution)))
(if (isvar rest)
'ANY
(find_feature_value feature rest substitution)))
))
'ANY)))
;;; Version of apply_subst which produces a
new version of a dag which has all the remainders
;;; 'flattened out'
(defun simplify_features (substitution dag)
(let ((realdag (lookup_subst dag substitution)))
(if (consp realdag)
(simplify_features_list substitution realdag)
realdag)))
(defun simplify_features_list (substitution dag)
(if (null dag)
'()
(if (equal (caar dag) '&)
(let ((remainder (lookup_subst (cadar dag) substitution)))
(if (isvar remainder)
(list (list '& remainder))
(simplify_features_list substitution remainder)))
(cons
(list (caar dag) (simplify_features substitution (cadar dag)))
(simplify_features_list substitution (cdr dag))))))
| null | https://raw.githubusercontent.com/logicmoo/logicmoo_nlu/c066897f55b3ff45aa9155ebcf799fda9741bf74/ext/nlp_book/lisp/dagunify.lsp | lisp | % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
% % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
dagunify.lsp [Chapter 7] unification for dags
This library file provides the following utilities for operating
(unify dag1 dag2) returns substitution/nil;
(simplify_features subst dag1) returns dag2;
make sure that everything in dag1 is in dag2
finally put the rest of dag2 at the end of dag1
(as long as subst is not nil)
adding to the substitution substitution
if necessary. This function returns in a list:
a) the new value of substitution
try the continuation entry
continuation is empty
continuation non-empty - recurse
(guaranteed to come before the continuation entry)
Get value of a feature, adding to the substitution if necessary
return a list consisting of a value and a new substitution
return ANY if there is no recorded value
Version of apply_subst which produces a
'flattened out' | % Example code from the book " Natural Language Processing in LISP " %
% published by %
% Copyright ( c ) 1989 , . %
(uses 'subst)
on :
( GET_VALUE feature dag subst1 ) returns ( value subst2 )
( PUT_VALUE_IN ( feature value ) dag subst1 ) returns ( subst2 remainder_dag )
Unification
(defun unify (dag1 dag2)
(combine_values dag1 dag2 empty_subst))
(defun combine_values (dag1 dag2 substitution)
(let* (
(realdag1 (lookup_subst dag1 substitution))
(realdag2 (lookup_subst dag2 substitution)))
(if (equal realdag1 realdag2)
substitution
(if (isvar realdag1)
(add_subst realdag1 realdag2 substitution)
(if (isvar realdag2)
(add_subst realdag2 realdag1 substitution)
(if (and (listp realdag1) (listp realdag2))
(do
((subst substitution))
((isvar realdag1)
(and subst (add_subst realdag1 realdag2 subst)))
(let* (
(feature (caar realdag1))
(value (lookup_subst (cadar realdag1) subst)))
(if (equal feature '&)
(setq realdag1 value)
(let (
(subst_dag2 (put_value_in (list feature value) realdag2 subst)))
(setq realdag2 (cadr subst_dag2))
(setq subst (car subst_dag2))
(setq realdag1 (cdr realdag1))
(if (null subst) (return nil))))))
nil))))))
Go through a dag and add the feature - value pair in FPAIR ,
b ) the rest of the dag
( i.e. everything except that one feature - value pair )
(defun put_value_in (fpair dag substitution)
(let*
((realdag (lookup_subst dag substitution)))
(if (consp realdag)
(let ((value (assoc (car fpair) realdag)))
(if value
dag already has a value for that feature
(list
(combine_values (cadr value) (cadr fpair) substitution)
(delete_feature_entry (car fpair) realdag))
(let (
(rest (lookup_subst (cadar (last realdag)) substitution))
(first (butlast realdag)))
(if (isvar rest)
(let ((newrest (newvar)))
(list
(add_subst rest (list fpair (list '& newrest)) substitution)
(append first (list (list '& newrest))))))
(let ((subst_rest (put_value_in fpair rest substitution)))
(list
(car subst_rest)
(append first (cadr subst_rest)))))))
(if (isvar realdag)
variable as dag - add to substitution
(let ((newrest (newvar)))
(list
(add_subst realdag (list fpair (list '& newrest)) substitution)
(list (list '& newrest))))
(error "Cannot find feature value in atom ~S" (list (car fpair) dag))))))
delete the entry for a given feature in a dag
(defun delete_feature_entry (feature dag)
(if (equal feature (caar dag))
(cdr dag)
(cons (car dag) (delete_feature_entry feature (cdr dag)))))
(defun get_value (feature dag substitution)
(let* (
(realdag (apply_subst substitution dag))
(value (and (consp realdag) (assoc feature realdag))))
(if value
(list (cadr value) substitution)
(if (isvar realdag)
(let* ((newrest (newvar)) (newvalue (newvar)))
(list
newvalue
(add_subst
realdag
(list (list feature newvalue) (list '& newrest))
substitution)))
(if (consp realdag)
(let ((rest (apply_subst substitution (cadar (last realdag)))))
(if (isvar rest)
(let* ((newrest (newvar)) (newvalue (newvar)))
(list
newvalue
(add_subst
rest
(list (list feature newvalue) (list '& newrest))
substitution)))
(get_value feature rest substitution)))
'(() ()))))))
find the value associated with a feature in a dag
(defun find_feature_value (feature dag substitution)
(let ((realdag (lookup_subst dag substitution)))
(if (consp realdag)
(let
((value (assoc feature realdag)))
(if value
(lookup_subst (cadr value) substitution)
(let ((rest (lookup_subst (cadar (last realdag)) substitution)))
(if (isvar rest)
'ANY
(find_feature_value feature rest substitution)))
))
'ANY)))
new version of a dag which has all the remainders
(defun simplify_features (substitution dag)
(let ((realdag (lookup_subst dag substitution)))
(if (consp realdag)
(simplify_features_list substitution realdag)
realdag)))
(defun simplify_features_list (substitution dag)
(if (null dag)
'()
(if (equal (caar dag) '&)
(let ((remainder (lookup_subst (cadar dag) substitution)))
(if (isvar remainder)
(list (list '& remainder))
(simplify_features_list substitution remainder)))
(cons
(list (caar dag) (simplify_features substitution (cadar dag)))
(simplify_features_list substitution (cdr dag))))))
|
bb51c3bb8e7a14e648f33dc5ae13fa6aef9d5bdb4b6cc3e486f786fdf22fc0b2 | slipstream/SlipStreamServer | session_template_oidc_lifecycle_test.clj | (ns com.sixsq.slipstream.ssclj.resources.session-template-oidc-lifecycle-test
(:require
[clojure.test :refer :all]
[com.sixsq.slipstream.ssclj.app.params :as p]
[com.sixsq.slipstream.ssclj.resources.common.utils :as u]
[com.sixsq.slipstream.ssclj.resources.lifecycle-test-utils :as ltu]
[com.sixsq.slipstream.ssclj.resources.session-template :as st]
[com.sixsq.slipstream.ssclj.resources.session-template-lifecycle-test-utils :as stu]
[com.sixsq.slipstream.ssclj.resources.session-template-oidc :as oidc]
[com.sixsq.slipstream.ssclj.util.metadata-test-utils :as mdtu]))
(use-fixtures :each ltu/with-test-server-fixture)
(def base-uri (str p/service-context (u/de-camelcase st/resource-name)))
(def valid-template {:method oidc/authn-method
:instance oidc/authn-method
:name "OpenID Connect"
:description "External Authentication via OpenID Connect Protocol"
:acl st/resource-acl})
(deftest check-metadata
(mdtu/check-metadata-exists (str st/resource-url "-" oidc/resource-url)))
(deftest lifecycle
(stu/session-template-lifecycle base-uri valid-template))
| null | https://raw.githubusercontent.com/slipstream/SlipStreamServer/3ee5c516877699746c61c48fc72779fe3d4e4652/cimi/test/com/sixsq/slipstream/ssclj/resources/session_template_oidc_lifecycle_test.clj | clojure | (ns com.sixsq.slipstream.ssclj.resources.session-template-oidc-lifecycle-test
(:require
[clojure.test :refer :all]
[com.sixsq.slipstream.ssclj.app.params :as p]
[com.sixsq.slipstream.ssclj.resources.common.utils :as u]
[com.sixsq.slipstream.ssclj.resources.lifecycle-test-utils :as ltu]
[com.sixsq.slipstream.ssclj.resources.session-template :as st]
[com.sixsq.slipstream.ssclj.resources.session-template-lifecycle-test-utils :as stu]
[com.sixsq.slipstream.ssclj.resources.session-template-oidc :as oidc]
[com.sixsq.slipstream.ssclj.util.metadata-test-utils :as mdtu]))
(use-fixtures :each ltu/with-test-server-fixture)
(def base-uri (str p/service-context (u/de-camelcase st/resource-name)))
(def valid-template {:method oidc/authn-method
:instance oidc/authn-method
:name "OpenID Connect"
:description "External Authentication via OpenID Connect Protocol"
:acl st/resource-acl})
(deftest check-metadata
(mdtu/check-metadata-exists (str st/resource-url "-" oidc/resource-url)))
(deftest lifecycle
(stu/session-template-lifecycle base-uri valid-template))
|
|
2c04cbc9977f70a7db20b2f4ae835484c2c462c6c1e0d62d42630d1cf91ee789 | albertov/bindings-gdal | OGRFieldInstances.hs | # OPTIONS_GHC -fno - warn - orphans #
# LANGUAGE CPP #
# LANGUAGE FlexibleInstances #
# LANGUAGE ScopedTypeVariables #
{-# LANGUAGE OverloadedStrings #-}
module GDAL.Internal.OGRFieldInstances () where
#include "bindings.h"
import Data.ByteString (ByteString)
import Data.Int
import Data.Word
import Data.Monoid (mempty, (<>))
import Data.Proxy (Proxy(Proxy))
import Data.Text (Text, pack, unpack)
import Data.Time
import Data.Typeable (Typeable, typeOf)
import Data.Vector.Generic (convert)
import qualified Data.Vector.Storable as St
import qualified Data.Vector.Unboxed as U
import qualified Data.Vector as V
import GDAL.Internal.OGRFeature
#define ogrField(ty,oft,tyCon,to,from) \
instance OGRField (ty) where { \
; fieldDef _ = FieldDef oft Nothing Nothing Nothing False \
; toField = tyCon . from \
; fromField (tyCon v) = Right (to v) \
; fromField f = defaultFromField f};
#define ogrMonoidField(ty,oft,tyCon,to,from) \
instance OGRField (ty) where { \
; fieldDef _ = FieldDef oft Nothing Nothing Nothing False \
; toField = tyCon . from \
; fromField (tyCon v) = Right (to v) \
; fromField OGRNullField = Right mempty \
; fromField f = defaultFromField f};
#define integralElem(A,B,C) ogrField (A,B,C,fromIntegral,fromIntegral)
#define integralList(A,B,C,to,from) \
ogrMonoidField (A,B,C,(to . St.map fromIntegral),St.map fromIntegral . from)
#define integral(A) \
integralElem(A,OFTInteger,OGRInteger) \
integralList([A],OFTIntegerList,OGRIntegerList,St.toList,St.fromList) \
integralList(St.Vector A,OFTIntegerList,OGRIntegerList,id,id) \
integralList(U.Vector A,OFTIntegerList,OGRIntegerList,convert,convert) \
integralList(V.Vector A,OFTIntegerList,OGRIntegerList,convert,convert)
#define integral64(A) \
integralElem(A,OFTInteger64,OGRInteger64) \
integralList([A],OFTInteger64List,OGRInteger64List,St.toList,St.fromList) \
integralList(St.Vector A,OFTInteger64List,OGRInteger64List,id,id) \
integralList(U.Vector A,OFTInteger64List,OGRInteger64List,convert,convert) \
integralList(V.Vector A,OFTInteger64List,OGRInteger64List,convert,convert)
#define realElem(A) ogrField (A,OFTReal,OGRReal,realToFrac,realToFrac)
#define realList(A,to,from) ogrMonoidField (A,OFTRealList,OGRRealList \
,(to . St.map realToFrac) \
,St.map realToFrac . from)
#define real(A) \
realElem(A) \
realList([A], St.toList, St.fromList) \
realList(St.Vector A, id, id) \
realList(U.Vector A, convert, convert) \
realList(V.Vector A, convert, convert)
--
OGRField instances
--
defaultFromField :: forall a. Typeable a => Field -> Either Text a
defaultFromField f = Left ("Unexpected '" <> typeName <> "' field: " <> tShow f)
where
typeName = tShow (typeOf (undefined :: a))
tShow :: Show b => b -> Text
tShow = pack . show
instance OGRField a => OGRField (Maybe a) where
fieldDef _ = (fieldDef (Proxy :: Proxy a)) {fldNullable = True}
toField Nothing = OGRNullField
toField (Just a) = toField a
fromField OGRNullField = Right Nothing
fromField a = fmap Just (fromField a)
#if SUPPORTS_WORD_FIELDS
integral64(Int)
integral64(Word)
#endif
#if SUPPORTS_64_BIT_INT_FIELDS
integral64(Int64)
integral64(Word64)
#endif
integral(Int8)
integral(Word8)
integral(Int16)
integral(Word16)
integral(Int32)
integral(Word32)
real(Float)
real(Double)
ogrMonoidField(Text,OFTString,OGRString,id,id)
ogrMonoidField([Text],OFTStringList,OGRStringList,V.toList,V.fromList)
ogrMonoidField(V.Vector Text,OFTStringList,OGRStringList,id,id)
ogrMonoidField(String,OFTString,OGRString,unpack,pack)
ogrMonoidField([String],OFTStringList,OGRStringList,(V.toList . V.map unpack),(V.map pack . V.fromList))
ogrMonoidField(V.Vector String,OFTStringList,OGRStringList,(V.convert . V.map unpack),(V.map pack . V.convert))
ogrMonoidField(ByteString,OFTBinary,OGRBinary,id,id)
instance OGRField UTCTime where
fieldDef _ =
FieldDef OFTDateTime Nothing Nothing Nothing False
toField u =
OGRDateTime (utcToLocalTime utc u) (KnownTimeZone utc)
fromField (OGRDateTime lt (KnownTimeZone tz)) =
Right (localTimeToUTC tz lt)
fromField (OGRDateTime _ ogrtz) =
Left ("UTCTime: Unexpected timezone '" <> tShow ogrtz <> "'")
fromField f = defaultFromField f
instance OGRField LocalTime where
fieldDef _ =
FieldDef OFTDateTime Nothing Nothing Nothing False
toField u =
OGRDateTime u LocalTimeZone
fromField (OGRDateTime lt LocalTimeZone) =
Right lt
fromField (OGRDateTime _ ogrtz) =
Left ("LocalTime: Unexpected timezone '" <> tShow ogrtz <> "'")
fromField f = defaultFromField f
instance OGRField ZonedTime where
fieldDef _ =
FieldDef OFTDateTime Nothing Nothing Nothing False
toField (ZonedTime lt tz) =
OGRDateTime lt (KnownTimeZone tz)
fromField (OGRDateTime lt (KnownTimeZone tz)) =
Right (ZonedTime lt tz)
fromField (OGRDateTime _ ogrtz) =
Left ("ZonedTime: Unexpected timezone '" <> tShow ogrtz <> "'")
fromField f = defaultFromField f
ogrField(Day,OFTDate,OGRDate,id,id)
ogrField(TimeOfDay,OFTTime,OGRTime,id,id)
ogrField(DiffTime,OFTTime,OGRTime,timeOfDayToTime,timeToTimeOfDay)
| null | https://raw.githubusercontent.com/albertov/bindings-gdal/f91087e06a569fc6dc81b4c22e58b5c9a1dcdc73/src/GDAL/Internal/OGRFieldInstances.hs | haskell | # LANGUAGE OverloadedStrings #
| # OPTIONS_GHC -fno - warn - orphans #
# LANGUAGE CPP #
# LANGUAGE FlexibleInstances #
# LANGUAGE ScopedTypeVariables #
module GDAL.Internal.OGRFieldInstances () where
#include "bindings.h"
import Data.ByteString (ByteString)
import Data.Int
import Data.Word
import Data.Monoid (mempty, (<>))
import Data.Proxy (Proxy(Proxy))
import Data.Text (Text, pack, unpack)
import Data.Time
import Data.Typeable (Typeable, typeOf)
import Data.Vector.Generic (convert)
import qualified Data.Vector.Storable as St
import qualified Data.Vector.Unboxed as U
import qualified Data.Vector as V
import GDAL.Internal.OGRFeature
#define ogrField(ty,oft,tyCon,to,from) \
instance OGRField (ty) where { \
; fieldDef _ = FieldDef oft Nothing Nothing Nothing False \
; toField = tyCon . from \
; fromField (tyCon v) = Right (to v) \
; fromField f = defaultFromField f};
#define ogrMonoidField(ty,oft,tyCon,to,from) \
instance OGRField (ty) where { \
; fieldDef _ = FieldDef oft Nothing Nothing Nothing False \
; toField = tyCon . from \
; fromField (tyCon v) = Right (to v) \
; fromField OGRNullField = Right mempty \
; fromField f = defaultFromField f};
#define integralElem(A,B,C) ogrField (A,B,C,fromIntegral,fromIntegral)
#define integralList(A,B,C,to,from) \
ogrMonoidField (A,B,C,(to . St.map fromIntegral),St.map fromIntegral . from)
#define integral(A) \
integralElem(A,OFTInteger,OGRInteger) \
integralList([A],OFTIntegerList,OGRIntegerList,St.toList,St.fromList) \
integralList(St.Vector A,OFTIntegerList,OGRIntegerList,id,id) \
integralList(U.Vector A,OFTIntegerList,OGRIntegerList,convert,convert) \
integralList(V.Vector A,OFTIntegerList,OGRIntegerList,convert,convert)
#define integral64(A) \
integralElem(A,OFTInteger64,OGRInteger64) \
integralList([A],OFTInteger64List,OGRInteger64List,St.toList,St.fromList) \
integralList(St.Vector A,OFTInteger64List,OGRInteger64List,id,id) \
integralList(U.Vector A,OFTInteger64List,OGRInteger64List,convert,convert) \
integralList(V.Vector A,OFTInteger64List,OGRInteger64List,convert,convert)
#define realElem(A) ogrField (A,OFTReal,OGRReal,realToFrac,realToFrac)
#define realList(A,to,from) ogrMonoidField (A,OFTRealList,OGRRealList \
,(to . St.map realToFrac) \
,St.map realToFrac . from)
#define real(A) \
realElem(A) \
realList([A], St.toList, St.fromList) \
realList(St.Vector A, id, id) \
realList(U.Vector A, convert, convert) \
realList(V.Vector A, convert, convert)
OGRField instances
defaultFromField :: forall a. Typeable a => Field -> Either Text a
defaultFromField f = Left ("Unexpected '" <> typeName <> "' field: " <> tShow f)
where
typeName = tShow (typeOf (undefined :: a))
tShow :: Show b => b -> Text
tShow = pack . show
instance OGRField a => OGRField (Maybe a) where
fieldDef _ = (fieldDef (Proxy :: Proxy a)) {fldNullable = True}
toField Nothing = OGRNullField
toField (Just a) = toField a
fromField OGRNullField = Right Nothing
fromField a = fmap Just (fromField a)
#if SUPPORTS_WORD_FIELDS
integral64(Int)
integral64(Word)
#endif
#if SUPPORTS_64_BIT_INT_FIELDS
integral64(Int64)
integral64(Word64)
#endif
integral(Int8)
integral(Word8)
integral(Int16)
integral(Word16)
integral(Int32)
integral(Word32)
real(Float)
real(Double)
ogrMonoidField(Text,OFTString,OGRString,id,id)
ogrMonoidField([Text],OFTStringList,OGRStringList,V.toList,V.fromList)
ogrMonoidField(V.Vector Text,OFTStringList,OGRStringList,id,id)
ogrMonoidField(String,OFTString,OGRString,unpack,pack)
ogrMonoidField([String],OFTStringList,OGRStringList,(V.toList . V.map unpack),(V.map pack . V.fromList))
ogrMonoidField(V.Vector String,OFTStringList,OGRStringList,(V.convert . V.map unpack),(V.map pack . V.convert))
ogrMonoidField(ByteString,OFTBinary,OGRBinary,id,id)
instance OGRField UTCTime where
fieldDef _ =
FieldDef OFTDateTime Nothing Nothing Nothing False
toField u =
OGRDateTime (utcToLocalTime utc u) (KnownTimeZone utc)
fromField (OGRDateTime lt (KnownTimeZone tz)) =
Right (localTimeToUTC tz lt)
fromField (OGRDateTime _ ogrtz) =
Left ("UTCTime: Unexpected timezone '" <> tShow ogrtz <> "'")
fromField f = defaultFromField f
instance OGRField LocalTime where
fieldDef _ =
FieldDef OFTDateTime Nothing Nothing Nothing False
toField u =
OGRDateTime u LocalTimeZone
fromField (OGRDateTime lt LocalTimeZone) =
Right lt
fromField (OGRDateTime _ ogrtz) =
Left ("LocalTime: Unexpected timezone '" <> tShow ogrtz <> "'")
fromField f = defaultFromField f
instance OGRField ZonedTime where
fieldDef _ =
FieldDef OFTDateTime Nothing Nothing Nothing False
toField (ZonedTime lt tz) =
OGRDateTime lt (KnownTimeZone tz)
fromField (OGRDateTime lt (KnownTimeZone tz)) =
Right (ZonedTime lt tz)
fromField (OGRDateTime _ ogrtz) =
Left ("ZonedTime: Unexpected timezone '" <> tShow ogrtz <> "'")
fromField f = defaultFromField f
ogrField(Day,OFTDate,OGRDate,id,id)
ogrField(TimeOfDay,OFTTime,OGRTime,id,id)
ogrField(DiffTime,OFTTime,OGRTime,timeOfDayToTime,timeToTimeOfDay)
|
225b39edc522d487c13b385279da253f5e5e4f13b6ca47fefec5db18e78e297b | kthielen/stlcc | InstCFG.hs |
module ASM.InstCFG where
import ASM.Type
import ASM.Term
import ASM.Liveness
import Util.Annotated
import Util.CFG
import Util.Sequence
import Util.Num
import Util.Tuples
import Util.Recursion
import qualified Data.Map as Map
block_reg_refs :: (Annotation a, Eq a) => BBlock (Inst a) -> [Reg a]
block_reg_refs (BBlock is _ _ _) = unique $ concat $ map inst_reg_refs is
cfg_reg_refs :: (Annotation a, Eq a) => CFG (Inst a) -> [Reg a]
cfg_reg_refs cfg = unique $ concat $ map block_reg_refs cfg
rename_registers :: (Annotation a, Eq a) => CFG (Inst a) -> Map.Map (Reg a) (Reg a) -> CFG (Inst a)
rename_registers bs vn = map rename_block bs where
rename_block (BBlock is m n o) = BBlock (map rename_inst is) m n o
rename_inst (Op p o dsts srcs f jmps) = Op p o (map rename_loc dsts) (map rename_loc srcs) f jmps
rename_inst (RMov p ty dst src) = RMov p ty (subst dst) (subst src)
rename_inst p = p
rename_loc (OAReg p ty s) = OAReg p ty (subst s)
rename_loc (OARMemOff p ty s x) = OARMemOff p ty (subst s) x
rename_loc p = p
subst s = either id (const s) $ choice (Map.lookup s vn')
vn' = Map.fromList [(s0, s1) | (Reg _ s0, Reg _ s1) <- Map.toList vn]
normalizeMoves :: (Annotation a, Eq a) => CFG (Inst a) -> CFG (Inst a)
normalizeMoves bs = map normalizeBlock bs where
normalizeBlock (BBlock is m n o) = BBlock [normalize i | i <- is, not (redundant i)] m n o
normalize (RMov p ty dst src) = realmov p (OAReg p ty dst) (OAReg p ty src)
normalize x = x
redundant (RMov _ ty dst src) | dst == src = True
redundant (Op _ "mov" [dst] [src] f j) | dst == src = True
redundant _ = False
next_frame_slot :: (Annotation a, Eq a) => CFG (Inst a) -> Int
next_frame_slot bs = 1 + (seqMax 0 (map bmax bs)) where
bmax (BBlock is _ _ _) = seqMax 0 (map imax is)
imax (Op _ _ ds ss _ _) = seqMax (seqMax 0 (map oaslot ds)) (map oaslot ss)
imax _ = 0
oaslot (OAFrameSlot _ _ i _) = i
oaslot _ = 0
spill_registers :: (Annotation a, Eq a) => CFG (Inst a) -> [Reg a] -> Map.Map (Reg a) (Reg a) -> CFG (Inst a)
spill_registers bs vs mvs = orderTrace $ reverse $ first $ foldl spill_block ([], 0) bs where
spill_block (bs', n) (BBlock is m k o) = ((BBlock is' m k o):bs', n') where (is', n') = spill_insts is n
spill_insts (i:is) n = (i' ++ is', n'') where (i', n') = spill_inst i n; (is', n'') = spill_insts is n'
spill_insts [] n = ([], n)
spill_inst i n = spill_rewrite_inst_regs ss i n
ss = choose_stack_slots bs vs mvs
spill_rewrite_inst_regs :: Annotation a => Map.Map (Reg a) Int -> Inst a -> Int -> ([Inst a], Int)
spill_rewrite_inst_regs ss i n = (pfx ++ [i''] ++ sfx, n'') where
(pfx, i', n') = foldl spill_read ([], i, n) uses
(i'', sfx, n'') = foldl spill_write (i', [], n') defs
uses = marked_regs (inst_reg_uses i)
defs = marked_regs (inst_reg_defs i)
marked_regs rseq = [(r, s) | (r, Just s) <- [(r, Map.lookup r ss) | r <- rseq]]
spill_read (pfx, i, n) (r, s) = (pfx ++ pfx', i', n') where (pfx', i', n') = spill_inst_read i r s n
spill_write (i, sfx, n) (r, s) = (i', sfx' ++ sfx, n') where (i', sfx', n') = spill_inst_write i r s n
spill_inst_read :: Annotation a => Inst a -> Reg a -> Int -> Int -> ([Inst a], Inst a, Int)
spill_inst_read (Op p "tailcall" [] srcs f []) (Reg ty r) s n = ([], i', n) where
i' = Op p "tailcall" [] (map (rewriteRegister r (OAFrameSlot p ty s Local)) srcs) f []
spill_inst_read i (Reg ty r) s n = (prefix, rewrite i, n + 1) where
prefix = [mov na treg tspill]
tregname = "spr" ++ r ++ show n
treg = OAReg na ty tregname
tspill = OAFrameSlot na ty s Local
rewrite (Op p o dsts srcs f lbls) = Op p o dsts (map (rename_register r tregname) srcs) f lbls
rewrite (RMov p ty' dst src) | src == r = RMov p ty' dst tregname
rewrite p@(RMov _ _ _ _) = p
na = nullAnnotation
spill_inst_write :: Annotation a => Inst a -> Reg a -> Int -> Int -> (Inst a, [Inst a], Int)
spill_inst_write i (Reg ty r) s n = (rewrite i, suffix, n + 1) where
suffix = [mov na tspill treg]
tregname = "spw" ++ r ++ show n
treg = OAReg na ty tregname
tspill = OAFrameSlot na ty s Local
rewrite (Op p o dsts srcs f lbls) = Op p o (map (rename_register r tregname) dsts) srcs f lbls
rewrite (RMov p ty' dst src) | dst == r = RMov p ty' tregname src
rewrite p@(RMov _ _ _ _) = p
na = nullAnnotation
rename_register :: String -> String -> OpArg a -> OpArg a
rename_register f t (OAReg p ty s) | s == f = OAReg p ty t
rename_register f t (OARMemOff p ty s n) | s == f = OARMemOff p ty t n
rename_register _ _ x = x
rewriteRegister :: String -> OpArg a -> OpArg a -> OpArg a
rewriteRegister r x (OAReg _ _ r') | r == r' = x
rewriteRegister _ _ x = x
choose_stack_slots :: (Annotation a, Eq a) => CFG (Inst a) -> [Reg a] -> Map.Map (Reg a) (Reg a) -> Map.Map (Reg a) Int
choose_stack_slots bs vs mvs = first $ fixedPoint merge_mvs (vs', Map.toList mvs) where
n = next_frame_slot bs
vs' = Map.fromList $ mapi (\n' vn -> (vn, n' + n)) vs
merge_mvs (d, mvs) = foldl merge_mv (d, []) mvs
merge_mv p@(d, iv) (s0, s1) = merge_def (merge_def p s0 s1 (Map.lookup s1 d)) s1 s0 (Map.lookup s0 d)
merge_def (d, iv) n r (Just x) = (Map.insert n x d, iv)
merge_def (d, iv) n r Nothing = (d, (n,r):iv)
------ better register spilling
type StackLocs a = Map.Map (Reg a) Int
type CoalescedRegs a = Map.Map (Reg a) (Reg a)
spillInterference :: InterferenceGraph a -> [Reg a] -> InterferenceGraph a
spillInterference ig regs = foldl (add_edge Interfere) ig' [(r1, r2) | r1 <- regs, r2 <- regs, r1 /= r2] where
ig' = foldl createVar ig regs
spillRegisters :: (Annotation a, Eq a) => CFG (Inst a) -> InterferenceGraph a -> [Reg a] -> CoalescedRegs a -> (CFG (Inst a), InterferenceGraph a)
spillRegisters bs ig vs mvs = (orderTrace (reverse bs'), ig'') where
(bs', _, ig'') = foldl spillBlock ([], 0, ig') bs
ss = chooseStackSlots bs vs mvs
ig' = Map.foldWithKey (\r _ ig' -> killVar ig' r) ig ss
spillBlock (bs', n, ig') (BBlock is m k o) = (BBlock is' m k o : bs', n', ig'') where
(is', n', ig'') = spillInsts is n ig'
spillInsts [] n ig = ([], n, ig)
spillInsts (i:is) n ig = (i' ++ is', n'', ig'') where
(i', n', ig') = spillRewriteInstRegs ss i n ig
(is', n'', ig'') = spillInsts is n' ig'
spillRewriteInstRegs :: Annotation a => StackLocs a -> Inst a -> Int -> InterferenceGraph a -> ([Inst a], Int, InterferenceGraph a)
spillRewriteInstRegs ss i n ig = (is, n'', ig') where
is = pfx ++ [i''] ++ sfx
ig' = spillInterference ig (unique (concatMap inst_reg_refs is))
(pfx, i', n') = foldl spillRead ([], i, n) uses
(i'', sfx, n'') = foldl spillWrite (i', [], n') defs
uses = markedRegs (inst_reg_uses i)
defs = markedRegs (inst_reg_defs i)
markedRegs rseq = [(r, s) | (r, Just s) <- [(r, Map.lookup r ss) | r <- rseq]]
spillRead (pfx, i, n) (r, s) = (pfx ++ pfx', i', n') where
(pfx', i', n') = spillInstRead i r s n
spillWrite (i, sfx, n) (r, s) = (i', sfx' ++ sfx, n') where
(i', sfx', n') = spillInstWrite i r s n
spillInstRead :: Annotation a => Inst a -> Reg a -> Int -> Int -> ([Inst a], Inst a, Int)
spillInstRead (Op p "tailcall" [] srcs f []) (Reg ty r) s n = ([], i', n) where
i' = Op p "tailcall" [] (map (rewriteRegister r (OAFrameSlot nullAnnotation ty s Local)) srcs) f []
spillInstRead i (Reg ty r) s n = (prefix, rewrite i, n + 1) where
prefix = [mov na treg tspill]
tregname = "spr" ++ r ++ show n
treg = OAReg na ty tregname
tspill = OAFrameSlot na ty s Local
rewrite (Op p o dsts srcs f lbls) = Op p o dsts (map (rename_register r tregname) srcs) f lbls
rewrite (RMov p ty' dst src) | src == r = RMov p ty' dst tregname
rewrite p@(RMov _ _ _ _) = p
na = nullAnnotation
spillInstWrite :: Annotation a => Inst a -> Reg a -> Int -> Int -> (Inst a, [Inst a], Int)
spillInstWrite i (Reg ty r) s n = (rewrite i, suffix, n + 1) where
suffix = [mov na tspill treg]
tregname = "spw" ++ r ++ show n
treg = OAReg na ty tregname
tspill = OAFrameSlot na ty s Local
rewrite (Op p o dsts srcs f lbls) = Op p o (map (rename_register r tregname) dsts) srcs f lbls
rewrite (RMov p ty' dst src) | dst == r = RMov p ty' tregname src
rewrite p@(RMov _ _ _ _) = p
na = nullAnnotation
chooseStackSlots :: (Annotation a, Eq a) => CFG (Inst a) -> [Reg a] -> CoalescedRegs a -> StackLocs a
chooseStackSlots bs vs mvs = ss where
(ss, _) = fixedPoint mergeMoves (vs', Map.toList mvs)
n = next_frame_slot bs
vs' = Map.fromList (mapi (\n' vn -> (vn, n' + n)) vs)
mergeMoves (d, mvs) = foldl mergeMove (d, []) mvs
mergeMove p@(d, iv) (s0, s1) = mergeDef (mergeDef p s0 s1 (Map.lookup s1 d)) s1 s0 (Map.lookup s0 d)
mergeDef (d, iv) n r (Just x) = (Map.insert n x d, iv)
mergeDef (d, iv) n r Nothing = (d, (n,r):iv)
-- add a prolog/epilog to allocate local variables and preserve callee-save registers
-- also, commit stack frame references to actual offsets against the sp register
finalizeFunction :: Annotation a => Eq a => CFG (Inst a) -> CFG (Inst a)
finalizeFunction [] = []
finalizeFunction bs = orderTrace bs''' where
bs''' = [BBlock prolog 0 0 0] ++ bs'' ++ [BBlock epilog (1 + length bs) 0 0]
bs'' = map (renameNamedBlock ibname entrance) [BBlock is (n + 1) m o | BBlock is n m o <- bs']
bs' = concatMapCFG patchExit (assignFrameOffsets frefs sregs bs)
patchExit (Op p "ret" _ _ _ _) = [jmp p exit]
patchExit (Op p "tailcall" _ [OAConst _ (CInt _ cid), x] _ _) = deallocFrame (Just cid) frefs sregs ++ [jmpArg p x]
patchExit x = [x]
rootBlock = block_by_id 0 bs
ibname = block_name rootBlock
entrance = "#" ++ ibname ++ "_entrance"
prolog = [LblDef na ibname] ++ allocFrame frefs sregs ++ [jmp na entrance]
epilog = [LblDef na exit] ++ deallocFrame Nothing frefs sregs ++ [ret na (argFrameSize frefs)]
exit = "#" ++ ibname ++ "_exit"
frefs = findFrameRefs bs
sregs = findSaveRegs bs
na = nullAnnotation
-- produces the code to initialize a call frame
allocFrame :: Annotation a => FrameRefs a -> [String] -> [Inst a]
allocFrame frefs sregs = alloc (localFrameSize frefs + tailFrameExpansion frefs) ++ saves where
alloc 0 = []
alloc n = [sub na (OAReg na (TPrim na "int") "sp") (OAConst na (CInt na n))]
saves = [push na (OAReg na (TPrim na "int") r) | r <- sregs]
na = nullAnnotation
-- produces the code to uninitialize a call frame optionally trimming the stack frame for a tail call
deallocFrame :: Annotation a => Maybe Int -> FrameRefs a -> [String] -> [Inst a]
deallocFrame tailctx frefs sregs = restores ++ dealloc (deallocSize tailctx) where
deallocSize (Just cid) = localFrameSize frefs + tailCallExpansion frefs cid
deallocSize Nothing = localFrameSize frefs + tailFrameExpansion frefs
restores = [pop na (OAReg na (TPrim na "int") r) | r <- reverse sregs]
dealloc 0 = []
dealloc n = [add na (OAReg na (TPrim na "int") "sp") (OAConst na (CInt na n))]
na = nullAnnotation
-- add extra stack space when tail-calling a function with a larger frame
tailFrameExpansion :: Annotation a => FrameRefs a -> Int
tailFrameExpansion frefs = ef (tailFrameSize frefs - argFrameSize frefs) where
ef n | n > 0 = n
ef _ = 0
-- the stack space required to make a particular tail call
tailCallExpansion :: Annotation a => FrameRefs a -> Int -> Int
tailCallExpansion frefs cid = ef (tailFrameSize frefs - tailCallSize frefs cid) + ef (argFrameSize frefs - tailFrameSize frefs) where
ef n | n > 0 = n
ef _ = 0
-- reduce excess stack space when tail-calling a function with a smaller frame
trimTailFrame :: Annotation a => FrameRefs a -> Int -> [Inst a]
trimTailFrame frefs cid = tf (argFrameSize frefs - tailCallSize frefs cid) where
na = nullAnnotation
tf n | n > 0 = [add na (OAReg na (TPrim na "int") "sp") (OAConst na (CInt na n))]
tf _ = []
-- rewrite frame slot references to explicit memory offsets from the stack pointer
assignFrameOffsets :: (Annotation a, Eq a) => FrameRefs a -> [String] -> CFG (Inst a) -> CFG (Inst a)
assignFrameOffsets frefs@(args, locals, tailas) sregs bs = mapRewriteArgs assignFrameOffset bs where
assignFrameOffset off (OAFrameSlot p ty sid (TailParam cid)) = OARMemOff p ty "sp" (off + sregoff + tailFrameOffset toffs cid sid + tailFrameExpansion frefs)
assignFrameOffset off (OAFrameSlot p ty sid _) = OARMemOff p ty "sp" (off + sregoff + frameOffset sid + tailFrameExpansion frefs)
assignFrameOffset _ x = x
tailArgStart = argFrameSize frefs + sizeof (TPrim na "int") + localFrameSize frefs
toffs = tailFrameOffsets tailArgStart frefs
frameOffset sid = uj (Map.lookup sid foffs)
foffs = frameOffsets frefs
sregoff = sizeof (TPrim na "int") * length sregs
na = relatedNullAnnotation (cfg_node bs 0)
-- find references to local variables and arguments in order
type SlotTys a = Map.Map Int (Ty a)
type TailSlotFrames a = Map.Map Int (SlotTys a)
type FrameRefs a = (SlotTys a, SlotTys a, TailSlotFrames a)
findFrameRefs :: (Annotation a, Eq a) => CFG (Inst a) -> FrameRefs a
findFrameRefs bs = (order args, order locals, tailas) where
order x = Map.fromList x
(args, locals, tailas) = foldl blockGather ([], [], Map.empty) bs
blockGather (args, locals, tailas) (BBlock is m k o) = foldl instGather (args, locals, tailas) is
-- for 0-arity tail calls (like to the 'failed_match' built-in function), make sure that the tail call is tracked
instGather (args, locals, tailas) (Op _ "tailcall" _ [OAConst _ (CInt _ cid), _] _ _) = (args, locals, def (Map.lookup cid tailas)) where
def (Just _) = tailas
def Nothing = Map.insert cid Map.empty tailas
-- track the number and type of frame slots for parameters, local variables, and for each tail call
instGather (args, locals, tailas) i = (args ++ args', locals ++ locals', foldl tinsert tailas tailas') where
args' = [(n, ty) | OAFrameSlot _ ty n Param <- instArgs i]
locals' = [(n, ty) | OAFrameSlot _ ty n Local <- instArgs i]
tailas' = [(cid, n, ty) | OAFrameSlot _ ty n (TailParam cid) <- instArgs i, n >= 0]
tinsert m (cid, n, ty) = insertMapVal m cid n ty
argFrameSize :: Annotation a => FrameRefs a -> Int
argFrameSize (args, _, _) = frameSize args
localFrameSize :: Annotation a => FrameRefs a -> Int
localFrameSize (_, locals, _) = frameSize locals
tailFrameSize :: Annotation a => FrameRefs a -> Int
tailFrameSize (_, _, tailas) = seqMax 0 [frameSize frame | (_, frame) <- Map.toList tailas]
tailCallSize :: Annotation a => FrameRefs a -> Int -> Int
tailCallSize (_, _, tailas) cid = frameSize (uj (Map.lookup cid tailas))
frameSize :: Annotation a => SlotTys a -> Int
frameSize ss = sum [sizeof t | (_, t) <- Map.toList ss]
-- given a sequence of frame variables, determines the offset of each within the frame
type FrameOffsets = Map.Map Int Int
type TailFrameOffsets = Map.Map Int FrameOffsets
frameOffsets :: Annotation a => FrameRefs a -> FrameOffsets
frameOffsets (args, locals, _) = snd $ foldl computeOffset (0, Map.empty) (reverse $ Map.toList args ++ [(-1, TPrim nullAnnotation "int")] ++ Map.toList locals) where
computeOffset (off, foMap) (i, ty) = (off + sizeof ty, Map.insert i off foMap)
tailFrameOffsets :: Annotation a => Int -> FrameRefs a -> TailFrameOffsets
tailFrameOffsets start (_, _, tsfs) = Map.map tailCallOffsets tsfs where
tailCallOffsets fvs = snd $ foldl computeOffset (start, Map.empty) (Map.toList fvs ++ [(-1, TPrim nullAnnotation "int")])
computeOffset (off, foMap) (i, ty) = (off', Map.insert i off' foMap) where off' = off - sizeof ty
tailFrameOffset :: TailFrameOffsets -> Int -> Int -> Int
tailFrameOffset tfo cid sid = uj (map2Lookup tfo cid sid)
-- allow the rewriting of stack frame offsets within a sequence of pushes for a function call
mapRewriteArgs :: (Annotation a, Eq a) => (Int -> OpArg a -> OpArg a) -> CFG (Inst a) -> CFG (Inst a)
mapRewriteArgs f bs = map rewrite bs where
rewrite (BBlock is m k o) = BBlock (reverse $ second $ foldl rewritei (0, []) is) m k o
rewritei (off, is') i = (clear i off + doff i, rewriteargs off i : is')
clear (Op _ "call" _ _ _ _) _ = 0
clear _ x = x
doff (Op _ "push" _ [r] _ _) = sizeof (argTy r)
doff _ = 0
rewriteargs off (Op p o dsts srcs sf lbls) = Op p o (map (f off) dsts) (map (f off) srcs) sf lbls
rewriteargs _ x = x
-- find the set of callee-save registers written in a body of code
findSaveRegs :: (Annotation a, Eq a) => CFG (Inst a) -> [String]
findSaveRegs bs = unique rs where
rs = foldr gather_brefs [] bs
gather_brefs (BBlock is _ _ _) rs = foldr gather_irefs rs is
gather_irefs i rs = [r | Reg _ r <- inst_reg_defs i, not (r `elem` cregs)] ++ rs
-- display the stack frame structure of a control-flow graph
frameRefDiag :: (Annotation a, Eq a) => CFG (Inst a) -> String
frameRefDiag bs =
"digraph G {\n" ++
" label = \"Stack Alignments\";\n" ++
" img [shape=\"Mrecord\" label=<" ++ showFrameRefs bs ++ ">];\n" ++
"}\n"
showFrameRefs :: (Annotation a, Eq a) => CFG (Inst a) -> String
showFrameRefs bs = frameTable where
frameTable = "<table>" ++ concat [frameBlock name frame ["red","blue"] | (name, frame) <- ("arguments", args) : map (\(i,v) -> ("tail call #" ++ show i, v)) (Map.toList tailcs)] ++ "</table>"
(args, locals, tailcs) = findFrameRefs bs
sregs = findSaveRegs bs
frameBlock :: Annotation a => String -> SlotTys a -> [String] -> String
frameBlock name f colors = "<tr><td>" ++ name ++ "</td>" ++ concat [tyBlock ty c | ((_, ty), c) <- zip (Map.toList f) (cycle colors)] ++ "</tr>"
tyBlock :: Annotation a => Ty a -> String -> String
tyBlock ty color = concat (take (sizeof ty) (repeat ("<td bgcolor=\"" ++ color ++ "\"> </td>")))
| null | https://raw.githubusercontent.com/kthielen/stlcc/369492daad6498a93c00f5924a99ceb65b5f1062/ASM/InstCFG.hs | haskell | ---- better register spilling
add a prolog/epilog to allocate local variables and preserve callee-save registers
also, commit stack frame references to actual offsets against the sp register
produces the code to initialize a call frame
produces the code to uninitialize a call frame optionally trimming the stack frame for a tail call
add extra stack space when tail-calling a function with a larger frame
the stack space required to make a particular tail call
reduce excess stack space when tail-calling a function with a smaller frame
rewrite frame slot references to explicit memory offsets from the stack pointer
find references to local variables and arguments in order
for 0-arity tail calls (like to the 'failed_match' built-in function), make sure that the tail call is tracked
track the number and type of frame slots for parameters, local variables, and for each tail call
given a sequence of frame variables, determines the offset of each within the frame
allow the rewriting of stack frame offsets within a sequence of pushes for a function call
find the set of callee-save registers written in a body of code
display the stack frame structure of a control-flow graph
|
module ASM.InstCFG where
import ASM.Type
import ASM.Term
import ASM.Liveness
import Util.Annotated
import Util.CFG
import Util.Sequence
import Util.Num
import Util.Tuples
import Util.Recursion
import qualified Data.Map as Map
block_reg_refs :: (Annotation a, Eq a) => BBlock (Inst a) -> [Reg a]
block_reg_refs (BBlock is _ _ _) = unique $ concat $ map inst_reg_refs is
cfg_reg_refs :: (Annotation a, Eq a) => CFG (Inst a) -> [Reg a]
cfg_reg_refs cfg = unique $ concat $ map block_reg_refs cfg
rename_registers :: (Annotation a, Eq a) => CFG (Inst a) -> Map.Map (Reg a) (Reg a) -> CFG (Inst a)
rename_registers bs vn = map rename_block bs where
rename_block (BBlock is m n o) = BBlock (map rename_inst is) m n o
rename_inst (Op p o dsts srcs f jmps) = Op p o (map rename_loc dsts) (map rename_loc srcs) f jmps
rename_inst (RMov p ty dst src) = RMov p ty (subst dst) (subst src)
rename_inst p = p
rename_loc (OAReg p ty s) = OAReg p ty (subst s)
rename_loc (OARMemOff p ty s x) = OARMemOff p ty (subst s) x
rename_loc p = p
subst s = either id (const s) $ choice (Map.lookup s vn')
vn' = Map.fromList [(s0, s1) | (Reg _ s0, Reg _ s1) <- Map.toList vn]
normalizeMoves :: (Annotation a, Eq a) => CFG (Inst a) -> CFG (Inst a)
normalizeMoves bs = map normalizeBlock bs where
normalizeBlock (BBlock is m n o) = BBlock [normalize i | i <- is, not (redundant i)] m n o
normalize (RMov p ty dst src) = realmov p (OAReg p ty dst) (OAReg p ty src)
normalize x = x
redundant (RMov _ ty dst src) | dst == src = True
redundant (Op _ "mov" [dst] [src] f j) | dst == src = True
redundant _ = False
next_frame_slot :: (Annotation a, Eq a) => CFG (Inst a) -> Int
next_frame_slot bs = 1 + (seqMax 0 (map bmax bs)) where
bmax (BBlock is _ _ _) = seqMax 0 (map imax is)
imax (Op _ _ ds ss _ _) = seqMax (seqMax 0 (map oaslot ds)) (map oaslot ss)
imax _ = 0
oaslot (OAFrameSlot _ _ i _) = i
oaslot _ = 0
spill_registers :: (Annotation a, Eq a) => CFG (Inst a) -> [Reg a] -> Map.Map (Reg a) (Reg a) -> CFG (Inst a)
spill_registers bs vs mvs = orderTrace $ reverse $ first $ foldl spill_block ([], 0) bs where
spill_block (bs', n) (BBlock is m k o) = ((BBlock is' m k o):bs', n') where (is', n') = spill_insts is n
spill_insts (i:is) n = (i' ++ is', n'') where (i', n') = spill_inst i n; (is', n'') = spill_insts is n'
spill_insts [] n = ([], n)
spill_inst i n = spill_rewrite_inst_regs ss i n
ss = choose_stack_slots bs vs mvs
spill_rewrite_inst_regs :: Annotation a => Map.Map (Reg a) Int -> Inst a -> Int -> ([Inst a], Int)
spill_rewrite_inst_regs ss i n = (pfx ++ [i''] ++ sfx, n'') where
(pfx, i', n') = foldl spill_read ([], i, n) uses
(i'', sfx, n'') = foldl spill_write (i', [], n') defs
uses = marked_regs (inst_reg_uses i)
defs = marked_regs (inst_reg_defs i)
marked_regs rseq = [(r, s) | (r, Just s) <- [(r, Map.lookup r ss) | r <- rseq]]
spill_read (pfx, i, n) (r, s) = (pfx ++ pfx', i', n') where (pfx', i', n') = spill_inst_read i r s n
spill_write (i, sfx, n) (r, s) = (i', sfx' ++ sfx, n') where (i', sfx', n') = spill_inst_write i r s n
spill_inst_read :: Annotation a => Inst a -> Reg a -> Int -> Int -> ([Inst a], Inst a, Int)
spill_inst_read (Op p "tailcall" [] srcs f []) (Reg ty r) s n = ([], i', n) where
i' = Op p "tailcall" [] (map (rewriteRegister r (OAFrameSlot p ty s Local)) srcs) f []
spill_inst_read i (Reg ty r) s n = (prefix, rewrite i, n + 1) where
prefix = [mov na treg tspill]
tregname = "spr" ++ r ++ show n
treg = OAReg na ty tregname
tspill = OAFrameSlot na ty s Local
rewrite (Op p o dsts srcs f lbls) = Op p o dsts (map (rename_register r tregname) srcs) f lbls
rewrite (RMov p ty' dst src) | src == r = RMov p ty' dst tregname
rewrite p@(RMov _ _ _ _) = p
na = nullAnnotation
spill_inst_write :: Annotation a => Inst a -> Reg a -> Int -> Int -> (Inst a, [Inst a], Int)
spill_inst_write i (Reg ty r) s n = (rewrite i, suffix, n + 1) where
suffix = [mov na tspill treg]
tregname = "spw" ++ r ++ show n
treg = OAReg na ty tregname
tspill = OAFrameSlot na ty s Local
rewrite (Op p o dsts srcs f lbls) = Op p o (map (rename_register r tregname) dsts) srcs f lbls
rewrite (RMov p ty' dst src) | dst == r = RMov p ty' tregname src
rewrite p@(RMov _ _ _ _) = p
na = nullAnnotation
rename_register :: String -> String -> OpArg a -> OpArg a
rename_register f t (OAReg p ty s) | s == f = OAReg p ty t
rename_register f t (OARMemOff p ty s n) | s == f = OARMemOff p ty t n
rename_register _ _ x = x
rewriteRegister :: String -> OpArg a -> OpArg a -> OpArg a
rewriteRegister r x (OAReg _ _ r') | r == r' = x
rewriteRegister _ _ x = x
choose_stack_slots :: (Annotation a, Eq a) => CFG (Inst a) -> [Reg a] -> Map.Map (Reg a) (Reg a) -> Map.Map (Reg a) Int
choose_stack_slots bs vs mvs = first $ fixedPoint merge_mvs (vs', Map.toList mvs) where
n = next_frame_slot bs
vs' = Map.fromList $ mapi (\n' vn -> (vn, n' + n)) vs
merge_mvs (d, mvs) = foldl merge_mv (d, []) mvs
merge_mv p@(d, iv) (s0, s1) = merge_def (merge_def p s0 s1 (Map.lookup s1 d)) s1 s0 (Map.lookup s0 d)
merge_def (d, iv) n r (Just x) = (Map.insert n x d, iv)
merge_def (d, iv) n r Nothing = (d, (n,r):iv)
type StackLocs a = Map.Map (Reg a) Int
type CoalescedRegs a = Map.Map (Reg a) (Reg a)
spillInterference :: InterferenceGraph a -> [Reg a] -> InterferenceGraph a
spillInterference ig regs = foldl (add_edge Interfere) ig' [(r1, r2) | r1 <- regs, r2 <- regs, r1 /= r2] where
ig' = foldl createVar ig regs
spillRegisters :: (Annotation a, Eq a) => CFG (Inst a) -> InterferenceGraph a -> [Reg a] -> CoalescedRegs a -> (CFG (Inst a), InterferenceGraph a)
spillRegisters bs ig vs mvs = (orderTrace (reverse bs'), ig'') where
(bs', _, ig'') = foldl spillBlock ([], 0, ig') bs
ss = chooseStackSlots bs vs mvs
ig' = Map.foldWithKey (\r _ ig' -> killVar ig' r) ig ss
spillBlock (bs', n, ig') (BBlock is m k o) = (BBlock is' m k o : bs', n', ig'') where
(is', n', ig'') = spillInsts is n ig'
spillInsts [] n ig = ([], n, ig)
spillInsts (i:is) n ig = (i' ++ is', n'', ig'') where
(i', n', ig') = spillRewriteInstRegs ss i n ig
(is', n'', ig'') = spillInsts is n' ig'
spillRewriteInstRegs :: Annotation a => StackLocs a -> Inst a -> Int -> InterferenceGraph a -> ([Inst a], Int, InterferenceGraph a)
spillRewriteInstRegs ss i n ig = (is, n'', ig') where
is = pfx ++ [i''] ++ sfx
ig' = spillInterference ig (unique (concatMap inst_reg_refs is))
(pfx, i', n') = foldl spillRead ([], i, n) uses
(i'', sfx, n'') = foldl spillWrite (i', [], n') defs
uses = markedRegs (inst_reg_uses i)
defs = markedRegs (inst_reg_defs i)
markedRegs rseq = [(r, s) | (r, Just s) <- [(r, Map.lookup r ss) | r <- rseq]]
spillRead (pfx, i, n) (r, s) = (pfx ++ pfx', i', n') where
(pfx', i', n') = spillInstRead i r s n
spillWrite (i, sfx, n) (r, s) = (i', sfx' ++ sfx, n') where
(i', sfx', n') = spillInstWrite i r s n
spillInstRead :: Annotation a => Inst a -> Reg a -> Int -> Int -> ([Inst a], Inst a, Int)
spillInstRead (Op p "tailcall" [] srcs f []) (Reg ty r) s n = ([], i', n) where
i' = Op p "tailcall" [] (map (rewriteRegister r (OAFrameSlot nullAnnotation ty s Local)) srcs) f []
spillInstRead i (Reg ty r) s n = (prefix, rewrite i, n + 1) where
prefix = [mov na treg tspill]
tregname = "spr" ++ r ++ show n
treg = OAReg na ty tregname
tspill = OAFrameSlot na ty s Local
rewrite (Op p o dsts srcs f lbls) = Op p o dsts (map (rename_register r tregname) srcs) f lbls
rewrite (RMov p ty' dst src) | src == r = RMov p ty' dst tregname
rewrite p@(RMov _ _ _ _) = p
na = nullAnnotation
spillInstWrite :: Annotation a => Inst a -> Reg a -> Int -> Int -> (Inst a, [Inst a], Int)
spillInstWrite i (Reg ty r) s n = (rewrite i, suffix, n + 1) where
suffix = [mov na tspill treg]
tregname = "spw" ++ r ++ show n
treg = OAReg na ty tregname
tspill = OAFrameSlot na ty s Local
rewrite (Op p o dsts srcs f lbls) = Op p o (map (rename_register r tregname) dsts) srcs f lbls
rewrite (RMov p ty' dst src) | dst == r = RMov p ty' tregname src
rewrite p@(RMov _ _ _ _) = p
na = nullAnnotation
chooseStackSlots :: (Annotation a, Eq a) => CFG (Inst a) -> [Reg a] -> CoalescedRegs a -> StackLocs a
chooseStackSlots bs vs mvs = ss where
(ss, _) = fixedPoint mergeMoves (vs', Map.toList mvs)
n = next_frame_slot bs
vs' = Map.fromList (mapi (\n' vn -> (vn, n' + n)) vs)
mergeMoves (d, mvs) = foldl mergeMove (d, []) mvs
mergeMove p@(d, iv) (s0, s1) = mergeDef (mergeDef p s0 s1 (Map.lookup s1 d)) s1 s0 (Map.lookup s0 d)
mergeDef (d, iv) n r (Just x) = (Map.insert n x d, iv)
mergeDef (d, iv) n r Nothing = (d, (n,r):iv)
finalizeFunction :: Annotation a => Eq a => CFG (Inst a) -> CFG (Inst a)
finalizeFunction [] = []
finalizeFunction bs = orderTrace bs''' where
bs''' = [BBlock prolog 0 0 0] ++ bs'' ++ [BBlock epilog (1 + length bs) 0 0]
bs'' = map (renameNamedBlock ibname entrance) [BBlock is (n + 1) m o | BBlock is n m o <- bs']
bs' = concatMapCFG patchExit (assignFrameOffsets frefs sregs bs)
patchExit (Op p "ret" _ _ _ _) = [jmp p exit]
patchExit (Op p "tailcall" _ [OAConst _ (CInt _ cid), x] _ _) = deallocFrame (Just cid) frefs sregs ++ [jmpArg p x]
patchExit x = [x]
rootBlock = block_by_id 0 bs
ibname = block_name rootBlock
entrance = "#" ++ ibname ++ "_entrance"
prolog = [LblDef na ibname] ++ allocFrame frefs sregs ++ [jmp na entrance]
epilog = [LblDef na exit] ++ deallocFrame Nothing frefs sregs ++ [ret na (argFrameSize frefs)]
exit = "#" ++ ibname ++ "_exit"
frefs = findFrameRefs bs
sregs = findSaveRegs bs
na = nullAnnotation
allocFrame :: Annotation a => FrameRefs a -> [String] -> [Inst a]
allocFrame frefs sregs = alloc (localFrameSize frefs + tailFrameExpansion frefs) ++ saves where
alloc 0 = []
alloc n = [sub na (OAReg na (TPrim na "int") "sp") (OAConst na (CInt na n))]
saves = [push na (OAReg na (TPrim na "int") r) | r <- sregs]
na = nullAnnotation
deallocFrame :: Annotation a => Maybe Int -> FrameRefs a -> [String] -> [Inst a]
deallocFrame tailctx frefs sregs = restores ++ dealloc (deallocSize tailctx) where
deallocSize (Just cid) = localFrameSize frefs + tailCallExpansion frefs cid
deallocSize Nothing = localFrameSize frefs + tailFrameExpansion frefs
restores = [pop na (OAReg na (TPrim na "int") r) | r <- reverse sregs]
dealloc 0 = []
dealloc n = [add na (OAReg na (TPrim na "int") "sp") (OAConst na (CInt na n))]
na = nullAnnotation
tailFrameExpansion :: Annotation a => FrameRefs a -> Int
tailFrameExpansion frefs = ef (tailFrameSize frefs - argFrameSize frefs) where
ef n | n > 0 = n
ef _ = 0
tailCallExpansion :: Annotation a => FrameRefs a -> Int -> Int
tailCallExpansion frefs cid = ef (tailFrameSize frefs - tailCallSize frefs cid) + ef (argFrameSize frefs - tailFrameSize frefs) where
ef n | n > 0 = n
ef _ = 0
trimTailFrame :: Annotation a => FrameRefs a -> Int -> [Inst a]
trimTailFrame frefs cid = tf (argFrameSize frefs - tailCallSize frefs cid) where
na = nullAnnotation
tf n | n > 0 = [add na (OAReg na (TPrim na "int") "sp") (OAConst na (CInt na n))]
tf _ = []
assignFrameOffsets :: (Annotation a, Eq a) => FrameRefs a -> [String] -> CFG (Inst a) -> CFG (Inst a)
assignFrameOffsets frefs@(args, locals, tailas) sregs bs = mapRewriteArgs assignFrameOffset bs where
assignFrameOffset off (OAFrameSlot p ty sid (TailParam cid)) = OARMemOff p ty "sp" (off + sregoff + tailFrameOffset toffs cid sid + tailFrameExpansion frefs)
assignFrameOffset off (OAFrameSlot p ty sid _) = OARMemOff p ty "sp" (off + sregoff + frameOffset sid + tailFrameExpansion frefs)
assignFrameOffset _ x = x
tailArgStart = argFrameSize frefs + sizeof (TPrim na "int") + localFrameSize frefs
toffs = tailFrameOffsets tailArgStart frefs
frameOffset sid = uj (Map.lookup sid foffs)
foffs = frameOffsets frefs
sregoff = sizeof (TPrim na "int") * length sregs
na = relatedNullAnnotation (cfg_node bs 0)
type SlotTys a = Map.Map Int (Ty a)
type TailSlotFrames a = Map.Map Int (SlotTys a)
type FrameRefs a = (SlotTys a, SlotTys a, TailSlotFrames a)
findFrameRefs :: (Annotation a, Eq a) => CFG (Inst a) -> FrameRefs a
findFrameRefs bs = (order args, order locals, tailas) where
order x = Map.fromList x
(args, locals, tailas) = foldl blockGather ([], [], Map.empty) bs
blockGather (args, locals, tailas) (BBlock is m k o) = foldl instGather (args, locals, tailas) is
instGather (args, locals, tailas) (Op _ "tailcall" _ [OAConst _ (CInt _ cid), _] _ _) = (args, locals, def (Map.lookup cid tailas)) where
def (Just _) = tailas
def Nothing = Map.insert cid Map.empty tailas
instGather (args, locals, tailas) i = (args ++ args', locals ++ locals', foldl tinsert tailas tailas') where
args' = [(n, ty) | OAFrameSlot _ ty n Param <- instArgs i]
locals' = [(n, ty) | OAFrameSlot _ ty n Local <- instArgs i]
tailas' = [(cid, n, ty) | OAFrameSlot _ ty n (TailParam cid) <- instArgs i, n >= 0]
tinsert m (cid, n, ty) = insertMapVal m cid n ty
argFrameSize :: Annotation a => FrameRefs a -> Int
argFrameSize (args, _, _) = frameSize args
localFrameSize :: Annotation a => FrameRefs a -> Int
localFrameSize (_, locals, _) = frameSize locals
tailFrameSize :: Annotation a => FrameRefs a -> Int
tailFrameSize (_, _, tailas) = seqMax 0 [frameSize frame | (_, frame) <- Map.toList tailas]
tailCallSize :: Annotation a => FrameRefs a -> Int -> Int
tailCallSize (_, _, tailas) cid = frameSize (uj (Map.lookup cid tailas))
frameSize :: Annotation a => SlotTys a -> Int
frameSize ss = sum [sizeof t | (_, t) <- Map.toList ss]
type FrameOffsets = Map.Map Int Int
type TailFrameOffsets = Map.Map Int FrameOffsets
frameOffsets :: Annotation a => FrameRefs a -> FrameOffsets
frameOffsets (args, locals, _) = snd $ foldl computeOffset (0, Map.empty) (reverse $ Map.toList args ++ [(-1, TPrim nullAnnotation "int")] ++ Map.toList locals) where
computeOffset (off, foMap) (i, ty) = (off + sizeof ty, Map.insert i off foMap)
tailFrameOffsets :: Annotation a => Int -> FrameRefs a -> TailFrameOffsets
tailFrameOffsets start (_, _, tsfs) = Map.map tailCallOffsets tsfs where
tailCallOffsets fvs = snd $ foldl computeOffset (start, Map.empty) (Map.toList fvs ++ [(-1, TPrim nullAnnotation "int")])
computeOffset (off, foMap) (i, ty) = (off', Map.insert i off' foMap) where off' = off - sizeof ty
tailFrameOffset :: TailFrameOffsets -> Int -> Int -> Int
tailFrameOffset tfo cid sid = uj (map2Lookup tfo cid sid)
mapRewriteArgs :: (Annotation a, Eq a) => (Int -> OpArg a -> OpArg a) -> CFG (Inst a) -> CFG (Inst a)
mapRewriteArgs f bs = map rewrite bs where
rewrite (BBlock is m k o) = BBlock (reverse $ second $ foldl rewritei (0, []) is) m k o
rewritei (off, is') i = (clear i off + doff i, rewriteargs off i : is')
clear (Op _ "call" _ _ _ _) _ = 0
clear _ x = x
doff (Op _ "push" _ [r] _ _) = sizeof (argTy r)
doff _ = 0
rewriteargs off (Op p o dsts srcs sf lbls) = Op p o (map (f off) dsts) (map (f off) srcs) sf lbls
rewriteargs _ x = x
findSaveRegs :: (Annotation a, Eq a) => CFG (Inst a) -> [String]
findSaveRegs bs = unique rs where
rs = foldr gather_brefs [] bs
gather_brefs (BBlock is _ _ _) rs = foldr gather_irefs rs is
gather_irefs i rs = [r | Reg _ r <- inst_reg_defs i, not (r `elem` cregs)] ++ rs
frameRefDiag :: (Annotation a, Eq a) => CFG (Inst a) -> String
frameRefDiag bs =
"digraph G {\n" ++
" label = \"Stack Alignments\";\n" ++
" img [shape=\"Mrecord\" label=<" ++ showFrameRefs bs ++ ">];\n" ++
"}\n"
showFrameRefs :: (Annotation a, Eq a) => CFG (Inst a) -> String
showFrameRefs bs = frameTable where
frameTable = "<table>" ++ concat [frameBlock name frame ["red","blue"] | (name, frame) <- ("arguments", args) : map (\(i,v) -> ("tail call #" ++ show i, v)) (Map.toList tailcs)] ++ "</table>"
(args, locals, tailcs) = findFrameRefs bs
sregs = findSaveRegs bs
frameBlock :: Annotation a => String -> SlotTys a -> [String] -> String
frameBlock name f colors = "<tr><td>" ++ name ++ "</td>" ++ concat [tyBlock ty c | ((_, ty), c) <- zip (Map.toList f) (cycle colors)] ++ "</tr>"
tyBlock :: Annotation a => Ty a -> String -> String
tyBlock ty color = concat (take (sizeof ty) (repeat ("<td bgcolor=\"" ++ color ++ "\"> </td>")))
|
a5bd8a4751d9d0192ea8c18206c65ebcfaddbcc5ee59b034f63655a9adc764ca | soren-n/bidi-higher-rank-poly | Set.ml | open Extra
type 'a set = 'a AVL.tree
let make = AVL.make_null
let is_empty set = (AVL.get_count set) = 0
let is_member = AVL.is_member
let get_member = AVL.get_member
let size = AVL.get_count
let add = AVL.insert
let remove = AVL.remove
let to_list = AVL.to_list
let from_list = AVL.from_list
let fold empty_case item_case set =
List.fold empty_case item_case (to_list set)
let union order xs ys =
let open AVL in
let open Order in
let _cont k x xs = k (x :: xs) in
let rec _visit xs ys return =
match xs, ys with
| [], _ -> return ys
| _, [] -> return xs
| x :: xs', y :: ys' ->
match order x y with
| EQ -> _visit xs' ys' (_cont return x)
| LT -> _visit xs' ys (_cont return x)
| GT -> _visit xs ys' (_cont return y)
in
from_list (_visit (to_list xs) (to_list ys) identity)
let difference order xs ys =
let open AVL in
let open Order in
let _cont k x xs = k (x :: xs) in
let rec _visit xs ys return =
match xs, ys with
| [], _ | _, [] -> return xs
| x :: xs', y :: ys' ->
match order x y with
| EQ -> _visit xs' ys' return
| LT -> _visit xs' ys (_cont return x)
| GT -> _visit xs' ys' (_cont return x)
in
from_list (_visit (to_list xs) (to_list ys) identity)
let intersection order xs ys =
let open AVL in
let open Order in
let _cont k x xs = k (x :: xs) in
let rec _visit xs ys return =
match xs, ys with
| [], _ | _, [] -> return []
| x :: xs', y :: ys' ->
match order x y with
| EQ -> _visit xs' ys' (_cont return x)
| LT -> _visit xs' ys return
| GT -> _visit xs ys' return
in
from_list (_visit (to_list xs) (to_list ys) identity)
let first values = AVL.get_leftmost values
let first_unsafe values =
match AVL.get_leftmost values with
| None -> assert false
| Some value -> value
let last values = AVL.get_rightmost values
let last_unsafe values =
match AVL.get_rightmost values with
| None -> assert false
| Some value -> value
| null | https://raw.githubusercontent.com/soren-n/bidi-higher-rank-poly/c0957759657b30a52235560d1d5f40e9bd2569b3/util/lib/Set.ml | ocaml | open Extra
type 'a set = 'a AVL.tree
let make = AVL.make_null
let is_empty set = (AVL.get_count set) = 0
let is_member = AVL.is_member
let get_member = AVL.get_member
let size = AVL.get_count
let add = AVL.insert
let remove = AVL.remove
let to_list = AVL.to_list
let from_list = AVL.from_list
let fold empty_case item_case set =
List.fold empty_case item_case (to_list set)
let union order xs ys =
let open AVL in
let open Order in
let _cont k x xs = k (x :: xs) in
let rec _visit xs ys return =
match xs, ys with
| [], _ -> return ys
| _, [] -> return xs
| x :: xs', y :: ys' ->
match order x y with
| EQ -> _visit xs' ys' (_cont return x)
| LT -> _visit xs' ys (_cont return x)
| GT -> _visit xs ys' (_cont return y)
in
from_list (_visit (to_list xs) (to_list ys) identity)
let difference order xs ys =
let open AVL in
let open Order in
let _cont k x xs = k (x :: xs) in
let rec _visit xs ys return =
match xs, ys with
| [], _ | _, [] -> return xs
| x :: xs', y :: ys' ->
match order x y with
| EQ -> _visit xs' ys' return
| LT -> _visit xs' ys (_cont return x)
| GT -> _visit xs' ys' (_cont return x)
in
from_list (_visit (to_list xs) (to_list ys) identity)
let intersection order xs ys =
let open AVL in
let open Order in
let _cont k x xs = k (x :: xs) in
let rec _visit xs ys return =
match xs, ys with
| [], _ | _, [] -> return []
| x :: xs', y :: ys' ->
match order x y with
| EQ -> _visit xs' ys' (_cont return x)
| LT -> _visit xs' ys return
| GT -> _visit xs ys' return
in
from_list (_visit (to_list xs) (to_list ys) identity)
let first values = AVL.get_leftmost values
let first_unsafe values =
match AVL.get_leftmost values with
| None -> assert false
| Some value -> value
let last values = AVL.get_rightmost values
let last_unsafe values =
match AVL.get_rightmost values with
| None -> assert false
| Some value -> value
|
|
400e32a83b80c8e00eb31604f40b73a53502e821b5e95bc2acb3884aa7e20450 | gfour/gic | myex5.hs | result = fib 5;
fib n = if n <= 1 then 1 else fib(n-1) + fib(n-2)
" result " = CALL ( 0,"fib_n__0 " ) ( " fib " )
" fib " = " if " [ " < = " [ ARG 0 0,"1 " ] , " 1 " , " + " [ CALL ( 0,"fib_n__1 " ) ( " fib"),CALL ( 0,"fib_n__2 " ) ( " fib " ) ] ]
" fib_n__0 " = SAVE ( 0,0 ) ACT_0 " 2 "
" fib_n__1 " = SAVE ( 0,0 ) ACT_0 " - " [ ARG 0 0,"1 " ]
" fib_n__2 " = SAVE ( 0,0 ) ACT_0 " - " [ ARG 0 0,"2 " ]
"result" = CALL (0,"fib_n__0") ("fib")
"fib" = "if" ["<=" [ARG 0 0,"1" ],"1" ,"+" [CALL (0,"fib_n__1") ("fib"),CALL (0,"fib_n__2") ("fib")]]
"fib_n__0" = SAVE (0,0) ACT_0 "2"
"fib_n__1" = SAVE (0,0) ACT_0 "-" [ARG 0 0,"1" ]
"fib_n__2" = SAVE (0,0) ACT_0 "-" [ARG 0 0,"2" ]
-}
| null | https://raw.githubusercontent.com/gfour/gic/d5f2e506b31a1a28e02ca54af9610b3d8d618e9a/Examples/Num/myex5.hs | haskell | result = fib 5;
fib n = if n <= 1 then 1 else fib(n-1) + fib(n-2)
" result " = CALL ( 0,"fib_n__0 " ) ( " fib " )
" fib " = " if " [ " < = " [ ARG 0 0,"1 " ] , " 1 " , " + " [ CALL ( 0,"fib_n__1 " ) ( " fib"),CALL ( 0,"fib_n__2 " ) ( " fib " ) ] ]
" fib_n__0 " = SAVE ( 0,0 ) ACT_0 " 2 "
" fib_n__1 " = SAVE ( 0,0 ) ACT_0 " - " [ ARG 0 0,"1 " ]
" fib_n__2 " = SAVE ( 0,0 ) ACT_0 " - " [ ARG 0 0,"2 " ]
"result" = CALL (0,"fib_n__0") ("fib")
"fib" = "if" ["<=" [ARG 0 0,"1" ],"1" ,"+" [CALL (0,"fib_n__1") ("fib"),CALL (0,"fib_n__2") ("fib")]]
"fib_n__0" = SAVE (0,0) ACT_0 "2"
"fib_n__1" = SAVE (0,0) ACT_0 "-" [ARG 0 0,"1" ]
"fib_n__2" = SAVE (0,0) ACT_0 "-" [ARG 0 0,"2" ]
-}
|
|
91ed754c3691cc6fefd523bf52fe50c306b8063f3b41fca2caec94361c794fc4 | copumpkin/java | Raw.hs | # LANGUAGE TemplateHaskell #
{-# OPTIONS_GHC -funbox-strict-fields #-}
module Java.ClassFormat.Raw where
import Data.Int
import Data.Word
import qualified Data.Text as T
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.Vector as V
import qualified Data.Vector.Unboxed as U
import qualified Data.IntMap as IM
import Control.Lens.TH
import Data.Eliminator.TH
import Java.Bytecode.Raw
}
data = ConstantValue -- exactly one
| Synthetic
| Signature -- 49.0 or above
| Deprecated
| RuntimeVisibleAnnotations -- 49.0 or above
| RuntimeInvisibleAnnotations -- 49.0 or above
data MethodAttribute
= Code -- native / abstract = 0 , otherwise = 1
| Exceptions
| Signature -- 49.0 or above
| Deprecated
| RuntimeVisibleAnnotations -- 49.0 or above
| RuntimeInvisibleAnnotations -- 49.0 or above
| RuntimeVisibleParameterAnnotations -- 49.0 or above
| RuntimeInvisibleParameterAnnotations -- 49.0 or above
| AnnotationDefault -- 49.0 or above
data ClassAttribute
= InnerClasses
| EnclosingMethod
| Synthetic
| Signature -- 49.0 or above
| SourceFile
| SourceDebugExtension
| Deprecated
| RuntimeVisibleAnnotations -- 49.0 or above
| RuntimeInvisibleAnnotations -- 49.0 or above
| BootstrapMethods -- 51.0 or above
data CodeAttribute
= LineNumberTable
| LocalVariableTable
| LocalVariableTypeTable
| StackMapTable -- 50.0 or above
-- ignore others
data FieldAttribute
= ConstantValue -- exactly one
| Synthetic
| Signature -- 49.0 or above
| Deprecated
| RuntimeVisibleAnnotations -- 49.0 or above
| RuntimeInvisibleAnnotations -- 49.0 or above
data MethodAttribute
= Code -- native/abstract = 0, otherwise = 1
| Exceptions
| Signature -- 49.0 or above
| Deprecated
| RuntimeVisibleAnnotations -- 49.0 or above
| RuntimeInvisibleAnnotations -- 49.0 or above
| RuntimeVisibleParameterAnnotations -- 49.0 or above
| RuntimeInvisibleParameterAnnotations -- 49.0 or above
| AnnotationDefault -- 49.0 or above
data ClassAttribute
= InnerClasses
| EnclosingMethod
| Synthetic
| Signature -- 49.0 or above
| SourceFile
| SourceDebugExtension
| Deprecated
| RuntimeVisibleAnnotations -- 49.0 or above
| RuntimeInvisibleAnnotations -- 49.0 or above
| BootstrapMethods -- 51.0 or above
data CodeAttribute
= LineNumberTable
| LocalVariableTable
| LocalVariableTypeTable
| StackMapTable -- 50.0 or above
-- ignore others
-}
data ReferenceKind
= Ref_getField
| Ref_getStatic
| Ref_putField
| Ref_putStatic
| Ref_invokeVirtual
| Ref_invokeStatic
| Ref_invokeSpecial
| Ref_newInvokeSpecial
| Ref_invokeInterface
deriving (Eq, Show)
data Constant
Use Text , but I 'm too lazy to figure out their modified UTF-8 right now
| Integer !Word32
| Float !Float
| Long !Word64
| Double !Double
| ClassName !Con2
| String !Con2
| FieldRef !Con2 !Con2
| MethodRef !Con2 !Con2
| InterfaceMethodRef !Con2 !Con2
| NameAndType !Con2 !Con2
| MethodHandle !ReferenceKind !Con2
| MethodType !Con2
| InvokeDynamic !Word16 !Con2
deriving (Eq, Show)
data Exception = Exception { startEx :: !Word16, endEx :: !Word16, handler :: !Word16, catchTypeIndex :: !Con2 } deriving (Eq, Show)
data CodeAttribute = CodeAttribute { maxStack :: !Word16, maxLocals :: !Word16, code :: !BL.ByteString, exceptionTable :: !(V.Vector Exception), codeAttributes :: !(V.Vector Attribute) } deriving (Eq, Show)
data InnerClass = InnerClass { innerClassInfo :: !Con2, outerClassInfo :: !Con2, innerClassName :: !Con2, innerClassAccessFlags :: !Word16 } deriving (Eq, Show)
data LocalVariable = LocalVariable { startPc :: !Word16, length :: !Word16, localVariableName :: !Con2, infoIndex :: !Con2, index :: !Word16 } deriving (Eq, Show)
data Annotation = Annotation { typeIndex :: !Con2, elementValuePairs :: !(V.Vector (Con2, Value)) } deriving (Eq, Show)
With better picklers , I could just factor out the into ConstVal ...
data Value
= ConstBoolVal !Con2
| ConstCharVal !Con2
| ConstFloatVal !Con2
| ConstDoubleVal !Con2
| ConstByteVal !Con2
| ConstShortVal !Con2
| ConstIntVal !Con2
| ConstLongVal !Con2
| ConstStringVal !Con2
| EnumVal !Con2 !Con2
| ClassVal !Con2
| AnnotationVal !Annotation
| ArrayVal !(V.Vector Value)
deriving (Eq, Show)
data Attribute
= ConstantValue Con2
| Code !CodeAttribute
| StackMapTable
| Exceptions !(V.Vector Con2)
| InnerClasses !(V.Vector InnerClass)
| EnclosingMethod !Con2 !Con2
| Synthetic -- empty
| Signature !Con2
| SourceFile !Con2
| SourceDebugExtension !BL.ByteString
| LineNumberTable !(U.Vector (Word16, Word16))
| LocalVariableTable !(V.Vector LocalVariable)
| LocalVariableTypeTable !(V.Vector LocalVariable)
| Deprecated -- empty
| RuntimeVisibleAnnotations !(V.Vector Annotation)
| RuntimeInvisibleAnnotations !(V.Vector Annotation)
| RuntimeVisibleParameterAnnotations !(V.Vector (V.Vector Annotation))
| RuntimeInvisibleParameterAnnotations !(V.Vector (V.Vector Annotation))
| AnnotationDefault !Value
| BootstrapMethods
| Custom !BL.ByteString
deriving (Eq, Show)
-- field_info and method_info are identical structures, called Entity here
data Entity = Entity { entityAccessFlags :: !Word16, entityName :: !Con2, entityDescriptor :: !Con2, entityAttributes :: !(V.Vector Attribute) } deriving (Eq, Show)
data Class = Class
{ majorVersion :: {-# UNPACK #-} !Word16
, minorVersion :: {-# UNPACK #-} !Word16
, constantPool :: !(IM.IntMap Constant)
, classAccessFlags :: {-# UNPACK #-} !Word16
, className :: {-# UNPACK #-} !Con2
, superClassName :: {-# UNPACK #-} !Con2
, interfaces :: !(U.Vector Con2)
, fields :: !(V.Vector Entity)
, methods :: !(V.Vector Entity)
, attributes :: !(V.Vector Attribute)
} deriving (Eq, Show)
mkElim ''Class
mkElim ''Entity
mkElim ''Attribute
mkElim ''Value
mkElim ''Annotation
mkElim ''LocalVariable
mkElim ''InnerClass
mkElim ''CodeAttribute
mkElim ''Exception
mkElim ''Constant
mkElim ''ReferenceKind
makePrisms ''Constant | null | https://raw.githubusercontent.com/copumpkin/java/ca5ecc025eef4463ef5f0f33e6873707aeb88bd9/src/Java/ClassFormat/Raw.hs | haskell | # OPTIONS_GHC -funbox-strict-fields #
exactly one
49.0 or above
49.0 or above
49.0 or above
native / abstract = 0 , otherwise = 1
49.0 or above
49.0 or above
49.0 or above
49.0 or above
49.0 or above
49.0 or above
49.0 or above
49.0 or above
49.0 or above
51.0 or above
50.0 or above
ignore others
exactly one
49.0 or above
49.0 or above
49.0 or above
native/abstract = 0, otherwise = 1
49.0 or above
49.0 or above
49.0 or above
49.0 or above
49.0 or above
49.0 or above
49.0 or above
49.0 or above
49.0 or above
51.0 or above
50.0 or above
ignore others
empty
empty
field_info and method_info are identical structures, called Entity here
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK # | # LANGUAGE TemplateHaskell #
module Java.ClassFormat.Raw where
import Data.Int
import Data.Word
import qualified Data.Text as T
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.Vector as V
import qualified Data.Vector.Unboxed as U
import qualified Data.IntMap as IM
import Control.Lens.TH
import Data.Eliminator.TH
import Java.Bytecode.Raw
}
| Synthetic
| Deprecated
data MethodAttribute
| Exceptions
| Deprecated
data ClassAttribute
= InnerClasses
| EnclosingMethod
| Synthetic
| SourceFile
| SourceDebugExtension
| Deprecated
data CodeAttribute
= LineNumberTable
| LocalVariableTable
| LocalVariableTypeTable
data FieldAttribute
| Synthetic
| Deprecated
data MethodAttribute
| Exceptions
| Deprecated
data ClassAttribute
= InnerClasses
| EnclosingMethod
| Synthetic
| SourceFile
| SourceDebugExtension
| Deprecated
data CodeAttribute
= LineNumberTable
| LocalVariableTable
| LocalVariableTypeTable
-}
data ReferenceKind
= Ref_getField
| Ref_getStatic
| Ref_putField
| Ref_putStatic
| Ref_invokeVirtual
| Ref_invokeStatic
| Ref_invokeSpecial
| Ref_newInvokeSpecial
| Ref_invokeInterface
deriving (Eq, Show)
data Constant
Use Text , but I 'm too lazy to figure out their modified UTF-8 right now
| Integer !Word32
| Float !Float
| Long !Word64
| Double !Double
| ClassName !Con2
| String !Con2
| FieldRef !Con2 !Con2
| MethodRef !Con2 !Con2
| InterfaceMethodRef !Con2 !Con2
| NameAndType !Con2 !Con2
| MethodHandle !ReferenceKind !Con2
| MethodType !Con2
| InvokeDynamic !Word16 !Con2
deriving (Eq, Show)
data Exception = Exception { startEx :: !Word16, endEx :: !Word16, handler :: !Word16, catchTypeIndex :: !Con2 } deriving (Eq, Show)
data CodeAttribute = CodeAttribute { maxStack :: !Word16, maxLocals :: !Word16, code :: !BL.ByteString, exceptionTable :: !(V.Vector Exception), codeAttributes :: !(V.Vector Attribute) } deriving (Eq, Show)
data InnerClass = InnerClass { innerClassInfo :: !Con2, outerClassInfo :: !Con2, innerClassName :: !Con2, innerClassAccessFlags :: !Word16 } deriving (Eq, Show)
data LocalVariable = LocalVariable { startPc :: !Word16, length :: !Word16, localVariableName :: !Con2, infoIndex :: !Con2, index :: !Word16 } deriving (Eq, Show)
data Annotation = Annotation { typeIndex :: !Con2, elementValuePairs :: !(V.Vector (Con2, Value)) } deriving (Eq, Show)
With better picklers , I could just factor out the into ConstVal ...
data Value
= ConstBoolVal !Con2
| ConstCharVal !Con2
| ConstFloatVal !Con2
| ConstDoubleVal !Con2
| ConstByteVal !Con2
| ConstShortVal !Con2
| ConstIntVal !Con2
| ConstLongVal !Con2
| ConstStringVal !Con2
| EnumVal !Con2 !Con2
| ClassVal !Con2
| AnnotationVal !Annotation
| ArrayVal !(V.Vector Value)
deriving (Eq, Show)
data Attribute
= ConstantValue Con2
| Code !CodeAttribute
| StackMapTable
| Exceptions !(V.Vector Con2)
| InnerClasses !(V.Vector InnerClass)
| EnclosingMethod !Con2 !Con2
| Signature !Con2
| SourceFile !Con2
| SourceDebugExtension !BL.ByteString
| LineNumberTable !(U.Vector (Word16, Word16))
| LocalVariableTable !(V.Vector LocalVariable)
| LocalVariableTypeTable !(V.Vector LocalVariable)
| RuntimeVisibleAnnotations !(V.Vector Annotation)
| RuntimeInvisibleAnnotations !(V.Vector Annotation)
| RuntimeVisibleParameterAnnotations !(V.Vector (V.Vector Annotation))
| RuntimeInvisibleParameterAnnotations !(V.Vector (V.Vector Annotation))
| AnnotationDefault !Value
| BootstrapMethods
| Custom !BL.ByteString
deriving (Eq, Show)
data Entity = Entity { entityAccessFlags :: !Word16, entityName :: !Con2, entityDescriptor :: !Con2, entityAttributes :: !(V.Vector Attribute) } deriving (Eq, Show)
data Class = Class
, constantPool :: !(IM.IntMap Constant)
, interfaces :: !(U.Vector Con2)
, fields :: !(V.Vector Entity)
, methods :: !(V.Vector Entity)
, attributes :: !(V.Vector Attribute)
} deriving (Eq, Show)
mkElim ''Class
mkElim ''Entity
mkElim ''Attribute
mkElim ''Value
mkElim ''Annotation
mkElim ''LocalVariable
mkElim ''InnerClass
mkElim ''CodeAttribute
mkElim ''Exception
mkElim ''Constant
mkElim ''ReferenceKind
makePrisms ''Constant |
b1fca81351f861046ded6a56584a83e7c9d80dc3d44debddce1bec0a50cbccdc | cloojure/tupelo | set.cljc | Copyright ( c ) . All rights reserved .
The use and distribution terms for this software are covered by the Eclipse Public License 1.0
; (-1.0.php) which can be found in the file epl-v10.html at
; the root of this distribution. By using this software in any fashion, you are agreeing to be
; bound by the terms of this license. You must not remove this notice, or any other, from this
; software.
(ns tupelo.set
"Tupelo - Making Clojure even sweeter"
(:refer-clojure :exclude [remove])
(:require
[clojure.set]
[schema.core :as s]
))
# todo wrap these functions and throw if non - set argument found !
;-----------------------------------------------------------------------------
for convenience of requiring only 1 ns
(def difference clojure.set/difference)
(def index clojure.set/index)
(def intersection clojure.set/intersection)
(def join clojure.set/join)
(def map-invert clojure.set/map-invert)
(def project clojure.set/project)
(def rename clojure.set/rename)
(def rename-keys clojure.set/rename-keys)
(def select clojure.set/select)
(def subset? clojure.set/subset?)
(def superset? clojure.set/superset?)
;-----------------------------------------------------------------------------
(defn union [& args]
(assert (every? set? args))
(apply clojure.set/union args))
(s/defn add :- #{s/Any}
"Adds a value to a set, creating the set if necessary."
[tgt-set :- (s/maybe #{s/Any})
& values :- [s/Any]]
(let [result (or tgt-set #{})]
(apply clojure.core/conj result values)))
(s/defn remove :- #{s/Any}
"Removes a values from a set iff present, creating the set if necessary."
[tgt-set :- (s/maybe #{s/Any})
& values :- [s/Any]]
(let [result (or tgt-set #{})]
(apply clojure.core/disj result values))) ; disj from empty set is a noop
| null | https://raw.githubusercontent.com/cloojure/tupelo/e7b5d216ba12d775a968baf445cebde8be9faa8f/src/cljc/tupelo/set.cljc | clojure | (-1.0.php) which can be found in the file epl-v10.html at
the root of this distribution. By using this software in any fashion, you are agreeing to be
bound by the terms of this license. You must not remove this notice, or any other, from this
software.
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
disj from empty set is a noop | Copyright ( c ) . All rights reserved .
The use and distribution terms for this software are covered by the Eclipse Public License 1.0
(ns tupelo.set
"Tupelo - Making Clojure even sweeter"
(:refer-clojure :exclude [remove])
(:require
[clojure.set]
[schema.core :as s]
))
# todo wrap these functions and throw if non - set argument found !
for convenience of requiring only 1 ns
(def difference clojure.set/difference)
(def index clojure.set/index)
(def intersection clojure.set/intersection)
(def join clojure.set/join)
(def map-invert clojure.set/map-invert)
(def project clojure.set/project)
(def rename clojure.set/rename)
(def rename-keys clojure.set/rename-keys)
(def select clojure.set/select)
(def subset? clojure.set/subset?)
(def superset? clojure.set/superset?)
(defn union [& args]
(assert (every? set? args))
(apply clojure.set/union args))
(s/defn add :- #{s/Any}
"Adds a value to a set, creating the set if necessary."
[tgt-set :- (s/maybe #{s/Any})
& values :- [s/Any]]
(let [result (or tgt-set #{})]
(apply clojure.core/conj result values)))
(s/defn remove :- #{s/Any}
"Removes a values from a set iff present, creating the set if necessary."
[tgt-set :- (s/maybe #{s/Any})
& values :- [s/Any]]
(let [result (or tgt-set #{})]
|
78f51d44df77a2be035d945131e33d565e68141dafe849c41360096ed061a5a4 | metametadata/carry | actions.cljs | (ns app.actions
(:require [datascript.core :as d]
[cljs.core.match :refer-macros [match]]))
(defn -update-db
[model tx-data]
(update model :db d/db-with tx-data))
(defn on-action
[model action]
(println " action" action)
(match action
[:receive-products tx-data]
(assoc model :db (d/db-with (-> model :db :schema d/empty-db)
tx-data))
[:add-to-cart id]
(let [inventory (:product/inventory (d/entity (:db model) id))
quantity (d/q '[:find ?q .
:in $ ?id
:where
[?e :order-line/product ?id]
[?e :order-line/quantity ?q]]
(:db model)
id)]
(assert (pos? inventory))
(-update-db model [{:db/id id :product/inventory (dec inventory)}
{:order-line/product id
:order-line/quantity ((fnil inc 0) quantity)}]))
:checkout-request
(assoc model :checking-out? true)
:checkout-success
(-> model
(assoc :checking-out? false)
(-update-db (map #(-> [:db.fn/retractEntity %])
(d/q '[:find [?e ...]
:where [?e :order-line/product]]
(:db model))))))) | null | https://raw.githubusercontent.com/metametadata/carry/fa5c7cd0d8f1b71edca70330acc97c6245638efb/examples/shopping-cart/src/app/actions.cljs | clojure | (ns app.actions
(:require [datascript.core :as d]
[cljs.core.match :refer-macros [match]]))
(defn -update-db
[model tx-data]
(update model :db d/db-with tx-data))
(defn on-action
[model action]
(println " action" action)
(match action
[:receive-products tx-data]
(assoc model :db (d/db-with (-> model :db :schema d/empty-db)
tx-data))
[:add-to-cart id]
(let [inventory (:product/inventory (d/entity (:db model) id))
quantity (d/q '[:find ?q .
:in $ ?id
:where
[?e :order-line/product ?id]
[?e :order-line/quantity ?q]]
(:db model)
id)]
(assert (pos? inventory))
(-update-db model [{:db/id id :product/inventory (dec inventory)}
{:order-line/product id
:order-line/quantity ((fnil inc 0) quantity)}]))
:checkout-request
(assoc model :checking-out? true)
:checkout-success
(-> model
(assoc :checking-out? false)
(-update-db (map #(-> [:db.fn/retractEntity %])
(d/q '[:find [?e ...]
:where [?e :order-line/product]]
(:db model))))))) |
|
cfac5bb574fb7d440c372439198b10a2265cd8db290d5227918520031d917d9b | ralsei/sawzall | info.rkt | #lang info
(define collection "sawzall-test")
(define test-omit-paths '("./info.rkt" "./test-data.rkt"))
(define test-responsibles '((all )))
(define pkg-desc "Tests for Sawzall")
(define version "1.0")
(define deps '("base"
"data-frame"
"rackunit-lib"
"sawzall-lib"
"threading-lib"))
| null | https://raw.githubusercontent.com/ralsei/sawzall/fb414abf103d8dc8fca0e8e8ea061a81e207a358/sawzall-test/info.rkt | racket | #lang info
(define collection "sawzall-test")
(define test-omit-paths '("./info.rkt" "./test-data.rkt"))
(define test-responsibles '((all )))
(define pkg-desc "Tests for Sawzall")
(define version "1.0")
(define deps '("base"
"data-frame"
"rackunit-lib"
"sawzall-lib"
"threading-lib"))
|
|
8d40672cc66899a56d4d85b36c1a38500ff13a3265e632e13815e4c389913c2d | aloiscochard/codec-jvm | ConstPool.hs | module Codec.JVM.ConstPool where
import Control.Monad (join)
import Data.Binary.Put (Put, putByteString, putWord8, putWord16be)
import Data.Map.Strict (Map)
import Data.Text.Encoding (encodeUtf8)
import qualified Data.List as L
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import Codec.JVM.Const
import Codec.JVM.Internal (putI16, putI32)
import Codec.JVM.Types
newtype CIx = CIx Int
newtype ConstPool = ConstPool (Map Const Int)
deriving Show
mkConstPool :: [Const] -> ConstPool
mkConstPool defs = ConstPool . snd $ L.foldl' f (0, M.empty) defs where
f acc c = L.foldl' f' acc $ unpack c where
f' (i, xs) y = if M.member y xs then (i, xs) else (i + 1, M.insert y i xs)
run :: ConstPool -> [Const]
run (ConstPool xs) = fmap fst $ L.sortOn snd $ M.toList xs
size :: ConstPool -> Int
size (ConstPool xs) = M.size xs
index :: Const -> ConstPool -> Maybe CIx
index def (ConstPool xs) = CIx . (+) 1 <$> M.lookup def xs
ix :: CIx -> Int
ix (CIx x) = x
unsafeIndex :: Const -> ConstPool -> CIx
unsafeIndex def cp = maybe (error $ join ["Constant '", show def, "'not found."]) id $ index def cp
unpack :: Const -> [Const]
unpack (CClass cn) = unpackClassName cn
unpack c@(CValue (CString str)) = [c, CUTF8 str]
unpack (CFieldRef ref) = unpackFieldRef ref
unpack (CMethodRef ref) = unpackMethodRef ref
unpack (CNameAndType nd) = unpackNameAndType nd
unpack c = [c]
unpackClassName :: IClassName -> [Const]
unpackClassName cn@(IClassName str) = [CClass cn, CUTF8 str]
unpackFieldDesc :: UName -> FieldDesc -> [Const]
unpackFieldDesc n (FieldDesc t) = unpackNameAndType (NameAndDesc n $ Desc t)
unpackFieldRef :: FieldRef -> [Const]
unpackFieldRef ref@(FieldRef cn n ft) =
CFieldRef ref:unpackClassName cn ++ unpackFieldDesc n (mkFieldDesc ft)
unpackMethodRef :: MethodRef -> [Const]
unpackMethodRef ref@(MethodRef cn n fts rt) =
CMethodRef ref:unpackClassName cn ++ unpackNameAndType (NameAndDesc n $ Desc (mkMethodDesc' fts rt))
unpackNameAndType :: NameAndDesc -> [Const]
unpackNameAndType nd@(NameAndDesc (UName str0) (Desc str1)) = [CNameAndType nd, CUTF8 str0, CUTF8 str1]
putIx :: ConstPool -> Const -> Put
putIx cp c = putWord16be . fromIntegral . ix $ unsafeIndex c cp
putConstPool :: ConstPool -> Put
putConstPool cp = mapM_ putConst $ run cp where
putConst c = do
putWord8 . constTag $ c
case c of
(CUTF8 str) -> do
putI16 (T.length str)
putByteString $ encodeUtf8 str
(CValue (CInteger i)) ->
putI32 i
(CValue (CString str)) ->
putIx' $ CUTF8 str
(CClass (IClassName str)) ->
putIx' $ CUTF8 str
(CFieldRef (FieldRef cn n ft)) -> do
putRef cn n $ mkFieldDesc' ft
(CMethodRef (MethodRef cn n fts rt)) ->
putRef cn n $ mkMethodDesc' fts rt
(CNameAndType (NameAndDesc (UName n) (Desc d))) -> do
putIx' $ CUTF8 n
putIx' $ CUTF8 d
where
putRef cn n d = do
putIx' $ CClass cn
putIx' . CNameAndType $ NameAndDesc n (Desc d)
putIx' = putIx cp
| null | https://raw.githubusercontent.com/aloiscochard/codec-jvm/2e5b73a3eb35620ae49216c8c12f0bec82bcbe26/src/Codec/JVM/ConstPool.hs | haskell | module Codec.JVM.ConstPool where
import Control.Monad (join)
import Data.Binary.Put (Put, putByteString, putWord8, putWord16be)
import Data.Map.Strict (Map)
import Data.Text.Encoding (encodeUtf8)
import qualified Data.List as L
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import Codec.JVM.Const
import Codec.JVM.Internal (putI16, putI32)
import Codec.JVM.Types
newtype CIx = CIx Int
newtype ConstPool = ConstPool (Map Const Int)
deriving Show
mkConstPool :: [Const] -> ConstPool
mkConstPool defs = ConstPool . snd $ L.foldl' f (0, M.empty) defs where
f acc c = L.foldl' f' acc $ unpack c where
f' (i, xs) y = if M.member y xs then (i, xs) else (i + 1, M.insert y i xs)
run :: ConstPool -> [Const]
run (ConstPool xs) = fmap fst $ L.sortOn snd $ M.toList xs
size :: ConstPool -> Int
size (ConstPool xs) = M.size xs
index :: Const -> ConstPool -> Maybe CIx
index def (ConstPool xs) = CIx . (+) 1 <$> M.lookup def xs
ix :: CIx -> Int
ix (CIx x) = x
unsafeIndex :: Const -> ConstPool -> CIx
unsafeIndex def cp = maybe (error $ join ["Constant '", show def, "'not found."]) id $ index def cp
unpack :: Const -> [Const]
unpack (CClass cn) = unpackClassName cn
unpack c@(CValue (CString str)) = [c, CUTF8 str]
unpack (CFieldRef ref) = unpackFieldRef ref
unpack (CMethodRef ref) = unpackMethodRef ref
unpack (CNameAndType nd) = unpackNameAndType nd
unpack c = [c]
unpackClassName :: IClassName -> [Const]
unpackClassName cn@(IClassName str) = [CClass cn, CUTF8 str]
unpackFieldDesc :: UName -> FieldDesc -> [Const]
unpackFieldDesc n (FieldDesc t) = unpackNameAndType (NameAndDesc n $ Desc t)
unpackFieldRef :: FieldRef -> [Const]
unpackFieldRef ref@(FieldRef cn n ft) =
CFieldRef ref:unpackClassName cn ++ unpackFieldDesc n (mkFieldDesc ft)
unpackMethodRef :: MethodRef -> [Const]
unpackMethodRef ref@(MethodRef cn n fts rt) =
CMethodRef ref:unpackClassName cn ++ unpackNameAndType (NameAndDesc n $ Desc (mkMethodDesc' fts rt))
unpackNameAndType :: NameAndDesc -> [Const]
unpackNameAndType nd@(NameAndDesc (UName str0) (Desc str1)) = [CNameAndType nd, CUTF8 str0, CUTF8 str1]
putIx :: ConstPool -> Const -> Put
putIx cp c = putWord16be . fromIntegral . ix $ unsafeIndex c cp
putConstPool :: ConstPool -> Put
putConstPool cp = mapM_ putConst $ run cp where
putConst c = do
putWord8 . constTag $ c
case c of
(CUTF8 str) -> do
putI16 (T.length str)
putByteString $ encodeUtf8 str
(CValue (CInteger i)) ->
putI32 i
(CValue (CString str)) ->
putIx' $ CUTF8 str
(CClass (IClassName str)) ->
putIx' $ CUTF8 str
(CFieldRef (FieldRef cn n ft)) -> do
putRef cn n $ mkFieldDesc' ft
(CMethodRef (MethodRef cn n fts rt)) ->
putRef cn n $ mkMethodDesc' fts rt
(CNameAndType (NameAndDesc (UName n) (Desc d))) -> do
putIx' $ CUTF8 n
putIx' $ CUTF8 d
where
putRef cn n d = do
putIx' $ CClass cn
putIx' . CNameAndType $ NameAndDesc n (Desc d)
putIx' = putIx cp
|
|
77009edbb59f5429318c8f3ec0ee3ced52beeba264afc825bf14a264517fc691 | lemmih/lhc | Reverse.hs | module Main where
import LHC.Prim
import LHC.Prelude
main :: IO ()
main = putStrLn (showInt (last (reverse longList)))
longList :: [Int]
longList = replicate 2000 0
entrypoint :: ()
entrypoint = unsafePerformIO main
| null | https://raw.githubusercontent.com/lemmih/lhc/53bfa57b9b7275b7737dcf9dd620533d0261be66/examples/Reverse.hs | haskell | module Main where
import LHC.Prim
import LHC.Prelude
main :: IO ()
main = putStrLn (showInt (last (reverse longList)))
longList :: [Int]
longList = replicate 2000 0
entrypoint :: ()
entrypoint = unsafePerformIO main
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.