text
stringlengths
12
786k
let apply_transaction ' ~ constraint_constants ~ txn_state_view l t = O1trace . sync_thread " apply_transaction " ( fun ( ) -> apply_transaction ~ constraint_constants ~ txn_state_view l t )
let apply_transaction ~ constraint_constants ~ txn_state_view l txn = Result . map ~ f : Transaction_applied . user_command_status ( apply_transaction ' l ~ constraint_constants ~ txn_state_view txn ) txn
module Parties_segment_witness = struct open Mina_base open Mina_ledger open Currency [ %% versioned module Stable = struct module V1 = struct type t = { global_ledger : Sparse_ledger . Stable . V2 . t ; local_state_init : ( ( Token_id . Stable . V1 . t , unit Parties . Call_forest . With_hashes . Stable . V1 . t ) Stack_frame . Stable . V1 . t , ( ( ( Token_id . Stable . V1 . t , unit Parties . Call_forest . With_hashes . Stable . V1 . t ) Stack_frame . Stable . V1 . t , Stack_frame . Digest . Stable . V1 . t ) With_hash . Stable . V1 . t , Call_stack_digest . Stable . V1 . t ) With_stack_hash . Stable . V1 . t list , Token_id . Stable . V1 . t , ( Amount . Stable . V1 . t , Sgn . Stable . V1 . t ) t Signed_poly . Stable . V1 . t , Sparse_ledger . Stable . V2 . t , bool , Kimchi_backend . Pasta . Basic . Fp . Stable . V1 . t , Transaction_status . Failure . Collection . Stable . V1 . t ) Mina_transaction_logic . Parties_logic . Local_state . Stable . V1 . t ; start_parties : ( Parties . Stable . V1 . t , Kimchi_backend . Pasta . Basic . Fp . Stable . V1 . t ) Mina_transaction_logic . Parties_logic . Start_data . Stable . V1 . t list ; state_body : Mina_state . Protocol_state . Body . Value . Stable . V2 . t ; init_stack : Mina_base . Pending_coinbase . Stack_versioned . Stable . V1 . t } [ @@ deriving sexp , to_yojson ] to_yojson let to_latest = Fn . id end end ] end end [ %% versioned
module Stable = struct module V2 = struct type t = { transaction : Mina_transaction . Transaction . Stable . V2 . t ; ledger : Mina_ledger . Sparse_ledger . Stable . V2 . t ; protocol_state_body : Mina_state . Protocol_state . Body . Value . Stable . V2 . t ; init_stack : Mina_base . Pending_coinbase . Stack_versioned . Stable . V1 . t ; status : Mina_base . Transaction_status . Stable . V2 . t } [ @@ deriving sexp , to_yojson ] to_yojson let to_latest = Fn . id end end ] end
let transfer_and_check_balances ( ? with_burn = false ) ~ loc b ( ? fee = Tez . zero ) ? expect_failure src dst amount = let open Lwt_tzresult_syntax in let *? amount_fee = fee +? amount in let * bal_src = Context . Contract . balance ( I b ) src in let * bal_dst = Context . Contract . balance ( I b ) dst in let * op = Op . transaction ~ gas_limit ( : Alpha_context . Gas . Arith . integral_of_int_exn 3000 ) ( I b ) ~ fee src dst amount in let * b = Incremental . add_operation ? expect_failure b op in let * { parametric = { origination_size ; cost_per_byte ; _ } ; _ } = Context . get_constants ( I b ) in let *? origination_burn = cost_per_byte *? Int64 . of_int origination_size in let *? amount_fee_burn = amount_fee +? origination_burn in let amount_fee_maybe_burn = if with_burn then amount_fee_burn else amount_fee in let * ( ) = Assert . balance_was_debited ~ loc ( I b ) src bal_src amount_fee_maybe_burn in let + ( ) = Assert . balance_was_credited ~ loc ( I b ) dst bal_dst amount in ( b , op )
let n_transactions n b ? fee source dest amount = List . fold_left_es ( fun b _ -> transfer_and_check_balances ~ loc : __LOC__ b ? fee source dest amount >|=? fun ( b , _ ) -> b ) b ( 1 -- n )
module Make ( IO : S . IO ) = struct open IO type reader = unit -> Transfer . chunk IO . t type writer = string -> unit IO . t module Chunked = struct let remaining_length chunk remaining = let read_len = Int64 . of_int ( String . length chunk ) in Int64 . sub remaining read_len let read_chunk ic size = let max_read_len = Int64 . of_int 0x8000 in let len = min size max_read_len in read ic ( Int64 . to_int len ) let parse_chunksize chunk_size_hex = let hex = try String . sub chunk_size_hex 0 ( String . index chunk_size_hex ' ; ' ) with _ -> chunk_size_hex in try Some ( Int64 . of_string ( " 0x " ^ hex ) ) with _ -> None let rec junk_until_empty_line ic = read_line ic >>= function | None | Some " " -> return Done | Some _trailer -> junk_until_empty_line ic let read ~ remaining ic ( ) = let read_chunk_fragment ( ) = read_chunk ic ! remaining >>= fun chunk -> remaining := remaining_length chunk ! remaining ; ( if ! remaining = 0L then read_line ic else return None ) >>= fun _ -> return chunk in if ! remaining = 0L then read_line ic >>= function | None -> return Done | Some chunk_size_hex -> ( match parse_chunksize chunk_size_hex with | None -> return Done | Some 0L -> junk_until_empty_line ic | Some count -> ( remaining := count ; read_chunk_fragment ( ) >>= function | " " -> return Done | buf -> return ( Chunk buf ) ) ) else read_chunk_fragment ( ) >>= function | " " -> return Done | buf -> return ( Chunk buf ) let write oc buf = let len = String . length buf in if len <> 0 then write oc ( Printf . sprintf " % x \ r \ n " len ) >>= fun ( ) -> write oc buf >>= fun ( ) -> write oc " \ r \ n " else return ( ) end module Fixed = struct let read ~ remaining ic ( ) = match ! remaining with | 0L -> return Done | len -> ( let max_read_len = Int64 . of_int 0x8000 in let read_len = Int64 . to_int ( min len max_read_len ) in read ic read_len >>= function | " " -> return Done | buf -> remaining := Int64 . sub ! remaining ( Int64 . of_int ( String . length buf ) ) ; return ( match ! remaining with 0L -> Final_chunk buf | _ -> Chunk buf ) ) let write = write end module Unknown = struct let read ic ( ) = read ic 4096 >>= fun buf -> if buf = " " then return Done else return ( Chunk buf ) let write = write end let write_and_flush fn oc buf = fn oc buf >>= fun ( ) -> IO . flush oc let make_reader = function | Chunked -> Chunked . read ~ remaining ( : ref 0L ) | Fixed len -> Fixed . read ~ remaining ( : ref len ) | Unknown -> Unknown . read let write_ignore_blank writer io s = if String . length s = 0 then return ( ) else writer io s let make_writer ( ? flush = false ) mode = match flush with | false -> ( match mode with | Chunked -> Chunked . write | Fixed _ -> Fixed . write | Unknown -> Unknown . write ) | true -> ( match mode with | Chunked -> write_and_flush Chunked . write | Fixed _ -> write_and_flush Fixed . write | Unknown -> write_and_flush Unknown . write ) |> write_ignore_blank let read reader = reader ( ) let write writer buf = writer buf end
module type Transformer = sig type t val transform_expression_children : t -> Expression . t -> bool val expression : t -> Expression . t -> Expression . t val transform_children : t -> Statement . t -> t * bool val statement : t -> Statement . t -> t * Statement . t list end
module type StatementTransformer = sig type t val statement : t -> Statement . t -> t * Statement . t list end
module Identity : sig val transform_expression_children : ' t -> Expression . t -> bool val expression : ' t -> Expression . t -> Expression . t val transform_children : ' t -> Statement . t -> ' t * bool val statement : ' t -> Statement . t -> ' t * Statement . t list let transform_expression_children _ _ = true let expression _ expression = expression let transform_children state _ = state , true let statement state statement = state , [ statement ] end
module Make ( Transformer : Transformer ) = struct type result = { state : Transformer . t ; source : Source . t ; } let source { source ; _ } = source let transform state source = let state = ref state in let transform_list list ~ f = let accumulate list element = f element :: list in List . fold_left list ~ f : accumulate ~ init [ ] : |> List . rev in let transform_argument { Call . Argument . name ; value } ~ transform_expression = { Call . Argument . name ; value = transform_expression value } in let transform_parameter ( { Node . value = { Parameter . name ; value ; annotation } ; _ } as node ) ~ transform_expression = { node with Node . value = { Parameter . name ; value = value >>| transform_expression ; annotation = annotation >>| transform_expression ; } ; } in let transform_generator { Comprehension . Generator . target ; iterator ; conditions ; async } ~ transform_expression = { Comprehension . Generator . target = transform_expression target ; iterator = transform_expression iterator ; conditions = transform_list conditions ~ f : transform_expression ; async ; } in let transform_entry { Dictionary . Entry . key ; value } ~ transform_expression = { Dictionary . Entry . key = transform_expression key ; value = transform_expression value } in let transform_substring substring ~ transform_expression = match substring with | Substring . Format expression -> Substring . Format ( transform_expression expression ) | Substring . Literal _ -> substring in let rec transform_expression expression = let transform_children value = match value with | Expression . Await expression -> Expression . Await ( transform_expression expression ) | BooleanOperator { BooleanOperator . left ; operator ; right } -> BooleanOperator { BooleanOperator . left = transform_expression left ; operator ; right = transform_expression right ; } | Call { callee ; arguments } -> Call { callee = transform_expression callee ; arguments = transform_arguments arguments } | ComparisonOperator { ComparisonOperator . left ; operator ; right } -> ComparisonOperator { ComparisonOperator . left = transform_expression left ; operator ; right = transform_expression right ; } | Constant _ -> value | Dictionary { Dictionary . entries ; keywords } -> Dictionary { Dictionary . entries = transform_list entries ~ f ( : transform_entry ~ transform_expression ) ; keywords = transform_list keywords ~ f : transform_expression ; } | DictionaryComprehension { Comprehension . element ; generators } -> DictionaryComprehension { Comprehension . element = transform_entry element ~ transform_expression ; generators = transform_list generators ~ f ( : transform_generator ~ transform_expression ) ; } | Generator { Comprehension . element ; generators } -> Generator { Comprehension . element = transform_expression element ; generators = transform_list generators ~ f ( : transform_generator ~ transform_expression ) ; } | FormatString substrings -> FormatString ( transform_list substrings ~ f ( : transform_substring ~ transform_expression ) ) | Lambda { Lambda . parameters ; body } -> Lambda { Lambda . parameters = transform_list parameters ~ f ( : transform_parameter ~ transform_expression ) ; body = transform_expression body ; } | List elements -> List ( transform_list elements ~ f : transform_expression ) | ListComprehension { Comprehension . element ; generators } -> ListComprehension { Comprehension . element = transform_expression element ; generators = transform_list generators ~ f ( : transform_generator ~ transform_expression ) ; } | Name ( Name . Identifier _ ) -> value | Name ( Name . Attribute ( { base ; _ } as name ) ) -> Name ( Name . Attribute { name with base = transform_expression base } ) | Set elements -> Set ( transform_list elements ~ f : transform_expression ) | SetComprehension { Comprehension . element ; generators } -> SetComprehension { Comprehension . element = transform_expression element ; generators = transform_list generators ~ f ( : transform_generator ~ transform_expression ) ; } | Starred starred -> let starred = match starred with | Starred . Once expression -> Starred . Once ( transform_expression expression ) | Starred . Twice expression -> Starred . Twice ( transform_expression expression ) in Starred starred | Ternary { Ternary . target ; test ; alternative } -> Ternary { Ternary . target = transform_expression target ; test = transform_expression test ; alternative = transform_expression alternative ; } | Tuple elements -> Tuple ( transform_list elements ~ f : transform_expression ) | UnaryOperator { UnaryOperator . operator ; operand } -> UnaryOperator { UnaryOperator . operator ; operand = transform_expression operand } | WalrusOperator { target ; value } -> WalrusOperator { target = transform_expression target ; value = transform_expression value } | Expression . Yield expression -> Expression . Yield ( expression >>| transform_expression ) | Expression . YieldFrom expression -> Expression . YieldFrom ( expression |> transform_expression ) in let initial_state = ! state in let expression = if Transformer . transform_expression_children ! state expression then { expression with Node . value = transform_children ( Node . value expression ) } else expression in let expression = Transformer . expression ! state expression in state := initial_state ; expression and transform_arguments arguments = let transform_argument { Call . Argument . name ; value } = { Call . Argument . name ; value = transform_expression value } in transform_list arguments ~ f : transform_argument in let rec transform_statement statement = let transform_children value = match value with | Statement . Assign { Assign . target ; annotation ; value } -> Statement . Assign { Assign . target = transform_expression target ; annotation = annotation >>| transform_expression ; value = transform_expression value ; } | Assert { Assert . test ; message ; origin } -> Assert { Assert . test = transform_expression test ; message = message >>| transform_expression ; origin ; } | Break -> value | Class { Class . name ; base_arguments ; body ; decorators ; top_level_unbound_names } -> Class { Class . name ; base_arguments = transform_list base_arguments ~ f ( : transform_argument ~ transform_expression ) ; body = transform_list body ~ f : transform_statement |> List . concat ; decorators = transform_list decorators ~ f : transform_expression ; top_level_unbound_names ; } | Continue -> value | Define { signature ; captures ; unbound_names ; body } -> let transform_signature { Define . Signature . name ; parameters ; decorators ; return_annotation ; async ; parent ; nesting_define ; generator ; } = { Define . Signature . name ; parameters = transform_list parameters ~ f ( : transform_parameter ~ transform_expression ) ; decorators = transform_list decorators ~ f : transform_expression ; return_annotation = return_annotation >>| transform_expression ; async ; parent ; nesting_define ; generator ; } in let transform_capture { Define . Capture . name ; kind } = let transform_kind = function | Define . Capture . Kind . Annotation annotation -> let annotation = Option . map annotation ~ f : transform_expression in Define . Capture . Kind . Annotation annotation | Define . Capture . Kind . DefineSignature value -> let value = transform_signature value in Define . Capture . Kind . DefineSignature value | Define . Capture . Kind . ( Self _ | ClassSelf _ ) as kind -> kind in { Define . Capture . name ; kind = transform_kind kind } in Define { signature = transform_signature signature ; captures = List . map captures ~ f : transform_capture ; unbound_names ; body = transform_list body ~ f : transform_statement |> List . concat ; } | Delete expressions -> Delete ( List . map expressions ~ f : transform_expression ) | Expression expression -> Expression ( transform_expression expression ) | For { For . target ; iterator ; body ; orelse ; async } -> For { For . target = transform_expression target ; iterator = transform_expression iterator ; body = transform_list body ~ f : transform_statement |> List . concat ; orelse = transform_list orelse ~ f : transform_statement |> List . concat ; async ; } | Global _ -> value | If { If . test ; body ; orelse } -> If { If . test = transform_expression test ; body = transform_list body ~ f : transform_statement |> List . concat ; orelse = transform_list orelse ~ f : transform_statement |> List . concat ; } | Import _ -> value | Match { Match . subject ; cases } -> let rec transform_pattern { Node . value ; location } = let value = match value with | Match . Pattern . MatchAs { pattern ; name } -> Match . Pattern . MatchAs { pattern = pattern >>| transform_pattern ; name } | MatchClass { class_name ; patterns ; keyword_attributes ; keyword_patterns } -> MatchClass { class_name ; patterns = transform_list patterns ~ f : transform_pattern ; keyword_attributes ; keyword_patterns = transform_list keyword_patterns ~ f : transform_pattern ; } | MatchMapping { keys ; patterns ; rest } -> MatchMapping { keys = transform_list keys ~ f : transform_expression ; patterns = transform_list patterns ~ f : transform_pattern ; rest ; } | MatchOr patterns -> MatchOr ( transform_list patterns ~ f : transform_pattern ) | MatchSequence patterns -> MatchSequence ( transform_list patterns ~ f : transform_pattern ) | MatchSingleton constant -> ( let expression = transform_expression { Node . value = Expression . Constant constant ; location } in match expression . value with | Expression . Constant constant -> MatchSingleton constant | _ -> MatchValue expression ) | MatchStar maybe_identifier -> MatchStar maybe_identifier | MatchValue expression -> MatchValue expression | MatchWildcard -> MatchWildcard in { Node . value ; location } in let transform_case { Match . Case . pattern ; guard ; body } = { Match . Case . pattern = transform_pattern pattern ; guard = guard >>| transform_expression ; body = transform_list body ~ f : transform_statement |> List . concat ; } in Match { Match . subject = transform_expression subject ; cases = transform_list cases ~ f : transform_case ; } | Nonlocal _ -> value | Pass -> value | Raise { Raise . expression ; from } -> Raise { Raise . expression = expression >>| transform_expression ; from = from >>| transform_expression ; } | Return ( { Return . expression ; _ } as return ) -> Return { return with Return . expression = expression >>| transform_expression } | Try { Try . body ; handlers ; orelse ; finally } -> let transform_handler { Try . Handler . kind ; name ; body } = { Try . Handler . kind = kind >>| transform_expression ; name ; body = transform_list body ~ f : transform_statement |> List . concat ; } in let body = transform_list body ~ f : transform_statement |> List . concat in let handlers = transform_list handlers ~ f : transform_handler in let orelse = transform_list orelse ~ f : transform_statement |> List . concat in let finally = transform_list finally ~ f : transform_statement |> List . concat in Try { Try . body ; handlers ; orelse ; finally } | With { With . items ; body ; async } -> let transform_item ( item , alias ) = transform_expression item , alias >>| transform_expression in With { With . items = transform_list items ~ f : transform_item ; body = transform_list body ~ f : transform_statement |> List . concat ; async ; } | While { While . test ; body ; orelse } -> While { While . test = transform_expression test ; body = transform_list body ~ f : transform_statement |> List . concat ; orelse = transform_list orelse ~ f : transform_statement |> List . concat ; } in let statement = let parent_state , should_transform_children = Transformer . transform_children ! state statement in if should_transform_children then ( state := parent_state ; { statement with Node . value = transform_children ( Node . value statement ) } ) else statement in let new_state , statements = Transformer . statement ! state statement in state := new_state ; statements in let statements = transform_list source . Source . statements ~ f : transform_statement |> List . concat in { state = ! state ; source = { source with Source . statements } } end
module MakeStatementTransformer ( Transformer : StatementTransformer ) = struct type result = { state : Transformer . t ; source : Source . t ; } let source { source ; _ } = source let transform state source = let state = ref state in let open Statement in let rec transform_statement { Node . location ; value } = let value = match value with | Assign _ | Assert _ | Break | Continue | Delete _ | Expression _ | Global _ | Import _ | Pass | Raise _ | Return _ | Nonlocal _ -> value | Class ( { Class . body ; _ } as value ) -> Class { value with Class . body = List . concat_map ~ f : transform_statement body } | Define ( { Define . body ; _ } as value ) -> Define { value with Define . body = List . concat_map ~ f : transform_statement body } | With ( { With . body ; _ } as value ) -> With { value with With . body = List . concat_map ~ f : transform_statement body } | For ( { For . body ; orelse ; _ } as value ) -> let body = List . concat_map ~ f : transform_statement body in let orelse = List . concat_map ~ f : transform_statement orelse in For { value with For . body ; orelse } | If ( { If . body ; orelse ; _ } as value ) -> let body = List . concat_map ~ f : transform_statement body in let orelse = List . concat_map ~ f : transform_statement orelse in If { value with If . body ; orelse } | Match ( { Match . cases ; _ } as value ) -> let transform_case ( { Match . Case . body ; _ } as value ) = { value with Match . Case . body = List . concat_map ~ f : transform_statement body } in Match { value with Match . cases = List . map ~ f : transform_case cases } | While ( { While . body ; orelse ; _ } as value ) -> let body = List . concat_map ~ f : transform_statement body in let orelse = List . concat_map ~ f : transform_statement orelse in While { value with While . body ; orelse } | Try { Try . body ; handlers ; orelse ; finally } -> let transform_handler ( { Try . Handler . body ; _ } as value ) = { value with Try . Handler . body = List . concat_map ~ f : transform_statement body } in let body = List . concat_map ~ f : transform_statement body in let handlers = List . map ~ f : transform_handler handlers in let orelse = List . concat_map ~ f : transform_statement orelse in let finally = List . concat_map ~ f : transform_statement finally in Try { Try . body ; handlers ; orelse ; finally } in let new_state , statements = Transformer . statement ! state { Node . location ; value } in state := new_state ; statements in let statements = List . concat_map ~ f : transform_statement source . Source . statements in { state = ! state ; source = { source with Source . statements } } end
let transform_expressions ~ transform statement = let module TransformExpressions = Make ( struct type t = unit let transform_expression_children _ _ = true let expression _ { Node . value ; location } = { Node . value = transform value ; location } let transform_children state _ = state , true let statement state statement = state , [ statement ] end ) in TransformExpressions . transform ( ) ( Source . create [ Node . create_with_default_location statement ] ) |> ( fun { TransformExpressions . source ; _ } -> source ) |> Source . statements |> function | [ { Node . value = statement ; _ } ] -> statement | _ -> failwith " expected single statement "
let sanitize_expression expression = let transform = function | Expression . Name ( Name . Identifier identifier ) -> Expression . Name ( Name . Identifier ( Identifier . sanitized identifier ) ) | Name ( Name . Attribute ( { attribute ; _ } as attribute_expression ) ) -> Name ( Name . Attribute { attribute_expression with attribute = Identifier . sanitized attribute } ) | expression -> expression in match transform_expressions ~ transform ( Statement . Expression expression ) with | Statement . Expression expression -> expression | _ -> expression
let sanitize_statement statement = let module SanitizeSignatures = MakeStatementTransformer ( struct type t = unit let statement state = function | { Node . value = Statement . Define ( { Define . signature = { Define . Signature . name ; parameters ; _ } as signature ; _ } as define ) ; _ ; } as statement -> let transform_parameter ( { Node . value = { Parameter . name ; _ } as parameter ; _ } as parameter_node ) = { parameter_node with value = { parameter with name = Identifier . sanitized name } } in ( state , [ { statement with value = Statement . Define { define with signature = { signature with name = Reference . sanitized name ; parameters = List . map parameters ~ f : transform_parameter ; } ; } ; } ; ] ) | statement -> state , [ statement ] end ) in let sanitized_statement = let sanitize_expression expression = Node . create_with_default_location expression |> sanitize_expression |> Node . value in let sanitize_signatures statement = SanitizeSignatures . transform ( ) ( Source . create [ Node . create_with_default_location statement ] ) in transform_expressions ~ transform : sanitize_expression statement |> sanitize_signatures |> SanitizeSignatures . source |> Source . statements in match sanitized_statement with | [ { Node . value = statement ; _ } ] -> statement | _ -> statement
module Toc = struct open Tyxml type t = heading list and heading = H of int * string let to_string ( H ( size , text ) ) = " H " ^ string_of_int size ^ " " ^ text let get_int = function H ( i , _ ) -> i let toc doc = let open Omd in let rec loop acc = function | [ ] -> List . rev acc | ( b : attributes block ) :: bs -> ( match b with | Heading ( _attrs , s , il ) -> ( match il with | Text ( _ , heading ) -> loop ( H ( s , heading ) :: acc ) bs | _ -> loop acc bs ) | _ -> loop acc bs ) in loop [ ] doc let transform doc = let open Omd in let f ( b : attributes block ) = match b with | Heading ( attrs , s , il ) -> ( match il with | Text ( _ , heading ) -> Heading ( ( " id " , Utils . title_to_dirname heading ) :: attrs , s , il ) | _ -> b ) | _ -> b in List . map f doc type ' a tree = Br of ' a * ' a tree list let to_tree lst = let arr = Array . init 7 ( fun i -> Br ( H ( i , " " ) , [ ] ) ) in let rec tidy arr until = function | n when n <= until -> ( ) | n -> let t = arr . ( n ) in let ( Br ( v , lst ) ) = arr . ( n - 1 ) in arr . ( n - 1 ) <- Br ( v , lst @ [ t ] ) ; tidy arr until ( n - 1 ) in let rec aux last = function | [ ] -> tidy arr 0 last ; arr . ( 0 ) | [ a ] -> let x = get_int a in arr . ( x ) <- Br ( a , [ ] ) ; aux x [ ] | a :: b :: xs -> let x = get_int a in let y = get_int b in let _t = arr . ( x ) in let ( Br ( p , lst ) ) = arr . ( x - 1 ) in if x = y then ( arr . ( x - 1 ) <- Br ( p , lst @ [ Br ( a , [ ] ) ] ) ; aux x ( b :: xs ) ) else if x > y then ( arr . ( x ) <- Br ( a , [ ] ) ; tidy arr ( y - 1 ) x ; aux x ( b :: xs ) ) else ( arr . ( x ) <- Br ( a , [ ] ) ; aux x ( b :: xs ) ) in aux 0 lst let map_to_item h = let to_elt cl link txt = [ % html " < a class " = cl " href " = ( " " # ^ link ) " " > [ Html . txt txt ] " </ a " ] > in match h with | H ( i , txt ) -> if String . equal txt " " then [ ] else [ to_elt [ " toc - link " ; " toc - item " - ^ string_of_int i ] ( Utils . title_to_dirname txt ) txt ; ] let rec preorder = function | Br ( v , [ ] ) -> [ % html " < ul class ' = toc ' >< li class ' = toc - li ' " > ( map_to_item v ) " </ li ></ ul " ] > | Br ( v , lst ) -> [ % html " < ul class ' = toc ' >< li " > ( map_to_item v @ List . fold_left ( fun acc v -> acc @ [ preorder v ] ) [ ] lst ) " </ li ></ ul " ] > let pp ppf t = List . iter ( fun h -> Format . pp_print_string ppf ( to_string h ^ " \ n " ) ) t let rec accessibility = function | x :: y :: ys -> let a = get_int x in let b = get_int y in if get_int x < get_int y then if b - a = 1 then accessibility ( y :: ys ) else raise ( Failure " Failed because of inproper heading nesting " ) | _ -> assert true let to_html toc = ( try accessibility toc with Failure t -> print_endline " == Failed Heading List " ; == pp Format . std_formatter toc ; raise ( Failure t ) ) ; let tree = to_tree toc in preorder tree |> fun list -> [ % html " < details >< summary > Table of Contents </ summary " > [ list ] " </ details " ] > end
module Image = struct type t = { conf : Image . Transform . conf ; path : Fpath . t ; responsive : Responsive . Images . t ; } let v ~ quality ~ path ~ dst responsive = { conf = Image . Transform . { quality ; dst ; rename = Fun . id ; files = [ ] } ; path ; responsive ; } let transform t blocks = let open Omd in let f ( b : attributes block ) = let make_img img = let html = Fmt . str " % a " ( Tyxml . Html . pp_elt ( ) ) img in Html ( [ ] , html ) in match b with | Paragraph ( attrs , il ) -> ( match il with | Omd . Image ( _ , { label = Omd . Text ( _ , alt ) ; destination ; _ } ) -> let conf = { t . conf with files = [ Fpath . v destination ] } in let conf = Responsive . Images . { conf ; root = t . path } in let img = Responsive . Images . v ~ alt ~ conf t . responsive |> List . hd |> snd in Paragraph ( attrs , make_img img ) | _ -> b ) | _ -> b in List . map f blocks end
module ModifyingTransformer : sig type t = int include Transform . Transformer with type t := t val final : t -> int include Transform . Identity type t = int let final count = count let expression _ = function | { Node . location ; value = Expression . Constant ( Constant . Integer number ) } -> { Node . location ; value = Expression . Constant ( Constant . Integer ( number + 1 ) ) } | expression -> expression end
module ShallowModifyingTransformer : sig type t = int include Transform . Transformer with type t := t include Transform . Identity include ModifyingTransformer let transform_children state _ = state , false end
module ModifyingTransform = Transform . Make ( ModifyingTransformer )
module ShallowModifyingTransform = Transform . Make ( ShallowModifyingTransformer )
let assert_modifying_source ( ? shallow = false ) statements expected_statements expected_sum = let state , modified = if shallow then let { ShallowModifyingTransform . state ; source } = ShallowModifyingTransform . transform 0 ( Source . create statements ) in state , source else let { ModifyingTransform . state ; source } = ModifyingTransform . transform 0 ( Source . create statements ) in state , source in assert_source_equal ( Source . create expected_statements ) modified ; assert_equal expected_sum ( ModifyingTransformer . final state ) ~ printer : string_of_int
let test_transform _ = assert_modifying_source [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 1 ) ) ; + Statement . Expression ( + Expression . Constant ( Constant . Integer 2 ) ) ; ] [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 2 ) ) ; + Statement . Expression ( + Expression . Constant ( Constant . Integer 3 ) ) ; ] 0 ; assert_modifying_source [ + Statement . Expression ( + Expression . WalrusOperator { target = " ! a " ; value = + Expression . Constant ( Constant . Integer 1 ) } ) ; ] [ + Statement . Expression ( + Expression . WalrusOperator { target = " ! a " ; value = + Expression . Constant ( Constant . Integer 2 ) } ) ; ] 0 ; assert_modifying_source [ + Statement . If { If . test = + Expression . Constant ( Constant . Integer 1 ) ; body = [ + Statement . If { If . test = + Expression . Constant ( Constant . Integer 2 ) ; body = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 3 ) ) ] ; orelse = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 4 ) ) ] ; } ; ] ; orelse = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 5 ) ) ] ; } ; ] [ + Statement . If { If . test = + Expression . Constant ( Constant . Integer 2 ) ; body = [ + Statement . If { If . test = + Expression . Constant ( Constant . Integer 3 ) ; body = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 4 ) ) ] ; orelse = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 5 ) ) ] ; } ; ] ; orelse = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 6 ) ) ] ; } ; ] 0 ; assert_modifying_source ~ shallow : true [ + Statement . If { If . test = + Expression . Constant ( Constant . Integer 1 ) ; body = [ + Statement . If { If . test = + Expression . Constant ( Constant . Integer 2 ) ; body = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 3 ) ) ] ; orelse = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 4 ) ) ] ; } ; ] ; orelse = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 5 ) ) ] ; } ; ] [ + Statement . If { If . test = + Expression . Constant ( Constant . Integer 1 ) ; body = [ + Statement . If { If . test = + Expression . Constant ( Constant . Integer 2 ) ; body = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 3 ) ) ] ; orelse = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 4 ) ) ] ; } ; ] ; orelse = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 5 ) ) ] ; } ; ] 0 ; assert_modifying_source [ + Statement . Match { Match . subject = + Expression . Constant ( Constant . Integer 0 ) ; cases = [ { Match . Case . pattern = + Match . Pattern . MatchSingleton ( Constant . Integer 2 ) ; guard = Some ( + Expression . Constant ( Constant . Integer 4 ) ) ; body = [ ] ; } ; ] ; } ; ] [ + Statement . Match { Match . subject = + Expression . Constant ( Constant . Integer 1 ) ; cases = [ { Match . Case . pattern = + Match . Pattern . MatchSingleton ( Constant . Integer 3 ) ; guard = Some ( + Expression . Constant ( Constant . Integer 5 ) ) ; body = [ ] ; } ; ] ; } ; ] 0
module ExpandingTransformer : sig type t = unit include Transform . Transformer with type t := t include Transform . Identity type t = unit let statement state statement = state , [ statement ; statement ] end
module ShallowExpandingTransformer : sig type t = unit include Transform . Transformer with type t := t include Transform . Identity include ExpandingTransformer let transform_children state _ = state , false end
module ExpandingTransform = Transform . Make ( ExpandingTransformer )
module ShallowExpandingTransform = Transform . Make ( ShallowExpandingTransformer )
let assert_expanded_source ( ? shallow = false ) statements expected_statements = let modified = if shallow then ShallowExpandingTransform . transform ( ) ( Source . create statements ) |> ShallowExpandingTransform . source else ExpandingTransform . transform ( ) ( Source . create statements ) |> ExpandingTransform . source in assert_source_equal ( Source . create expected_statements ) modified
let test_expansion _ = assert_expanded_source [ + Statement . Expression ( + Expression . Constant ( Constant . Float 1 . 0 ) ) ; + Statement . Expression ( + Expression . Constant ( Constant . Float 2 . 0 ) ) ; ] [ + Statement . Expression ( + Expression . Constant ( Constant . Float 1 . 0 ) ) ; + Statement . Expression ( + Expression . Constant ( Constant . Float 1 . 0 ) ) ; + Statement . Expression ( + Expression . Constant ( Constant . Float 2 . 0 ) ) ; + Statement . Expression ( + Expression . Constant ( Constant . Float 2 . 0 ) ) ; ] ; assert_expanded_source ~ shallow : true [ + Statement . Expression ( + Expression . Constant ( Constant . Float 1 . 0 ) ) ; + Statement . Expression ( + Expression . Constant ( Constant . Float 2 . 0 ) ) ; ] [ + Statement . Expression ( + Expression . Constant ( Constant . Float 1 . 0 ) ) ; + Statement . Expression ( + Expression . Constant ( Constant . Float 1 . 0 ) ) ; + Statement . Expression ( + Expression . Constant ( Constant . Float 2 . 0 ) ) ; + Statement . Expression ( + Expression . Constant ( Constant . Float 2 . 0 ) ) ; ] ; assert_expanded_source [ + Statement . If { If . test = + Expression . Constant ( Constant . Integer 1 ) ; body = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 3 ) ) ] ; orelse = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 5 ) ) ] ; } ; ] [ + Statement . If { If . test = + Expression . Constant ( Constant . Integer 1 ) ; body = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 3 ) ) ; + Statement . Expression ( + Expression . Constant ( Constant . Integer 3 ) ) ; ] ; orelse = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 5 ) ) ; + Statement . Expression ( + Expression . Constant ( Constant . Integer 5 ) ) ; ] ; } ; + Statement . If { If . test = + Expression . Constant ( Constant . Integer 1 ) ; body = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 3 ) ) ; + Statement . Expression ( + Expression . Constant ( Constant . Integer 3 ) ) ; ] ; orelse = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 5 ) ) ; + Statement . Expression ( + Expression . Constant ( Constant . Integer 5 ) ) ; ] ; } ; ] ; assert_expanded_source ~ shallow : true [ + Statement . If { If . test = + Expression . Constant ( Constant . Integer 1 ) ; body = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 3 ) ) ] ; orelse = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 5 ) ) ] ; } ; ] [ + Statement . If { If . test = + Expression . Constant ( Constant . Integer 1 ) ; body = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 3 ) ) ] ; orelse = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 5 ) ) ] ; } ; + Statement . If { If . test = + Expression . Constant ( Constant . Integer 1 ) ; body = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 3 ) ) ] ; orelse = [ + Statement . Expression ( + Expression . Constant ( Constant . Integer 5 ) ) ] ; } ; ]
let test_expansion_with_stop _ = let module StoppingExpandingTransformer : sig type t = unit include Transform . Transformer with type t := t end = struct include ExpandingTransformer let transform_children state _ = state , false end in let module StoppingExpandingTransform = Transform . Make ( StoppingExpandingTransformer ) in let assert_expanded_source_with_stop source expected_source = let modified = StoppingExpandingTransform . transform ( ) ( parse source ) |> StoppingExpandingTransform . source in assert_source_equal ~ location_insensitive : true ( parse expected_source ) modified in assert_expanded_source_with_stop { | if ( 1 ) : if ( 2 ) : 3 else : 4 else : if ( 5 ) : 6 else : 7 } | { | if ( 1 ) : if ( 2 ) : 3 else : 4 else : if ( 5 ) : 6 else : 7 if ( 1 ) : if ( 2 ) : 3 else : 4 else : if ( 5 ) : 6 else : 7 } |
let test_double_count _ = let module DoubleCounterTransformer : sig type t = int include Transform . Transformer with type t := t end = struct include Transform . Identity type t = int let statement count statement = count + 1 , [ statement ] end in let module ShallowDoubleCounterTransformer : sig type t = int include Transform . Transformer with type t := t end = struct include Transform . Identity include DoubleCounterTransformer let transform_children state _ = state , false end in let module DoubleCounterTransform = Transform . Make ( DoubleCounterTransformer ) in let module ShallowDoubleCounterTransform = Transform . Make ( ShallowDoubleCounterTransformer ) in let assert_double_count ( ? shallow = false ) source expected_sum = let state , modified = if shallow then let { ShallowDoubleCounterTransform . state ; source } = ShallowDoubleCounterTransform . transform 0 ( parse source ) in state , source else let { DoubleCounterTransform . state ; source } = DoubleCounterTransform . transform 0 ( parse source ) in state , source in assert_source_equal ( parse source ) modified ; assert_equal expected_sum ( ModifyingTransformer . final state ) ~ printer : string_of_int in assert_double_count { | 1 . 0 2 . 0 } | 2 ; assert_double_count ~ shallow : true { | 1 . 0 2 . 0 } | 2 ; assert_double_count { | if ( 1 ) : 3 else : 5 } | 3 ; assert_double_count ~ shallow : true { | if ( 1 ) : 3 else : 5 } | 1 ; assert_double_count { | if ( 1 ) : if ( 2 ) : 3 else : 4 else : if ( 5 ) : 6 else : 7 } | 7 ; assert_double_count ~ shallow : true { | if ( 1 ) : if ( 2 ) : 3 else : 4 else : if ( 5 ) : 6 else : 7 } | 1
let test_conditional_count _ = let module NestedCounterTransformer : sig type t = { is_nested : bool ; statement_count : int ; } include Transform . Transformer with type t := t end = struct type t = { is_nested : bool ; statement_count : int ; } let transform_expression_children _ _ = false let transform_children ( { is_nested ; _ } as state ) statement = match Node . value statement with | Statement . Class _ -> { state with is_nested = true } , true | _ -> state , is_nested let expression _ expression = expression let statement { statement_count ; is_nested } statement = let is_nested , statement_count = match Node . value statement with | Statement . Class _ -> false , statement_count | _ -> is_nested , if is_nested then statement_count + 1 else statement_count in { is_nested ; statement_count } , [ statement ] end in let module NestedCounterTransform = Transform . Make ( NestedCounterTransformer ) in let assert_conditional_count source expected_statement_count = let statement_count { NestedCounterTransformer . statement_count ; _ } = statement_count in let state , modified = let { NestedCounterTransform . state ; source } = NestedCounterTransform . transform { statement_count = 0 ; is_nested = false } ( parse source ) in state , source in assert_source_equal ( parse source ) modified ; assert_equal expected_statement_count ( statement_count state ) in assert_conditional_count { | 1 . 0 2 . 0 } | 0 ; assert_conditional_count { | 1 . 0 class Foo : 2 . 0 } | 1 ; assert_conditional_count { | 1 . 0 class Foo : 2 . 0 def method ( self ) -> None : 3 . 0 4 . 0 } | 3 ; ( )
let test_statement_transformer _ = let module ModifyingStatementTransformer : sig type t = int include Transform . StatementTransformer with type t := t val final : t -> int end = struct type t = int let final count = count let statement count { Node . location ; value } = let count , value = match value with | Statement . Assign ( { Assign . value = { Node . value = Constant ( Constant . Integer number ) ; _ } as value ; _ } as assign ) -> ( count + number , Statement . Assign { assign with Assign . value = { value with Node . value = Constant ( Constant . Integer ( number + 1 ) ) } ; } ) | _ -> count , value in count , [ { Node . location ; value } ] end in let module Transform = Transform . MakeStatementTransformer ( ModifyingStatementTransformer ) in let assert_transform source expected expected_sum = let { Transform . state ; source = modified } = Transform . transform 0 ( parse source ) in assert_source_equal ( parse expected ) modified ; assert_equal expected_sum ( ModifyingStatementTransformer . final state ) ~ printer : string_of_int in assert_transform { | def foo ( ) : x = 1 y = 2 2 3 + 4 x = 3 y = 4 if 1 == 3 : x = 5 else : if a > b : y = 6 class C : z = 7 match x : case 1 : w = 0 } | { | def foo ( ) : x = 2 y = 3 2 3 + 4 x = 4 y = 5 if 1 == 3 : x = 6 else : if a > b : y = 7 class C : z = 8 match x : case 1 : w = 1 } | 28
let test_transform_expression _ = let keep_first_argument = function | Expression . Call { Call . callee = { Node . value = Name ( Identifier given_callee_name ) ; location } ; arguments = argument :: _ ; } -> Expression . Call { Call . callee = { Node . value = Name ( Identifier given_callee_name ) ; location } ; arguments = [ argument ] ; } | expression -> expression in let assert_transform given expected = match parse given |> Source . statements , parse expected |> Source . statements with | [ { Node . value = given ; _ } ] , [ { Node . value = expected ; _ } ] -> let actual = Transform . transform_expressions ~ transform : keep_first_argument given in let printer x = [ % sexp_of : Statement . statement ] x |> Sexp . to_string_hum in assert_equal ~ cmp ( : fun left right -> Statement . location_insensitive_compare ( Node . create_with_default_location left ) ( Node . create_with_default_location right ) = 0 ) ~ printer ~ pp_diff ( : diff ~ print ( : fun format x -> Format . fprintf format " % s " ( printer x ) ) ) expected actual | _ -> failwith " expected one statement each " in assert_transform { | def foo ( ) : bar ( 1 , 2 ) x = bar ( bar ( 1 , bar ( 2 , 3 ) ) , bar ( 4 , 5 ) ) some_other_function ( ) } | { | def foo ( ) : bar ( 1 ) x = bar ( bar ( 1 ) ) some_other_function ( ) } ; | ( )
let test_sanitize_statement _ = let assert_sanitized statements expected = let given_statement , expected_statement = match statements , parse expected |> Source . statements with | [ { Node . value = given_statement ; _ } ] , [ { Node . value = expected_statement ; _ } ] -> given_statement , expected_statement | _ -> failwith " Expected defines " in assert_equal ~ cmp ( : fun left right -> Statement . location_insensitive_compare left right = 0 ) ~ printer [ :% show : Statement . t ] ( expected_statement |> Node . create_with_default_location ) ( Transform . sanitize_statement given_statement |> Node . create_with_default_location ) in assert_sanitized [ + Statement . Define { Define . signature = { Define . Signature . name = " !&$ local_test ? foo $ bar " ; parameters = [ { + Parameter . name = " $ parameter $ a " ; value = None ; annotation = Some " ! int " } ] ; decorators = [ ] ; return_annotation = Some " ! int " ; async = false ; generator = false ; parent = None ; nesting_define = None ; } ; captures = [ ] ; unbound_names = [ ] ; body = [ + Statement . Assign { Assign . target = " !$ local_test ? foo ? bar $ my_kwargs " ; annotation = None ; value = + Expression . Dictionary { Dictionary . entries = [ { Dictionary . Entry . key = + Expression . Constant ( Constant . String ( StringLiteral . create " a " ) ) ; value = " !$ parameter $ a " ; } ; ] ; keywords = [ ] ; } ; } ; + Statement . Expression ( + Expression . Call { Call . callee = " ! print " ; arguments = [ { Call . Argument . name = None ; value = " !$ local_test ? foo ? bar $ my_kwargs " } ] ; } ) ; + Statement . Return { Return . is_implicit = false ; expression = Some ( + Expression . Call { Call . callee = " !$ local_test ? foo $ baz " ; arguments = [ { Call . Argument . name = None ; value = " !$ parameter $ a " } ] ; } ) ; } ; ] ; } ; ] { | def bar ( a : int ) -> int : my_kwargs = { " a " : a } print ( my_kwargs ) return baz ( a ) } ; | assert_sanitized [ + Statement . Define { Define . signature = { Define . Signature . name = " !& bar " ; parameters = [ { + Parameter . name = " a " ; value = None ; annotation = Some " ! int " } ] ; decorators = [ ] ; return_annotation = Some " ! int " ; async = false ; generator = false ; parent = None ; nesting_define = None ; } ; captures = [ ] ; unbound_names = [ ] ; body = [ + Statement . Assign { Assign . target = " ! my_kwargs " ; annotation = None ; value = + Expression . Dictionary { Dictionary . entries = [ { Dictionary . Entry . key = + Expression . Constant ( Constant . String ( StringLiteral . create " a " ) ) ; value = " ! a " ; } ; ] ; keywords = [ ] ; } ; } ; + Statement . Expression ( + Expression . Call { Call . callee = " ! print " ; arguments = [ { Call . Argument . name = None ; value = " ! my_kwargs " } ] ; } ) ; + Statement . Return { Return . is_implicit = false ; expression = Some ( + Expression . Call { Call . callee = " ! baz " ; arguments = [ { Call . Argument . name = None ; value = " ! a " } ] ; } ) ; } ; ] ; } ; ] { | def bar ( a : int ) -> int : my_kwargs = { " a " : a } print ( my_kwargs ) return baz ( a ) } ; | ( )
let ( ) = " transform " >::: [ " transform " >:: test_transform ; " expansion " >:: test_expansion ; " expansion_with_stop " >:: test_expansion_with_stop ; " statement_double_counter " >:: test_double_count ; " statement_conditional_counter " >:: test_conditional_count ; " statement_transformer " >:: test_statement_transformer ; " transform_expression " >:: test_transform_expression ; " sanitize_statement " >:: test_sanitize_statement ; ] |> Test . run
module Apply_optimization_framework ( Input : Semantics . Bioinformatics_base ) module Transformation_types = struct type ' a from = ' a Input . repr type ' a term = | Unknown : ' a from -> ' a term | Apply : ( ' a -> ' b ) term * ' a term -> ' b term | Lambda : ( ' a term -> ' b term ) -> ( ' a -> ' b ) term | List_make : ( ' a term ) list -> ' a list term | List_map : ( ' a list term * ( ' a -> ' b ) term ) -> ' b list term | Pair : ' a term * ' b term -> ( ' a * ' b ) term | Fst : ( ' a * ' b ) term -> ' a term | Snd : ( ' a * ' b ) term -> ' b term let fwd x = Unknown x let rec bwd : type a . a term -> a from = function | Apply ( Lambda f , v ) -> bwd ( f v ) | Apply ( other , v ) -> Input . apply ( bwd other ) ( bwd v ) | List_map ( List_make l , Lambda f ) -> Input . list ( List . map ~ f ( : fun x -> bwd ( f x ) ) l ) | List_map ( x , f ) -> Input . list_map ~ f ( : bwd f ) ( bwd x ) | Lambda f -> Input . lambda ( fun x -> ( bwd ( f ( fwd x ) ) ) ) | List_make l -> Input . list ( List . map ~ f : bwd l ) | Fst ( Pair ( a , b ) ) -> bwd a | Snd ( Pair ( a , b ) ) -> bwd b | Pair ( a , b ) -> Input . pair ( bwd a ) ( bwd b ) | Fst b -> Input . pair_first ( bwd b ) | Snd b -> Input . pair_second ( bwd b ) | Unknown x -> x end module Transformation = Optimization_framework . Define_transformation ( Transformation_types ) open Transformation module Language_delta = struct open Transformation_types let apply f x = Apply ( f , x ) let lambda f = Lambda f let list l = List_make l let list_map l ~ f = List_map ( l , f ) let pair a b = Pair ( a , b ) let pair_first p = Fst p let pair_second p = Snd p end end
module Apply ( Input : Semantics . Bioinformatics_base ) = struct module The_pass = Apply_optimization_framework ( Input ) include Optimization_framework . Generic_optimizer ( The_pass . Transformation ) ( Input ) include The_pass . Language_delta end
module Big_int = struct include Big_int exception Error let sexp_of_big_int x = Sexp . Atom ( Big_int . string_of_big_int x ) let big_int_of_sexp sexp = match sexp with | Sexp . Atom str -> Big_int . big_int_of_string str | Sexp . List _ -> raise Error end
module T = struct type t = [ | ` Null | ` String of String . t | ` Bool of Bool . t | ` Int of Int64 . t | ` BigInt of Big_int . big_int | ` Float of Float . t | ` Bytes of string | ` Array of t list | ` Map of ( t , t ) Map . Poly . t | ` UUID of Uuid . t | ` Keyword of String . t | ` Symbol of String . t | ` Time of Time . t | ` URI of String . t | ` List of t list | ` Set of t Set . Poly . t | ` Extension of string * t ] with sexp , compare end
module CacheCode = struct exception Invalid_cache_code of string let base_char_index = 48 let cache_code_digits = 44 let max_count = 44 * 44 - 1 let to_int s = match String . length s with | 1 -> ( Char . to_int s . [ 0 ] ) - base_char_index | 2 -> ( ( Char . to_int s . [ 0 ] ) - base_char_index ) * cache_code_digits + ( ( Char . to_int s . [ 1 ] ) - base_char_index ) | _ -> raise ( Invalid_cache_code s ) let of_int i = let hi = i / cache_code_digits in let lo = i % cache_code_digits in if i > max_count then raise ( Invalid_cache_code " Integer is larger than 1936 " ) else if hi = 0 then " " ^ ^ String . of_char ( Char . of_int_exn ( lo + base_char_index ) ) else " " ^ ^ String . of_char ( Char . of_int_exn ( hi + base_char_index ) ) ^ String . of_char ( Char . of_int_exn ( lo + base_char_index ) ) end
module Reader = struct exception Internal_error exception Parse_error of string module Context = struct type tag = | Quote | List | Set | CMap | Unknown of string with sexp let tag_of_string = function | " ' " ~# -> Quote | " ~# list " -> List | " ~# set " -> Set | " ~# cmap " -> CMap | s -> Unknown ( String . drop_prefix s 2 ) module Transit_cache = struct type entry = ETransit of T . t | ETag of tag type t = { m : entry Int . Map . t ; c : int } let empty = { m = Int . Map . empty ; c = 0 } let track { m ; c } s = let m ' = Int . Map . add m ~ key : c ~ data : s in if c > CacheCode . max_count then empty else { m = m ' ; c = c + 1 } let track_transit x v = track x ( ETransit v ) let track_tag x v = track x ( ETag v ) let find_exn { m ; _ } x = Int . Map . find_exn m x end module Cache = Transit_cache type context = | Empty | Focused of T . t | Array of T . t list * context | MapKey of ( T . t * T . t ) list * context | MapValue of T . t * ( T . t * T . t ) list * context | Tagged of tag * context with sexp type t = Cache . t * context let empty = ( Cache . empty , Empty ) let result ( _ , ctx ) = match ctx with | Focused v -> v | _ -> raise Internal_error let track_transit ( cache , ctx ) s x = if String . length s > 3 then ( Cache . track_transit cache x , ctx ) else ( cache , ctx ) let track_tag ( cache , ctx ) s x = let rec add ( cache , ctx ) e = match ctx with | Empty -> ( cache , Focused e ) | Focused _ -> raise Internal_error | Array ( es , ctx ) -> ( cache , Array ( e :: es , ctx ) ) | Tagged ( t , inner ) -> let ( cache ' , inner ' ) = add ( cache , inner ) e in ( cache ' , Tagged ( t , inner ' ) ) | MapKey ( es , ctx ) -> ( match e with | ` String s when String . length s > 3 -> ( Cache . track_transit cache e , MapValue ( e , es , ctx ) ) | _ -> ( cache , MapValue ( e , es , ctx ) ) ) | MapValue ( k , es , ctx ) -> ( cache , MapKey ( ( k , e ) :: es , ctx ) ) let push_map_as_array ( cache , ctx ) = match ctx with | Array ( [ ] , parent ) -> ( cache , MapKey ( [ ] , parent ) ) | _ -> raise ( Parse_error " Map - as - array marker in wrong location " ) ! let push_array ( cache , ctx ) = ( cache , Array ( [ ] , ctx ) ) let push_map ( cache , ctx ) = ( cache , MapKey ( [ ] , ctx ) ) let push_tagged ( cache , ctx ) t = match ctx with | Array ( [ ] , _ ) as cx -> ( cache , Tagged ( t , cx ) ) | MapKey ( [ ] , parent ) -> ( cache , Tagged ( t , MapValue ( ` Null , [ ] , parent ) ) ) | _ -> raise ( Parse_error " Array tag , but not parsing an array " ) let rec pairup = function | [ ] -> [ ] | k :: v :: rest -> ( k , v ) :: ( pairup rest ) | _ -> raise ( Parse_error " Odd number of pairs " ) let pop_map ( cache , ctx ) = match ctx with | MapKey ( res , parent ) -> add ( cache , parent ) ( ` Map ( Map . Poly . of_alist_exn res ) ) | Tagged ( Quote , MapKey ( [ _ , res ] , parent ) ) -> add ( cache , parent ) res | Tagged ( Set , MapKey ( [ _ , ` Array res ] , parent ) ) -> add ( cache , parent ) ( ` Set ( Set . Poly . of_list res ) ) | Tagged ( List , MapKey ( [ _ , ` Array res ] , parent ) ) -> add ( cache , parent ) ( ` List res ) | Tagged ( CMap , MapKey ( [ _ , ` Array res ] , parent ) ) -> add ( cache , parent ) ( ` Map ( Map . Poly . of_alist_exn ( pairup res ) ) ) | Tagged ( Unknown t , MapKey ( [ _ , res ] , parent ) ) -> add ( cache , parent ) ( ` Extension ( t , res ) ) | Tagged ( _ , _ ) -> raise Internal_error | MapValue ( _ , _ , _ ) -> raise ( Parse_error " end_of_map called with an “ free ” key " ) | Array ( _ , _ ) -> raise ( Parse_error " end_of_map in array context " ) | Empty -> raise ( Parse_error " end_of_map called in empty context " ) | Focused _ -> raise ( Parse_error " end_of_map called in focused context " ) let pop_array ( cache , ctx ) = match ctx with | Array ( res , parent ) -> add ( cache , parent ) ( ` Array ( List . rev res ) ) | Tagged ( List , Array ( [ ` Array res ] , parent ) ) -> add ( cache , parent ) ( ` List res ) | Tagged ( List , Array ( _ , _ ) ) -> raise ( Parse_error " Wrong ~# list encoding " ) | Tagged ( Set , Array ( [ ` Array res ] , parent ) ) -> add ( cache , parent ) ( ` Set ( Set . Poly . of_list res ) ) | Tagged ( Set , Array ( _ , _ ) ) -> raise ( Parse_error " Wrong ~# set encoding " ) | Tagged ( Quote , Array ( [ res ] , parent ) ) -> add ( cache , parent ) res | Tagged ( Quote , Array ( _ , _ ) ) -> raise ( Parse_error " Quote with multi - elem array " ) | Tagged ( CMap , Array ( [ ` Array res ] , parent ) ) -> add ( cache , parent ) ( ` Map ( Map . Poly . of_alist_exn ( pairup res ) ) ) | Tagged ( Unknown t , Array ( [ res ] , parent ) ) -> add ( cache , parent ) ( ` Extension ( t , res ) ) | Tagged ( _ , _ ) -> raise Internal_error | MapValue ( _ , _ , _ ) -> raise ( Parse_error " Odd number of k / v pairs in map - as - array / cmap " ) | MapKey ( res , parent ) -> add ( cache , parent ) ( ` Map ( Map . Poly . of_alist_exn res ) ) | Empty -> raise ( Parse_error " end_of_array called in Empty context " ) | Focused _ -> raise ( Parse_error " end_of_array called in Focused context " ) let cache_lookup ( ( cache , _ ) as cx ) s = match CacheCode . to_int s |> Cache . find_exn cache with | Cache . ETransit x -> add cx x | Cache . ETag t -> push_tagged cx t end let decode_tagged s = function | ' _ ' -> ` Null | ' s ' -> ` String s | ' ' ? -> ( match s with | " t " -> ` Bool true | " f " -> ` Bool false | _ -> raise ( Parse_error " decode_tagged ? case " ) ) | ' i ' -> ` Int ( Int64 . of_string s ) | ' n ' -> let i = Big_int . big_int_of_string s in ( try ` Int ( Big_int . int64_of_big_int i ) with Failure " nativeint_of_big_int " -> ` BigInt i ) | ' d ' -> ` Float ( Float . of_string s ) | ' b ' -> ` Bytes ( Base64 . decode_string s ) | ' u ' -> ( ` UUID ( Uuid . of_string s ) ) | ' r ' -> ( ` URI s ) | ' ' : -> ( ` Keyword s ) | ' ' $ -> ( ` Symbol s ) | ' m ' -> let f = Big_int . float_of_big_int ( Big_int . big_int_of_string s ) in ( ` Time ( Time . of_float ( f . / 1000 . 0 ) ) ) | ' t ' -> let tp = Time . of_string s in ` Time tp | t -> ` Extension ( String . of_char t , ` String s ) let decode_string s ctx head = let track x = Context . add ( Context . track_transit ctx s x ) x in match head with | ( ' ' , ^ ' ' ) -> Context . push_map_as_array ctx | ( ' ' , ^ _ ) -> Context . cache_lookup ctx ( String . drop_prefix s 1 ) | ( ' ' , ~ ' ' ) ~ -> ` String ( String . drop_prefix s 1 ) |> Context . add ctx | ( ' ' , ~ ' ' ) ^ -> ` String ( String . drop_prefix s 1 ) |> Context . add ctx | ( ' ' , ~ ' ' ) # -> let array_tag = Context . tag_of_string s in Context . push_tagged ( Context . track_tag ctx s array_tag ) array_tag | ( ' ' , ~ t ) -> ( match decode_tagged ( String . drop_prefix s 2 ) t with | ` Symbol sy -> track ( ` Symbol sy ) | ` Keyword k -> track ( ` Keyword k ) | value -> Context . add ctx value ) | _ -> Context . add ctx ( ` String s ) let on_null ctx = Context . add ctx ` Null let on_bool ctx b = Context . add ctx ( ` Bool b ) let on_int ctx i = Context . add ctx ( ` Int i ) let on_float ctx f = Context . add ctx ( ` Float f ) let on_string ctx buf offset len = let str = String . sub buf ~ pos : offset ~ len : len in match String . length str with | 0 | 1 -> Context . add ctx ( ` String str ) | _ -> decode_string str ctx ( str . [ 0 ] , str . [ 1 ] ) module JSON = struct let callbacks = { YAJL . on_null = on_null ; on_bool = on_bool ; on_number = ` Parse_numbers ( ( ` Int64 on_int ) , on_float ) ; on_string = on_string ; on_start_map = Context . push_map ; on_map_key = on_string ; on_end_map = Context . pop_map ; on_start_array = Context . push_array ; on_end_array = Context . pop_array ; } let from_string str = let p = YAJL . make_parser callbacks Context . empty in let ( ) = YAJL . parse p str in YAJL . complete_parse p |> Context . result end end
module Writer = struct exception Todo module Cache = struct type t = { count : int ; cache : int String . Map . t } let empty = { count = 0 ; cache = String . Map . empty } let find { cache ; _ } = String . Map . find cache let track { cache ; count } key = if String . length key > 3 then if count > CacheCode . max_count then empty else { count = count + 1 ; cache = String . Map . add cache ~ key ~ data : count } else { cache ; count } end module CtxBase = struct type ' a t = ( Cache . t * YAJL . gen -> ' a * Cache . t * YAJL . gen ) let runState f init = f init let return x = fun ( c , g ) -> ( x , c , g ) let bind act1 fact2 s = let ( iv , ic , ig ) = runState act1 s in let act2 = fact2 iv in runState act2 ( ic , ig ) let map = ` Define_using_bind end module Ctx = struct include Monad . Make ( CtxBase ) let runState f init = f init let get_gen ( c , g ) = ( g , c , g ) let get_cache ( c , g ) = ( c , c , g ) let put_cache c ' ( _c , g ) = ( ( ) , c ' , g ) end let int_53_bit_upper = Int64 . of_int 9007199254740992 let int_53_bit_lower = Int64 . of_int ( - 9007199254740992 ) let to_string t = let open Ctx . Monad_infix in let ( ) >> f1 f2 = f1 >>= ( fun ( ) -> f2 ) in let string x = Ctx . get_gen >>= ( fun gen -> YAJL . gen_string gen x ; Ctx . return ( ) ) in let int i = Ctx . get_gen >>= ( fun gen -> YAJL . gen_int64 gen i ; Ctx . return ( ) ) in let null = Ctx . get_gen >>= ( fun gen -> YAJL . gen_null gen ; Ctx . return ( ) ) in let bool b = Ctx . get_gen >>= ( fun gen -> YAJL . gen_bool gen b ; Ctx . return ( ) ) in let float f = Ctx . get_gen >>= ( fun gen -> YAJL . gen_float gen f ; Ctx . return ( ) ) in let start_array = Ctx . get_gen >>= ( fun gen -> Ctx . return ( YAJL . gen_start_array gen ) ) in let end_array = Ctx . get_gen >>= ( fun gen -> Ctx . return ( YAJL . gen_end_array gen ) ) in let track str = Ctx . get_cache >>= ( fun c -> match Cache . find c str with | None -> let c ' = Cache . track c str in Ctx . put_cache c ' >> string str | Some i -> string ( CacheCode . of_int i ) ) in let is_composite_map m = let keys = Map . Poly . keys m in let composite_key = function | ` Map _ -> true | ` Array _ -> true | ` Set _ -> true | ` List _ -> true | _ -> false in List . exists keys ~ f : composite_key in let rec array_tagged tag x = start_array >> track tag >> write_json x ~ string_key : false >> end_array and write_map m = let f ( key , data ) = write_json key ~ string_key : true >> write_json data ~ string_key : false in let l = Map . Poly . to_alist m in if is_composite_map m then begin start_array >> string " ~# cmap " >> start_array >> Ctx . all_ignore ( List . map l ~ f ) >> end_array >> end_array end else begin start_array >> string " ^ " >> Ctx . all_ignore ( List . map l ~ f ) >> end_array end and write_json x ~ string_key = let string = if string_key then track else string in match x with | ` Null -> null | ` Bool b -> bool b | ` String " " -> string " " | ` String s -> let str = match s . [ 0 ] with | ' ' ~ -> " " ~ ^ s | ' ' ^ -> " " ~ ^ s | _ -> s in if string_key then track str else string str | ` Int i -> if string_key then track ( " ~ i " ^ Int64 . to_string i ) else if i < int_53_bit_upper && i > int_53_bit_lower then int i else string ( " ~ i " ^ Int64 . to_string i ) | ` BigInt n -> string ( " ~ n " ^ Big_int . string_of_big_int n ) | ` Keyword k -> track ( " " ~: ^ k ) | ` Symbol symb -> track ( " " ~$ ^ symb ) | ` UUID uuid -> string ( " ~ u " ^ Uuid . to_string uuid ) | ` URI s -> string ( " ~ r " ^ s ) | ` Time t -> let i = ( Time . to_float t ) . * 1000 . 0 |> Float . to_int64 in string ( " ~ m " ^ Int64 . to_string i ) | ` Float f -> if string_key then track ( " ~ d " ^ Float . to_string f ) else float f | ` Array ts -> begin start_array >> Ctx . all_ignore ( List . map ts ~ f ( : fun x -> write_json x ~ string_key : false ) ) >> end_array end | ` List ts -> array_tagged " ~# list " ( ` Array ts ) | ` Set s -> array_tagged " ~# set " ( ` Array ( Set . Poly . to_list s ) ) | ` Map m -> write_map m | ` Extension ( tag , x ) -> start_array >> string ( " " ~# ^ tag ) >> write_json x ~ string_key : string_key >> end_array | _ -> raise Todo in let quote x = start_array >> string " ' " ~# >> write_json x ~ string_key : false >> end_array in let write_json_toplevel = function | ` Null -> quote ` Null | ` Bool b -> quote ( ` Bool b ) | ` String s -> quote ( ` String s ) | ` Float f -> quote ( ` Float f ) | ` Int i -> quote ( ` Int i ) | ` Time t -> quote ( ` Time t ) | ` Keyword kw -> quote ( ` Keyword kw ) | ` Symbol symb -> quote ( ` Symbol symb ) | ` URI u -> quote ( ` URI u ) | ` UUID uuid -> quote ( ` UUID uuid ) | x -> write_json x ~ string_key : false in let write x = let gen = YAJL . make_gen ( ) in let ( ( ) , _ , _ ) = Ctx . runState ( write_json_toplevel x ) ( Cache . empty , gen ) in let ( buf , pos , len ) = YAJL . gen_get_buf gen in let res = String . sub buf ~ pos ~ len in YAJL . gen_clear gen ; res in write t end
module type Inputs_intf = sig module Transition_frontier : module type of Transition_frontier end
module Make ( Inputs : Inputs_intf ) Inputs_intf : Mina_intf . Transition_chain_prover_intf with type transition_frontier := Inputs . Transition_frontier . t = struct open Inputs let find_in_root_history frontier state_hash = let open Transition_frontier . Extensions in let open Option . Let_syntax in let root_history = get_extension ( Transition_frontier . extensions frontier ) frontier Root_history in let % map root_data = Root_history . lookup root_history state_hash in External_transition . Validated . lower @@ Frontier_base . Root_data . Historical . transition root_data module Merkle_list = Merkle_list_prover . Make_ident ( struct type value = Mina_block . Validated . t type context = Transition_frontier . t type proof_elem = State_body_hash . t let to_proof_elem = Mina_block . Validated . state_body_hash let get_previous ~ context transition = let parent_hash = transition |> Mina_block . Validated . forget |> With_hash . data |> Mina_block . header |> Mina_block . Header . protocol_state |> Protocol_state . previous_state_hash in let open Option . Let_syntax in Option . merge Transition_frontier ( . find context parent_hash >>| Breadcrumb . validated_transition ) validated_transition ( find_in_root_history context parent_hash ) parent_hash ~ f : Fn . const end ) end let prove ? length ~ frontier state_hash = let open Option . Let_syntax in let % map requested_transition = Option . merge Transition_frontier ( . find frontier state_hash >>| Breadcrumb . validated_transition ) validated_transition ( find_in_root_history frontier state_hash ) state_hash ~ f : Fn . const in let first_transition , merkle_list = Merkle_list . prove ? length ~ context : frontier requested_transition in ( Mina_block . Validated . state_hash first_transition , merkle_list ) merkle_list end module Transition_frontier = Transition_frontier end ) end
let global_max_length ( genesis_constants : Genesis_constants . t ) t = genesis_constants . protocol . k
let rejected_blocks = Queue . create ( )
let validated_blocks = Queue . create ( )
type t = { logger : Logger . t ; verifier : Verifier . t ; consensus_local_state : Consensus . Data . Local_state . t ; catchup_tree : Catchup_tree . t ; full_frontier : Full_frontier . t ; persistent_root : Persistent_root . t ; persistent_root_instance : Persistent_root . Instance . t ; persistent_frontier : Persistent_frontier . t ; persistent_frontier_instance : Persistent_frontier . Instance . t ; extensions : Extensions . t ; genesis_state_hash : State_hash . t ; closed : unit Ivar . t }
let catchup_tree t = t . catchup_tree
type Structured_log_events . t += Added_breadcrumb_user_commands [ @@ deriving register_event ] register_event
type Structured_log_events . t += Applying_diffs of { diffs : Yojson . Safe . t list } [ @@ deriving register_event { msg = " Applying diffs : $ diffs " } ]
let genesis_root_data ~ precomputed_values = let transition = External_transition . Validated . lift @@ Mina_block . Validated . lift @@ Mina_block . genesis ~ precomputed_values in let constraint_constants = precomputed_values . constraint_constants in let scan_state = Staged_ledger . Scan_state . empty ~ constraint_constants ( ) in let protocol_states = [ ] in let pending_coinbase = Or_error . ok_exn ( Pending_coinbase . create ~ depth : constraint_constants . pending_coinbase_depth ( ) ) in Root_data . Limited . create ~ transition ~ scan_state ~ pending_coinbase ~ protocol_states
let load_from_persistence_and_start ~ logger ~ verifier ~ consensus_local_state ~ max_length ~ persistent_root ~ persistent_root_instance ~ persistent_frontier ~ persistent_frontier_instance ~ precomputed_values ~ catchup_mode ignore_consensus_local_state = let open Deferred . Result . Let_syntax in let root_identifier = match Persistent_root . Instance . load_root_identifier persistent_root_instance with | Some root_identifier -> root_identifier | None -> failwith " no persistent root identifier found ( should have been written \ already ) already " in let % bind ( ) = Deferred . return ( match Persistent_frontier . Instance . fast_forward persistent_frontier_instance root_identifier with | Ok ( ) -> [ % log info ] info " Fast forward successful " ; Ok ( ) | Error ` Sync_cannot_be_running -> Error ( ` Failure " sync job is already running on persistent frontier ) " | Error ` Bootstrap_required -> Error ` Bootstrap_required | Error ( ` Failure msg ) msg -> [ % log fatal ] fatal ~ metadata : [ ( " target_root " , Root_identifier . to_yojson root_identifier ) root_identifier ] " Unable to fast forward persistent frontier : % s " msg ; Error ( ` Failure msg ) msg ) in let % bind full_frontier , extensions = Deferred . map ( Persistent_frontier . Instance . load_full_frontier persistent_frontier_instance ~ max_length ~ root_ledger : ( Persistent_root . Instance . snarked_ledger persistent_root_instance ) persistent_root_instance ~ consensus_local_state ~ ignore_consensus_local_state ~ precomputed_values ~ persistent_root_instance ) ~ f : ( Result . map_error ~ f ( : function | ` Sync_cannot_be_running -> ` Failure " sync job is already running on persistent frontier " | ` Failure _ as err -> err ) ) in [ % log info ] info " Loaded full frontier and extensions " ; let % map ( ) = Deferred . return ( Persistent_frontier . Instance . start_sync ~ constraint_constants : precomputed_values . constraint_constants ~ persistent_root_instance persistent_frontier_instance |> Result . map_error ~ f ( : function | ` Sync_cannot_be_running -> ` Failure " sync job is already running on persistent frontier " | ` Not_found _ as err -> ` Failure ( Persistent_frontier . Database . Error . not_found_message err ) err ) ) in { logger ; catchup_tree = Catchup_tree . create catchup_mode ~ root ( : Full_frontier . root full_frontier ) full_frontier ; verifier ; consensus_local_state ; full_frontier ; persistent_root ; persistent_root_instance ; persistent_frontier ; persistent_frontier_instance ; extensions ; closed = Ivar . create ( ) ; genesis_state_hash = ( Precomputed_values . genesis_state_hashes precomputed_values ) precomputed_values . state_hash }
let rec load_with_max_length : max_length : int -> ? retry_with_fresh_db : bool -> logger : Logger . t -> verifier : Verifier . t -> consensus_local_state : Consensus . Data . Local_state . t -> persistent_root : Persistent_root . t -> persistent_frontier : Persistent_frontier . t -> precomputed_values : Precomputed_values . t -> catchup_mode [ : ` Normal | ` Super ] -> unit -> ( t , [ > ` Bootstrap_required | ` Persistent_frontier_malformed | ` Failure of string ] ) Deferred . Result . t = fun ~ max_length ( ? retry_with_fresh_db = true ) true ~ logger ~ verifier ~ consensus_local_state ~ persistent_root ~ persistent_frontier ~ precomputed_values ~ catchup_mode ( ) -> let open Deferred . Let_syntax in let continue persistent_frontier_instance ~ ignore_consensus_local_state ~ snarked_ledger_hash = match Persistent_root . load_from_disk_exn persistent_root ~ snarked_ledger_hash ~ logger with | Error _ as err -> let % map ( ) = Persistent_frontier . Instance . destroy persistent_frontier_instance in err | Ok persistent_root_instance -> ( match % bind load_from_persistence_and_start ~ logger ~ verifier ~ consensus_local_state ~ max_length ~ persistent_root ~ persistent_root_instance ~ catchup_mode ~ persistent_frontier ~ persistent_frontier_instance ~ precomputed_values ignore_consensus_local_state with | Ok _ as result -> return result | Error _ as err -> let % map ( ) = Persistent_frontier . Instance . destroy persistent_frontier_instance in Persistent_root . Instance . close persistent_root_instance ; err ) in let persistent_frontier_instance = Persistent_frontier . create_instance_exn persistent_frontier in let reset_and_continue ( ? destroy_frontier_instance = true ) true ( ) = let % bind ( ) = if destroy_frontier_instance then Persistent_frontier . Instance . destroy persistent_frontier_instance else return ( ) in let % bind ( ) = Persistent_frontier . reset_database_exn persistent_frontier ~ root_data ( : genesis_root_data ~ precomputed_values ) precomputed_values ~ genesis_state_hash : ( State_hash . With_state_hashes . state_hash precomputed_values . protocol_state_with_hashes ) in Persistent_root . reset_to_genesis_exn persistent_root ~ precomputed_values ; let genesis_ledger_hash = Precomputed_values . genesis_ledger precomputed_values |> Lazy . force |> Ledger . merkle_root |> Frozen_ledger_hash . of_ledger_hash in continue ( Persistent_frontier . create_instance_exn persistent_frontier ) persistent_frontier ~ ignore_consensus_local_state : false ~ snarked_ledger_hash : genesis_ledger_hash in match Persistent_frontier . Instance . check_database ~ genesis_state_hash : ( State_hash . With_state_hashes . state_hash precomputed_values . protocol_state_with_hashes ) persistent_frontier_instance with | Error ` Not_initialized -> [ % log info ] info " persistent frontier database does not exist " ; reset_and_continue ( ) | Error ` Invalid_version -> [ % log info ] info " persistent frontier database out of date " ; reset_and_continue ( ) | Error ( ` Genesis_state_mismatch persisted_genesis_state_hash ) persisted_genesis_state_hash -> [ % log info ] info " Genesis state in persisted frontier $ persisted_state_hash differs \ from the current genesis state $ precomputed_state_hash " ~ metadata : [ ( " persisted_state_hash " , State_hash . to_yojson persisted_genesis_state_hash ) ; ( " precomputed_state_hash " , State_hash . to_yojson ( State_hash . With_state_hashes . state_hash precomputed_values . protocol_state_with_hashes ) ) ] ; reset_and_continue ( ) | Error ( ` Corrupt err ) err -> [ % log error ] error " Persistent frontier database is corrupt : % s " ( Persistent_frontier . Database . Error . message err ) err ; if retry_with_fresh_db then ( [ % log info ] info " destroying old persistent frontier database " ; let % bind ( ) = Persistent_frontier . Instance . destroy persistent_frontier_instance in let % bind ( ) = Persistent_frontier . destroy_database_exn persistent_frontier in load_with_max_length ~ max_length ~ logger ~ verifier ~ consensus_local_state ~ persistent_root ~ persistent_frontier ~ retry_with_fresh_db : false ~ precomputed_values ~ catchup_mode ( ) >>| Result . map_error ~ f ( : function | ` Persistent_frontier_malformed -> ` Failure " failed to destroy and create new persistent frontier \ database " | err -> err ) ) else return ( Error ` Persistent_frontier_malformed ) Persistent_frontier_malformed | Ok snarked_ledger_hash -> ( match % bind continue persistent_frontier_instance ~ ignore_consensus_local_state : true ~ snarked_ledger_hash with | Error ( ` Failure err ) err when retry_with_fresh_db -> [ % log error ] error " Failed to initialize transition frontier : $ err . Destroying old \ persistent frontier database and retrying . " ~ metadata [ : ( " err " , ` String err ) err ] ; reset_and_continue ~ destroy_frontier_instance : false ( ) | res -> return res )
let load ( ? retry_with_fresh_db = true ) true ~ logger ~ verifier ~ consensus_local_state ~ persistent_root ~ persistent_frontier ~ precomputed_values ~ catchup_mode ( ) = let max_length = global_max_length ( Precomputed_values . genesis_constants precomputed_values ) precomputed_values in load_with_max_length ~ max_length ~ retry_with_fresh_db ~ logger ~ verifier ~ consensus_local_state ~ persistent_root ~ persistent_frontier ~ precomputed_values ~ catchup_mode ( )
let close ~ loc { logger ; verifier = _ ; consensus_local_state = _ ; catchup_tree = _ ; full_frontier ; persistent_root = _safe_to_ignore_1 ; persistent_root_instance ; persistent_frontier = _safe_to_ignore_2 ; persistent_frontier_instance ; extensions ; closed ; genesis_state_hash = _ } = [ % log debug ] debug " Closing transition frontier " ; Full_frontier . close ~ loc full_frontier ; Extensions . close extensions ; let % map ( ) = Persistent_frontier . Instance . destroy persistent_frontier_instance in Persistent_root . Instance . close persistent_root_instance ; Ivar . fill_if_empty closed ( )
let closed t = Ivar . read t . closed
let persistent_root { persistent_root ; _ } = persistent_root
let persistent_frontier { persistent_frontier ; _ } = persistent_frontier
let extensions { extensions ; _ } = extensions
let genesis_state_hash { genesis_state_hash ; _ } = genesis_state_hash
let root_snarked_ledger { persistent_root_instance ; _ } = Persistent_root . Instance . snarked_ledger persistent_root_instance
let add_breadcrumb_exn t breadcrumb = let open Deferred . Let_syntax in let diffs = Full_frontier . calculate_diffs t . full_frontier breadcrumb in [ % log ' trace t . logger ] logger ~ metadata : [ ( " state_hash " , State_hash . to_yojson ( Breadcrumb . state_hash ( Full_frontier . best_tip t . full_frontier ) full_frontier ) full_frontier ) ; ( " n " , ` Int ( List . length @@ Full_frontier . all_breadcrumbs t . full_frontier ) full_frontier ) ] " PRE : ( $ state_hash , $ n ) n " ; [ % str_log ' trace t . logger ] logger ( Applying_diffs { diffs = List . map ~ f : Diff . Full . E . to_yojson diffs } ) ; Catchup_tree . apply_diffs t . catchup_tree diffs ; let ( ` New_root_and_diffs_with_mutants ( new_root_identifier , diffs_with_mutants ) diffs_with_mutants ) = Full_frontier . apply_diffs t . full_frontier diffs ~ has_long_catchup_job : ( Catchup_tree . max_catchup_chain_length t . catchup_tree > 5 ) 5 ~ enable_epoch_ledger_sync ( ` : Enabled ( root_snarked_ledger t ) t ) t in Option . iter new_root_identifier ~ f ( : Persistent_root . Instance . set_root_identifier t . persistent_root_instance ) persistent_root_instance ; [ % log ' trace t . logger ] logger ~ metadata : [ ( " state_hash " , State_hash . to_yojson ( Breadcrumb . state_hash @@ Full_frontier . best_tip t . full_frontier ) full_frontier ) ; ( " n " , ` Int ( List . length @@ Full_frontier . all_breadcrumbs t . full_frontier ) full_frontier ) ] " POST : ( $ state_hash , $ n ) n " ; let user_cmds = Mina_block . Validated . valid_commands @@ Breadcrumb . validated_transition breadcrumb in [ % str_log ' trace t . logger ] logger Added_breadcrumb_user_commands ~ metadata : [ ( " user_commands " , ` List ( List . map user_cmds ~ f ( : With_status . to_yojson User_command . Valid . to_yojson ) to_yojson ) ) ; ( " state_hash " , State_hash . to_yojson ( Breadcrumb . state_hash breadcrumb ) breadcrumb ) breadcrumb ] ; let lite_diffs = List . map diffs ~ f : Diff ( . fun ( Full . E . E diff ) diff -> Lite . E . E ( to_lite diff ) diff ) diff in let % bind sync_result = Persistent_frontier . Instance . notify_sync t . persistent_frontier_instance ~ diffs : lite_diffs in sync_result |> Result . map_error ~ f ( : fun ` Sync_must_be_running -> Failure " Cannot add breadcrumb because persistent frontier sync job is not \ running , which indicates that transition frontier initialization \ has not been performed correctly " ) |> Result . ok_exn ; Extensions . notify t . extensions ~ frontier : t . full_frontier ~ diffs_with_mutants open Full_frontier let proxy1 f { full_frontier ; _ } = f full_frontier let max_length = proxy1 max_length let consensus_local_state = proxy1 consensus_local_state let all_breadcrumbs = proxy1 all_breadcrumbs let visualize ~ filename = proxy1 ( visualize ~ filename ) filename let visualize_to_string = proxy1 visualize_to_string let iter = proxy1 iter let common_ancestor = proxy1 common_ancestor let successors = proxy1 successors let successors_rec = proxy1 successors_rec let successor_hashes = proxy1 successor_hashes let successor_hashes_rec = proxy1 successor_hashes_rec let hash_path = proxy1 hash_path let best_tip = proxy1 best_tip let root = proxy1 root let find = proxy1 find let precomputed_values = proxy1 precomputed_values let genesis_constants = proxy1 genesis_constants let find_exn = proxy1 find_exn let root_length = proxy1 root_length let best_tip_path ? max_length = proxy1 ( best_tip_path ? max_length ) max_length let best_tip_path_length_exn = proxy1 best_tip_path_length_exn let find_protocol_state = proxy1 find_protocol_state let path_map ? max_length { full_frontier ; _ } breadcrumb ~ f = path_map ? max_length full_frontier breadcrumb ~ f end
module For_tests = struct open Signature_lib module Ledger_transfer = Mina_ledger . Ledger_transfer . Make ( Mina_ledger . Ledger ) Ledger ( Mina_ledger . Ledger . Db ) Db open Full_frontier . For_tests let proxy2 f { full_frontier = x ; _ } { full_frontier = y ; _ } = f x y let equal = proxy2 equal let load_with_max_length = load_with_max_length let rec deferred_rose_tree_iter ( Rose_tree . T ( root , trees ) trees ) trees ~ f = let % bind ( ) = f root in Deferred . List . iter trees ~ f ( : deferred_rose_tree_iter ~ f ) f let gen_genesis_breadcrumb ( ? logger = Logger . null ( ) ) ~ verifier ( ~ precomputed_values : Precomputed_values . t ) t ( ) = let constraint_constants = precomputed_values . constraint_constants in Quickcheck . Generator . create ( fun ~ size : _ ~ random : _ -> let transition_receipt_time = Some ( Time . now ( ) ) in Protocol_version ( . set_current zero ) zero ; let genesis_transition = Mina_block . Validated . lift ( Mina_block . genesis ~ precomputed_values ) precomputed_values in let genesis_ledger = Lazy . force ( Precomputed_values . genesis_ledger precomputed_values ) precomputed_values in let get_state hash = Or_error . errorf " ! Protocol state ( for scan state transactions ) transactions for \ { % sexp : State_hash . t } t not found " hash in let genesis_staged_ledger = Or_error . ok_exn ( Async . Thread_safe . block_on_async_exn ( fun ( ) -> Staged_ledger . of_scan_state_pending_coinbases_and_snarked_ledger ~ logger ~ verifier ~ constraint_constants ~ scan_state : ( Staged_ledger . Scan_state . empty ~ constraint_constants ( ) ) ~ get_state ~ pending_coinbases : ( Or_error . ok_exn @@ Pending_coinbase . create ~ depth : constraint_constants . pending_coinbase_depth ( ) ) ~ snarked_ledger : genesis_ledger ~ snarked_local_state ( : Mina_state . Local_state . empty ( ) ) ~ expected_merkle_root ( : Ledger . merkle_root genesis_ledger ) genesis_ledger ) ) in Breadcrumb . create ~ validated_transition : genesis_transition ~ staged_ledger : genesis_staged_ledger ~ just_emitted_a_proof : false ~ transition_receipt_time ) let gen_persistence ( ? logger = Logger . null ( ) ) ~ verifier ( ~ precomputed_values : Precomputed_values . t ) t ( ) = let open Core in let root_dir = " / tmp / coda_unit_test " in Quickcheck . Generator . create ( fun ~ size : _ ~ random : _ -> let uuid = Uuid_unix . create ( ) in let temp_dir = root_dir ^/ Uuid . to_string uuid in let root_dir = temp_dir ^/ " root " in let frontier_dir = temp_dir ^/ " frontier " in let cleaned = ref false in let clean_temp_dirs _ = if not ! cleaned then ( let process_info = Unix . create_process ~ prog " : rm " ~ args [ : " - rf " ; temp_dir ] in Unix . waitpid process_info . pid |> Result . map_error ~ f ( : function | ` Exit_non_zero n -> Printf . sprintf " error ( exit code % d ) d " n | ` Signal _ -> " error ( received unexpected signal ) signal " ) |> Result . ok_or_failwith ; cleaned := true ) in Unix . mkdir_p temp_dir ; Unix . mkdir root_dir ; Unix . mkdir frontier_dir ; let persistent_root = Persistent_root . create ~ logger ~ directory : root_dir ~ ledger_depth : precomputed_values . constraint_constants . ledger_depth in let persistent_frontier = Persistent_frontier . create ~ logger ~ verifier ~ time_controller ( : Block_time . Controller . basic ~ logger ) logger ~ directory : frontier_dir in Gc . Expert . add_finalizer_exn persistent_root clean_temp_dirs ; Gc . Expert . add_finalizer_exn persistent_frontier ( fun x -> Option . iter persistent_frontier . Persistent_frontier . Factory_type . instance ~ f ( : fun instance -> Persistent_frontier . Database . close instance . db ) ; Option . iter persistent_root . Persistent_root . Factory_type . instance ~ f ( : fun instance -> Ledger . Db . close instance . snarked_ledger ) snarked_ledger ; clean_temp_dirs x ) ; ( persistent_root , persistent_frontier ) persistent_frontier ) let gen_genesis_breadcrumb_with_protocol_states ~ logger ~ verifier ~ precomputed_values ( ) = let open Quickcheck . Generator . Let_syntax in let % map root = gen_genesis_breadcrumb ~ logger ~ verifier ~ precomputed_values ( ) in let protocol_states = [ ] in ( root , protocol_states ) protocol_states let gen ( ? logger = Logger . null ( ) ) ~ verifier ? trust_system ? consensus_local_state ~ precomputed_values ( ? root_ledger_and_accounts = ( Lazy . force ( Precomputed_values . genesis_ledger precomputed_values ) precomputed_values , Lazy . force ( Precomputed_values . accounts precomputed_values ) precomputed_values ) ) ( ? gen_root_breadcrumb = gen_genesis_breadcrumb_with_protocol_states ~ logger ~ verifier ~ precomputed_values ( ) ) ~ max_length ~ size ( ? use_super_catchup : bool option ) option ( ) = let open Quickcheck . Generator . Let_syntax in let trust_system = Option . value trust_system ~ default ( : Trust_system . null ( ) ) in let epoch_ledger_location = Filename . temp_dir_name ^/ " epoch_ledger " ^ ( Uuid_unix . create ( ) |> Uuid . to_string ) to_string in let consensus_local_state = Option . value consensus_local_state ~ default : ( Consensus . Data . Local_state . create ~ genesis_ledger : ( Precomputed_values . genesis_ledger precomputed_values ) precomputed_values ~ genesis_epoch_data : precomputed_values . genesis_epoch_data ~ epoch_ledger_location Public_key . Compressed . Set . empty ~ ledger_depth : precomputed_values . constraint_constants . ledger_depth ~ genesis_state_hash : ( State_hash . With_state_hashes . state_hash precomputed_values . protocol_state_with_hashes ) ) in let root_snarked_ledger , root_ledger_accounts = root_ledger_and_accounts in let % bind root , branches , protocol_states = let % bind root , protocol_states = gen_root_breadcrumb in let % map ( Rose_tree . T ( root , branches ) branches ) branches = Quickcheck . Generator . with_size ~ size ( Quickcheck_lib . gen_imperative_rose_tree ( Quickcheck . Generator . return root ) root ( Breadcrumb . For_tests . gen_non_deferred ~ logger ~ precomputed_values ~ verifier ~ trust_system ~ accounts_with_secret_keys : root_ledger_accounts ) ) in ( root , branches , protocol_states ) protocol_states in let root_data = Root_data . Limited . create ~ transition : ( External_transition . Validated . lift @@ Breadcrumb . validated_transition root ) ~ scan_state ( : Breadcrumb . staged_ledger root |> Staged_ledger . scan_state ) scan_state ~ pending_coinbase : ( Breadcrumb . staged_ledger root |> Staged_ledger . pending_coinbase_collection ) ~ protocol_states in let % map persistent_root , persistent_frontier = gen_persistence ~ logger ~ precomputed_values ~ verifier ( ) in Async . Thread_safe . block_on_async_exn ( fun ( ) -> Persistent_frontier . reset_database_exn persistent_frontier ~ root_data ~ genesis_state_hash : ( State_hash . With_state_hashes . state_hash precomputed_values . protocol_state_with_hashes ) ) ; Persistent_root . with_instance_exn persistent_root ~ f ( : fun instance -> let transition = Root_data . Limited . transition root_data in Persistent_root . Instance . set_root_state_hash instance ( Mina_block . Validated . state_hash @@ External_transition . Validated . lower transition ) ; ignore @@ Ledger_transfer . transfer_accounts ~ src : root_snarked_ledger ~ dest ( : Persistent_root . Instance . snarked_ledger instance ) instance ) ; let frontier_result = Async . Thread_safe . block_on_async_exn ( fun ( ) -> load_with_max_length ~ max_length ~ retry_with_fresh_db : false ~ logger ~ verifier ~ consensus_local_state ~ persistent_root ~ catchup_mode : ( match use_super_catchup with | Some true -> ` Super | Some false -> ` Normal | None -> ` Normal ) ~ persistent_frontier ~ precomputed_values ( ) ) in let frontier = let fail msg = failwith ( " failed to load transition frontier : " ^ msg ) msg in match frontier_result with | Error ` Bootstrap_required -> fail " bootstrap required " | Error ` Persistent_frontier_malformed -> fail " persistent frontier malformed " | Error ` Snarked_ledger_mismatch -> fail " persistent frontier is out of sync with snarked ledger " | Error ( ` Failure msg ) msg -> fail msg | Ok frontier -> frontier in Async . Thread_safe . block_on_async_exn ( fun ( ) -> Deferred . List . iter ~ how ` : Sequential branches ~ f ( : deferred_rose_tree_iter ~ f ( : add_breadcrumb_exn frontier ) frontier ) frontier ) ; Core . Gc . Expert . add_finalizer_exn consensus_local_state ( fun consensus_local_state -> Consensus . Data . Local_state ( . Snapshot . Ledger_snapshot . close @@ staking_epoch_ledger consensus_local_state ) consensus_local_state ; Consensus . Data . Local_state ( . Snapshot . Ledger_snapshot . close @@ next_epoch_ledger consensus_local_state ) consensus_local_state ) ; frontier let gen_with_branch ? logger ~ verifier ? trust_system ? consensus_local_state ~ precomputed_values ( ? root_ledger_and_accounts = ( Lazy . force ( Precomputed_values . genesis_ledger precomputed_values ) precomputed_values , Lazy . force ( Precomputed_values . accounts precomputed_values ) precomputed_values ) ) ? gen_root_breadcrumb ( ? get_branch_root = root ) root ~ max_length ~ frontier_size ~ branch_size ( ? use_super_catchup : bool option ) option ( ) = let open Quickcheck . Generator . Let_syntax in let % bind frontier = gen ? logger ~ verifier ? trust_system ? use_super_catchup ? consensus_local_state ~ precomputed_values ? gen_root_breadcrumb ~ root_ledger_and_accounts ~ max_length ~ size : frontier_size ( ) in let % map make_branch = Breadcrumb . For_tests . gen_seq ? logger ~ precomputed_values ~ verifier ? trust_system ~ accounts_with_secret_keys ( : snd root_ledger_and_accounts ) root_ledger_and_accounts branch_size in let branch = Async . Thread_safe . block_on_async_exn ( fun ( ) -> make_branch ( get_branch_root frontier ) frontier ) in ( frontier , branch ) branch end
module type Transition_handler_validator_intf = sig type unprocessed_transition_cache type transition_frontier val run : logger : Logger . t -> trust_system : Trust_system . t -> time_controller : Block_time . Controller . t -> frontier : transition_frontier -> transition_reader : Mina_block . initial_valid_block Envelope . Incoming . t Strict_pipe . Reader . t -> valid_transition_writer : ( ( Mina_block . initial_valid_block Envelope . Incoming . t , State_hash . t ) Cached . t , Strict_pipe . drop_head Strict_pipe . buffered , unit ) Strict_pipe . Writer . t -> unprocessed_transition_cache : unprocessed_transition_cache -> unit val validate_transition : logger : Logger . t -> frontier : transition_frontier -> unprocessed_transition_cache : unprocessed_transition_cache -> Mina_block . initial_valid_block Envelope . Incoming . t -> ( ( Mina_block . initial_valid_block Envelope . Incoming . t , State_hash . t ) Cached . t , [ > ` In_frontier of State_hash . t | ` In_process of State_hash . t Cache_lib . Intf . final_state | ` Disconnected ] ) Result . t end
module type Breadcrumb_builder_intf = sig type transition_frontier type transition_frontier_breadcrumb val build_subtrees_of_breadcrumbs : logger : Logger . t -> verifier : Verifier . t -> trust_system : Trust_system . t -> frontier : transition_frontier -> initial_hash : State_hash . t -> ( Mina_block . initial_valid_block Envelope . Incoming . t , State_hash . t ) Cached . t Rose_tree . t List . t -> ( transition_frontier_breadcrumb , State_hash . t ) t Cached . t Rose_tree . t List . t Deferred . Or_error . t end
module type Transition_handler_processor_intf = sig type transition_frontier type transition_frontier_breadcrumb val run : logger : Logger . t -> verifier : Verifier . t -> trust_system : Trust_system . t -> time_controller : Block_time . Controller . t -> frontier : transition_frontier -> primary_transition_reader : ( Mina_block . initial_valid_block Envelope . Incoming . t , State_hash . t ) Cached . t Strict_pipe . Reader . t -> producer_transition_reader : transition_frontier_breadcrumb Strict_pipe . Reader . t -> clean_up_catchup_scheduler : unit Ivar . t -> catchup_job_writer : ( State_hash . t * ( Mina_block . initial_valid_block Envelope . Incoming . t , State_hash . t ) Cached . t Rose_tree . t list , Strict_pipe . crash Strict_pipe . buffered , unit ) Strict_pipe . Writer . t -> catchup_breadcrumbs_reader : ( ( transition_frontier_breadcrumb , State_hash . t ) t Cached . t Rose_tree . t list * [ ` Ledger_catchup of unit Ivar . t | ` Catchup_scheduler ] ) Strict_pipe . Reader . t -> catchup_breadcrumbs_writer : ( ( transition_frontier_breadcrumb , State_hash . t ) t Cached . t Rose_tree . t list * [ ` Ledger_catchup of unit Ivar . t | ` Catchup_scheduler ] , Strict_pipe . crash Strict_pipe . buffered , unit ) Strict_pipe . Writer . t -> processed_transition_writer : ( [ ` Transition of Mina_block . Validated . t ] * [ ` Source of [ ` Gossip | ` Catchup | ` Internal ] ] , Strict_pipe . crash Strict_pipe . buffered , unit ) Strict_pipe . Writer . t -> unit end
module type Unprocessed_transition_cache_intf = sig type t val create : logger : Logger . t -> t val register_exn : t -> Mina_block . initial_valid_block Envelope . Incoming . t -> ( Mina_block . initial_valid_block Envelope . Incoming . t , State_hash . t ) Cached . t end
module type Transition_handler_intf = sig type transition_frontier type transition_frontier_breadcrumb module Unprocessed_transition_cache : Unprocessed_transition_cache_intf module Breadcrumb_builder : Breadcrumb_builder_intf with type transition_frontier := transition_frontier and type transition_frontier_breadcrumb := transition_frontier_breadcrumb module Validator : Transition_handler_validator_intf with type unprocessed_transition_cache := Unprocessed_transition_cache . t and type transition_frontier := transition_frontier module Processor : Transition_handler_processor_intf with type transition_frontier := transition_frontier and type transition_frontier_breadcrumb := transition_frontier_breadcrumb end
module type Best_tip_prover_intf = sig type transition_frontier val prove : logger : Logger . t -> transition_frontier -> ( Mina_block . t State_hash . With_state_hashes . t , State_body_hash . t list * Mina_block . t ) Proof_carrying_data . t option val verify : verifier : Verifier . t -> genesis_constants : Genesis_constants . t -> precomputed_values : Precomputed_values . t -> ( Mina_block . t , State_body_hash . t list * Mina_block . t ) Proof_carrying_data . t -> ( [ ` Root of Mina_block . initial_valid_block ] * [ ` Best_tip of Mina_block . initial_valid_block ] ) Deferred . Or_error . t end
module type Consensus_best_tip_prover_intf = sig type transition_frontier val prove : logger : Logger . t -> consensus_constants : Consensus . Constants . t -> frontier : transition_frontier -> Consensus . Data . Consensus_state . Value . t State_hash . With_state_hashes . t -> ( Mina_block . t , State_body_hash . t list * Mina_block . t ) Proof_carrying_data . t option val verify : logger : Logger . t -> verifier : Verifier . t -> consensus_constants : Consensus . Constants . t -> genesis_constants : Genesis_constants . t -> precomputed_values : Precomputed_values . t -> Consensus . Data . Consensus_state . Value . t State_hash . With_state_hashes . t -> ( Mina_block . t , State_body_hash . t list * Mina_block . t ) Proof_carrying_data . t -> ( [ ` Root of Mina_block . initial_valid_block ] * [ ` Best_tip of Mina_block . initial_valid_block ] ) Deferred . Or_error . t end
module type Sync_handler_intf = sig type transition_frontier val answer_query : frontier : transition_frontier -> Ledger_hash . t -> Mina_ledger . Sync_ledger . Query . t Envelope . Incoming . t -> logger : Logger . t -> trust_system : Trust_system . t -> Mina_ledger . Sync_ledger . Answer . t option Deferred . t val get_staged_ledger_aux_and_pending_coinbases_at_hash : frontier : transition_frontier -> State_hash . t -> ( Staged_ledger . Scan_state . t * Ledger_hash . t * Pending_coinbase . t * Mina_state . Protocol_state . value list ) Option . t val get_transition_chain : frontier : transition_frontier -> State_hash . t list -> Mina_block . t list option val best_tip_path : frontier : transition_frontier -> State_hash . t list module Root : Consensus_best_tip_prover_intf with type transition_frontier := transition_frontier end
module type Transition_chain_prover_intf = sig type transition_frontier val prove : ? length : int -> frontier : transition_frontier -> State_hash . t -> ( State_hash . t * State_body_hash . t list ) list option end
module type Bootstrap_controller_intf = sig type network type transition_frontier type persistent_root type persistent_frontier val run : logger : Logger . t -> trust_system : Trust_system . t -> verifier : Verifier . t -> network : network -> consensus_local_state : Consensus . Data . Local_state . t -> transition_reader : Mina_block . initial_valid_block Envelope . Incoming . t Strict_pipe . Reader . t -> persistent_root : persistent_root -> persistent_frontier : persistent_frontier -> initial_root_transition : Mina_block . Validated . t -> genesis_state_hash : State_hash . t -> genesis_ledger : Mina_ledger . Ledger . t Lazy . t -> genesis_constants : Genesis_constants . t -> ( transition_frontier * Mina_block . initial_valid_block Envelope . Incoming . t list ) Deferred . t end
module type Transition_frontier_controller_intf = sig type transition_frontier type breadcrumb type network val run : logger : Logger . t -> trust_system : Trust_system . t -> verifier : Verifier . t -> network : network -> time_controller : Block_time . Controller . t -> collected_transitions : Mina_block . initial_valid_block Envelope . Incoming . t list -> frontier : transition_frontier -> network_transition_reader : Mina_block . initial_valid_block Envelope . Incoming . t Strict_pipe . Reader . t -> producer_transition_reader : breadcrumb Strict_pipe . Reader . t -> clear_reader [ : ` Clear ] Strict_pipe . Reader . t -> Mina_block . Validated . t Strict_pipe . Reader . t end
module type Transition_router_intf = sig type transition_frontier type transition_frontier_persistent_root type transition_frontier_persistent_frontier type breadcrumb type network val run : logger : Logger . t -> trust_system : Trust_system . t -> verifier : Verifier . t -> network : network -> is_seed : bool -> is_demo_mode : bool -> time_controller : Block_time . Controller . t -> consensus_local_state : Consensus . Data . Local_state . t -> persistent_root_location : string -> persistent_frontier_location : string -> frontier_broadcast_pipe : transition_frontier option Pipe_lib . Broadcast_pipe . Reader . t * transition_frontier option Pipe_lib . Broadcast_pipe . Writer . t -> network_transition_reader : ( [ ` Transition of Mina_block . t Envelope . Incoming . t ] * [ ` Time_received of Block_time . t ] * [ ` Valid_cb of Mina_net2 . Validation_callback . t ] ) Strict_pipe . Reader . t -> producer_transition_reader : breadcrumb Strict_pipe . Reader . t -> most_recent_valid_block : Mina_block . initial_valid_block Broadcast_pipe . Reader . t * Mina_block . initial_valid_block Broadcast_pipe . Writer . t -> precomputed_values : Precomputed_values . t -> catchup_mode [ : ` Normal | ` Super ] -> notify_online ( : unit -> unit Deferred . t ) t -> ( [ ` Transition of Mina_block . Validated . t ] * [ ` Source of [ ` Gossip | ` Catchup | ` Internal ] ] * [ ` Valid_cb of Mina_net2 . Validation_callback . t option ] ) Strict_pipe . Reader . t * unit Ivar . t end
let run ~ logger ~ trust_system ~ verifier ~ network ~ time_controller ~ collected_transitions ~ frontier ~ network_transition_reader ~ producer_transition_reader ~ clear_reader ~ precomputed_values = let valid_transition_pipe_capacity = 50 in let start_time = Time . now ( ) in let f_drop_head name head valid_cb = let block : Mina_block . initial_valid_block = Network_peer . Envelope . Incoming . data @@ Cache_lib . Cached . peek head in Mina_block . handle_dropped_transition ( Validation . block_with_hash block |> With_hash . hash ) hash ? valid_cb ~ pipe_name : name ~ logger in let valid_transition_reader , valid_transition_writer = let name = " valid transitions " in Strict_pipe . create ~ name ( Buffered ( ` Capacity valid_transition_pipe_capacity , ` Overflow ( Drop_head ( fun ( ` Block head , ` Valid_cb vc ) vc -> Mina_metrics ( . Counter . inc_one Pipe . Drop_on_overflow . transition_frontier_valid_transitions ) transition_frontier_valid_transitions ; f_drop_head name head vc ) ) ) ) in let primary_transition_pipe_capacity = valid_transition_pipe_capacity + List . length collected_transitions in let primary_transition_reader , primary_transition_writer = let name = " primary transitions " in Strict_pipe . create ~ name ( Buffered ( ` Capacity primary_transition_pipe_capacity , ` Overflow ( Drop_head ( fun ( ` Block head , ` Valid_cb vc ) vc -> Mina_metrics ( . Counter . inc_one Pipe . Drop_on_overflow . transition_frontier_primary_transitions ) transition_frontier_primary_transitions ; f_drop_head name head vc ) ) ) ) in let processed_transition_reader , processed_transition_writer = Strict_pipe . create ~ name " : processed transitions " ( Buffered ( ` Capacity 30 , ` Overflow Crash ) Crash ) Crash in let catchup_job_reader , catchup_job_writer = Strict_pipe . create ~ name " : catchup jobs " ( Buffered ( ` Capacity 30 , ` Overflow Crash ) Crash ) Crash in let catchup_breadcrumbs_reader , catchup_breadcrumbs_writer = Strict_pipe . create ~ name " : catchup breadcrumbs " ( Buffered ( ` Capacity 30 , ` Overflow Crash ) Crash ) Crash in let unprocessed_transition_cache = Transition_handler . Unprocessed_transition_cache . create ~ logger in List . iter collected_transitions ~ f ( : fun t -> let block_cached = Transition_handler . Unprocessed_transition_cache . register_exn unprocessed_transition_cache t in Strict_pipe . Writer . write primary_transition_writer ( ` Block block_cached , ` Valid_cb None ) None ) ; let initial_state_hashes = List . map collected_transitions ~ f ( : fun envelope -> Network_peer . Envelope . Incoming . data envelope |> Validation . block_with_hash |> Mina_base . State_hash . With_state_hashes . state_hash ) |> Mina_base . State_hash . Set . of_list in let extensions = Transition_frontier . extensions frontier in don ' t_wait_for @@ Pipe_lib . Broadcast_pipe . Reader . iter_until ( Transition_frontier . Extensions . get_view_pipe extensions New_breadcrumbs ) New_breadcrumbs ~ f ( : fun new_breadcrumbs -> let open Mina_base . State_hash in let new_state_hashes = List . map new_breadcrumbs ~ f : Transition_frontier . Breadcrumb . state_hash |> Set . of_list in if Set . is_empty @@ Set . inter initial_state_hashes new_state_hashes then Deferred . return false else ( Mina_metrics ( . Gauge . set Catchup . initial_catchup_time Time ( . Span . to_min @@ diff ( now ( ) ) start_time ) start_time ) start_time ; Deferred . return true ) ) ; Transition_handler . Validator . run ~ consensus_constants : ( Precomputed_values . consensus_constants precomputed_values ) precomputed_values ~ logger ~ trust_system ~ time_controller ~ frontier ~ transition_reader : network_transition_reader ~ valid_transition_writer ~ unprocessed_transition_cache ; Strict_pipe . Reader . iter_without_pushback valid_transition_reader ~ f ( : fun ( ` Block b , ` Valid_cb vc ) vc -> Strict_pipe . Writer . write primary_transition_writer ( ` Block b , ` Valid_cb vc ) vc ) |> don ' t_wait_for ; let clean_up_catchup_scheduler = Ivar . create ( ) in Transition_handler . Processor . run ~ logger ~ precomputed_values ~ time_controller ~ trust_system ~ verifier ~ frontier ~ primary_transition_reader ~ producer_transition_reader ~ clean_up_catchup_scheduler ~ catchup_job_writer ~ catchup_breadcrumbs_reader ~ catchup_breadcrumbs_writer ~ processed_transition_writer ; Ledger_catchup . run ~ logger ~ precomputed_values ~ trust_system ~ verifier ~ network ~ frontier ~ catchup_job_reader ~ catchup_breadcrumbs_writer ~ unprocessed_transition_cache ; Strict_pipe . Reader . iter_without_pushback clear_reader ~ f ( : fun _ -> let open Strict_pipe . Writer in kill valid_transition_writer ; kill primary_transition_writer ; kill processed_transition_writer ; kill catchup_job_writer ; kill catchup_breadcrumbs_writer ; if Ivar . is_full clean_up_catchup_scheduler then [ % log error ] error " Ivar . fill bug is here " ! ; Ivar . fill clean_up_catchup_scheduler ( ) ) |> don ' t_wait_for ; processed_transition_reader
( module struct let max_length = 5 module Stubs = Stubs . Make ( struct let max_length = max_length end ) end open Stubs let breadcrumbs_path = Transition_frontier . root_history_path_map ~ f : Fn . id let accounts_with_secret_keys = Test_genesis_ledger . accounts let create_root_frontier = create_root_frontier accounts_with_secret_keys let create_breadcrumbs ~ logger ~ pids ~ trust_system ~ size root = Deferred . all @@ Quickcheck . random_value ( gen_linear_breadcrumbs ~ logger ~ pids ~ trust_system ~ size ~ accounts_with_secret_keys root ) root let breadcrumb_trail_equals = List . equal Transition_frontier . Breadcrumb . equal let logger = Logger . null ( ) let hb_logger = Logger . create ( ) let pids = Child_processes . Termination . create_pid_table ( ) let trust_system = Trust_system . null ( ) let % test " If a transition does not exists in the transition_frontier or \ in the root_history , then we should not get an answer " = heartbeat_flag := true ; Async . Thread_safe . block_on_async_exn ( fun ( ) -> print_heartbeat hb_logger |> don ' t_wait_for ; let % bind frontier = create_root_frontier ~ logger ~ pids in let root = Transition_frontier . root frontier in let % bind breadcrumbs = create_breadcrumbs ~ logger ~ pids ~ trust_system ~ size : max_length root in let last_breadcrumb , breadcrumbs_to_add = let rev_breadcrumbs = List . rev breadcrumbs in ( List . hd_exn rev_breadcrumbs , List . rev @@ List . tl_exn rev_breadcrumbs ) in let % map ( ) = Deferred . List . iter breadcrumbs_to_add ~ f ( : fun breadcrumb -> Transition_frontier . add_breadcrumb_exn frontier breadcrumb ) in let res = Option . is_none ( breadcrumbs_path frontier @@ Transition_frontier . Breadcrumb . state_hash last_breadcrumb ) in heartbeat_flag := false ; res ) let % test " Query transition only from transition_frontier if the \ root_history is empty " = Backtrace . elide := false ; Async . Scheduler . set_record_backtraces true ; Async . Thread_safe . block_on_async_exn ( fun ( ) -> print_heartbeat hb_logger |> don ' t_wait_for ; let % bind frontier = create_root_frontier ~ logger ~ pids in let root = Transition_frontier . root frontier in let % bind breadcrumbs = create_breadcrumbs ~ logger ~ pids ~ trust_system ~ size : max_length root in let % map ( ) = Deferred . List . iter breadcrumbs ~ f ( : Transition_frontier . add_breadcrumb_exn frontier ) frontier in let random_index = Quickcheck . random_value ( Int . gen_incl 0 ( max_length - 1 ) 1 ) 1 in let random_breadcrumb = List ( . nth_exn breadcrumbs random_index ) random_index in let queried_breadcrumbs = breadcrumbs_path frontier @@ Transition_frontier . Breadcrumb . state_hash random_breadcrumb |> Option . value_exn |> Non_empty_list . to_list in assert ( Transition_frontier . For_tests . root_history_is_empty frontier ) frontier ; let expected_breadcrumbs = Transition_frontier . root frontier :: List . take breadcrumbs ( random_index + 1 ) 1 in heartbeat_flag := false ; breadcrumb_trail_equals expected_breadcrumbs queried_breadcrumbs ) let % test " Query transitions only from root_history " = heartbeat_flag := true ; Async . Thread_safe . block_on_async_exn ( fun ( ) -> print_heartbeat hb_logger |> don ' t_wait_for ; let % bind frontier = create_root_frontier ~ logger ~ pids in let root = Transition_frontier . root frontier in let query_index = 1 in let size = max_length + query_index + 2 in let % bind breadcrumbs = create_breadcrumbs ~ logger ~ pids ~ trust_system ~ size root in let % map ( ) = Deferred . List . iter breadcrumbs ~ f ( : fun breadcrumb -> Transition_frontier . add_breadcrumb_exn frontier breadcrumb ) in let query_breadcrumb = List . nth_exn breadcrumbs query_index in let expected_breadcrumbs = root :: List . take breadcrumbs ( query_index + 1 ) 1 in let query_hash = Transition_frontier . Breadcrumb . state_hash query_breadcrumb in assert ( Transition_frontier . For_tests . root_history_mem frontier query_hash ) ; heartbeat_flag := false ; List . equal Transition_frontier . Breadcrumb . equal expected_breadcrumbs ( breadcrumbs_path frontier query_hash |> Option . value_exn |> Non_empty_list . to_list ) ) let % test " moving the root removes the old root ' s non - heir children as \ garbage " = heartbeat_flag := true ; Async . Thread_safe . block_on_async_exn ( fun ( ) -> print_heartbeat hb_logger |> don ' t_wait_for ; let % bind frontier = create_root_frontier ~ logger ~ pids in let % bind ( ) = add_linear_breadcrumbs ~ logger ~ pids ~ trust_system ~ size : max_length ~ accounts_with_secret_keys ~ frontier ~ parent ( : Transition_frontier . root frontier ) frontier in let add_child = add_child ~ logger ~ trust_system ~ accounts_with_secret_keys ~ frontier in let % bind soon_garbage = add_child ~ parent ( : Transition_frontier . root frontier ) frontier in let % map _ = add_child ~ parent ( : Transition_frontier . best_tip frontier ) frontier ~ pids in let res = Transition_frontier ( . find frontier @@ Breadcrumb . state_hash soon_garbage ) soon_garbage |> Option . is_none in heartbeat_flag := false ; res ) let % test " Transitions get popped off from root history " = heartbeat_flag := true ; Async . Thread_safe . block_on_async_exn ( fun ( ) -> print_heartbeat hb_logger |> don ' t_wait_for ; let % bind frontier = create_root_frontier ~ logger ~ pids in let root = Transition_frontier . root frontier in let root_hash = Transition_frontier . Breadcrumb . state_hash root in let size = ( 3 * max_length ) max_length + 1 in let % map ( ) = build_frontier_randomly frontier ~ gen_root_breadcrumb_builder : ( gen_linear_breadcrumbs ~ logger ~ pids ~ trust_system ~ size ~ accounts_with_secret_keys ) accounts_with_secret_keys in assert ( not @@ Transition_frontier . For_tests . root_history_mem frontier root_hash ) ; let res = Transition_frontier . find frontier root_hash |> Option . is_empty in heartbeat_flag := false ; res ) let % test " Get transitions from both transition frontier and root history " = heartbeat_flag := true ; Async . Thread_safe . block_on_async_exn ( fun ( ) -> print_heartbeat hb_logger |> don ' t_wait_for ; let % bind frontier = create_root_frontier ~ logger ~ pids in let root = Transition_frontier . root frontier in let num_root_history_breadcrumbs = Quickcheck . random_value ( Int . gen_incl 1 ( 2 * max_length ) max_length ) max_length in let % bind root_history_breadcrumbs = create_breadcrumbs ~ logger ~ pids ~ trust_system ~ size : num_root_history_breadcrumbs root in let most_recent_breadcrumb_in_root_history_breadcrumb = List . last_exn root_history_breadcrumbs in let % bind transition_frontier_breadcrumbs = create_breadcrumbs ~ logger ~ pids ~ trust_system ~ size : max_length most_recent_breadcrumb_in_root_history_breadcrumb in let random_breadcrumb_index = Quickcheck . random_value ( Int . gen_incl 0 ( max_length - 1 ) 1 ) 1 in let random_breadcrumb_hash = Transition_frontier . Breadcrumb . state_hash ( List . nth_exn transition_frontier_breadcrumbs random_breadcrumb_index ) random_breadcrumb_index in let expected_breadcrumb_trail = ( root :: root_history_breadcrumbs ) root_history_breadcrumbs @ List . take transition_frontier_breadcrumbs ( random_breadcrumb_index + 1 ) 1 in let % map ( ) = Deferred . List . iter ( root_history_breadcrumbs @ transition_frontier_breadcrumbs ) transition_frontier_breadcrumbs ~ f ( : fun breadcrumb -> Transition_frontier . add_breadcrumb_exn frontier breadcrumb ) in let result = breadcrumbs_path frontier random_breadcrumb_hash |> Option . value_exn |> Non_empty_list . to_list in heartbeat_flag := false ; List . equal Transition_frontier . Breadcrumb . equal expected_breadcrumb_trail result ) end ) ) *
type Structured_log_events . t += Starting_transition_frontier_controller [ @@ deriving register_event { msg = " Starting transition frontier controller phase " } ]
type Structured_log_events . t += Starting_bootstrap_controller [ @@ deriving register_event { msg = " Starting bootstrap controller phase " } ]
let create_bufferred_pipe ? name ~ f ( ) = Strict_pipe . create ? name ( Buffered ( ` Capacity 50 , ` Overflow ( Drop_head f ) f ) f ) f
let is_transition_for_bootstrap ~ logger ( ~ precomputed_values : Precomputed_values . t ) t frontier new_transition = let root_consensus_state = Transition_frontier . root frontier |> Transition_frontier . Breadcrumb . consensus_state_with_hashes in let new_consensus_state = Validation . block_with_hash new_transition |> With_hash . map ~ f : Mina_block . consensus_state in let constants = precomputed_values . consensus_constants in match Consensus . Hooks . select ~ constants ~ existing : root_consensus_state ~ candidate : new_consensus_state ~ logger with | ` Keep -> false | ` Take -> let slack = 5 in if Length . to_int ( Transition_frontier . best_tip frontier |> Transition_frontier . Breadcrumb . consensus_state |> Consensus . Data . Consensus_state . blockchain_length ) + 290 + slack < Length . to_int ( Consensus . Data . Consensus_state . blockchain_length new_consensus_state . data ) then true else Consensus . Hooks . should_bootstrap ~ constants ~ existing : root_consensus_state ~ candidate : new_consensus_state ~ logger : ( Logger . extend logger [ ( " selection_context " , ` String " Transition_router . is_transition_for_bootstrap " ) ] )
let start_transition_frontier_controller ~ logger ~ trust_system ~ verifier ~ network ~ time_controller ~ producer_transition_reader_ref ~ producer_transition_writer_ref ~ verified_transition_writer ~ clear_reader ~ collected_transitions ~ transition_reader_ref ~ transition_writer_ref ~ frontier_w ~ precomputed_values frontier = [ % str_log info ] info Starting_transition_frontier_controller ; let ( transition_frontier_controller_reader , transition_frontier_controller_writer ) = let name = " transition frontier controller pipe " in create_bufferred_pipe ~ name ~ f ( : fun ( ` Block block , ` Valid_cb valid_cb ) valid_cb -> Mina_metrics ( . Counter . inc_one Pipe . Drop_on_overflow . router_transition_frontier_controller ) router_transition_frontier_controller ; Mina_block . handle_dropped_transition ( With_hash . hash @@ Validation . block_with_hash @@ Network_peer . Envelope . Incoming . data block ) ? valid_cb ~ pipe_name : name ~ logger ) ( ) in transition_reader_ref := transition_frontier_controller_reader ; transition_writer_ref := transition_frontier_controller_writer ; let producer_transition_reader , producer_transition_writer = Strict_pipe . create ~ name " : transition frontier : producer transition " Synchronous in producer_transition_reader_ref := producer_transition_reader ; producer_transition_writer_ref := producer_transition_writer ; Broadcast_pipe . Writer . write frontier_w ( Some frontier ) frontier |> don ' t_wait_for ; let new_verified_transition_reader = Transition_frontier_controller . run ~ logger ~ trust_system ~ verifier ~ network ~ time_controller ~ collected_transitions ~ frontier ~ network_transition_reader :! transition_reader_ref ~ producer_transition_reader ~ clear_reader ~ precomputed_values in Strict_pipe . Reader . iter new_verified_transition_reader ~ f : ( Fn . compose Deferred . return ( Strict_pipe . Writer . write verified_transition_writer ) verified_transition_writer ) |> don ' t_wait_for
let start_bootstrap_controller ~ logger ~ trust_system ~ verifier ~ network ~ time_controller ~ producer_transition_reader_ref ~ producer_transition_writer_ref ~ verified_transition_writer ~ clear_reader ~ transition_reader_ref ~ transition_writer_ref ~ consensus_local_state ~ frontier_w ~ initial_root_transition ~ persistent_root ~ persistent_frontier ~ best_seen_transition ~ precomputed_values ~ catchup_mode = [ % str_log info ] info Starting_bootstrap_controller ; [ % log info ] info " Starting Bootstrap Controller phase " ; let bootstrap_controller_reader , bootstrap_controller_writer = let name = " bootstrap controller pipe " in create_bufferred_pipe ~ name ~ f ( : fun ( ` Block head , ` Valid_cb valid_cb ) valid_cb -> Mina_metrics ( . Counter . inc_one Pipe . Drop_on_overflow . router_bootstrap_controller ) router_bootstrap_controller ; Mina_block . handle_dropped_transition ( With_hash . hash @@ Validation . block_with_hash @@ Network_peer . Envelope . Incoming . data head ) ~ pipe_name : name ~ logger ? valid_cb ) ( ) in transition_reader_ref := bootstrap_controller_reader ; transition_writer_ref := bootstrap_controller_writer ; let producer_transition_reader , producer_transition_writer = Strict_pipe . create ~ name " : bootstrap controller : producer transition " Synchronous in producer_transition_reader_ref := producer_transition_reader ; producer_transition_writer_ref := producer_transition_writer ; Option . iter best_seen_transition ~ f ( : fun block -> Strict_pipe . Writer . write bootstrap_controller_writer ( ` Block block , ` Valid_cb None ) None ) ; don ' t_wait_for ( Broadcast_pipe . Writer . write frontier_w None ) None ; upon ( Bootstrap_controller . run ~ logger ~ trust_system ~ verifier ~ network ~ consensus_local_state ~ transition_reader :! transition_reader_ref ~ persistent_frontier ~ persistent_root ~ initial_root_transition ~ best_seen_transition ~ precomputed_values ~ catchup_mode ) ( fun ( new_frontier , collected_transitions ) collected_transitions -> Strict_pipe . Writer . kill ! transition_writer_ref ; start_transition_frontier_controller ~ logger ~ trust_system ~ verifier ~ network ~ time_controller ~ producer_transition_reader_ref ~ producer_transition_writer_ref ~ verified_transition_writer ~ clear_reader ~ collected_transitions ~ transition_reader_ref ~ transition_writer_ref ~ frontier_w ~ precomputed_values new_frontier )
let download_best_tip ~ notify_online ~ logger ~ network ~ verifier ~ trust_system ~ most_recent_valid_block_writer ~ genesis_constants ~ precomputed_values = let num_peers = 16 in let % bind peers = Mina_networking . random_peers network num_peers in [ % log info ] info " Requesting peers for their best tip to do initialization " ; let % bind tips = Deferred . List . filter_map ~ how ` : Parallel peers ~ f ( : fun peer -> let open Deferred . Let_syntax in match % bind Mina_networking . get_best_tip ~ heartbeat_timeout ( : Time_ns . Span . of_min 1 ) . ~ timeout ( : Time . Span . of_min 1 ) . network peer with | Error e -> [ % log debug ] debug ~ metadata : [ ( " peer " , Network_peer . Peer . to_yojson peer ) peer ; ( " error " , Error_json . error_to_yojson e ) e ] " Couldn ' t get best tip from peer : $ error " ; return None | Ok peer_best_tip -> ( [ % log debug ] debug ~ metadata : [ ( " peer " , Network_peer . Peer . to_yojson peer ) peer ; ( " length " , Length . to_yojson ( Mina_block . blockchain_length peer_best_tip . data ) data ) ] " Successfully downloaded best tip with $ length from $ peer " ; match % bind Best_tip_prover . verify ~ verifier peer_best_tip ~ genesis_constants ~ precomputed_values with | Error e -> [ % log warn ] warn ~ metadata : [ ( " peer " , Network_peer . Peer . to_yojson peer ) peer ; ( " error " , Error_json . error_to_yojson e ) e ] " Peer sent us bad proof for their best tip " ; let % map ( ) = Trust_system ( . record trust_system logger peer Actions . ( Violated_protocol , Some ( " Peer sent us bad proof for their best tip " , [ ] ) ) ) in None | Ok ( ` Root _ , ` Best_tip candidate_best_tip ) candidate_best_tip -> [ % log debug ] debug ~ metadata [ : ( " peer " , Network_peer . Peer . to_yojson peer ) peer ] " Successfully verified best tip from $ peer " ; return ( Some ( Envelope . Incoming . wrap_peer ~ data { : peer_best_tip with data = candidate_best_tip } ~ sender : peer ) ) ) ) in [ % log debug ] debug ~ metadata : [ ( " actual " , ` Int ( List . length tips ) tips ) tips ; ( " expected " , ` Int num_peers ) num_peers ] " Finished requesting tips . Got $ actual / $ expected " ; let % map ( ) = notify_online ( ) in let res = List . fold tips ~ init : None ~ f ( : fun acc enveloped_candidate_best_tip -> Option . merge acc ( Option . return enveloped_candidate_best_tip ) enveloped_candidate_best_tip ~ f ( : fun enveloped_existing_best_tip enveloped_candidate_best_tip -> let f x = Validation . block_with_hash x |> With_hash . map ~ f : Mina_block . consensus_state in match Consensus . Hooks . select ~ constants : precomputed_values . consensus_constants ~ existing ( : f enveloped_existing_best_tip . data . data ) data ~ candidate ( : f enveloped_candidate_best_tip . data . data ) data ~ logger with | ` Keep -> enveloped_existing_best_tip | ` Take -> enveloped_candidate_best_tip ) ) in Option . iter res ~ f ( : fun best -> let best_tip_length = Validation . block best . data . data |> Mina_block . blockchain_length |> Length . to_int in Mina_metrics . Transition_frontier . update_max_blocklength_observed best_tip_length ; don ' t_wait_for @@ Broadcast_pipe . Writer . write most_recent_valid_block_writer best . data . data ) ; Option . map res ~ f : ( Envelope . Incoming . map ~ f ( : fun ( x : _ Proof_carrying_data . t ) t -> Ledger_catchup . Best_tip_lru . add x ; x . data ) )
let load_frontier ~ logger ~ verifier ~ persistent_frontier ~ persistent_root ~ consensus_local_state ~ precomputed_values ~ catchup_mode = match % map Transition_frontier . load ~ logger ~ verifier ~ consensus_local_state ~ persistent_root ~ persistent_frontier ~ precomputed_values ~ catchup_mode ( ) with | Ok frontier -> [ % log info ] info " Successfully loaded frontier " ; Some frontier | Error ` Persistent_frontier_malformed -> failwith " persistent frontier unexpectedly malformed -- this should not happen \ with retry enabled " | Error ` Bootstrap_required -> [ % log warn ] warn " Fast forward has not been implemented . Bootstrapping instead . " ; None | Error ( ` Failure e ) e -> failwith ( " failed to initialize transition frontier : " ^ e ) e | Error ` Snarked_ledger_mismatch -> [ % log warn ] warn " Persistent database is out of sync with snarked_ledger " ; None
let wait_for_high_connectivity ~ logger ~ network ~ is_seed = let connectivity_time_upperbound = 60 . 0 in let high_connectivity = Mina_networking . on_first_high_connectivity network ~ f : Fn . id in Deferred . any [ ( high_connectivity >>| fun ( ) -> [ % log info ] info " Already connected to enough peers , start initialization " ) ; ( after ( Time_ns . Span . of_sec connectivity_time_upperbound ) connectivity_time_upperbound >>= fun ( ) -> Mina_networking . peers network >>| fun peers -> if not @@ Deferred . is_determined high_connectivity then if List . length peers = 0 then if is_seed then [ % log info ] info ~ metadata : [ ( " max seconds to wait for high connectivity " , ` Float connectivity_time_upperbound ) ] " Will start initialization without connecting to any peers " else ( [ % log error ] error " Failed to find any peers during initialization ( crashing \ because this is not a seed node ) node " ; exit 1 ) else [ % log info ] info ~ metadata : [ ( " num peers " , ` Int ( List . length peers ) peers ) peers ; ( " max seconds to wait for high connectivity " , ` Float connectivity_time_upperbound ) ] " Will start initialization without connecting to too many peers " ) ]
let initialize ~ logger ~ network ~ is_seed ~ is_demo_mode ~ verifier ~ trust_system ~ time_controller ~ frontier_w ~ producer_transition_reader_ref ~ producer_transition_writer_ref ~ clear_reader ~ verified_transition_writer ~ transition_reader_ref ~ transition_writer_ref ~ most_recent_valid_block_writer ~ persistent_root ~ persistent_frontier ~ consensus_local_state ~ precomputed_values ~ catchup_mode ~ notify_online = let % bind ( ) = if is_demo_mode then return ( ) else wait_for_high_connectivity ~ logger ~ network ~ is_seed in let genesis_constants = Precomputed_values . genesis_constants precomputed_values in match % bind Deferred . both ( download_best_tip ~ notify_online ~ logger ~ network ~ verifier ~ trust_system ~ most_recent_valid_block_writer ~ genesis_constants ~ precomputed_values ) ( load_frontier ~ logger ~ verifier ~ persistent_frontier ~ persistent_root ~ consensus_local_state ~ precomputed_values ~ catchup_mode ) with | best_tip , None -> [ % log info ] info " Unable to load frontier ; starting bootstrap " ; let % map initial_root_transition = Persistent_frontier ( . with_instance_exn persistent_frontier ~ f : Instance . get_root_transition ) get_root_transition >>| Result . ok_or_failwith in start_bootstrap_controller ~ logger ~ trust_system ~ verifier ~ network ~ time_controller ~ producer_transition_reader_ref ~ producer_transition_writer_ref ~ verified_transition_writer ~ clear_reader ~ transition_reader_ref ~ consensus_local_state ~ transition_writer_ref ~ frontier_w ~ persistent_root ~ persistent_frontier ~ initial_root_transition ~ catchup_mode ~ best_seen_transition : best_tip ~ precomputed_values | best_tip , Some frontier -> ( match best_tip with | Some best_tip when is_transition_for_bootstrap ~ logger frontier ( best_tip |> Envelope . Incoming . data ) data ~ precomputed_values -> [ % log info ] info ~ metadata : [ ( " length " , ` Int ( Unsigned . UInt32 . to_int ( Mina_block . blockchain_length @@ Validation . block best_tip . data ) ) ) ] " Network best tip is too new to catchup to ( best_tip with \ $ length ) length ; starting bootstrap " ; let initial_root_transition = Transition_frontier ( . Breadcrumb . validated_transition ( root frontier ) frontier ) frontier in let % map ( ) = Transition_frontier . close ~ loc : __LOC__ frontier in start_bootstrap_controller ~ logger ~ trust_system ~ verifier ~ network ~ time_controller ~ producer_transition_reader_ref ~ producer_transition_writer_ref ~ verified_transition_writer ~ clear_reader ~ transition_reader_ref ~ consensus_local_state ~ transition_writer_ref ~ frontier_w ~ persistent_root ~ persistent_frontier ~ initial_root_transition ~ catchup_mode ~ best_seen_transition ( : Some best_tip ) best_tip ~ precomputed_values | _ -> if Option . is_some best_tip then [ % log info ] info ~ metadata : [ ( " length " , ` Int ( Unsigned . UInt32 . to_int ( Mina_block . blockchain_length @@ Validation . block ( Option . value_exn best_tip ) best_tip . data ) ) ) ] " Network best tip is recent enough to catchup to ( best_tip with \ $ length ) length ; syncing local state and starting participation " else [ % log info ] info " Successfully loaded frontier , but failed downloaded best tip \ from network " ; let curr_best_tip = Transition_frontier . best_tip frontier in let % map ( ) = match Consensus . Hooks . required_local_state_sync ~ constants : precomputed_values . consensus_constants ~ consensus_state : ( Transition_frontier . Breadcrumb . consensus_state curr_best_tip ) curr_best_tip ~ local_state : consensus_local_state with | None -> [ % log info ] info " Local state already in sync " ; Deferred . unit | Some sync_jobs -> ( [ % log info ] info " Local state is out of sync ; " ; match % map Consensus . Hooks . sync_local_state ~ local_state : consensus_local_state ~ logger ~ trust_system ~ random_peers ( : Mina_networking . random_peers network ) network ~ query_peer : { Consensus . Hooks . Rpcs . query = ( fun peer rpc query -> Mina_networking ( . query_peer network peer . peer_id ( Rpcs . Consensus_rpc rpc ) rpc query ) query ) } ~ ledger_depth : precomputed_values . constraint_constants . ledger_depth sync_jobs with | Error e -> Error . tag e ~ tag " : Local state sync failed " |> Error . raise | Ok ( ) -> ( ) ) in let collected_transitions = Option . to_list best_tip in start_transition_frontier_controller ~ logger ~ trust_system ~ verifier ~ network ~ time_controller ~ producer_transition_reader_ref ~ producer_transition_writer_ref ~ verified_transition_writer ~ clear_reader ~ collected_transitions ~ transition_reader_ref ~ transition_writer_ref ~ frontier_w ~ precomputed_values frontier )
let wait_till_genesis ~ logger ~ time_controller ( ~ precomputed_values : Precomputed_values . t ) t = let module Time = Block_time in let now = Time . now time_controller in let consensus_constants = precomputed_values . consensus_constants in let genesis_state_timestamp = consensus_constants . genesis_state_timestamp in try Consensus . Hooks . is_genesis_epoch ~ constants : consensus_constants now |> Fn . const Deferred . unit with Invalid_argument _ -> let time_till_genesis = Time . diff genesis_state_timestamp now in [ % log warn ] warn ~ metadata : [ ( " time_till_genesis " , ` Int ( Int64 . to_int_exn ( Time . Span . to_ms time_till_genesis ) time_till_genesis ) time_till_genesis ) ] " Node started before the chain start time : waiting $ time_till_genesis \ milliseconds before starting participation " ; let rec logger_loop ( ) = let % bind ( ) = after ( Time_ns . Span . of_sec 30 ) . in let now = Time . now time_controller in try Consensus . Hooks . is_genesis_epoch ~ constants : consensus_constants now |> Fn . const Deferred . unit with Invalid_argument _ -> let tm_remaining = Time . diff genesis_state_timestamp now in [ % log debug ] debug " Time before the chain start time . Waiting $ tm_remaining \ milliseconds before starting participation " ~ metadata : [ ( " tm_remaining " , ` Int ( Int64 . to_int_exn @@ Time . Span . to_ms tm_remaining ) tm_remaining ) ] ; logger_loop ( ) in Time . Timeout . await ~ timeout_duration : time_till_genesis time_controller ( logger_loop ( ) ) |> Deferred . ignore_m
let run ~ logger ~ trust_system ~ verifier ~ network ~ is_seed ~ is_demo_mode ~ time_controller ~ consensus_local_state ~ persistent_root_location ~ persistent_frontier_location ~ frontier_broadcast_pipe ( : frontier_r , frontier_w ) frontier_w ~ network_transition_reader ~ producer_transition_reader ~ most_recent_valid_block : ( most_recent_valid_block_reader , most_recent_valid_block_writer ) most_recent_valid_block_writer ~ precomputed_values ~ catchup_mode ~ notify_online = let initialization_finish_signal = Ivar . create ( ) in let clear_reader , clear_writer = Strict_pipe . create ~ name " : clear " Synchronous in let verified_transition_reader , verified_transition_writer = let name = " verified transitions " in create_bufferred_pipe ~ name ~ f ( : fun ( ` Transition ( head : Mina_block . Validated . t ) t , _ , ` Valid_cb valid_cb ) -> Mina_metrics ( . Counter . inc_one Pipe . Drop_on_overflow . router_verified_transitions ) router_verified_transitions ; Mina_block . handle_dropped_transition ( Mina_block . Validated . forget head |> With_hash . hash ) hash ~ pipe_name : name ~ logger ? valid_cb ) ( ) in let transition_reader , transition_writer = let name = " transition pipe " in create_bufferred_pipe ~ name ~ f ( : fun ( ` Block block , ` Valid_cb valid_cb ) valid_cb -> Mina_metrics ( . Counter . inc_one Pipe . Drop_on_overflow . router_transitions ) router_transitions ; Mina_block . handle_dropped_transition ( Network_peer . Envelope . Incoming . data block |> Validation . block_with_hash |> With_hash . hash ) ? valid_cb ~ pipe_name : name ~ logger ) ( ) in let transition_reader_ref = ref transition_reader in let transition_writer_ref = ref transition_writer in let producer_transition_reader_ref , producer_transition_writer_ref = let reader , writer = Strict_pipe . create ~ name " : producer transition " Synchronous in ( ref reader , ref writer ) writer in O1trace . background_thread " transition_router " ( fun ( ) -> don ' t_wait_for @@ Strict_pipe . Reader . iter producer_transition_reader ~ f ( : fun x -> Strict_pipe . Writer . write ! producer_transition_writer_ref x ) ; let % bind ( ) = wait_till_genesis ~ logger ~ time_controller ~ precomputed_values in let valid_transition_reader , valid_transition_writer = let name = " valid transitions " in create_bufferred_pipe ~ name ~ f ( : fun head -> let ` Block block , ` Valid_cb valid_cb = head in Mina_metrics ( . Counter . inc_one Pipe . Drop_on_overflow . router_valid_transitions ) router_valid_transitions ; Mina_block . handle_dropped_transition ( Network_peer . Envelope . Incoming . data block |> Validation . block_with_hash |> With_hash . hash ) ~ valid_cb ~ pipe_name : name ~ logger ) ( ) in Initial_validator . run ~ logger ~ trust_system ~ verifier ~ transition_reader : network_transition_reader ~ valid_transition_writer ~ initialization_finish_signal ~ precomputed_values ; let persistent_frontier = Transition_frontier . Persistent_frontier . create ~ logger ~ verifier ~ time_controller ~ directory : persistent_frontier_location in let persistent_root = Transition_frontier . Persistent_root . create ~ logger ~ directory : persistent_root_location ~ ledger_depth ( : Precomputed_values . ledger_depth precomputed_values ) precomputed_values in let % map ( ) = initialize ~ logger ~ network ~ is_seed ~ is_demo_mode ~ verifier ~ trust_system ~ persistent_frontier ~ persistent_root ~ time_controller ~ frontier_w ~ producer_transition_reader_ref ~ catchup_mode ~ producer_transition_writer_ref ~ clear_reader ~ verified_transition_writer ~ transition_reader_ref ~ transition_writer_ref ~ most_recent_valid_block_writer ~ consensus_local_state ~ precomputed_values ~ notify_online in Ivar . fill_if_empty initialization_finish_signal ( ) ; let valid_transition_reader1 , valid_transition_reader2 = Strict_pipe . Reader . Fork . two valid_transition_reader in don ' t_wait_for @@ Strict_pipe . Reader . iter valid_transition_reader1 ~ f ( : fun ( ` Block enveloped_transition , _ ) _ -> let incoming_transition = Envelope . Incoming . data enveloped_transition in let current_transition = Broadcast_pipe . Reader . peek most_recent_valid_block_reader in if Consensus . Hooks . equal_select_status ` Take ( Consensus . Hooks . select ~ constants : precomputed_values . consensus_constants ~ existing : ( Validation . block_with_hash current_transition |> With_hash . map ~ f : Mina_block . consensus_state ) ~ candidate : ( Validation . block_with_hash incoming_transition |> With_hash . map ~ f : Mina_block . consensus_state ) ~ logger ) then Broadcast_pipe . Writer . write most_recent_valid_block_writer incoming_transition else Deferred . unit ) ; don ' t_wait_for @@ Strict_pipe . Reader . iter_without_pushback valid_transition_reader2 ~ f ( : fun ( ` Block enveloped_transition , ` Valid_cb vc ) vc -> don ' t_wait_for @@ let % map ( ) = let incoming_transition = Envelope . Incoming . data enveloped_transition in match Broadcast_pipe . Reader . peek frontier_r with | Some frontier -> if is_transition_for_bootstrap ~ logger frontier incoming_transition ~ precomputed_values then ( Strict_pipe . Writer . kill ! transition_writer_ref ; Strict_pipe . Writer . kill ! producer_transition_writer_ref ; let initial_root_transition = Transition_frontier ( . Breadcrumb . validated_transition ( root frontier ) frontier ) frontier in let % bind ( ) = Strict_pipe . Writer . write clear_writer ` Clear in let % map ( ) = Transition_frontier . close ~ loc : __LOC__ frontier in start_bootstrap_controller ~ logger ~ trust_system ~ verifier ~ network ~ time_controller ~ producer_transition_reader_ref ~ producer_transition_writer_ref ~ verified_transition_writer ~ clear_reader ~ transition_reader_ref ~ transition_writer_ref ~ consensus_local_state ~ frontier_w ~ persistent_root ~ persistent_frontier ~ initial_root_transition ~ best_seen_transition ( : Some enveloped_transition ) enveloped_transition ~ precomputed_values ~ catchup_mode ) else Deferred . unit | None -> Deferred . unit in Strict_pipe . Writer . write ! transition_writer_ref ( ` Block enveloped_transition , ` Valid_cb ( Some vc ) vc ) vc ) ) ; ( verified_transition_reader , initialization_finish_signal ) initialization_finish_signal
let app e_fun e_list = match e_list with | [ ] -> e_fun | _ -> Oapp ( e_fun , e_list )
let sequence inst1 inst2 = match inst1 , inst2 with | ( Osequence [ ] , inst ) | ( inst , Osequence [ ] ) -> inst | Osequence ( l1 ) , Osequence ( l2 ) -> Osequence ( l1 @ l2 ) | _ , Osequence ( l2 ) -> Osequence ( inst1 :: l2 ) | _ -> Osequence [ inst1 ; inst2 ]
let kind = function | Zelus . S | Zelus . A | Zelus . AD | Zelus . AS -> Ofun | Zelus . C | Zelus . D -> Onode | Zelus . P -> Onode
let rec type_expression { Zelus . desc = desc } = match desc with | Zelus . Etypevar ( s ) -> Otypevar ( s ) | Zelus . Etypeconstr ( ln , ty_list ) -> Otypeconstr ( ln , List . map type_expression ty_list ) | Zelus . Etypetuple ( ty_list ) -> Otypetuple ( List . map type_expression ty_list ) | Zelus . Etypevec ( ty , s ) -> Otypevec ( type_expression ty , size s ) | Zelus . Etypefun ( k , opt_name , ty_arg , ty_res ) -> Otypefun ( kind k , opt_name , type_expression ty_arg , type_expression ty_res ) match desc with | Zelus . Eabstract_type -> Oabstract_type | Zelus . Eabbrev ( ty ) -> Oabbrev ( type_expression ty ) | Zelus . Evariant_type ( constr_decl_list ) -> Ovariant_type ( List . map constr_decl constr_decl_list ) | Zelus . Erecord_type ( n_ty_list ) -> Orecord_type ( List . map ( fun ( n , ty ) -> ( n , type_expression ty ) ) n_ty_list ) match desc with | Econstr0decl ( n ) -> Oconstr0decl ( n ) | Econstr1decl ( n , ty_list ) -> Oconstr1decl ( n , List . map type_expression ty_list ) match desc with | Zelus . Sconst ( i ) -> Sconst ( i ) | Zelus . Sglobal ( ln ) -> Sglobal ( ln ) | Zelus . Sname ( n ) -> Sname ( n ) | Zelus . Sop ( op , s1 , s2 ) -> let operator = function Zelus . Splus -> Splus | Zelus . Sminus -> Sminus in Sop ( operator op , size s1 , size s2 )
let rec is_mutable { t_desc = desc } = match desc with | Tvec _ -> true | Tlink ( link ) -> is_mutable link | _ -> false
let type_expression_of_typ ty = let ty_exp = Interface . type_expression_of_typ ty in type_expression ty_exp
type env = entry Env . t and entry = { e_typ : Deftypes . typ ; e_sort : sort ; e_size : loop_path ; } and sort = | In of exp | Out of Zident . t * Deftypes . tsort and loop_path = Zident . t list
type code = { mem : mentry State . t ; init : Obc . inst ; instances : ientry State . t ; reset : Obc . inst ; step : inst ; }
let fprint ff ( env : entry Env . t ) = let fprint_entry ff { e_typ = ty ; e_sort = sort ; e_size = size } = Format . fprintf ff " [ { @ typ = % a ; , @ size = % a } ] " @ Zident . Env . fprint_t fprint_entry ff env