text
stringlengths 12
786k
|
---|
type bunch = { objs : objdata array ; wp : block Weak . t ; } ; ; |
let data = Array . init size ( fun i -> let n = 1 + random_int size in { objs = Array . make n ( Absent 0 ) ; wp = Weak . create n ; } ) ; ; |
let gccount ( ) = let res = ( Gc . quick_stat ( ) ) . Gc . major_collections in res |
type change = No_change | Fill | Erase ; ; |
let check_and_change data i j = let gc1 = gccount ( ) in let change = match data . ( i ) . objs . ( j ) , Weak . check data . ( i ) . wp j with | Present x , false -> assert false | Absent n , true -> assert ( gc1 <= n + 2 ) ; No_change | Absent _ , false -> Fill | Present _ , true -> if random_int 10 = 0 then Erase else No_change in match change with | No_change -> ( ) | Fill -> let x = Array . make ( 1 + random_int 10 ) 42 in data . ( i ) . objs . ( j ) <- Present x ; Weak . set data . ( i ) . wp j ( Some x ) ; | Erase -> data . ( i ) . objs . ( j ) <- Absent gc1 ; let gc2 = gccount ( ) in if gc1 <> gc2 then data . ( i ) . objs . ( j ) <- Absent gc2 ; ; ; |
let dummy = ref [ ] ; ; || |
let run index ( ) = let domain_data = Array . init 100 ( fun i -> let n = 1 + random_int 100 in { objs = Array . make n ( Absent 0 ) ; wp = Weak . create n ; } ) in while gccount ( ) < 5 do dummy := Array . make ( random_int 300 ) 0 ; let i = ( random_int ( size / num_domains ) ) + index * size / num_domains in let j = random_int ( Array . length data . ( i ) . objs ) in check_and_change data i j ; let ix = random_int 100 in let jx = random_int ( Array . length domain_data . ( ix ) . objs ) in check_and_change domain_data ix jx done |
let _ = for index = 0 to 4 do let domains = Array . init ( num_domains - 1 ) ( fun i -> Domain . spawn ( run ( ( i + index ) mod 5 ) ) ) in run ( ( num_domains - 1 + index ) mod 5 ) ( ) ; Array . iter Domain . join domains done ; print_endline " ok " |
module type S = sig type data type t val create : int -> t val clear : t -> unit val merge : t -> data -> data val fold : ( data -> ' a -> ' a ) -> t -> ' a -> ' a val actually_weak : bool end |
module MakeStrong ( H : Hashtbl . HashedType ) = struct module Set = Set . Make ( struct type t = H . t let compare x y = if H . equal x y then 0 else let hx = H . hash x in let hy = H . hash y in hx - hy end ) type data = H . t type t = { mutable set : Set . t } let create n = { set = Set . empty } let clear s = s . set <- Set . empty let merge s data = if not ( Set . mem data s . set ) then ( s . set <- Set . add data s . set ; data ) else ( Set . find data s . set ) let fold f s accum = Set . fold f s . set accum let actually_weak = false end |
module MakeWeak ( H : Hashtbl . HashedType ) = struct let actually_weak = true let limit = 8 type t = { mutable extent : int ; mutable array : H . t Weak . t ; } type data = H . t let is_array xs = Weak . length xs . array <= limit let create n = { extent = 0 ; array = Weak . create ( max 1 n ) } let clear xs = if is_array xs then xs . extent <- 0 else xs . array <- Weak . create ( Weak . length xs . array ) let filter _ _ = failwith " not implemented " let fold fn xs acc = let acc = ref acc in if is_array xs then begin let j = ref 0 in for i = 0 to xs . extent - 1 do match Weak . get xs . array i with | Some x as x ' opt -> acc := fn x ! acc ; if ! j < i then Weak . set xs . array ! j x ' opt ; incr j | None -> ( ) done ; xs . extent <- ! j ; end else for i = 0 to Weak . length xs . array - 1 do match Weak . get xs . array i with | Some x -> acc := fn x ! acc ; | None -> ( ) done ; ! acc let rec merge xs x = let resize ( ) = let old_size = Weak . length xs . array in let old_array = xs . array in xs . extent <- 0 ; xs . array <- Weak . create ( old_size * 2 ) ; if is_array xs then for i = 0 to old_size - 1 do match Weak . get old_array i with | Some _ as x ' opt -> Weak . set xs . array xs . extent x ' opt ; xs . extent <- xs . extent + 1 | None -> ( ) done else for i = 0 to old_size - 1 do match Weak . get old_array i with | Some x -> ignore ( merge xs x ) | None -> ( ) done in if is_array xs then let x ' opt = fold begin fun x ' x ' opt -> match x ' opt with | Some _ -> x ' opt | None -> if H . equal x ' x then Some x ' else None end xs None in match x ' opt with | Some x -> x | None -> if xs . extent >= Weak . length xs . array then resize ( ) ; if is_array xs then begin Weak . set xs . array xs . extent ( Some x ) ; xs . extent <- xs . extent + 1 ; x end else merge xs x else let size = Weak . length xs . array in let i = H . hash x mod size in let window = max limit ( size / 4 ) in let rec find j result = if j <= xs . extent then let k = ( i + j ) mod size in match Weak . get xs . array k with | Some x ' when H . equal x x ' -> x ' | Some _ -> find ( j + 1 ) result | None -> find ( j + 1 ) ( if result == None then Some ( j , k ) else result ) else match result with | Some ( j , k ) -> xs . extent <- max j xs . extent ; Weak . set xs . array k ( Some x ) ; x | None -> let rec find j = if j < window then let k = ( i + j ) mod size in match Weak . get xs . array k with | Some _ -> find ( j + 1 ) | None -> xs . extent <- max j xs . extent ; Weak . set xs . array k ( Some x ) ; x else begin resize ( ) ; merge xs x end in find j in find 0 None end |
module Hashed = struct type t = string list ; ; let equal x y = eprintf " equal : % s / % s \ n " ( List . hd x ) ( List . hd y ) ; x = y ; ; let hash x = Hashtbl . hash ( List . hd x ) ; ; end ; ; |
module HT = Weak . Make ( Hashed ) ; ; |
let tbl = HT . create 7 ; ; |
let r = ref [ ] ; ; |
let bunch = if Array . length Sys . argv < 2 then 10000 else int_of_string Sys . argv . ( 1 ) ; ; |
let random_string n = String . init n ( fun _ -> Char . chr ( 32 + Random . int 95 ) ) ; ; |
let added = ref 0 ; ; |
let mistakes = ref 0 ; ; |
let print_status ( ) = let ( len , entries , sumbuck , buckmin , buckmed , buckmax ) = HT . stats tbl in if entries > bunch * ( ! added + 1 ) then begin if debug then begin printf " \ n ===================\ n " ; printf " len = % d \ n " len ; printf " entries = % d \ n " entries ; printf " sum of bucket sizes = % d \ n " sumbuck ; printf " min bucket = % d \ n " buckmin ; printf " med bucket = % d \ n " buckmed ; printf " max bucket = % d \ n " buckmax ; printf " GC count = % d \ n " ( Gc . quick_stat ( ) ) . Gc . major_collections ; flush stdout ; end ; incr mistakes ; end ; added := 0 ; ; ; r := [ ] ; incr added ; for i = 1 to bunch do let c = random_string 7 in r := c :: ! r ; HT . add tbl ! r ; done ; done ; ; |
module type G = sig type t module V : Sig . COMPARABLE val iter_vertex : ( V . t -> unit ) -> t -> unit val iter_succ : ( V . t -> unit ) -> t -> V . t -> unit end |
type ' a element = | Vertex of ' a | Component of ' a * ' a t |
module Make ( G : G ) = struct module HT = Hashtbl . Make ( G . V ) let recursive_scc g root_g = let stack = Stack . create ( ) in let dfn = HT . create 1024 in let num = ref 0 in let partition = ref [ ] in G . iter_vertex ( fun v -> HT . add dfn v 0 ) g ; let rec visit vertex partition = let head = ref 0 in let loop = ref false in Stack . push vertex stack ; incr num ; HT . replace dfn vertex ! num ; head := ! num ; G . iter_succ ( fun succ -> let dfn_succ = HT . find dfn succ in let min = if dfn_succ = 0 then visit succ partition else dfn_succ in if min <= ! head then begin head := min ; loop := true end ) g vertex ; if ! head = HT . find dfn vertex then begin HT . replace dfn vertex max_int ; let element = ref ( Stack . pop stack ) in if ! loop then begin while G . V . compare ! element vertex <> 0 do HT . replace dfn ! element 0 ; element := Stack . pop stack ; done ; partition := component vertex :: ! partition ; end else partition := Vertex vertex :: ! partition end ; ! head and component vertex = let partition = ref [ ] in G . iter_succ ( fun succ -> if HT . find dfn succ = 0 then ignore ( visit succ partition : int ) ) g vertex ; Component ( vertex , ! partition ) in let ( _ : int ) = visit root_g partition in ! partition end |
module Component = struct type t = { id : int ; kind : kind ; } [ @@ deriving compare , sexp ] and kind = | Node of Cfg . Node . t | Cycle of { head : Cfg . Node . t ; components : t list ; } [ @@ deriving compare , sexp ] end |
type t = Component . t list [ @@ deriving compare , sexp ] |
let create ~ cfg ~ entry_index ~ successors = let depth_first_numbers = Int . Table . create ( ) in let stack = Stack . create ( ) in let current_depth_first_number = ref 0 in let make_depth_first_number ( ) = incr current_depth_first_number ; ! current_depth_first_number in let get_depth_first_number node_index = Hashtbl . find depth_first_numbers node_index |> Option . value ~ default : 0 in let current_component_id = ref 0 in let make_component kind = let id = ! current_component_id in incr current_component_id ; { Component . id ; kind } in let rec visit components node_index = Stack . push stack node_index ; let head_dfn = make_depth_first_number ( ) in Hashtbl . set depth_first_numbers ~ key : node_index ~ data : head_dfn ; let visit_successors ( components , minimum_dfn , loop ) successor_id = let components , successor_dfn = match get_depth_first_number successor_id with | 0 -> visit components successor_id | successor_dfn -> components , successor_dfn in if successor_dfn <= minimum_dfn then components , successor_dfn , true else components , minimum_dfn , loop in let node = Cfg . node cfg ~ id : node_index in let components , minimum_dfn , loop = node |> successors |> Set . fold ~ init ( : components , head_dfn , false ) ~ f : visit_successors in let components = if Int . equal minimum_dfn ( get_depth_first_number node_index ) then ( Hashtbl . set depth_first_numbers ~ key : node_index ~ data : max_int ; let element_id = Stack . pop_exn stack in if loop then ( let rec reset_dfn element_id = if not ( Int . equal element_id node_index ) then ( Hashtbl . set depth_first_numbers ~ key : element_id ~ data : 0 ; reset_dfn ( Stack . pop_exn stack ) ) in reset_dfn element_id ; let visit_successors components successor_id = match get_depth_first_number successor_id with | 0 -> let components , _ = visit components successor_id in components | _ -> components in let new_components = node |> successors |> Set . fold ~ init [ ] : ~ f : visit_successors in let component = make_component ( Component . Cycle { head = node ; components = new_components } ) in component :: components ) else let component = make_component ( Component . Node node ) in component :: components ) else components in components , minimum_dfn in let components , _ = visit [ ] entry_index in components |
let assert_wto ( ? entry_index = 0 ) cfg_nodes expected_wto = let cfg = Int . Table . create ( ) in let insert_node node = Hashtbl . set cfg ~ key ( : Cfg . Node . id node ) ~ data : node in List . iter ~ f : insert_node cfg_nodes ; let actual_wto = WeakTopologicalOrder . create ~ cfg ~ entry_index ~ successors : Cfg . Node . successors in let rec compare_component left right = let open WeakTopologicalOrder in match left . Component . kind , right . Component . kind with | Component . Node left , Component . Node right -> Int . equal ( Cfg . Node . id left ) ( Cfg . Node . id right ) | ( Component . Cycle { head = left_head ; components = left_components } , Component . Cycle { head = right_head ; components = right_components } ) -> Int . equal ( Cfg . Node . id left_head ) ( Cfg . Node . id right_head ) && compare_components left_components right_components | _ -> false and compare_components left right = List . equal compare_component left right in let pp formatter weak_topological_order = String . pp formatter ( weak_topological_order |> WeakTopologicalOrder . sexp_of_t |> Sexp . to_string_hum ) in assert_equal ~ cmp : compare_components ~ printer ( : Format . asprintf " % a " pp ) ~ pp_diff ( : Test . diff ~ print : pp ) expected_wto actual_wto |
let cfg_node ( ? predecessors = [ ] ) ( ? successors = [ ] ) id = Cfg . Node . create id Cfg . Node . Normal ( Int . Set . of_list predecessors ) ( Int . Set . of_list successors ) |
let wto_node id = { WeakTopologicalOrder . Component . id = 0 ; kind = Node ( cfg_node id ) } |
let wto_cycle ~ head ~ components = { WeakTopologicalOrder . Component . id = 0 ; kind = Cycle { head = cfg_node head ; components } } |
let test_empty _ = assert_wto [ cfg_node 0 ~ predecessors [ ] : ~ successors [ ] ] : [ wto_node 0 ] |
let test_sequential _ = assert_wto [ cfg_node 0 ~ predecessors [ ] : ~ successors [ : 1 ] ; cfg_node 1 ~ predecessors [ : 0 ] ~ successors [ : 2 ] ; cfg_node 2 ~ predecessors [ : 1 ] ~ successors [ : 3 ] ; cfg_node 3 ~ predecessors [ : 2 ] ~ successors [ ] ; : ] [ wto_node 0 ; wto_node 1 ; wto_node 2 ; wto_node 3 ] |
let test_branch _ = assert_wto [ cfg_node 0 ~ predecessors [ ] : ~ successors [ : 1 ] ; cfg_node 1 ~ predecessors [ : 0 ] ~ successors [ : 2 ; 3 ] ; cfg_node 2 ~ predecessors [ : 1 ] ~ successors [ : 4 ] ; cfg_node 3 ~ predecessors [ : 1 ] ~ successors [ : 4 ] ; cfg_node 4 ~ predecessors [ : 2 ; 3 ] ~ successors [ ] ; : ] [ wto_node 0 ; wto_node 1 ; wto_node 3 ; wto_node 2 ; wto_node 4 ] |
let test_nested_branch _ = assert_wto [ cfg_node 0 ~ predecessors [ ] : ~ successors [ : 1 ] ; cfg_node 1 ~ predecessors [ : 0 ] ~ successors [ : 2 ; 3 ] ; cfg_node 2 ~ predecessors [ : 1 ] ~ successors [ : 4 ; 5 ] ; cfg_node 3 ~ predecessors [ : 1 ] ~ successors [ : 7 ] ; cfg_node 4 ~ predecessors [ : 2 ] ~ successors [ : 6 ] ; cfg_node 5 ~ predecessors [ : 2 ] ~ successors [ : 6 ] ; cfg_node 6 ~ predecessors [ : 4 ; 5 ] ~ successors [ : 7 ] ; cfg_node 7 ~ predecessors [ : 3 ; 6 ] ~ successors [ ] ; : ] [ wto_node 0 ; wto_node 1 ; wto_node 3 ; wto_node 2 ; wto_node 5 ; wto_node 4 ; wto_node 6 ; wto_node 7 ] ; assert_wto [ cfg_node 0 ~ predecessors [ ] : ~ successors [ : 1 ] ; cfg_node 1 ~ predecessors [ : 0 ] ~ successors [ : 2 ; 3 ] ; cfg_node 2 ~ predecessors [ : 1 ] ~ successors [ : 4 ; 5 ] ; cfg_node 3 ~ predecessors [ : 1 ] ~ successors [ : 6 ; 7 ] ; cfg_node 4 ~ predecessors [ : 2 ] ~ successors [ : 8 ] ; cfg_node 5 ~ predecessors [ : 2 ] ~ successors [ : 8 ] ; cfg_node 6 ~ predecessors [ : 3 ] ~ successors [ : 9 ] ; cfg_node 7 ~ predecessors [ : 3 ] ~ successors [ : 9 ] ; cfg_node 8 ~ predecessors [ : 4 ; 5 ] ~ successors [ : 10 ] ; cfg_node 9 ~ predecessors [ : 6 ; 7 ] ~ successors [ : 10 ] ; cfg_node 10 ~ predecessors [ : 8 ; 9 ] ~ successors [ ] ; : ] [ wto_node 0 ; wto_node 1 ; wto_node 3 ; wto_node 7 ; wto_node 6 ; wto_node 9 ; wto_node 2 ; wto_node 5 ; wto_node 4 ; wto_node 8 ; wto_node 10 ; ] ; assert_wto [ cfg_node 0 ~ predecessors [ ] : ~ successors [ : 10 ] ; cfg_node 10 ~ predecessors [ : 0 ] ~ successors [ : 9 ; 3 ] ; cfg_node 9 ~ predecessors [ : 10 ] ~ successors [ : 7 ; 5 ] ; cfg_node 3 ~ predecessors [ : 10 ] ~ successors [ : 6 ; 4 ] ; cfg_node 7 ~ predecessors [ : 9 ] ~ successors [ : 8 ] ; cfg_node 5 ~ predecessors [ : 9 ] ~ successors [ : 8 ] ; cfg_node 6 ~ predecessors [ : 3 ] ~ successors [ : 2 ] ; cfg_node 4 ~ predecessors [ : 3 ] ~ successors [ : 2 ] ; cfg_node 8 ~ predecessors [ : 7 ; 5 ] ~ successors [ : 1 ] ; cfg_node 2 ~ predecessors [ : 6 ; 4 ] ~ successors [ : 1 ] ; cfg_node 1 ~ predecessors [ : 8 ; 2 ] ~ successors [ ] ; : ] [ wto_node 0 ; wto_node 10 ; wto_node 9 ; wto_node 7 ; wto_node 5 ; wto_node 8 ; wto_node 3 ; wto_node 6 ; wto_node 4 ; wto_node 2 ; wto_node 1 ; ] |
let test_unreachable _ = assert_wto [ cfg_node 0 ~ predecessors [ ] : ~ successors [ : 2 ] ; cfg_node 1 ~ predecessors [ : 3 ] ~ successors [ : 2 ] ; cfg_node 2 ~ predecessors [ : 0 ; 1 ] ~ successors [ ] ; : cfg_node 3 ~ predecessors [ ] : ~ successors [ : 1 ] ; ] [ wto_node 0 ; wto_node 2 ] |
let test_loop _ = assert_wto [ cfg_node 0 ~ predecessors [ ] : ~ successors [ : 1 ] ; cfg_node 1 ~ predecessors [ : 0 ; 2 ] ~ successors [ : 2 ; 3 ] ; cfg_node 2 ~ predecessors [ : 1 ] ~ successors [ : 1 ] ; cfg_node 3 ~ predecessors [ : 1 ] ~ successors [ ] ; : ] [ wto_node 0 ; wto_cycle ~ head : 1 ~ components [ : wto_node 2 ] ; wto_node 3 ] ; assert_wto [ cfg_node 0 ~ predecessors [ ] : ~ successors [ : 1 ] ; cfg_node 1 ~ predecessors [ : 0 ; 3 ] ~ successors [ : 2 ; 4 ] ; cfg_node 2 ~ predecessors [ : 1 ] ~ successors [ : 3 ] ; cfg_node 3 ~ predecessors [ : 2 ] ~ successors [ : 1 ] ; cfg_node 4 ~ predecessors [ : 1 ] ~ successors [ ] ; : ] [ wto_node 0 ; wto_cycle ~ head : 1 ~ components [ : wto_node 2 ; wto_node 3 ] ; wto_node 4 ] |
let test_loop_with_branch _ = assert_wto [ cfg_node 0 ~ predecessors [ ] : ~ successors [ : 1 ] ; cfg_node 1 ~ predecessors [ : 0 ; 5 ] ~ successors [ : 2 ; 6 ] ; cfg_node 2 ~ predecessors [ : 1 ] ~ successors [ : 3 ; 4 ] ; cfg_node 3 ~ predecessors [ : 2 ] ~ successors [ : 5 ] ; cfg_node 4 ~ predecessors [ : 2 ] ~ successors [ : 5 ] ; cfg_node 5 ~ predecessors [ : 3 ; 4 ] ~ successors [ : 1 ] ; cfg_node 6 ~ predecessors [ : 1 ] ~ successors [ ] ; : ] [ wto_node 0 ; wto_cycle ~ head : 1 ~ components [ : wto_node 2 ; wto_node 4 ; wto_node 3 ; wto_node 5 ] ; wto_node 6 ; ] ; assert_wto [ cfg_node 0 ~ predecessors [ ] : ~ successors [ : 1 ] ; cfg_node 1 ~ predecessors [ : 0 ; 5 ] ~ successors [ : 4 ; 3 ] ; cfg_node 4 ~ predecessors [ : 1 ] ~ successors [ : 6 ; 2 ] ; cfg_node 6 ~ predecessors [ : 4 ] ~ successors [ : 5 ] ; cfg_node 2 ~ predecessors [ : 4 ] ~ successors [ : 5 ] ; cfg_node 5 ~ predecessors [ : 6 ; 2 ] ~ successors [ : 1 ] ; cfg_node 3 ~ predecessors [ : 1 ] ~ successors [ ] ; : ] [ wto_node 0 ; wto_cycle ~ head : 1 ~ components [ : wto_node 4 ; wto_node 6 ; wto_node 2 ; wto_node 5 ] ; wto_node 3 ; ] ; assert_wto [ cfg_node 0 ~ predecessors [ ] : ~ successors [ : 1 ] ; cfg_node 1 ~ predecessors [ : 0 ] ~ successors [ : 2 ; 3 ] ; cfg_node 2 ~ predecessors [ : 1 ; 4 ] ~ successors [ : 4 ; 5 ] ; cfg_node 3 ~ predecessors [ : 1 ] ~ successors [ : 5 ] ; cfg_node 4 ~ predecessors [ : 2 ] ~ successors [ : 2 ] ; cfg_node 5 ~ predecessors [ : 2 ; 3 ] ~ successors [ ] ; : ] [ wto_node 0 ; wto_node 1 ; wto_node 3 ; wto_cycle ~ head : 2 ~ components [ : wto_node 4 ] ; wto_node 5 ] |
let test_nested_loop _ = assert_wto [ cfg_node 0 ~ predecessors [ ] : ~ successors [ : 1 ] ; cfg_node 1 ~ predecessors [ : 0 ; 4 ] ~ successors [ : 2 ; 3 ] ; cfg_node 2 ~ predecessors [ : 1 ] ~ successors [ : 4 ; 5 ] ; cfg_node 3 ~ predecessors [ : 1 ] ~ successors [ ] ; : cfg_node 4 ~ predecessors [ : 2 ] ~ successors [ : 1 ] ; cfg_node 5 ~ predecessors [ : 2 ] ~ successors [ : 2 ] ; ] [ wto_node 0 ; wto_cycle ~ head : 1 ~ components [ : wto_cycle ~ head : 2 ~ components [ : wto_node 5 ] ; wto_node 4 ] ; wto_node 3 ; ] ; assert_wto [ cfg_node 0 ~ predecessors [ : 3 ] ~ successors [ : 1 ; 6 ] ; cfg_node 1 ~ predecessors [ : 0 ; 4 ] ~ successors [ : 2 ; 3 ] ; cfg_node 2 ~ predecessors [ : 1 ] ~ successors [ : 4 ; 5 ] ; cfg_node 3 ~ predecessors [ : 1 ] ~ successors [ : 0 ] ; cfg_node 4 ~ predecessors [ : 2 ] ~ successors [ : 1 ] ; cfg_node 5 ~ predecessors [ : 2 ] ~ successors [ : 2 ] ; cfg_node 6 ~ predecessors [ : 0 ] ~ successors [ ] ; : ] [ wto_cycle ~ head : 0 ~ components : [ wto_cycle ~ head : 1 ~ components [ : wto_cycle ~ head : 2 ~ components [ : wto_node 5 ] ; wto_node 4 ] ; wto_node 3 ; ] ; wto_node 6 ; ] ; assert_wto [ cfg_node 0 ~ predecessors [ : 7 ] ~ successors [ : 1 ; 2 ; 8 ] ; cfg_node 1 ~ predecessors [ : 0 ; 3 ] ~ successors [ : 3 ; 4 ] ; cfg_node 2 ~ predecessors [ : 0 ; 6 ] ~ successors [ : 5 ; 6 ] ; cfg_node 3 ~ predecessors [ : 1 ] ~ successors [ : 1 ] ; cfg_node 4 ~ predecessors [ : 1 ] ~ successors [ : 7 ] ; cfg_node 5 ~ predecessors [ : 2 ] ~ successors [ : 7 ] ; cfg_node 6 ~ predecessors [ : 2 ] ~ successors [ : 2 ] ; cfg_node 7 ~ predecessors [ : 4 ; 5 ] ~ successors [ : 0 ] ; cfg_node 8 ~ predecessors [ : 0 ] ~ successors [ ] ; : ] [ wto_cycle ~ head : 0 ~ components : [ wto_cycle ~ head : 2 ~ components [ : wto_node 6 ] ; wto_node 5 ; wto_cycle ~ head : 1 ~ components [ : wto_node 3 ] ; wto_node 4 ; wto_node 7 ; ] ; wto_node 8 ; ] |
let test_bourdon _ = assert_wto ~ entry_index : 1 [ cfg_node 1 ~ predecessors [ ] : ~ successors [ : 2 ] ; cfg_node 2 ~ predecessors [ : 1 ] ~ successors [ : 3 ; 8 ] ; cfg_node 3 ~ predecessors [ : 2 ; 7 ] ~ successors [ : 4 ] ; cfg_node 4 ~ predecessors [ : 3 ] ~ successors [ : 5 ; 7 ] ; cfg_node 5 ~ predecessors [ : 4 ; 6 ] ~ successors [ : 6 ] ; cfg_node 6 ~ predecessors [ : 5 ] ~ successors [ : 5 ; 7 ] ; cfg_node 7 ~ predecessors [ : 6 ; 4 ] ~ successors [ : 3 ; 8 ] ; cfg_node 8 ~ predecessors [ : 2 ; 7 ] ~ successors [ ] ; : ] [ wto_node 1 ; wto_node 2 ; wto_cycle ~ head : 3 ~ components [ : wto_node 4 ; wto_cycle ~ head : 5 ~ components [ : wto_node 6 ] ; wto_node 7 ] ; wto_node 8 ; ] |
let ( ) = " weakTopologicalOrder " >::: [ " empty " >:: test_empty ; " sequential " >:: test_sequential ; " branch " >:: test_branch ; " nested_branch " >:: test_nested_branch ; " unreachable " >:: test_unreachable ; " loop " >:: test_loop ; " loop_with_branch " >:: test_loop_with_branch ; " nested_loop " >:: test_nested_loop ; " bourdon " >:: test_bourdon ; ] |> Test . run |
type ( ' a , ' b ) t = { entry_by_key : ( ' a , ' b Weak_pointer . t ) Hashtbl . t ; keys_with_unused_data : ' a Thread_safe_queue . t ; mutable thread_safe_run_when_unused_data : unit -> unit } |
module Using_hashable = struct let create ? growth_allowed ? size hashable = { entry_by_key = Hashtbl . Using_hashable . create ~ hashable ? growth_allowed ? size ( ) ; keys_with_unused_data = Thread_safe_queue . create ( ) ; thread_safe_run_when_unused_data = ignore } ; ; end |
let create ? growth_allowed ? size m = Using_hashable . create ? growth_allowed ? size ( Hashtbl . Hashable . of_key m ) ; ; |
let set_run_when_unused_data t ~ thread_safe_f = t . thread_safe_run_when_unused_data <- thread_safe_f ; ; ; |
let remove t key = Hashtbl . remove t . entry_by_key key |
let reclaim_space_for_keys_with_unused_data t = while Thread_safe_queue . length t . keys_with_unused_data > 0 do let key = Thread_safe_queue . dequeue_exn t . keys_with_unused_data in match Hashtbl . find t . entry_by_key key with | None -> ( ) | Some entry -> if Weak_pointer . is_none entry then remove t key done ; ; ; |
let get_entry t key = Hashtbl . find_or_add t . entry_by_key key ~ default ( : fun ( ) -> Weak_pointer . create ( ) ) ; ; ; |
let mem t key = match Hashtbl . find t . entry_by_key key with | None -> false | Some entry -> Weak_pointer . is_some entry ; ; |
let key_is_using_space t key = Hashtbl . mem t . entry_by_key key |
let set_data t key entry data = Weak_pointer . set entry data ; Gc . Expert . add_finalizer_last data ( fun ( ) -> Thread_safe_queue . enqueue t . keys_with_unused_data key ; t . thread_safe_run_when_unused_data ( ) ) ; ; ; |
let replace t ~ key ~ data = set_data t key ( get_entry t key ) data |
let add_exn t ~ key ~ data = let entry = get_entry t key in if Weak_pointer . is_some entry then failwiths ~ here [ :% here ] " Weak_hashtbl . add_exn of key in use " t [ % sexp_of : ( _ , _ ) t ] ; set_data t key entry data ; ; ; |
let find t key = match Hashtbl . find t . entry_by_key key with | None -> None | Some entry -> Weak_pointer . get entry ; ; |
let find_or_add t key ~ default = let entry = get_entry t key in match Weak_pointer . get entry with | Some v -> v | None -> let data = default ( ) in set_data t key entry data ; data ; ; |
let create ? growth_allowed ? size hashable = let t = create ? growth_allowed ? size hashable in let reclaim_will_happen = ref false in let reclaim ( ) = reclaim_will_happen := false ; reclaim_space_for_keys_with_unused_data t in set_run_when_unused_data t ~ thread_safe_f ( : fun ( ) -> if not ! reclaim_will_happen then ( reclaim_will_happen := true ; Async_kernel_scheduler . thread_safe_enqueue_job Execution_context . main reclaim ( ) ) ) ; t ; ; |
let reclaim_space_for_keys_with_unused_data ` Do_not_use = assert false |
let set_run_when_unused_data ` Do_not_use = assert false |
type t = out_channel -> LP . doc -> unit |
let ( ) @@ f x = f x |
module Markdown = struct let output_code io = String . iter ( function | ' \ n ' -> output_string io " \ n " | c -> output_char io c ) let code io = function | LP . Str ( _ , str ) -> output_code io str | LP . Ref ( str ) -> fprintf io " <<% s " >> str let chunk io = function | LP . Doc ( str ) -> output_string io str | LP . Code ( name , src ) -> ( output_code io @@ Printf . sprintf " <<% s >>=\ n " name ; List . iter ( code io ) src ; output_char io ' \ n ' ) let weave io chunks = List . iter ( chunk io ) chunks end |
let formats = let add map ( keys , value ) = List . fold_left ( fun m k -> SM . add k value m ) map keys in List . fold_left add SM . empty [ [ " plain " ; " markdown " ] , Markdown . weave ] |
let lookup fmt = try SM . find fmt formats with Not_found -> raise ( NoSuchFormat fmt ) |
let formats = List . map fst @@ SM . bindings formats |
let choose_routers_without_path_builds_paths _ ( ) = let router = Sihl . Web . ( choose [ get " " ( fun _ -> Lwt . return @@ Opium . Response . of_plain_text " " ) ; get " foo " ( fun _ -> Lwt . return @@ Opium . Response . of_plain_text " " ) ; get " bar " ( fun _ -> Lwt . return @@ Opium . Response . of_plain_text " " ) ] ) in let paths = router |> Sihl . Web . routes_of_router |> List . map ( fun ( _ , path , _ ) -> path ) in Alcotest . ( check ( list string ) " builds paths " [ " " ; " / foo " ; " / bar " ] paths ) ; Lwt . return ( ) ; ; |
let choose_routers_with_empty_scope_builds_paths _ ( ) = let router = Sihl . Web . ( choose ~ scope " " : [ get " " ( fun _ -> Lwt . return @@ Opium . Response . of_plain_text " " ) ; get " " / ( fun _ -> Lwt . return @@ Opium . Response . of_plain_text " " ) ; get " foo " ( fun _ -> Lwt . return @@ Opium . Response . of_plain_text " " ) ; get " bar " ( fun _ -> Lwt . return @@ Opium . Response . of_plain_text " " ) ] ) in let paths = router |> Sihl . Web . routes_of_router |> List . map ( fun ( _ , path , _ ) -> path ) in Alcotest . ( check ( list string ) " builds paths " [ " " ; " " ; / " / foo " ; " / bar " ] paths ) ; Lwt . return ( ) ; ; |
let choose_routers_with_slash_scope_builds_paths _ ( ) = let router = Sihl . Web . ( choose ~ scope " " :/ [ get " " ( fun _ -> Lwt . return @@ Opium . Response . of_plain_text " " ) ; get " foo " ( fun _ -> Lwt . return @@ Opium . Response . of_plain_text " " ) ; get " bar " ( fun _ -> Lwt . return @@ Opium . Response . of_plain_text " " ) ] ) in let paths = router |> Sihl . Web . routes_of_router |> List . map ( fun ( _ , path , _ ) -> path ) in Alcotest . ( check ( list string ) " builds paths " [ " " ; / " / foo " ; " / bar " ] paths ) ; Lwt . return ( ) ; ; |
let choose_routers_builds_paths _ ( ) = let router = Sihl . Web . ( choose ~ scope " :/ root " [ get " " ( fun _ -> Lwt . return @@ Opium . Response . of_plain_text " " ) ; get " " / ( fun _ -> Lwt . return @@ Opium . Response . of_plain_text " " ) ; get " foo " ( fun _ -> Lwt . return @@ Opium . Response . of_plain_text " " ) ; get " bar " ( fun _ -> Lwt . return @@ Opium . Response . of_plain_text " " ) ] ) in let paths = router |> Sihl . Web . routes_of_router |> List . map ( fun ( _ , path , _ ) -> path ) in Alcotest . ( check ( list string ) " builds paths " [ " / root " ; " / root " ; / " / root / foo " ; " / root / bar " ] paths ) ; Lwt . return ( ) ; ; |
let choose_nested_routers_builds_paths _ ( ) = let router = Sihl . Web . ( choose ~ scope " :/ root " [ choose ~ scope " : sub " [ get " " ( fun _ -> Lwt . return @@ Opium . Response . of_plain_text " " ) ; get " foo " ( fun _ -> Lwt . return @@ Opium . Response . of_plain_text " " ) ; get " fooz " ( fun _ -> Lwt . return @@ Opium . Response . of_plain_text " " ) ] ; get " bar " ( fun _ -> Lwt . return @@ Opium . Response . of_plain_text " bar " ) ] ) in let paths = router |> Sihl . Web . routes_of_router |> List . map ( fun ( _ , path , _ ) -> path ) in Alcotest . ( check ( list string ) " builds paths " [ " / root / sub " ; " / root / sub / foo " ; " / root / sub / fooz " ; " / root / bar " ] paths ) ; Lwt . return ( ) ; ; |
let externalize_link _ ( ) = let actual = Sihl . Web . externalize_path ~ prefix " : prefix " " foo / bar " in Alcotest . ( check string " prefixes path " " prefix / foo / bar " actual ) ; let actual = Sihl . Web . externalize_path ~ prefix " : prefix " " foo / bar " / in Alcotest . ( check string " preserve trailing " " prefix / foo / bar " / actual ) ; let actual = Sihl . Web . externalize_path ~ prefix " : prefix " " / foo / bar " / in Alcotest . ( check string " no duplicate slash " " prefix / foo / bar " / actual ) ; Lwt . return ( ) ; ; |
let find_bearer_token _ ( ) = let token_value = " tokenvalue123 " in let token_header = Format . sprintf " Bearer % s " token_value in let req = Opium . Request . get " / some / path / login " |> Opium . Request . add_header ( " authorization " , token_header ) in let handler req = let token = Sihl . Web . Request . bearer_token req in Alcotest . ( check ( option string ) " has token " ( Some token_value ) token ) ; Lwt . return @@ Opium . Response . of_plain_text " " in let % lwt _ = handler req in Lwt . return ( ) ; ; |
let find_bearer_token_with_space _ ( ) = let token_value = " tokenvalue123 and after space " in let token_header = Format . sprintf " Bearer % s " token_value in let req = Opium . Request . get " / some / path / login " |> Opium . Request . add_header ( " authorization " , token_header ) in let handler req = let token = Sihl . Web . Request . bearer_token req in Alcotest . ( check ( option string ) " has token " ( Some " tokenvalue123 " ) token ) ; Lwt . return @@ Opium . Response . of_plain_text " " in let % lwt _ = handler req in Lwt . return ( ) ; ; |
let suite = [ ( " router " , [ test_case " choose routers without path builds paths " ` Quick choose_routers_without_path_builds_paths ; test_case " choose routers with empty scope builds paths " ` Quick choose_routers_with_empty_scope_builds_paths ; test_case " choose routers with slash scope builds paths " ` Quick choose_routers_with_slash_scope_builds_paths ; test_case " choose routers builds paths " ` Quick choose_routers_builds_paths ; test_case " choose nested routers builds paths " ` Quick choose_nested_routers_builds_paths ] ) ; " path " , [ test_case " prefix " ` Quick externalize_link ] ; ( " bearer token " , [ test_case " find bearer token " ` Quick find_bearer_token ; test_case " find bearer token with space " ` Quick find_bearer_token_with_space ] ) ] ; ; |
let ( ) = Logs . set_level ( Sihl . Log . get_log_level ( ) ) ; Logs . set_reporter ( Sihl . Log . cli_reporter ( ) ) ; Lwt_main . run ( Alcotest_lwt . run " web " suite ) ; ; |
type context = { arguments : ( string * string ) list ; connection : Cohttp_lwt_unix . Server . conn ; request : Cohttp . Request . t ; body : Cohttp_lwt . Body . t } |
let headers = let h = Cohttp . Header . init_with " Access - Control - Allow - Origin " " " * in let h = Cohttp . Header . add h " content - type " " application / json " in h |
let string_response ( ? headers = headers ) = Server . respond_string ~ headers |
let error_response ( ? headers = headers ) ( ? status = ` Internal_server_error ) ( errors : Result_util . message list ) : ( Cohttp . Response . t * Cohttp_lwt . Body . t ) Lwt . t = let error_msg : string = JsonUtil . string_of_write ( JsonUtil . write_list Result_util . write_message ) errors in let ( ) = Lwt . async ( fun ( ) -> Logs_lwt . debug ( fun m -> m " + error : % s " error_msg ) ) in Server . respond_string ~ headers ~ status ~ body : error_msg ( ) |
let api_result_response ( ~ string_of_success : ' ok -> string ) : ' ok Api . result -> ( Cohttp . Response . t * Cohttp_lwt . Body . t ) Lwt . t = function | { Result_util . value = Result . Ok ok ; Result_util . status ; _ } -> let body : string = string_of_success ok in let status :> Cohttp . Code . status_code = status in Server . respond_string ~ headers ~ status ~ body ( ) | { Result_util . value = Result . Error errors ; Result_util . status ; _ } -> let error_msg : string = JsonUtil . string_of_write ( JsonUtil . write_list Result_util . write_message ) errors in let status :> Cohttp . Code . status_code = status in Logs_lwt . err ( fun m -> m " % s " error_msg ) >>= fun ( ) -> Server . respond_string ~ headers ~ status ~ body : error_msg ( ) |
let kasa_response ~ string_of_success x = api_result_response ~ string_of_success ( Api_common . result_kasa x ) |
let method_not_allowed_respond meths = let headers = Cohttp . Header . add_multi ( Cohttp . Header . init ( ) ) " Allow " ( List . map Cohttp . Code . string_of_method meths ) in Server . respond ~ headers ~ status ` : Method_not_allowed ~ body : Cohttp_lwt . Body . empty ( ) |
let options_respond methods = let meths_str = List . map Cohttp . Code . string_of_method methods in let headers = Cohttp . Header . init_with " Access - Control - Allow - Origin " " " * in let headers = Cohttp . Header . add headers " Access - Control - Allow - Headers " " Content - Type " in let headers = Cohttp . Header . add_multi headers " Allow " meths_str in let headers = Cohttp . Header . add_multi headers " Access - Control - Allow - Methods " meths_str in let headers = Cohttp . Header . add headers " Access - Control - Request - Headers " " X - Custom - Header " in Server . respond ~ headers ~ status ` : OK ~ body : Cohttp_lwt . Body . empty ( ) |
type ' a route = { path : string ; operation : ' a } |
type route_handler = ( context : context -> ( Cohttp . Response . t * Cohttp_lwt . Body . t ) Lwt . t ) route |
type route_filter = ( context : context -> chain ( : context : context -> ( Cohttp . Response . t * Cohttp_lwt . Body . t ) Lwt . t ) -> ( Cohttp . Response . t * Cohttp_lwt . Body . t ) Lwt . t ) route |
type url_matcher = { re : Re . re ; labels : string list ; route : string } |
let label_prepattern = Re . rep1 ( Re . alt [ Re . alnum ; Re . char ' _ ' ] ) |
let variable_pattern = Re . compile ( Re . seq [ Re . char ' { ' ; label_prepattern ; Re . char ' } ' ] ) |
let label_pattern = Re . compile label_prepattern |
let create_url_matcher ( url : string ) : url_matcher = let labels = Re . matches variable_pattern url in let labels = List . flatten ( List . map ( Re . matches label_pattern ) labels ) in let pattern = Re . split_full variable_pattern url |> List . map ( function | ` Text s -> Re . str s | ` Delim _ -> Re . group ( Re . rep ( Re . compl [ Re . char ' ' ] ) ) ) / |> Re . seq |> Re . whole_string in let ( ) = Lwt . async ( fun ( ) -> Logs_lwt . debug ( fun m -> m " + route : % a " Re . pp pattern ) ) in let re = Re . compile pattern in { re ; labels ; route = url ; } |
let rec match_url ( url_matchers : ( ' a * url_matcher ) list ) ( url : string ) : ( ' a * ( string * string ) list ) list = match url_matchers with | ( arg , matcher ) :: tail -> ( try let matching = Re . exec matcher . re url |> Re . Group . all |> Array . to_list |> List . tl in let get_parameters : ( string * string ) list = List . combine matcher . labels matching in let ( ) = Lwt . async ( fun ( ) -> Logs_lwt . debug ( fun m -> m " match_url :\ n + url : ' % s ' \ n + route : ' % s ' \ n + args : { [ @% a ] @ } " url matcher . route ( Pp . list Pp . comma ( fun f ( key , value ) -> Format . fprintf f " % s : % s " key value ) ) get_parameters ) ) in [ arg , get_parameters ] with Not_found -> [ ] ) ( @ match_url tail url ) | [ ] -> [ ] |
let request_handler context = function | [ ] -> Server . respond_not_found ~ uri ( : Request . uri context . request ) ( ) | [ route , arguments ] -> Lwt . catch ( fun ( ) -> let context = { context with arguments = arguments } in route . operation ~ context ) ( fun exn -> api_result_response ~ string_of_success ( : fun x -> x ) ( match exn with | Yojson . Json_error e -> ( Api_common . result_error_msg e ) | exn -> ( Api_common . result_error_exception exn ) ) ) | _ :: _ -> error_response ? headers : None ? status : None [ Api_common . error_msg " multiple routes match url " ] |
let route_handler ( routes : route_handler list ) : context : context -> ( Cohttp . Response . t * Cohttp_lwt . Body . t ) Lwt . t = let url_matchers : ( route_handler * url_matcher ) list = List . map ( fun route -> ( route , create_url_matcher route . path ) ) routes in fun ~ context -> let url : string = Uri . pct_decode ( Uri . path ( Cohttp . Request . uri context . request ) ) in request_handler context ( match_url url_matchers url ) |
let inputString ( m : input ) : Pla . t = match m with | IContext -> Pla . string " processor . context " | IReal name | IInt name | IBool name -> Pla . string name |
let inputName ( i , acc ) s = match s with | IContext -> i , Pla . string " processor . context " :: acc | _ -> i + 1 , [ % pla { | in_ <# i # i [ > n ] } ] | :: acc |
let performFunctionCall module_name ( config : config ) = let args = List . fold_left inputName ( 0 , [ ] ) config . process_inputs |> snd |> List . rev |> Pla . join_sep Pla . comma in let copy = match config . process_outputs with | [ ] -> Pla . unit | [ _ ] -> let value = Pla . string " ret " in [ % pla { | out_0 [ n ] = <# value ; #> } ] | | o -> List . mapi ( fun i _ -> let value = [ % pla { |<# module_name # s > _process_ret_ <# i # i ( > processor . context ) } ] | in [ % pla { | out_ <# i # i [ > n ] = <# value ; #> } ] | ) o |> Pla . join_sep_all Pla . newline in [ % pla { | for ( var n = 0 ; n < e . inputBuffer . length ; n ) ++ { var ret = processor . <# module_name # s > _process ( <# args ) ; #> <#><# copy #> } } ] <#>| |
let noteFunctions ( params : params ) = let module_name = params . module_name in let on_args = Pla . map_sep Pla . comma inputString params . config . noteon_inputs in let off_args = Pla . map_sep Pla . comma inputString params . config . noteoff_inputs in ( [ % pla { | node . noteOn = function ( note , velocity , channel ) { if ( velocity > 0 ) processor . <# module_name # s > _noteOn ( <# on_args ) ; #> else processor . <# module_name # s > _noteOff ( <# off_args ) ; #> } } ] | , [ % pla { | node . noteOff = function ( note , channel ) { processor . <# module_name # s > _noteOff ( <# off_args ) ; #> } } ] | ) |
let controlChangeFunction ( params : params ) = let module_name = params . module_name in let ctrl_args = Pla . map_sep Pla . comma inputString params . config . controlchange_inputs in [ % pla { | node . controlChange = function ( control , value , channel ) { processor . <# module_name # s > _controlChange ( <# ctrl_args ) ; #> } } ] | |
let rec removeContext inputs = match inputs with | IContext :: t -> removeContext t | _ -> inputs |
let get ( params : params ) runtime code : ( Pla . t * FileKind . t ) list = let config = params . config in let module_name = params . module_name in let inputs = removeContext config . process_inputs in let nprocess_inputs = List . length inputs in let nprocess_outputs = List . length config . process_outputs in let input_var = List . mapi ( fun i _ -> [ % pla { | var in_ <# i # i > = e . inputBuffer . getChannelData ( <# i # i ) ; > } ] ) | inputs |> Pla . join_sep Pla . newline in let output_var = List . mapi ( fun i _ -> [ % pla { | var out_ <# i # i > = e . outputBuffer . getChannelData ( <# i # i ) ; > } ] ) | config . process_outputs |> Pla . join_sep Pla . newline in let process_call = performFunctionCall params . module_name params . config in let note_on , note_off = noteFunctions params in let control_change = controlChangeFunction params in let text = [ % pla { | var code = function ( ) { <# runtime #> <# code #> this . context = this . <# module_name # s > _process_init ( ) ; } ; var processor = new code ( ) ; processor . <# module_name # s > _default ( processor . context ) ; var node = audioContext . createScriptProcessor ( 0 , <# nprocess_inputs # i , > <# nprocess_outputs # i ) ; > node . inputs = <# nprocess_inputs # i ; > node . outputs = <# nprocess_outputs # i ; > node . onaudioprocess = function ( e ) { <# input_var #+> <# output_var #+> <# process_call #+> } <# note_on #> <# note_off #> <# control_change #> return node ; } ) } ] | in [ text , FileKind . ExtOnly " js " ] |
let oslo_polygon = { { | " type " : " Feature " , " geometry " : { " type " : " Polygon " , " coordinates " : [ [ [ 10 . 489165172838884 , 60 . 017259872374645 ] , [ 10 . 580764868996987 , 60 . 0762384207017 ] , [ 10 . 592122568549627 , 60 . 09394183519897 ] , [ 10 . 572782530207661 , 60 . 11678480264957 ] , [ 10 . 600720249305056 , 60 . 13160981872188 ] , [ 10 . 68031961054535 , 60 . 13353032001292 ] , [ 10 . 73711867703991 , 60 . 125733600579316 ] , [ 10 . 78802079942288 , 60 . 06755422118711 ] , [ 10 . 819765048019693 , 60 . 064296771632726 ] , [ 10 . 811720634337512 , 60 . 02561911878851 ] , [ 10 . 876109308200913 , 59 . 98547372050647 ] , [ 10 . 933734244914053 , 59 . 97416166211912 ] , [ 10 . 951389441905969 , 59 . 94924298867558 ] , [ 10 . 914816194580183 , 59 . 91161920924281 ] , [ 10 . 907158498257449 , 59 . 869893465966655 ] , [ 10 . 933102370207493 , 59 . 83659145034232 ] , [ 10 . 936527591798225 , 59 . 831669697457514 ] , [ 10 . 88029688872709 , 59 . 81138930328435 ] , [ 10 . 770788935602035 , 59 . 82510863617183 ] , [ 10 . 744019668227386 , 59 . 83928320264522 ] , [ 10 . 73100663891497 , 59 . 877178566827084 ] , [ 10 . 658082484659966 , 59 . 884410483442366 ] , [ 10 . 632783389561938 , 59 . 915118906971855 ] , [ 10 . 63388386110467 , 59 . 95342058502221 ] , [ 10 . 610456248652959 , 59 . 97660952873646 ] , [ 10 . 55585521816055 , 59 . 99672657430896 ] , [ 10 . 518070354830757 , 59 . 999291170702094 ] , [ 10 . 489165172838884 , 60 . 017259872374645 ] ] ] } , " properties " : { " kommunenummer " : " 0301 " , " objtype " : " Kommune " , " lokalid " : " 173018 " , " oppdateringsdato " : null , " datauttaksdato " : " 20191220110355 " , " versjonid " : " 4 . 1 " , " opphav " : null , " samiskforvaltningsomrade " : false , " datafangstdato " : null , " navnerom " : " http :// skjema . geonorge . no / SOSI / produktspesifikasjon / AdmEnheter / 4 . 1 " , " navn " : [ { " rekkefolge " : " " , " sprak " : " nor " , " navn " : " Oslo " } ] } } } | h " leafy - map " [ ] || [ h " leafy - feature - group " Prop . [ | bool " zoom - to - fit " true ] | [ h " leafy - geojson " Prop . [ | string " data " oslo_polygon ; string " fill " " forestgreen " ; string " stroke " " # 006400 " ; int " stroke - wdith " 1 ] | [ ] ; h " leafy - marker " Prop . [ | string " lat " " 59 . 9147857 " ; string " lng " " 10 . 7470423 " ; string " tooltip " " Hello Oslo " ! ] | [ ] ] ] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.