_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
01a73f07fa5330f6571c0b418e89c9b26b94a7defcc5441ae5709a27c257a3fe | Bogdanp/koyo | all.rkt | #lang racket/base
(define-syntax-rule (reprovide mod ...)
(begin
(require mod ...)
(provide (all-from-out mod ...))))
(reprovide
"auth.rkt"
"common.rkt"
"dashboard.rkt")
| null | https://raw.githubusercontent.com/Bogdanp/koyo/93f3fd06ee596a62bb0b286cb6290a800e911154/koyo-lib/blueprints/standard/app-name-here/pages/all.rkt | racket | #lang racket/base
(define-syntax-rule (reprovide mod ...)
(begin
(require mod ...)
(provide (all-from-out mod ...))))
(reprovide
"auth.rkt"
"common.rkt"
"dashboard.rkt")
|
|
948cca3207e664abcdadf5820f86cdce7dfd5c896e2b45c3b0bf7cafcf0f0e6b | bugczw/Introduction-to-Functional-Programming-in-OCaml | W2_S2_1.ml |
POINTS AND ( 30/30 points )
The given prelude defines three types , one for three dimensional points , another for velocity vectors in three dimensions , and another one representing moving objects in space .
Write a function move : point - > dpoint - > point such that move p dp is the point p whose coordinates have been updated according to dp .
( x is now x + . dx , y is now y + . dy , is now z + . dz .
Write a function next : physical_object - > physical_object such that next o is the physical object o at time t + dt .
The position of next o is the position of o moved according to its velocity vector .
Suppose that these objects are spheres whose radius is 1.0 .
Write a function will_collide_soon : physical_object - > physical_object - > bool that tells if at the next instant , the two spheres will intersect .
THE GIVEN PRELUDE
type point = { x : float ; y : float ; z : float }
type dpoint = { dx : float ; dy : float ; dz : float }
type physical_object = { position : point ; velocity : dpoint }
POINTS AND VECTORS (30/30 points)
The given prelude defines three types, one for three dimensional points, another for velocity vectors in three dimensions, and another one representing moving objects in space.
Write a function move : point -> dpoint -> point such that move p dp is the point p whose coordinates have been updated according to dp.
(x is now x +. dx, y is now y +. dy, z is now z +. dz.
Write a function next : physical_object -> physical_object such that next o is the physical object o at time t + dt.
The position of next o is the position of o moved according to its velocity vector.
Suppose that these objects are spheres whose radius is 1.0.
Write a function will_collide_soon : physical_object -> physical_object -> bool that tells if at the next instant, the two spheres will intersect.
THE GIVEN PRELUDE
type point = { x : float; y : float; z : float }
type dpoint = { dx : float; dy : float; dz : float }
type physical_object = { position : point; velocity : dpoint }
*)
let move p dp =
{x = p.x +. dp.dx; y= p.y +. dp.dy; z= p.z +. dp.dz};;
let next obj =
{position = move obj.position obj.velocity; velocity = obj.velocity};;
let square x = x *. x;;
let dist (p1:point) (p2:point) =
sqrt (square(p2.x-.p1.x) +. square(p2.y-.p1.y) +. square(p2.z-.p1.z));;
let will_collide_soon p1 p2 =
dist (next p1).position (next p2).position <= 2.0;;
| null | https://raw.githubusercontent.com/bugczw/Introduction-to-Functional-Programming-in-OCaml/13c4d1f92e7479f8eb10ea5d4c43a598b6676d0f/OCaml_MOOC_W2_ALL/Exercise/W2_S2_1.ml | ocaml |
POINTS AND ( 30/30 points )
The given prelude defines three types , one for three dimensional points , another for velocity vectors in three dimensions , and another one representing moving objects in space .
Write a function move : point - > dpoint - > point such that move p dp is the point p whose coordinates have been updated according to dp .
( x is now x + . dx , y is now y + . dy , is now z + . dz .
Write a function next : physical_object - > physical_object such that next o is the physical object o at time t + dt .
The position of next o is the position of o moved according to its velocity vector .
Suppose that these objects are spheres whose radius is 1.0 .
Write a function will_collide_soon : physical_object - > physical_object - > bool that tells if at the next instant , the two spheres will intersect .
THE GIVEN PRELUDE
type point = { x : float ; y : float ; z : float }
type dpoint = { dx : float ; dy : float ; dz : float }
type physical_object = { position : point ; velocity : dpoint }
POINTS AND VECTORS (30/30 points)
The given prelude defines three types, one for three dimensional points, another for velocity vectors in three dimensions, and another one representing moving objects in space.
Write a function move : point -> dpoint -> point such that move p dp is the point p whose coordinates have been updated according to dp.
(x is now x +. dx, y is now y +. dy, z is now z +. dz.
Write a function next : physical_object -> physical_object such that next o is the physical object o at time t + dt.
The position of next o is the position of o moved according to its velocity vector.
Suppose that these objects are spheres whose radius is 1.0.
Write a function will_collide_soon : physical_object -> physical_object -> bool that tells if at the next instant, the two spheres will intersect.
THE GIVEN PRELUDE
type point = { x : float; y : float; z : float }
type dpoint = { dx : float; dy : float; dz : float }
type physical_object = { position : point; velocity : dpoint }
*)
let move p dp =
{x = p.x +. dp.dx; y= p.y +. dp.dy; z= p.z +. dp.dz};;
let next obj =
{position = move obj.position obj.velocity; velocity = obj.velocity};;
let square x = x *. x;;
let dist (p1:point) (p2:point) =
sqrt (square(p2.x-.p1.x) +. square(p2.y-.p1.y) +. square(p2.z-.p1.z));;
let will_collide_soon p1 p2 =
dist (next p1).position (next p2).position <= 2.0;;
|
|
92748a72825c331d99f0b43e98d426ba5af9efc1bf525db938a9c58a77b00548 | dschrempf/elynx | SubstitutionModel.hs | -- |
-- Module : ELynx.MarkovProcess.SubstitutionModel
-- Description : Data type describing substitution model
Copyright : 2021
License : GPL-3.0 - or - later
--
-- Maintainer :
-- Stability : unstable
-- Portability : portable
--
Creation date : Tue Jan 29 19:10:46 2019 .
--
-- To be imported qualified.
module ELynx.MarkovProcess.SubstitutionModel
( -- * Types
Name,
Params,
SubstitutionModel,
-- * Accessors
alphabet,
name,
params,
stationaryDistribution,
exchangeabilityMatrix,
rateMatrix,
totalRate,
-- * Building substitution models
substitutionModel,
-- * Transformations
scale,
normalize,
appendName,
)
where
import qualified Data.Vector.Storable as V
import ELynx.Alphabet.Alphabet
import qualified ELynx.MarkovProcess.RateMatrix as R
import qualified Numeric.LinearAlgebra as LinAlg
-- | Name of substitution model; abstracted and subject to change.
type Name = String
| Parameters of substitution model . May be the empty list .
type Params = [Double]
-- XXX: Use a proper data type. For example:
data SubstitutionModelAA = LG | WAG | LG - Custom dist | ...
data SubstitutionModelNuc = JC | HKY p1 p2 ... | GTR p1 p2 ...
--
-- I thought about this a lot, and it seems easier like it is at the moment.
-- Since the data types are abstracted anyways, not much harm can be done. Of
-- course, conflicting substitution models can be declared, or duplicate ones
-- with different names, but well...
-- | Complete definition of a substitution model. Create instances with
' ' . A substitution model has an alphabet , a name , and a list
-- of parameters (e.g., the kappa value for the HKY model). Further, the
-- transition rate matrix is defined by a stationary distribution and a set of
-- exchangeabilities.
data SubstitutionModel = SubstitutionModel
{ -- | Alphabet
alphabet :: Alphabet,
-- | Name
name :: Name,
-- | List of parameters
params :: Params,
-- | Stationary distribution
stationaryDistribution :: R.StationaryDistribution,
-- | Exchangeability matrix
exchangeabilityMatrix :: R.ExchangeabilityMatrix
}
deriving (Show, Read)
-- | Calculate rate matrix from substitution model.
rateMatrix :: SubstitutionModel -> R.RateMatrix
rateMatrix sm =
R.fromExchangeabilityMatrix
(exchangeabilityMatrix sm)
(stationaryDistribution sm)
-- | Get scale of substitution model.
totalRate :: SubstitutionModel -> Double
totalRate sm = R.totalRate (rateMatrix sm)
normalizeSumVec :: V.Vector Double -> V.Vector Double
normalizeSumVec v = V.map (/ s) v
where
s = V.sum v
# INLINE normalizeSumVec #
| Create normalized ' SubstitutionModel ' . See ' normalize ' .
substitutionModel ::
Alphabet ->
Name ->
Params ->
R.StationaryDistribution ->
R.ExchangeabilityMatrix ->
SubstitutionModel
substitutionModel c n ps d e =
if R.isValid d
then normalize $ SubstitutionModel c n ps (normalizeSumVec d) e
else
error $
"substitionModel: Stationary distribution does not sum to 1.0: "
++ show d
-- | Scale the rate of a substitution model by given factor.
scale :: Double -> SubstitutionModel -> SubstitutionModel
scale r sm = sm {exchangeabilityMatrix = em'}
where
em' = LinAlg.scale r $ exchangeabilityMatrix sm
| Normalize a substitution model , so that , on average , one substitution
-- happens per unit time.
normalize :: SubstitutionModel -> SubstitutionModel
normalize sm = scale (1.0 / r) sm where r = totalRate sm
-- | Abbend to name.
appendName :: Name -> SubstitutionModel -> SubstitutionModel
appendName n sm = sm {name = n'} where n' = name sm <> n
| null | https://raw.githubusercontent.com/dschrempf/elynx/f73f4474c61c22c6a9e54c56bdc34b37eff09687/elynx-markov/src/ELynx/MarkovProcess/SubstitutionModel.hs | haskell | |
Module : ELynx.MarkovProcess.SubstitutionModel
Description : Data type describing substitution model
Maintainer :
Stability : unstable
Portability : portable
To be imported qualified.
* Types
* Accessors
* Building substitution models
* Transformations
| Name of substitution model; abstracted and subject to change.
XXX: Use a proper data type. For example:
I thought about this a lot, and it seems easier like it is at the moment.
Since the data types are abstracted anyways, not much harm can be done. Of
course, conflicting substitution models can be declared, or duplicate ones
with different names, but well...
| Complete definition of a substitution model. Create instances with
of parameters (e.g., the kappa value for the HKY model). Further, the
transition rate matrix is defined by a stationary distribution and a set of
exchangeabilities.
| Alphabet
| Name
| List of parameters
| Stationary distribution
| Exchangeability matrix
| Calculate rate matrix from substitution model.
| Get scale of substitution model.
| Scale the rate of a substitution model by given factor.
happens per unit time.
| Abbend to name. | Copyright : 2021
License : GPL-3.0 - or - later
Creation date : Tue Jan 29 19:10:46 2019 .
module ELynx.MarkovProcess.SubstitutionModel
Name,
Params,
SubstitutionModel,
alphabet,
name,
params,
stationaryDistribution,
exchangeabilityMatrix,
rateMatrix,
totalRate,
substitutionModel,
scale,
normalize,
appendName,
)
where
import qualified Data.Vector.Storable as V
import ELynx.Alphabet.Alphabet
import qualified ELynx.MarkovProcess.RateMatrix as R
import qualified Numeric.LinearAlgebra as LinAlg
type Name = String
| Parameters of substitution model . May be the empty list .
type Params = [Double]
data SubstitutionModelAA = LG | WAG | LG - Custom dist | ...
data SubstitutionModelNuc = JC | HKY p1 p2 ... | GTR p1 p2 ...
' ' . A substitution model has an alphabet , a name , and a list
data SubstitutionModel = SubstitutionModel
alphabet :: Alphabet,
name :: Name,
params :: Params,
stationaryDistribution :: R.StationaryDistribution,
exchangeabilityMatrix :: R.ExchangeabilityMatrix
}
deriving (Show, Read)
rateMatrix :: SubstitutionModel -> R.RateMatrix
rateMatrix sm =
R.fromExchangeabilityMatrix
(exchangeabilityMatrix sm)
(stationaryDistribution sm)
totalRate :: SubstitutionModel -> Double
totalRate sm = R.totalRate (rateMatrix sm)
normalizeSumVec :: V.Vector Double -> V.Vector Double
normalizeSumVec v = V.map (/ s) v
where
s = V.sum v
# INLINE normalizeSumVec #
| Create normalized ' SubstitutionModel ' . See ' normalize ' .
substitutionModel ::
Alphabet ->
Name ->
Params ->
R.StationaryDistribution ->
R.ExchangeabilityMatrix ->
SubstitutionModel
substitutionModel c n ps d e =
if R.isValid d
then normalize $ SubstitutionModel c n ps (normalizeSumVec d) e
else
error $
"substitionModel: Stationary distribution does not sum to 1.0: "
++ show d
scale :: Double -> SubstitutionModel -> SubstitutionModel
scale r sm = sm {exchangeabilityMatrix = em'}
where
em' = LinAlg.scale r $ exchangeabilityMatrix sm
| Normalize a substitution model , so that , on average , one substitution
normalize :: SubstitutionModel -> SubstitutionModel
normalize sm = scale (1.0 / r) sm where r = totalRate sm
appendName :: Name -> SubstitutionModel -> SubstitutionModel
appendName n sm = sm {name = n'} where n' = name sm <> n
|
3c564f499e6ee868b5871e4a0ccec2e9120e3a458603983d266f091f83839aaf | icicle-lang/p-ambiata | Foldable.hs | # LANGUAGE NoImplicitPrelude #
module P.Foldable (
findMapM
, head
) where
import Control.Monad
import Data.Foldable
import Data.Function ((.))
import Data.Maybe
findMapM :: (Monad m, Foldable f) => (a -> m (Maybe b)) -> f a -> m (Maybe b)
findMapM f = foldr (\a a' -> f a >>= maybe a' (return . Just)) (return Nothing)
head :: (Foldable f) => f a -> Maybe a
head = foldr (\x _ -> return x) Nothing
| null | https://raw.githubusercontent.com/icicle-lang/p-ambiata/3098e411c9d521321e866b2f9637113223ef41d1/src/P/Foldable.hs | haskell | # LANGUAGE NoImplicitPrelude #
module P.Foldable (
findMapM
, head
) where
import Control.Monad
import Data.Foldable
import Data.Function ((.))
import Data.Maybe
findMapM :: (Monad m, Foldable f) => (a -> m (Maybe b)) -> f a -> m (Maybe b)
findMapM f = foldr (\a a' -> f a >>= maybe a' (return . Just)) (return Nothing)
head :: (Foldable f) => f a -> Maybe a
head = foldr (\x _ -> return x) Nothing
|
|
79393658c3ed7d8369ad3dbdc78f00fa804a6aa2d534d276e8446af8ae5ca63d | janestreet/async_smtp | message.ml | module Stable = struct
open Core.Core_stable
open Email_message.Email_message_stable
open Async_smtp_types.Async_smtp_types_stable
module Time = Time_float_unix.Stable
module Unstable_mail_log = Mail_log
module Mail_log = Mail_log.Stable
module Retry_interval = Smtp_envelope.Retry_interval
module Quarantine_reason = Quarantine_reason.Stable
module Id = struct
module V1 = struct
include String.V1
let to_string t = t
let of_string t = t
let%expect_test _ =
print_endline [%bin_digest: t];
[%expect {| d9a8da25d5656b016fb4dbdc2e4197fb |}]
;;
end
end
module Status = struct
module V1 = struct
type t =
[ `Send_now
| `Send_at of Time.V1.t
| `Sending
| `Frozen
| `Removed
| `Quarantined of Quarantine_reason.V1.t
| `Delivered
]
[@@deriving sexp, bin_io]
let%expect_test _ =
print_endline [%bin_digest: t];
[%expect {| 424465fabd3656a7dfa206491ab934af |}]
;;
end
end
module V1 = struct
type t =
{ spool_dir : string
; id : Id.V1.t
; flows : Mail_log.Flows.V1.t [@default Unstable_mail_log.Flows.none]
; parent_id : Smtp_envelope.Id.V1.t
; spool_date : Time.V1.t
; next_hop_choices : [ `Inet of Host_and_port.V1.t ] list
; mutable retry_intervals : Retry_interval.V2.t list
; mutable remaining_recipients : Email_address.V1.t list
; mutable failed_recipients : Email_address.V1.t list
; mutable relay_attempts : (Time.V1.t * Error.V1.t) list
; mutable status : Status.V1.t
; mutable envelope_info : Smtp_envelope.Info.V1.t
}
[@@deriving sexp, bin_io]
let%expect_test _ =
print_endline [%bin_digest: t];
[%expect {| 49b13cef6568275307ed409f67857b25 |}]
;;
end
module V2 = struct
type t =
{ spool_dir : string
; id : Id.V1.t
; flows : Mail_log.Flows.V1.t [@default Unstable_mail_log.Flows.none]
; parent_id : Smtp_envelope.Id.V1.t
; spool_date : Time.V1.t
; next_hop_choices : [ `Inet of Host_and_port.V1.t ] list
; mutable retry_intervals : Retry_interval.V2.t list
; mutable remaining_recipients : Email_address.V1.t list
; mutable failed_recipients : Email_address.V1.t list
; mutable relay_attempts : (Time.V1.t * Error.V1.t) list
; mutable status : Status.V1.t
; mutable envelope_info : Smtp_envelope.Info.V2.t
}
[@@deriving sexp, bin_io]
let of_v1 (v1 : V1.t) =
{ spool_dir = v1.spool_dir
; id = v1.id
; flows = v1.flows
; parent_id = v1.parent_id
; spool_date = v1.spool_date
; next_hop_choices = v1.next_hop_choices
; retry_intervals = v1.retry_intervals
; remaining_recipients = v1.remaining_recipients
; failed_recipients = v1.failed_recipients
; relay_attempts = v1.relay_attempts
; status = v1.status
; envelope_info = Smtp_envelope.Info.V2.of_v1 v1.envelope_info
}
;;
let%expect_test _ =
print_endline [%bin_digest: t];
[%expect {| 586728abcc44ce512a1d7ef9abb4f35b |}]
;;
end
module V3 = struct
type t =
{ spool_dir : string
; id : Id.V1.t
; flows : Mail_log.Flows.V1.t [@default Unstable_mail_log.Flows.none]
; parent_id : Smtp_envelope.Id.V1.t
; spool_date : Time.V1.t
; next_hop_choices : Host_and_port.V1.t list
; mutable retry_intervals : Retry_interval.V2.t list
; mutable remaining_recipients : Email_address.V1.t list
; mutable failed_recipients : Email_address.V1.t list
; mutable relay_attempts : (Time.V1.t * Error.V1.t) list
; mutable status : Status.V1.t
; mutable envelope_info : Smtp_envelope.Info.V2.t
}
[@@deriving sexp, bin_io]
let of_v2 (v2 : V2.t) =
{ spool_dir = v2.spool_dir
; id = v2.id
; flows = v2.flows
; parent_id = v2.parent_id
; spool_date = v2.spool_date
; next_hop_choices = Core.List.map v2.next_hop_choices ~f:(fun (`Inet i) -> i)
; retry_intervals = v2.retry_intervals
; remaining_recipients = v2.remaining_recipients
; failed_recipients = v2.failed_recipients
; relay_attempts = v2.relay_attempts
; status = v2.status
; envelope_info = v2.envelope_info
}
;;
let of_v1 v1 = of_v2 (V2.of_v1 v1)
let%expect_test _ =
print_endline [%bin_digest: t];
[%expect {| 7c91581c5678eddbae053a4a79d731a0 |}]
;;
end
end
open Core
open Async
open Async_smtp_types
module Time = Time_float_unix
(* Includes parent id and an incrementing counter. *)
module Id = struct
include String
let counter = ref 0
let create ~original_msg =
let parent_id = Smtp_envelope.id original_msg in
let t =
sprintf
!"%{Smtp_envelope.Id}-%s"
parent_id
(Smtp_envelope.Id.urlbase64_encode_float ~length:6 (!counter |> Int.to_float)
|> Smtp_envelope.Id.to_string)
in
incr counter;
t
;;
end
module Status = Stable.Status.V1
module Queue = struct
type t =
| Active
| Frozen
| Removed
| Quarantine
[@@deriving sexp, enumerate, compare]
let to_dirname = function
| Active -> "active"
| Frozen -> "frozen"
| Removed -> "removed"
| Quarantine -> "quarantine"
;;
let of_status status =
match status with
| `Frozen -> Some Frozen
| `Send_now | `Send_at _ | `Sending -> Some Active
| `Removed -> Some Removed
| `Quarantined _ -> Some Quarantine
| `Delivered -> None
;;
let of_status' status =
match of_status status with
| Some queue -> Ok queue
| None ->
Or_error.error_s
[%message "Specified status not associated with a queue" (status : Status.t)]
;;
end
module Data = struct
type t = Email.t
let map_headers headers ~encode_or_decode =
let f =
match encode_or_decode with
| `Encode -> fun s -> Dot_escaping.encode_line_string s |> String_monoid.to_string
| `Decode -> Dot_escaping.decode_line_string
in
Email_headers.map' ~normalize:`None headers ~f:(fun ~name ~value -> f name, value)
;;
let map_raw_content_bstr body ~encode_or_decode =
let eol, f =
match encode_or_decode with
| `Encode -> "\r\n", Dot_escaping.encode_line_bigstring
| `Decode ->
"\n", fun s -> Dot_escaping.decode_line_bigstring s |> String_monoid.of_bigstring
in
(* Most likely, the output buffer will be the same length as the input buffer. Give
ourselves some leeway to avoid having to resize. *)
let buffer = Bigbuffer.create (Bigstring_shared.length body + 100) in
let add_transformed_line line =
String_monoid.output_bigbuffer (f (Bigstring_shared.to_bigstring line)) buffer
in
let rec loop seq =
match Sequence.hd seq with
| None -> ()
| Some line ->
add_transformed_line line;
(match Sequence.tl seq with
| None -> ()
| Some tail ->
(* Peek the sequence so we don't add an eol marker for the last line. *)
if Option.is_some (Sequence.hd tail) then Bigbuffer.add_string buffer eol;
loop tail)
in
loop (Bigstring_shared.lines_seq ~include_empty_last_line:() body);
Bigstring_shared.of_bigbuffer_volatile buffer
;;
let map_raw_content raw_content ~encode_or_decode =
Option.map
(Email.Raw_content.Expert.to_bigstring_shared_option raw_content)
~f:(map_raw_content_bstr ~encode_or_decode)
|> Email.Raw_content.Expert.of_bigstring_shared_option
;;
let map_email t ~encode_or_decode =
Email.create
~headers:(map_headers (Email.headers t) ~encode_or_decode)
~raw_content:(map_raw_content (Email.raw_content t) ~encode_or_decode)
;;
let to_email = map_email ~encode_or_decode:`Decode
let of_email = map_email ~encode_or_decode:`Encode
let load path =
Deferred.Or_error.try_with
~run:`Schedule
~rest:`Log
(fun () ->
let%bind contents = Reader.file_contents path in
return (Email.of_string contents))
;;
let save ?temp_file t path =
Deferred.Or_error.try_with
~run:`Schedule
~rest:`Log
(fun () -> Email.save ?temp_file ~fsync:true ~eol_except_raw_content:`CRLF t path)
;;
end
(* A value of type t should only be modified via [On_disk_spool]. This guarantees
that all changes are properly flushed to disk. *)
type t = Stable.V3.t =
{ spool_dir : string
; id : Id.t
; flows : Mail_log.Flows.t
; parent_id : Smtp_envelope.Id.t
; spool_date : Time.t
; next_hop_choices : Host_and_port.t list
; mutable retry_intervals : Smtp_envelope.Retry_interval.t list
; mutable remaining_recipients : Email_address.Stable.V1.t list
; mutable failed_recipients : Email_address.Stable.V1.t list
; mutable relay_attempts : (Time.t * Error.t) list
; mutable status : Status.t
; mutable envelope_info : Smtp_envelope.Info.t
}
[@@deriving fields, sexp_of]
(* type alias to make code more readable below *)
type meta = t [@@deriving sexp_of]
let compare t1 t2 = Sexp.compare (sexp_of_t t1) (sexp_of_t t2)
let status t =
match t.status with
| `Send_at time when Time.(time < now ()) -> `Send_now
| status -> status
;;
let time_on_spool t = Time.diff (Time.now ()) t.spool_date
let last_relay_attempt t = List.hd t.relay_attempts
let set_status t x = t.status <- x
let set_remaining_recipients t x = t.remaining_recipients <- x
let set_failed_recipients t x = t.failed_recipients <- x
let set_retry_intervals t x = t.retry_intervals <- x
let add_retry_intervals t x = t.retry_intervals <- x @ t.retry_intervals
let add_relay_attempt t x = t.relay_attempts <- x :: t.relay_attempts
let move_failed_recipients_to_remaining_recipients t =
t.remaining_recipients <- t.remaining_recipients @ t.failed_recipients;
t.failed_recipients <- []
;;
let of_envelope_batch
envelope_batch
~gen_id
~spool_dir
~spool_date
~failed_recipients
~relay_attempts
~parent_id
~status
~flows
=
let email_body = Smtp_envelope.Routed.Batch.email_body envelope_batch in
(* We make sure to only map the email body once. *)
let data_raw_content = Data.map_raw_content email_body ~encode_or_decode:`Encode in
Deferred.Or_error.List.map
~how:`Sequential
(Smtp_envelope.Routed.Batch.envelopes envelope_batch)
~f:(fun envelope ->
let headers =
Smtp_envelope.Bodiless.Routed.headers envelope
|> Data.map_headers ~encode_or_decode:`Encode
in
let envelope_info = Smtp_envelope.Bodiless.Routed.envelope_info envelope in
let data = Email.create ~headers ~raw_content:data_raw_content in
let next_hop_choices = Smtp_envelope.Bodiless.Routed.next_hop_choices envelope in
let retry_intervals = Smtp_envelope.Bodiless.Routed.retry_intervals envelope in
let remaining_recipients = Smtp_envelope.Bodiless.Routed.recipients envelope in
gen_id ()
>>|? fun id ->
( { spool_dir
; id
; flows
; parent_id
; spool_date
; next_hop_choices
; retry_intervals
; remaining_recipients
; failed_recipients
; relay_attempts
; status
; envelope_info
}
, data
, Smtp_envelope.Routed.of_bodiless envelope email_body ))
;;
module On_disk = struct
module Metadata = struct
module T = struct
include Stable.V3
let t_of_sexp sexp =
try t_of_sexp sexp with
| error_from_v3 ->
(try Stable.V2.t_of_sexp sexp |> Stable.V3.of_v2 with
| error_from_v2 ->
(try Stable.V1.t_of_sexp sexp |> Stable.V3.of_v1 with
| error_from_v1 ->
raise_s
[%message
"[On_disk.Metadata.t_of_sexp]"
(error_from_v3 : exn)
(error_from_v2 : exn)
(error_from_v1 : exn)]))
;;
end
include T
include Sexpable.To_stringable (T)
end
module Data = Data
module Queue = Queue
module Name_generator = struct
module Unique_name = Id
type t = Smtp_envelope.t
let next original_msg ~attempt:_ = Id.create ~original_msg
end
module Throttle = struct
Do n't hit the open files system limit
let t = Throttle.create ~continue_on_error:true ~max_concurrent_jobs:400
let enqueue f = Throttle.enqueue t f
end
end
module On_disk_spool = Multispool.Make (On_disk)
| null | https://raw.githubusercontent.com/janestreet/async_smtp/ffe145b51d7c3d49705deb51ce33a53a21083403/src/message.ml | ocaml | Includes parent id and an incrementing counter.
Most likely, the output buffer will be the same length as the input buffer. Give
ourselves some leeway to avoid having to resize.
Peek the sequence so we don't add an eol marker for the last line.
A value of type t should only be modified via [On_disk_spool]. This guarantees
that all changes are properly flushed to disk.
type alias to make code more readable below
We make sure to only map the email body once. | module Stable = struct
open Core.Core_stable
open Email_message.Email_message_stable
open Async_smtp_types.Async_smtp_types_stable
module Time = Time_float_unix.Stable
module Unstable_mail_log = Mail_log
module Mail_log = Mail_log.Stable
module Retry_interval = Smtp_envelope.Retry_interval
module Quarantine_reason = Quarantine_reason.Stable
module Id = struct
module V1 = struct
include String.V1
let to_string t = t
let of_string t = t
let%expect_test _ =
print_endline [%bin_digest: t];
[%expect {| d9a8da25d5656b016fb4dbdc2e4197fb |}]
;;
end
end
module Status = struct
module V1 = struct
type t =
[ `Send_now
| `Send_at of Time.V1.t
| `Sending
| `Frozen
| `Removed
| `Quarantined of Quarantine_reason.V1.t
| `Delivered
]
[@@deriving sexp, bin_io]
let%expect_test _ =
print_endline [%bin_digest: t];
[%expect {| 424465fabd3656a7dfa206491ab934af |}]
;;
end
end
module V1 = struct
type t =
{ spool_dir : string
; id : Id.V1.t
; flows : Mail_log.Flows.V1.t [@default Unstable_mail_log.Flows.none]
; parent_id : Smtp_envelope.Id.V1.t
; spool_date : Time.V1.t
; next_hop_choices : [ `Inet of Host_and_port.V1.t ] list
; mutable retry_intervals : Retry_interval.V2.t list
; mutable remaining_recipients : Email_address.V1.t list
; mutable failed_recipients : Email_address.V1.t list
; mutable relay_attempts : (Time.V1.t * Error.V1.t) list
; mutable status : Status.V1.t
; mutable envelope_info : Smtp_envelope.Info.V1.t
}
[@@deriving sexp, bin_io]
let%expect_test _ =
print_endline [%bin_digest: t];
[%expect {| 49b13cef6568275307ed409f67857b25 |}]
;;
end
module V2 = struct
type t =
{ spool_dir : string
; id : Id.V1.t
; flows : Mail_log.Flows.V1.t [@default Unstable_mail_log.Flows.none]
; parent_id : Smtp_envelope.Id.V1.t
; spool_date : Time.V1.t
; next_hop_choices : [ `Inet of Host_and_port.V1.t ] list
; mutable retry_intervals : Retry_interval.V2.t list
; mutable remaining_recipients : Email_address.V1.t list
; mutable failed_recipients : Email_address.V1.t list
; mutable relay_attempts : (Time.V1.t * Error.V1.t) list
; mutable status : Status.V1.t
; mutable envelope_info : Smtp_envelope.Info.V2.t
}
[@@deriving sexp, bin_io]
let of_v1 (v1 : V1.t) =
{ spool_dir = v1.spool_dir
; id = v1.id
; flows = v1.flows
; parent_id = v1.parent_id
; spool_date = v1.spool_date
; next_hop_choices = v1.next_hop_choices
; retry_intervals = v1.retry_intervals
; remaining_recipients = v1.remaining_recipients
; failed_recipients = v1.failed_recipients
; relay_attempts = v1.relay_attempts
; status = v1.status
; envelope_info = Smtp_envelope.Info.V2.of_v1 v1.envelope_info
}
;;
let%expect_test _ =
print_endline [%bin_digest: t];
[%expect {| 586728abcc44ce512a1d7ef9abb4f35b |}]
;;
end
module V3 = struct
type t =
{ spool_dir : string
; id : Id.V1.t
; flows : Mail_log.Flows.V1.t [@default Unstable_mail_log.Flows.none]
; parent_id : Smtp_envelope.Id.V1.t
; spool_date : Time.V1.t
; next_hop_choices : Host_and_port.V1.t list
; mutable retry_intervals : Retry_interval.V2.t list
; mutable remaining_recipients : Email_address.V1.t list
; mutable failed_recipients : Email_address.V1.t list
; mutable relay_attempts : (Time.V1.t * Error.V1.t) list
; mutable status : Status.V1.t
; mutable envelope_info : Smtp_envelope.Info.V2.t
}
[@@deriving sexp, bin_io]
let of_v2 (v2 : V2.t) =
{ spool_dir = v2.spool_dir
; id = v2.id
; flows = v2.flows
; parent_id = v2.parent_id
; spool_date = v2.spool_date
; next_hop_choices = Core.List.map v2.next_hop_choices ~f:(fun (`Inet i) -> i)
; retry_intervals = v2.retry_intervals
; remaining_recipients = v2.remaining_recipients
; failed_recipients = v2.failed_recipients
; relay_attempts = v2.relay_attempts
; status = v2.status
; envelope_info = v2.envelope_info
}
;;
let of_v1 v1 = of_v2 (V2.of_v1 v1)
let%expect_test _ =
print_endline [%bin_digest: t];
[%expect {| 7c91581c5678eddbae053a4a79d731a0 |}]
;;
end
end
open Core
open Async
open Async_smtp_types
module Time = Time_float_unix
module Id = struct
include String
let counter = ref 0
let create ~original_msg =
let parent_id = Smtp_envelope.id original_msg in
let t =
sprintf
!"%{Smtp_envelope.Id}-%s"
parent_id
(Smtp_envelope.Id.urlbase64_encode_float ~length:6 (!counter |> Int.to_float)
|> Smtp_envelope.Id.to_string)
in
incr counter;
t
;;
end
module Status = Stable.Status.V1
module Queue = struct
type t =
| Active
| Frozen
| Removed
| Quarantine
[@@deriving sexp, enumerate, compare]
let to_dirname = function
| Active -> "active"
| Frozen -> "frozen"
| Removed -> "removed"
| Quarantine -> "quarantine"
;;
let of_status status =
match status with
| `Frozen -> Some Frozen
| `Send_now | `Send_at _ | `Sending -> Some Active
| `Removed -> Some Removed
| `Quarantined _ -> Some Quarantine
| `Delivered -> None
;;
let of_status' status =
match of_status status with
| Some queue -> Ok queue
| None ->
Or_error.error_s
[%message "Specified status not associated with a queue" (status : Status.t)]
;;
end
module Data = struct
type t = Email.t
let map_headers headers ~encode_or_decode =
let f =
match encode_or_decode with
| `Encode -> fun s -> Dot_escaping.encode_line_string s |> String_monoid.to_string
| `Decode -> Dot_escaping.decode_line_string
in
Email_headers.map' ~normalize:`None headers ~f:(fun ~name ~value -> f name, value)
;;
let map_raw_content_bstr body ~encode_or_decode =
let eol, f =
match encode_or_decode with
| `Encode -> "\r\n", Dot_escaping.encode_line_bigstring
| `Decode ->
"\n", fun s -> Dot_escaping.decode_line_bigstring s |> String_monoid.of_bigstring
in
let buffer = Bigbuffer.create (Bigstring_shared.length body + 100) in
let add_transformed_line line =
String_monoid.output_bigbuffer (f (Bigstring_shared.to_bigstring line)) buffer
in
let rec loop seq =
match Sequence.hd seq with
| None -> ()
| Some line ->
add_transformed_line line;
(match Sequence.tl seq with
| None -> ()
| Some tail ->
if Option.is_some (Sequence.hd tail) then Bigbuffer.add_string buffer eol;
loop tail)
in
loop (Bigstring_shared.lines_seq ~include_empty_last_line:() body);
Bigstring_shared.of_bigbuffer_volatile buffer
;;
let map_raw_content raw_content ~encode_or_decode =
Option.map
(Email.Raw_content.Expert.to_bigstring_shared_option raw_content)
~f:(map_raw_content_bstr ~encode_or_decode)
|> Email.Raw_content.Expert.of_bigstring_shared_option
;;
let map_email t ~encode_or_decode =
Email.create
~headers:(map_headers (Email.headers t) ~encode_or_decode)
~raw_content:(map_raw_content (Email.raw_content t) ~encode_or_decode)
;;
let to_email = map_email ~encode_or_decode:`Decode
let of_email = map_email ~encode_or_decode:`Encode
let load path =
Deferred.Or_error.try_with
~run:`Schedule
~rest:`Log
(fun () ->
let%bind contents = Reader.file_contents path in
return (Email.of_string contents))
;;
let save ?temp_file t path =
Deferred.Or_error.try_with
~run:`Schedule
~rest:`Log
(fun () -> Email.save ?temp_file ~fsync:true ~eol_except_raw_content:`CRLF t path)
;;
end
type t = Stable.V3.t =
{ spool_dir : string
; id : Id.t
; flows : Mail_log.Flows.t
; parent_id : Smtp_envelope.Id.t
; spool_date : Time.t
; next_hop_choices : Host_and_port.t list
; mutable retry_intervals : Smtp_envelope.Retry_interval.t list
; mutable remaining_recipients : Email_address.Stable.V1.t list
; mutable failed_recipients : Email_address.Stable.V1.t list
; mutable relay_attempts : (Time.t * Error.t) list
; mutable status : Status.t
; mutable envelope_info : Smtp_envelope.Info.t
}
[@@deriving fields, sexp_of]
type meta = t [@@deriving sexp_of]
let compare t1 t2 = Sexp.compare (sexp_of_t t1) (sexp_of_t t2)
let status t =
match t.status with
| `Send_at time when Time.(time < now ()) -> `Send_now
| status -> status
;;
let time_on_spool t = Time.diff (Time.now ()) t.spool_date
let last_relay_attempt t = List.hd t.relay_attempts
let set_status t x = t.status <- x
let set_remaining_recipients t x = t.remaining_recipients <- x
let set_failed_recipients t x = t.failed_recipients <- x
let set_retry_intervals t x = t.retry_intervals <- x
let add_retry_intervals t x = t.retry_intervals <- x @ t.retry_intervals
let add_relay_attempt t x = t.relay_attempts <- x :: t.relay_attempts
let move_failed_recipients_to_remaining_recipients t =
t.remaining_recipients <- t.remaining_recipients @ t.failed_recipients;
t.failed_recipients <- []
;;
let of_envelope_batch
envelope_batch
~gen_id
~spool_dir
~spool_date
~failed_recipients
~relay_attempts
~parent_id
~status
~flows
=
let email_body = Smtp_envelope.Routed.Batch.email_body envelope_batch in
let data_raw_content = Data.map_raw_content email_body ~encode_or_decode:`Encode in
Deferred.Or_error.List.map
~how:`Sequential
(Smtp_envelope.Routed.Batch.envelopes envelope_batch)
~f:(fun envelope ->
let headers =
Smtp_envelope.Bodiless.Routed.headers envelope
|> Data.map_headers ~encode_or_decode:`Encode
in
let envelope_info = Smtp_envelope.Bodiless.Routed.envelope_info envelope in
let data = Email.create ~headers ~raw_content:data_raw_content in
let next_hop_choices = Smtp_envelope.Bodiless.Routed.next_hop_choices envelope in
let retry_intervals = Smtp_envelope.Bodiless.Routed.retry_intervals envelope in
let remaining_recipients = Smtp_envelope.Bodiless.Routed.recipients envelope in
gen_id ()
>>|? fun id ->
( { spool_dir
; id
; flows
; parent_id
; spool_date
; next_hop_choices
; retry_intervals
; remaining_recipients
; failed_recipients
; relay_attempts
; status
; envelope_info
}
, data
, Smtp_envelope.Routed.of_bodiless envelope email_body ))
;;
module On_disk = struct
module Metadata = struct
module T = struct
include Stable.V3
let t_of_sexp sexp =
try t_of_sexp sexp with
| error_from_v3 ->
(try Stable.V2.t_of_sexp sexp |> Stable.V3.of_v2 with
| error_from_v2 ->
(try Stable.V1.t_of_sexp sexp |> Stable.V3.of_v1 with
| error_from_v1 ->
raise_s
[%message
"[On_disk.Metadata.t_of_sexp]"
(error_from_v3 : exn)
(error_from_v2 : exn)
(error_from_v1 : exn)]))
;;
end
include T
include Sexpable.To_stringable (T)
end
module Data = Data
module Queue = Queue
module Name_generator = struct
module Unique_name = Id
type t = Smtp_envelope.t
let next original_msg ~attempt:_ = Id.create ~original_msg
end
module Throttle = struct
Do n't hit the open files system limit
let t = Throttle.create ~continue_on_error:true ~max_concurrent_jobs:400
let enqueue f = Throttle.enqueue t f
end
end
module On_disk_spool = Multispool.Make (On_disk)
|
d6b43243b09e90b6aa224f1d04e684d634ca0a345cb408d0893f957e7a4fa42b | uncomplicate/deep-diamond | tensor.clj | Copyright ( c ) . All rights reserved .
;; The use and distribution terms for this software are covered by the
;; Eclipse Public License 1.0 (-1.0.php) or later
;; which can be found in the file LICENSE at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(ns uncomplicate.diamond.internal.cudnn.tensor
(:require [uncomplicate.commons
[core :refer [Releaseable release let-release with-release Info info Viewable view]]
[utils :refer [dragan-says-ex]]]
[uncomplicate.clojurecuda.core :refer [memcpy-host! mem-alloc]]
[uncomplicate.clojurecuda.internal.protocols :as cuda]
[uncomplicate.neanderthal
[core :refer [transfer! dim vctr copy! native]]
[block :refer [entry-width buffer data-accessor count-entries create-data-source
offset cast-prim]]
[cuda :refer [factory-by-type]]]
[uncomplicate.neanderthal.internal.api
:refer [flow equals-block compatible? set-all MemoryContext
EngineProvider Container DataAccessorProvider FactoryProvider
native-factory zero raw host factory fits? DenseContainer view-vctr]]
[uncomplicate.neanderthal.internal.device.cublock
:refer [cu-block-vector set-vector! get-vector!]]
[uncomplicate.diamond.tensor
:as diamond
:refer [TensorDescriptor shape layout data-type TensorContainer Transfer
input output Revert ConnectorCreator connector view-tz batch-size]]
[uncomplicate.diamond.internal
[protocols
:refer [TensorFactory DiamondFactoryProvider create-tensor create-tensor-desc
diamond-factory neanderthal-factory tensor-engine native-diamond-factory
Offset DiffTransfer diff-input diff-output BatchDescriptor
batch-index]]
[utils :refer [check-contiguous default-strides]]]
[uncomplicate.diamond.internal.dnnl
[tensor :as dnnl-tensor]
[protocols :as dnnl :refer [data]]
[core :as dnnl-core :refer [memory-desc]]]
[uncomplicate.diamond.internal.cudnn
[core :refer [tensor-descriptor equal-desc? size dims strides transform-tensor]]
[protocols :refer [DescProvider desc handle]]
[constants :refer [cudnn-format]]])
(:import [clojure.lang IFn ExceptionInfo AFn]
[uncomplicate.neanderthal.internal.api Block Changeable DataAccessor VectorSpace]
uncomplicate.diamond.tensor.TensorDescriptorImpl
uncomplicate.diamond.internal.dnnl.tensor.DnnlTensor
[uncomplicate.diamond.internal.cudnn.impl CUTensorDescriptor CUFilterDescriptor]))
(def ^{:private true :const true} INEFFICIENT_OPERATION_MSG
"This operation would be inefficient because it uses memory transfer.
Please use transfer! to be reminded of that.")
(def ^{:private true :const true} DOES_NOT_FIT_MSG
"Source and destination shapes have to fit.")
(defn ^:private not-available []
(throw (UnsupportedOperationException. "Not available in CUDA. Please use a host instance.")))
(declare ->CUDnnTensor cudnn-transformer cudnn-tensor ->CUDnnShuffler
cudnn-batcher cudnn-tensor-desc)
(defn cudnn-shape-padding [shape]
(into shape (repeat (- 4 (count shape)) 1)))
(extend-type java.util.Collection
DescProvider
(desc [this]
(cudnn-tensor-desc (shape this) :float nil)))
(extend-type java.lang.Number
DescProvider
(desc [this]
(cudnn-tensor-desc [this] :float nil)))
(extend-type java.util.Map
DescProvider
(desc [this]
(cudnn-tensor-desc (shape this) (or (:data-type this) :float) (layout this))))
(extend-type TensorDescriptorImpl
DescProvider
(desc [this]
(cudnn-tensor-desc (.shape this) (or (.data-type this) :float) (layout this))))
(extend-type Object
DescProvider
(desc [this]
(cudnn-tensor-desc (shape this) (or (data-type this) :float) (layout this))))
(extend-type CUTensorDescriptor
TensorDescriptor
(shape [this]
(.dims this))
(data-type [this]
(.data-type this))
(layout [this]
(.strides this))
ConnectorCreator
(connector [in-desc out]
(if (equal-desc? in-desc (input out))
(view out)
(let [out-tz (output out)]
(if (equal-desc? in-desc out-tz)
(view out-tz)
(let [fact (diamond-factory out-tz)]
(let-release [in-tz (cudnn-tensor fact (view in-desc) (batch-index out-tz))]
(cudnn-transformer (handle fact) in-tz (view out-tz)))))))))
(defmethod print-method CUTensorDescriptor
[^CUTensorDescriptor d ^java.io.Writer w]
(.write w (pr-str {:shape (.dims d) :data-type (.data-type d) :layout (.strides d)})))
(defn cudnn-tensor-desc [shape dtype format]
(let [format (or format (default-strides shape))]
(if (or (cudnn-format format) (and (sequential? format) (<= 4 (count format))))
(tensor-descriptor shape dtype format)
(with-release [md (memory-desc shape dtype format)]
(let [padding-4 (repeat (- 4 (count shape)) 1)]
(tensor-descriptor (into shape padding-4) dtype (into (layout md) padding-4)))))))
(extend-type CUFilterDescriptor
TensorDescriptor
(shape [this]
(.dims this))
(data-type [this]
(.data-type this))
(layout [this]
(.format this))
ConnectorCreator
(connector [in-desc out]
(if (equal-desc? in-desc (input out))
(view out)
(let [out-tz (output out)]
(if (equal-desc? in-desc out-tz)
(view out-tz)
(let [fact (diamond-factory out-tz)]
(let-release [in-tz (cudnn-tensor fact (view in-desc) (batch-index out-tz))]
(cudnn-transformer (handle fact) in-tz (view out-tz)))))))))
(defmethod print-method CUFilterDescriptor
[^CUFilterDescriptor d ^java.io.Writer w]
(.write w (pr-str {:shape (.dims d) :data-type (.data-type d)
:format (.format d)})))
;; =================== Transformer ==============================================
(deftype CUDnnTransformer [cudnn-hdl in-tz out-tz in-da out-da]
Releaseable
(release [_]
(release in-tz)
(release out-tz))
Object
(hashCode [_]
(-> (hash :transformer)
(hash-combine (shape in-tz))
(hash-combine (shape out-tz))))
(equals [_ other]
(and (instance? CUDnnTransformer other)
(= (shape in-tz) (shape (.in-tz ^CUDnnTransformer other)))
(= out-tz (.out-tz ^CUDnnTransformer other))))
(toString [this]
(str {:input in-tz
:output out-tz}))
Revert
(revert [_]
(cudnn-transformer cudnn-hdl (view out-tz) (view in-tz)))
Viewable
(view [_]
(cudnn-transformer cudnn-hdl (view in-tz) (view out-tz)))
Transfer
(input [_]
in-tz)
(output [_]
out-tz)
IFn
(invoke [this]
(.invoke this cudnn-hdl)
out-tz)
(invoke [_ cudnn-hdl2]
(transform-tensor cudnn-hdl2
(cast-prim in-da 1.0)
in-tz (buffer in-tz) (* (offset in-tz) (entry-width in-da))
(cast-prim out-da 0.0)
out-tz (buffer out-tz) (* (offset out-tz) (entry-width out-da)))
out-tz)
(applyTo [this xs]
(AFn/applyToHelper this xs))
ConnectorCreator
(connector [this out-desc]
(if (equal-desc? out-tz out-desc)
this
(connector in-tz out-desc))))
(defn cudnn-transformer [cudnn-hdl in-tz out-tz]
(->CUDnnTransformer cudnn-hdl in-tz out-tz (data-accessor in-tz) (data-accessor out-tz)))
;; =================== Batcher ==================================================
(deftype CUDnnBatcher [cudnn-hdl src-sub dst-sub src-tz dst-tz ^long mb-size
^long src-cnt ^long src-stride-n ^long src-entry-width
^long dst-cnt ^long dst-stride-n ^long dst-entry-width]
Releaseable
(release [_]
(release src-tz)
(release dst-tz)
(release src-sub)
(release dst-sub))
Object
(hashCode [_]
(-> (hash :batcher)
(hash-combine (shape src-sub))
(hash-combine (shape dst-sub))))
(equals [_ other]
(and (instance? CUDnnBatcher other)
(= (shape dst-tz) (shape (.dst-tz ^CUDnnBatcher other)))
(= src-tz (.src-tz ^CUDnnBatcher other))))
(toString [_]
(str {:input src-tz
:output dst-tz
:mb-size mb-size}))
Viewable
(view [_]
(cudnn-batcher cudnn-hdl (view src-tz) (view dst-tz) mb-size))
Transfer
(input [_]
src-tz)
(output [_]
dst-tz)
IFn
(invoke [this]
(.invoke this cudnn-hdl 0 0))
(invoke [this src-n]
(.invoke this cudnn-hdl src-n 0))
(invoke [this src-n dst-n]
(.invoke this cudnn-hdl src-n dst-n))
(invoke [_ cudnn-hdl2 src-n dst-n]
(let [src-n (long src-n)
dst-n (long dst-n)]
(if (and (<= 0 src-n (- src-cnt mb-size)) (<= 0 dst-n (- dst-cnt mb-size)))
(transform-tensor cudnn-hdl2
(cast-prim (data-accessor src-sub) 1.0) src-sub (buffer src-sub)
(+ (offset src-sub) (* src-entry-width src-stride-n src-n))
(cast-prim (data-accessor dst-sub) 0.0) dst-sub (buffer dst-sub)
(+ (offset dst-sub) (* dst-entry-width dst-stride-n dst-n)))
(dragan-says-ex "Requested subtensor is outside of bounds."
{:src-index src-n :src-cnt src-cnt :dst-index dst-n :dst-cnt dst-cnt
:mb-size mb-size})))
dst-tz)
(applyTo [this xs]
(AFn/applyToHelper this xs))
ConnectorCreator
(connector [this dst-desc]
(if (equal-desc? dst-tz dst-desc)
this
(connector src-tz dst-desc))))
(defn cudnn-batcher [cudnn-hdl src-tz dst-tz mb-size]
(let [mb-size (max 1 (long mb-size))]
(let-release [src-sub (view-tz src-tz mb-size)
dst-sub (view-tz dst-tz mb-size)]
(->CUDnnBatcher cudnn-hdl src-sub dst-sub
src-tz dst-tz mb-size
((dims src-tz) (batch-index src-tz)) ((strides src-sub) (batch-index src-tz))
(entry-width (data-accessor src-sub))
((dims dst-tz) (batch-index dst-tz)) ((strides dst-sub) (batch-index dst-tz))
(entry-width (data-accessor dst-sub))))))
(deftype CUDnnShuffler [cudnn-hdl batcher batch-size mb-size]
Releaseable
(release [_]
(release batcher))
(hashCode [_]
(hash-combine (hash :shuffler) (hash batcher)))
(equals [_ other]
(and (instance? CUDnnShuffler other)
(= batch-size (.batch-size ^CUDnnShuffler other))
(= mb-size (.mb-size ^CUDnnShuffler other))
(= batcher (.batcher ^CUDnnShuffler other))))
(toString [this]
(str {:input (input this)
:output (output this)
:mb-size mb-size}))
Viewable
(view [_]
(->CUDnnShuffler cudnn-hdl (view batcher) batch-size mb-size))
Transfer
(input [_]
(input batcher))
(output [_]
(output batcher))
IFn
(invoke [_]
(dotimes [i mb-size]
(batcher cudnn-hdl (rand-int batch-size) i))
(output batcher))
(invoke [this cols]
(.invoke this cudnn-hdl cols))
(invoke [_ cudnn-hdl2 cols]
(loop [src-n (first cols) cols (rest cols) dst-n 0]
(when src-n
(batcher cudnn-hdl src-n dst-n)
(recur (first cols) (rest cols) (inc dst-n))))
(output batcher))
(applyTo [this xs]
(AFn/applyToHelper this xs))
ConnectorCreator
(connector [this dst-desc]
(if (equal-desc? (output batcher) dst-desc)
this
(connector batcher dst-desc))))
(defn cudnn-shuffler [cudnn-hdl src-tz dst-tz]
(->CUDnnShuffler cudnn-hdl (cudnn-batcher cudnn-hdl src-tz dst-tz 1)
(batch-size src-tz) (batch-size dst-tz)))
;; ================================ Tensor ======================================
(deftype CUDnnTensor [diamond-fact eng vect-view master buf ^long ofst
^CUTensorDescriptor cu-desc ^long n-index]
Object
(hashCode [x]
(-> (hash :CUDnnTensor) (hash-combine (hash cu-desc))))
(equals [x y]
(or (identical? x y)
(and (instance? CUDnnTensor y) (equal-desc? cu-desc (desc y))
(.isContiguous x) (.isContiguous ^CUDnnTensor y)
(= (view-vctr x) (view-vctr y)))))
(toString [this]
(pr-str {:shape (.dims cu-desc) :data-type (.data-type cu-desc)
:layout (.strides cu-desc)}))
Info
(info [x]
{:data-type (.data-type cu-desc)
:class (class x)
:device :cuda
:shape (.dims cu-desc)
:offset ofst
:strides (.strides cu-desc)
:master master
:engine eng})
(info [x info-type]
(case info-type
:data-type (.data-type cu-desc)
:class (class x)
:device :cuda
:shape (.dims cu-desc)
:offset ofst
:strides (.strides cu-desc)
:master master
:engine eng
nil))
Releaseable
(release [_]
(when master
(release buf))
(release cu-desc)
true)
EngineProvider
(engine [_]
eng)
DiamondFactoryProvider
(diamond-factory [_]
diamond-fact)
(native-diamond-factory [this]
(native-diamond-factory diamond-fact))
FactoryProvider
(factory [_]
(factory vect-view))
(native-factory [_]
(native-factory vect-view))
DataAccessorProvider
(data-accessor [_]
(data-accessor vect-view))
Container
(raw [x]
(raw x diamond-fact))
(raw [_ fact]
(let [df (diamond-factory fact)]
(create-tensor df (create-tensor-desc df cu-desc) n-index false)))
(zero [x]
(zero x diamond-fact))
(zero [_ fact]
(let [df (diamond-factory fact)]
(create-tensor df (create-tensor-desc df cu-desc) n-index true)))
(host [x]
(let-release [res (raw x (native-diamond-factory diamond-fact))]
(get-vector! vect-view (view-vctr res))
res))
(native [x]
(host x))
MemoryContext
(compatible? [_ y]
(compatible? (factory vect-view) (factory y)))
(fits? [_ y]
(= (.dims cu-desc) (cudnn-shape-padding (shape y))))
(device [_]
:cuda)
VectorSpace
(dim [_]
(apply * (.dims cu-desc)))
Block
(buffer [_]
buf)
(offset [_]
ofst)
(stride [_]
(dragan-says-ex "Tensors do not have a single stride. You're doing something wrong."))
(isContiguous [_]
(= (size cu-desc)
(apply * (entry-width (data-accessor vect-view)) (.dims cu-desc))))
Changeable
(setBoxed [x val]
(set-all eng val x)
x)
(setBoxed [x i val]
(dragan-says-ex INEFFICIENT_OPERATION_MSG))
(alter [_ _]
(dragan-says-ex INEFFICIENT_OPERATION_MSG))
(alter [_ _ _]
(dragan-says-ex INEFFICIENT_OPERATION_MSG))
Revert
(revert [this]
this)
Transfer
(input [this]
this)
(output [this]
this)
DiffTransfer
(diff-input [this]
this)
(diff-output [this]
this)
IFn
(invoke [this]
this)
(applyTo [this xs]
(AFn/applyToHelper this xs))
DescProvider
(desc [_]
cu-desc)
TensorDescriptor
(shape [_]
(.dims cu-desc))
(data-type [_]
(.data-type cu-desc))
(layout [_]
(.strides cu-desc))
BatchDescriptor
(batch-index [_]
n-index)
Viewable
(view [_]
(->CUDnnTensor diamond-fact eng vect-view false buf ofst (view cu-desc) n-index))
DenseContainer
(view-vctr [_]
vect-view)
TensorContainer
(view-tz [this]
this)
(view-tz [_ sub]
(let-release [sub-desc (if (number? sub)
(cudnn-tensor-desc (assoc (dims cu-desc) n-index sub)
(.data-type cu-desc)
(.strides cu-desc))
(cudnn-tensor-desc (shape sub)
(or (data-type sub) (.data-type cu-desc))
(or (layout sub) (.strides cu-desc))))]
(cudnn-tensor diamond-fact false buf ofst sub-desc n-index)))
Offset
(offset [this new-ofst]
(cudnn-tensor diamond-fact false buf
(+ ofst (long new-ofst))
(view cu-desc) n-index))
ConnectorCreator
(connector [in-tz out-desc]
(if (equal-desc? cu-desc out-desc)
(view in-tz)
(let-release [out-tz (cudnn-tensor diamond-fact out-desc (batch-index in-tz))]
(cudnn-transformer (handle diamond-fact) (view in-tz) out-tz)))))
(defn cudnn-tensor
([diamond-fact master buf ofst tdesc n-index]
(let [tdesc (desc tdesc)
neand-fact (neanderthal-factory diamond-fact (data-type tdesc))
tz-cnt (apply * (dims tdesc))]
(if (<= 0 (size tdesc) (- (long (cuda/size buf)) (long ofst)))
(let-release [vect-view (cu-block-vector neand-fact false buf tz-cnt ofst 1)]
(->CUDnnTensor diamond-fact
(tensor-engine diamond-fact (data-type tdesc))
vect-view master buf ofst tdesc n-index))
(throw (ex-info "Insufficient buffer size."
{:size (size tdesc) :buffer-size (cuda/size buf)})))))
([diamond-fact master buf ofst tdesc]
(cudnn-tensor diamond-fact master buf ofst tdesc 0))
([diamond-fact tdesc n-index]
(let [tdesc (desc tdesc)]
(let-release [buf (mem-alloc (max 1 (size tdesc)))]
(cudnn-tensor diamond-fact true buf 0 tdesc n-index))))
([diamond-fact tdesc]
(cudnn-tensor diamond-fact tdesc 0)))
(defmethod print-method CUDnnTensor
[^CUDnnTensor x ^java.io.Writer w]
(.write w (str x))
(.write w "\n")
(with-release [native-x (native (view-vctr x))]
(print-method (doall (take *print-length* (seq native-x))) w)))
(defmethod transfer! [CUDnnTensor CUDnnTensor]
[source destination]
(copy! source destination))
(defmethod transfer! [DnnlTensor CUDnnTensor]
[src dest]
(check-contiguous src dest)
(if (fits? dest src)
(if (and (= (data-type src) (data-type dest))
(= (cudnn-shape-padding (layout src)) (strides dest)))
(set-vector! (view-vctr src) (view-vctr dest))
(with-release [dnnl-mid (raw dest src)
dnnl-view (view-tz src (diamond/desc (cudnn-shape-padding (shape src))
(data-type src)
(cudnn-shape-padding (layout src))))]
(dnnl-core/offset! (buffer dnnl-view) (dnnl-core/offset (buffer src)))
(transfer! dnnl-view dnnl-mid)
(set-vector! (view-vctr dnnl-mid) (view-vctr dest))))
(dragan-says-ex DOES_NOT_FIT_MSG
{:source (dnnl/desc src) :destination (desc dest)
:compatible? (compatible? src dest)}))
dest)
(defmethod transfer! [CUDnnTensor DnnlTensor]
[src dest]
(check-contiguous src dest)
(if (fits? src dest)
(if (and (= (data-type src) (data-type dest))
(= (strides src) (cudnn-shape-padding (layout dest))))
(get-vector! (view-vctr src) (view-vctr dest))
(with-release [dnnl-mid (raw src dest)
dnnl-view (view-tz dest (diamond/desc (cudnn-shape-padding (shape dest))
(data-type dest)
(cudnn-shape-padding (layout dest))))]
(dnnl-core/offset! (buffer dnnl-view) (dnnl-core/offset (buffer dest)))
(get-vector! (view-vctr src) (view-vctr dnnl-mid))
(transfer! dnnl-mid dnnl-view)))
(dragan-says-ex DOES_NOT_FIT_MSG
{:source (desc src) :destination (dnnl/desc dest)
:compatible? (compatible? src dest)}))
dest)
(defmethod transfer! [CUDnnTensor Object]
[source destination]
(with-release [src (host source)]
(transfer! src destination)))
(defmethod transfer! [Object CUDnnTensor]
[source cuda]
(check-contiguous cuda)
(with-release [dest (raw cuda (native-diamond-factory cuda))]
(transfer! source dest)
(set-vector! (view-vctr dest) (view-vctr cuda))
cuda))
(defmethod transfer! [Object CUDnnTransformer]
[source destination]
(transfer! source (input destination))
destination)
(defmethod transfer! [CUDnnTransformer Object]
[source destination]
(transfer! (output source) destination))
| null | https://raw.githubusercontent.com/uncomplicate/deep-diamond/2fd8a3c1919b80aec481974b2aceaa0a1d26d007/src/clojure/uncomplicate/diamond/internal/cudnn/tensor.clj | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php) or later
which can be found in the file LICENSE at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
=================== Transformer ==============================================
=================== Batcher ==================================================
================================ Tensor ====================================== | Copyright ( c ) . All rights reserved .
(ns uncomplicate.diamond.internal.cudnn.tensor
(:require [uncomplicate.commons
[core :refer [Releaseable release let-release with-release Info info Viewable view]]
[utils :refer [dragan-says-ex]]]
[uncomplicate.clojurecuda.core :refer [memcpy-host! mem-alloc]]
[uncomplicate.clojurecuda.internal.protocols :as cuda]
[uncomplicate.neanderthal
[core :refer [transfer! dim vctr copy! native]]
[block :refer [entry-width buffer data-accessor count-entries create-data-source
offset cast-prim]]
[cuda :refer [factory-by-type]]]
[uncomplicate.neanderthal.internal.api
:refer [flow equals-block compatible? set-all MemoryContext
EngineProvider Container DataAccessorProvider FactoryProvider
native-factory zero raw host factory fits? DenseContainer view-vctr]]
[uncomplicate.neanderthal.internal.device.cublock
:refer [cu-block-vector set-vector! get-vector!]]
[uncomplicate.diamond.tensor
:as diamond
:refer [TensorDescriptor shape layout data-type TensorContainer Transfer
input output Revert ConnectorCreator connector view-tz batch-size]]
[uncomplicate.diamond.internal
[protocols
:refer [TensorFactory DiamondFactoryProvider create-tensor create-tensor-desc
diamond-factory neanderthal-factory tensor-engine native-diamond-factory
Offset DiffTransfer diff-input diff-output BatchDescriptor
batch-index]]
[utils :refer [check-contiguous default-strides]]]
[uncomplicate.diamond.internal.dnnl
[tensor :as dnnl-tensor]
[protocols :as dnnl :refer [data]]
[core :as dnnl-core :refer [memory-desc]]]
[uncomplicate.diamond.internal.cudnn
[core :refer [tensor-descriptor equal-desc? size dims strides transform-tensor]]
[protocols :refer [DescProvider desc handle]]
[constants :refer [cudnn-format]]])
(:import [clojure.lang IFn ExceptionInfo AFn]
[uncomplicate.neanderthal.internal.api Block Changeable DataAccessor VectorSpace]
uncomplicate.diamond.tensor.TensorDescriptorImpl
uncomplicate.diamond.internal.dnnl.tensor.DnnlTensor
[uncomplicate.diamond.internal.cudnn.impl CUTensorDescriptor CUFilterDescriptor]))
(def ^{:private true :const true} INEFFICIENT_OPERATION_MSG
"This operation would be inefficient because it uses memory transfer.
Please use transfer! to be reminded of that.")
(def ^{:private true :const true} DOES_NOT_FIT_MSG
"Source and destination shapes have to fit.")
(defn ^:private not-available []
(throw (UnsupportedOperationException. "Not available in CUDA. Please use a host instance.")))
(declare ->CUDnnTensor cudnn-transformer cudnn-tensor ->CUDnnShuffler
cudnn-batcher cudnn-tensor-desc)
(defn cudnn-shape-padding [shape]
(into shape (repeat (- 4 (count shape)) 1)))
(extend-type java.util.Collection
DescProvider
(desc [this]
(cudnn-tensor-desc (shape this) :float nil)))
(extend-type java.lang.Number
DescProvider
(desc [this]
(cudnn-tensor-desc [this] :float nil)))
(extend-type java.util.Map
DescProvider
(desc [this]
(cudnn-tensor-desc (shape this) (or (:data-type this) :float) (layout this))))
(extend-type TensorDescriptorImpl
DescProvider
(desc [this]
(cudnn-tensor-desc (.shape this) (or (.data-type this) :float) (layout this))))
(extend-type Object
DescProvider
(desc [this]
(cudnn-tensor-desc (shape this) (or (data-type this) :float) (layout this))))
(extend-type CUTensorDescriptor
TensorDescriptor
(shape [this]
(.dims this))
(data-type [this]
(.data-type this))
(layout [this]
(.strides this))
ConnectorCreator
(connector [in-desc out]
(if (equal-desc? in-desc (input out))
(view out)
(let [out-tz (output out)]
(if (equal-desc? in-desc out-tz)
(view out-tz)
(let [fact (diamond-factory out-tz)]
(let-release [in-tz (cudnn-tensor fact (view in-desc) (batch-index out-tz))]
(cudnn-transformer (handle fact) in-tz (view out-tz)))))))))
(defmethod print-method CUTensorDescriptor
[^CUTensorDescriptor d ^java.io.Writer w]
(.write w (pr-str {:shape (.dims d) :data-type (.data-type d) :layout (.strides d)})))
(defn cudnn-tensor-desc [shape dtype format]
(let [format (or format (default-strides shape))]
(if (or (cudnn-format format) (and (sequential? format) (<= 4 (count format))))
(tensor-descriptor shape dtype format)
(with-release [md (memory-desc shape dtype format)]
(let [padding-4 (repeat (- 4 (count shape)) 1)]
(tensor-descriptor (into shape padding-4) dtype (into (layout md) padding-4)))))))
(extend-type CUFilterDescriptor
TensorDescriptor
(shape [this]
(.dims this))
(data-type [this]
(.data-type this))
(layout [this]
(.format this))
ConnectorCreator
(connector [in-desc out]
(if (equal-desc? in-desc (input out))
(view out)
(let [out-tz (output out)]
(if (equal-desc? in-desc out-tz)
(view out-tz)
(let [fact (diamond-factory out-tz)]
(let-release [in-tz (cudnn-tensor fact (view in-desc) (batch-index out-tz))]
(cudnn-transformer (handle fact) in-tz (view out-tz)))))))))
(defmethod print-method CUFilterDescriptor
[^CUFilterDescriptor d ^java.io.Writer w]
(.write w (pr-str {:shape (.dims d) :data-type (.data-type d)
:format (.format d)})))
(deftype CUDnnTransformer [cudnn-hdl in-tz out-tz in-da out-da]
Releaseable
(release [_]
(release in-tz)
(release out-tz))
Object
(hashCode [_]
(-> (hash :transformer)
(hash-combine (shape in-tz))
(hash-combine (shape out-tz))))
(equals [_ other]
(and (instance? CUDnnTransformer other)
(= (shape in-tz) (shape (.in-tz ^CUDnnTransformer other)))
(= out-tz (.out-tz ^CUDnnTransformer other))))
(toString [this]
(str {:input in-tz
:output out-tz}))
Revert
(revert [_]
(cudnn-transformer cudnn-hdl (view out-tz) (view in-tz)))
Viewable
(view [_]
(cudnn-transformer cudnn-hdl (view in-tz) (view out-tz)))
Transfer
(input [_]
in-tz)
(output [_]
out-tz)
IFn
(invoke [this]
(.invoke this cudnn-hdl)
out-tz)
(invoke [_ cudnn-hdl2]
(transform-tensor cudnn-hdl2
(cast-prim in-da 1.0)
in-tz (buffer in-tz) (* (offset in-tz) (entry-width in-da))
(cast-prim out-da 0.0)
out-tz (buffer out-tz) (* (offset out-tz) (entry-width out-da)))
out-tz)
(applyTo [this xs]
(AFn/applyToHelper this xs))
ConnectorCreator
(connector [this out-desc]
(if (equal-desc? out-tz out-desc)
this
(connector in-tz out-desc))))
(defn cudnn-transformer [cudnn-hdl in-tz out-tz]
(->CUDnnTransformer cudnn-hdl in-tz out-tz (data-accessor in-tz) (data-accessor out-tz)))
(deftype CUDnnBatcher [cudnn-hdl src-sub dst-sub src-tz dst-tz ^long mb-size
^long src-cnt ^long src-stride-n ^long src-entry-width
^long dst-cnt ^long dst-stride-n ^long dst-entry-width]
Releaseable
(release [_]
(release src-tz)
(release dst-tz)
(release src-sub)
(release dst-sub))
Object
(hashCode [_]
(-> (hash :batcher)
(hash-combine (shape src-sub))
(hash-combine (shape dst-sub))))
(equals [_ other]
(and (instance? CUDnnBatcher other)
(= (shape dst-tz) (shape (.dst-tz ^CUDnnBatcher other)))
(= src-tz (.src-tz ^CUDnnBatcher other))))
(toString [_]
(str {:input src-tz
:output dst-tz
:mb-size mb-size}))
Viewable
(view [_]
(cudnn-batcher cudnn-hdl (view src-tz) (view dst-tz) mb-size))
Transfer
(input [_]
src-tz)
(output [_]
dst-tz)
IFn
(invoke [this]
(.invoke this cudnn-hdl 0 0))
(invoke [this src-n]
(.invoke this cudnn-hdl src-n 0))
(invoke [this src-n dst-n]
(.invoke this cudnn-hdl src-n dst-n))
(invoke [_ cudnn-hdl2 src-n dst-n]
(let [src-n (long src-n)
dst-n (long dst-n)]
(if (and (<= 0 src-n (- src-cnt mb-size)) (<= 0 dst-n (- dst-cnt mb-size)))
(transform-tensor cudnn-hdl2
(cast-prim (data-accessor src-sub) 1.0) src-sub (buffer src-sub)
(+ (offset src-sub) (* src-entry-width src-stride-n src-n))
(cast-prim (data-accessor dst-sub) 0.0) dst-sub (buffer dst-sub)
(+ (offset dst-sub) (* dst-entry-width dst-stride-n dst-n)))
(dragan-says-ex "Requested subtensor is outside of bounds."
{:src-index src-n :src-cnt src-cnt :dst-index dst-n :dst-cnt dst-cnt
:mb-size mb-size})))
dst-tz)
(applyTo [this xs]
(AFn/applyToHelper this xs))
ConnectorCreator
(connector [this dst-desc]
(if (equal-desc? dst-tz dst-desc)
this
(connector src-tz dst-desc))))
(defn cudnn-batcher [cudnn-hdl src-tz dst-tz mb-size]
(let [mb-size (max 1 (long mb-size))]
(let-release [src-sub (view-tz src-tz mb-size)
dst-sub (view-tz dst-tz mb-size)]
(->CUDnnBatcher cudnn-hdl src-sub dst-sub
src-tz dst-tz mb-size
((dims src-tz) (batch-index src-tz)) ((strides src-sub) (batch-index src-tz))
(entry-width (data-accessor src-sub))
((dims dst-tz) (batch-index dst-tz)) ((strides dst-sub) (batch-index dst-tz))
(entry-width (data-accessor dst-sub))))))
(deftype CUDnnShuffler [cudnn-hdl batcher batch-size mb-size]
Releaseable
(release [_]
(release batcher))
(hashCode [_]
(hash-combine (hash :shuffler) (hash batcher)))
(equals [_ other]
(and (instance? CUDnnShuffler other)
(= batch-size (.batch-size ^CUDnnShuffler other))
(= mb-size (.mb-size ^CUDnnShuffler other))
(= batcher (.batcher ^CUDnnShuffler other))))
(toString [this]
(str {:input (input this)
:output (output this)
:mb-size mb-size}))
Viewable
(view [_]
(->CUDnnShuffler cudnn-hdl (view batcher) batch-size mb-size))
Transfer
(input [_]
(input batcher))
(output [_]
(output batcher))
IFn
(invoke [_]
(dotimes [i mb-size]
(batcher cudnn-hdl (rand-int batch-size) i))
(output batcher))
(invoke [this cols]
(.invoke this cudnn-hdl cols))
(invoke [_ cudnn-hdl2 cols]
(loop [src-n (first cols) cols (rest cols) dst-n 0]
(when src-n
(batcher cudnn-hdl src-n dst-n)
(recur (first cols) (rest cols) (inc dst-n))))
(output batcher))
(applyTo [this xs]
(AFn/applyToHelper this xs))
ConnectorCreator
(connector [this dst-desc]
(if (equal-desc? (output batcher) dst-desc)
this
(connector batcher dst-desc))))
(defn cudnn-shuffler [cudnn-hdl src-tz dst-tz]
(->CUDnnShuffler cudnn-hdl (cudnn-batcher cudnn-hdl src-tz dst-tz 1)
(batch-size src-tz) (batch-size dst-tz)))
(deftype CUDnnTensor [diamond-fact eng vect-view master buf ^long ofst
^CUTensorDescriptor cu-desc ^long n-index]
Object
(hashCode [x]
(-> (hash :CUDnnTensor) (hash-combine (hash cu-desc))))
(equals [x y]
(or (identical? x y)
(and (instance? CUDnnTensor y) (equal-desc? cu-desc (desc y))
(.isContiguous x) (.isContiguous ^CUDnnTensor y)
(= (view-vctr x) (view-vctr y)))))
(toString [this]
(pr-str {:shape (.dims cu-desc) :data-type (.data-type cu-desc)
:layout (.strides cu-desc)}))
Info
(info [x]
{:data-type (.data-type cu-desc)
:class (class x)
:device :cuda
:shape (.dims cu-desc)
:offset ofst
:strides (.strides cu-desc)
:master master
:engine eng})
(info [x info-type]
(case info-type
:data-type (.data-type cu-desc)
:class (class x)
:device :cuda
:shape (.dims cu-desc)
:offset ofst
:strides (.strides cu-desc)
:master master
:engine eng
nil))
Releaseable
(release [_]
(when master
(release buf))
(release cu-desc)
true)
EngineProvider
(engine [_]
eng)
DiamondFactoryProvider
(diamond-factory [_]
diamond-fact)
(native-diamond-factory [this]
(native-diamond-factory diamond-fact))
FactoryProvider
(factory [_]
(factory vect-view))
(native-factory [_]
(native-factory vect-view))
DataAccessorProvider
(data-accessor [_]
(data-accessor vect-view))
Container
(raw [x]
(raw x diamond-fact))
(raw [_ fact]
(let [df (diamond-factory fact)]
(create-tensor df (create-tensor-desc df cu-desc) n-index false)))
(zero [x]
(zero x diamond-fact))
(zero [_ fact]
(let [df (diamond-factory fact)]
(create-tensor df (create-tensor-desc df cu-desc) n-index true)))
(host [x]
(let-release [res (raw x (native-diamond-factory diamond-fact))]
(get-vector! vect-view (view-vctr res))
res))
(native [x]
(host x))
MemoryContext
(compatible? [_ y]
(compatible? (factory vect-view) (factory y)))
(fits? [_ y]
(= (.dims cu-desc) (cudnn-shape-padding (shape y))))
(device [_]
:cuda)
VectorSpace
(dim [_]
(apply * (.dims cu-desc)))
Block
(buffer [_]
buf)
(offset [_]
ofst)
(stride [_]
(dragan-says-ex "Tensors do not have a single stride. You're doing something wrong."))
(isContiguous [_]
(= (size cu-desc)
(apply * (entry-width (data-accessor vect-view)) (.dims cu-desc))))
Changeable
(setBoxed [x val]
(set-all eng val x)
x)
(setBoxed [x i val]
(dragan-says-ex INEFFICIENT_OPERATION_MSG))
(alter [_ _]
(dragan-says-ex INEFFICIENT_OPERATION_MSG))
(alter [_ _ _]
(dragan-says-ex INEFFICIENT_OPERATION_MSG))
Revert
(revert [this]
this)
Transfer
(input [this]
this)
(output [this]
this)
DiffTransfer
(diff-input [this]
this)
(diff-output [this]
this)
IFn
(invoke [this]
this)
(applyTo [this xs]
(AFn/applyToHelper this xs))
DescProvider
(desc [_]
cu-desc)
TensorDescriptor
(shape [_]
(.dims cu-desc))
(data-type [_]
(.data-type cu-desc))
(layout [_]
(.strides cu-desc))
BatchDescriptor
(batch-index [_]
n-index)
Viewable
(view [_]
(->CUDnnTensor diamond-fact eng vect-view false buf ofst (view cu-desc) n-index))
DenseContainer
(view-vctr [_]
vect-view)
TensorContainer
(view-tz [this]
this)
(view-tz [_ sub]
(let-release [sub-desc (if (number? sub)
(cudnn-tensor-desc (assoc (dims cu-desc) n-index sub)
(.data-type cu-desc)
(.strides cu-desc))
(cudnn-tensor-desc (shape sub)
(or (data-type sub) (.data-type cu-desc))
(or (layout sub) (.strides cu-desc))))]
(cudnn-tensor diamond-fact false buf ofst sub-desc n-index)))
Offset
(offset [this new-ofst]
(cudnn-tensor diamond-fact false buf
(+ ofst (long new-ofst))
(view cu-desc) n-index))
ConnectorCreator
(connector [in-tz out-desc]
(if (equal-desc? cu-desc out-desc)
(view in-tz)
(let-release [out-tz (cudnn-tensor diamond-fact out-desc (batch-index in-tz))]
(cudnn-transformer (handle diamond-fact) (view in-tz) out-tz)))))
(defn cudnn-tensor
([diamond-fact master buf ofst tdesc n-index]
(let [tdesc (desc tdesc)
neand-fact (neanderthal-factory diamond-fact (data-type tdesc))
tz-cnt (apply * (dims tdesc))]
(if (<= 0 (size tdesc) (- (long (cuda/size buf)) (long ofst)))
(let-release [vect-view (cu-block-vector neand-fact false buf tz-cnt ofst 1)]
(->CUDnnTensor diamond-fact
(tensor-engine diamond-fact (data-type tdesc))
vect-view master buf ofst tdesc n-index))
(throw (ex-info "Insufficient buffer size."
{:size (size tdesc) :buffer-size (cuda/size buf)})))))
([diamond-fact master buf ofst tdesc]
(cudnn-tensor diamond-fact master buf ofst tdesc 0))
([diamond-fact tdesc n-index]
(let [tdesc (desc tdesc)]
(let-release [buf (mem-alloc (max 1 (size tdesc)))]
(cudnn-tensor diamond-fact true buf 0 tdesc n-index))))
([diamond-fact tdesc]
(cudnn-tensor diamond-fact tdesc 0)))
(defmethod print-method CUDnnTensor
[^CUDnnTensor x ^java.io.Writer w]
(.write w (str x))
(.write w "\n")
(with-release [native-x (native (view-vctr x))]
(print-method (doall (take *print-length* (seq native-x))) w)))
(defmethod transfer! [CUDnnTensor CUDnnTensor]
[source destination]
(copy! source destination))
(defmethod transfer! [DnnlTensor CUDnnTensor]
[src dest]
(check-contiguous src dest)
(if (fits? dest src)
(if (and (= (data-type src) (data-type dest))
(= (cudnn-shape-padding (layout src)) (strides dest)))
(set-vector! (view-vctr src) (view-vctr dest))
(with-release [dnnl-mid (raw dest src)
dnnl-view (view-tz src (diamond/desc (cudnn-shape-padding (shape src))
(data-type src)
(cudnn-shape-padding (layout src))))]
(dnnl-core/offset! (buffer dnnl-view) (dnnl-core/offset (buffer src)))
(transfer! dnnl-view dnnl-mid)
(set-vector! (view-vctr dnnl-mid) (view-vctr dest))))
(dragan-says-ex DOES_NOT_FIT_MSG
{:source (dnnl/desc src) :destination (desc dest)
:compatible? (compatible? src dest)}))
dest)
(defmethod transfer! [CUDnnTensor DnnlTensor]
[src dest]
(check-contiguous src dest)
(if (fits? src dest)
(if (and (= (data-type src) (data-type dest))
(= (strides src) (cudnn-shape-padding (layout dest))))
(get-vector! (view-vctr src) (view-vctr dest))
(with-release [dnnl-mid (raw src dest)
dnnl-view (view-tz dest (diamond/desc (cudnn-shape-padding (shape dest))
(data-type dest)
(cudnn-shape-padding (layout dest))))]
(dnnl-core/offset! (buffer dnnl-view) (dnnl-core/offset (buffer dest)))
(get-vector! (view-vctr src) (view-vctr dnnl-mid))
(transfer! dnnl-mid dnnl-view)))
(dragan-says-ex DOES_NOT_FIT_MSG
{:source (desc src) :destination (dnnl/desc dest)
:compatible? (compatible? src dest)}))
dest)
(defmethod transfer! [CUDnnTensor Object]
[source destination]
(with-release [src (host source)]
(transfer! src destination)))
(defmethod transfer! [Object CUDnnTensor]
[source cuda]
(check-contiguous cuda)
(with-release [dest (raw cuda (native-diamond-factory cuda))]
(transfer! source dest)
(set-vector! (view-vctr dest) (view-vctr cuda))
cuda))
(defmethod transfer! [Object CUDnnTransformer]
[source destination]
(transfer! source (input destination))
destination)
(defmethod transfer! [CUDnnTransformer Object]
[source destination]
(transfer! (output source) destination))
|
a46975ce01d5c947649ca0d44ac66ce89433db6950aafb47a7450a552f6413e9 | botsunit/bucs | bucs_app.erl | % @hidden
-module(bucs_app).
-behaviour(application).
-export([start/2]).
-export([stop/1]).
start(_Type, _Args) ->
bucs_sup:start_link().
stop(_State) ->
ok.
| null | https://raw.githubusercontent.com/botsunit/bucs/792437befd259042efaf95e301dec019a5dd6ea4/src/bucs_app.erl | erlang | @hidden | -module(bucs_app).
-behaviour(application).
-export([start/2]).
-export([stop/1]).
start(_Type, _Args) ->
bucs_sup:start_link().
stop(_State) ->
ok.
|
b43dd3eb41e2cb699426c9ba1eff2d629f28df2f1ca18a7d7e35ffec72f443f7 | sjl/coding-math | fps.lisp | (in-package #:coding-math.fps)
;;;; FPS
(defvar *last-draw* 0)
(defvar *fps* 0.0)
(defvar *mspf* 0.0)
(defvar *frame* 0)
(defparameter *rolling-average* 0.0)
(defparameter *rolling-average-count* 10)
(defun update-average (frame-time)
(setf *rolling-average*
(/ (+ frame-time
(* *rolling-average* *rolling-average-count*))
(1+ *rolling-average-count*))))
(defun update-fps ()
(setf *mspf* (* 1000.0
(/ *rolling-average*
internal-time-units-per-second))
*fps* (/ 1000.0 *mspf*)))
(defun draw-fps ()
(text (format nil "MSPF: ~,2F" *mspf*) 0 0)
(text (format nil "PFPS: ~,2F" *fps*) 0 20))
(defmacro with-fps (&body body)
(let ((start (gensym "start")))
`(let ((,start (get-internal-real-time)))
,@body
(update-average (- (get-internal-real-time) ,start))
(draw-fps)
(incf *frame*)
(when (losh:dividesp *frame* 15)
(update-fps)))))
| null | https://raw.githubusercontent.com/sjl/coding-math/8e2add14d033da41cb3ac0aac63ad67edb4dd66a/src/fps.lisp | lisp | FPS | (in-package #:coding-math.fps)
(defvar *last-draw* 0)
(defvar *fps* 0.0)
(defvar *mspf* 0.0)
(defvar *frame* 0)
(defparameter *rolling-average* 0.0)
(defparameter *rolling-average-count* 10)
(defun update-average (frame-time)
(setf *rolling-average*
(/ (+ frame-time
(* *rolling-average* *rolling-average-count*))
(1+ *rolling-average-count*))))
(defun update-fps ()
(setf *mspf* (* 1000.0
(/ *rolling-average*
internal-time-units-per-second))
*fps* (/ 1000.0 *mspf*)))
(defun draw-fps ()
(text (format nil "MSPF: ~,2F" *mspf*) 0 0)
(text (format nil "PFPS: ~,2F" *fps*) 0 20))
(defmacro with-fps (&body body)
(let ((start (gensym "start")))
`(let ((,start (get-internal-real-time)))
,@body
(update-average (- (get-internal-real-time) ,start))
(draw-fps)
(incf *frame*)
(when (losh:dividesp *frame* 15)
(update-fps)))))
|
c36a1bc783a8cea21fed8dd440307eaf9ac7d55c708ab9521b940484151b9b9b | ghc/testsuite | tc164.hs | # LANGUAGE ImplicitParams #
module ShouldCompile where
data UniqueSupply = US Integer
newUnique :: (?uniqueSupply :: UniqueSupply) => Integer
newUnique = r
where US r = ?uniqueSupply
The lazy pattern match in the where clause killed GHC 5.04
-- because the type {?uniqueSupply::UniqueSupply} of the RHS
of the ' where ' did n't look like a UniqueSupply
| null | https://raw.githubusercontent.com/ghc/testsuite/998a816ae89c4fd573f4abd7c6abb346cf7ee9af/tests/typecheck/should_compile/tc164.hs | haskell | because the type {?uniqueSupply::UniqueSupply} of the RHS | # LANGUAGE ImplicitParams #
module ShouldCompile where
data UniqueSupply = US Integer
newUnique :: (?uniqueSupply :: UniqueSupply) => Integer
newUnique = r
where US r = ?uniqueSupply
The lazy pattern match in the where clause killed GHC 5.04
of the ' where ' did n't look like a UniqueSupply
|
f042988bb17e41cc44de4f5eb086c6e95d3f7e0d526e68d63e45e36f34f960d7 | mon-key/unicly | unicly-integers.lisp | : FILE - CREATED < Timestamp : # { 2011 - 08 - 17T15:28:02 - 04:00Z}#{11333 } - by MON >
;;; :FILE unicly/unicly-integers.lisp
;;; ==============================
;;; ==============================
;; :NOTE ironclad utility functions
;;
ironclad : ub16ref / le buffer index = > value
;; ironclad:ub32ref/le buffer index => value
ironclad : ub64ref / le buffer index = > value
;;
This family of functions accesses an unsigned 16 - bit , 32 - bit or 64 - bit value
;; stored in little-endian order starting at index in array.
array must be a ( SIMPLE - ARRAY ( UNSIGNED - BYTE 8) ( * ) ) . These functions are SETFable .
;;
;; ironclad:ub16ref/be buffer index => value
;; ironclad:ub32ref/be buffer index => value
ironclad : / be buffer index = > value
;;
;; As the above, only the value is stored in big-endian order.
;;; ==============================
(in-package #:unicly)
;;; ==============================
;;; :PASTE-AUTHOR nyef -- Alistair Bridgewater
;;; :PASTE-TITLE Informing loop of integer size -- how to do it idiomatically?
: PASTE 120426 : PASTE - URL ( URL ` /+2KX6/1 ' )
(defun uuid-request-integer (array offset length &key little-endian sign-extend)
(let ((value (loop
for i from 0 below length
for octet = (aref array (+ offset
(if little-endian
i
(- length i 1))))
sum (ash octet (* i 8)))))
(if (and sign-extend
(logbitp (1- (* length 8)) value))
(logior (lognot (1- (ash 1 (1- (* length 8))))) value)
value)))
;;
(define-compiler-macro uuid-request-integer (&whole form array offset length &key little-endian sign-extend)
: NOTE the 4 is an ( unsigned - byte 32 ) which is n't a fixnum on x86 - 32
(if (and (member length '(1 2 4))
(member little-endian '(t nil))
(member sign-extend '(t nil)))
`(let* (,@(loop
for i from 0 below length
for var in '(byte-0 byte-1 byte-2 byte-3)
collect `(,var (aref ,array (+ ,offset
,(if little-endian
i
(- length i 1))))))
(value ,(elt '(#1=byte-0
#2=(dpb byte-1 (byte 8 8) #1#)
#3=(dpb byte-2 (byte 8 16) #2#)
(dpb byte-3 (byte 8 24) #3#))
(1- length))))
,(if sign-extend
`(if (logbitp ,(1- (* length 8)) value)
(logior ,(lognot (1- (ash 1 (1- (* length 8))))) value)
value)
'value))
form))
(declaim (inline uuid-disassemble-ub48))
(defun uuid-disassemble-ub48 (u48)
(declare (uuid-ub48 u48)
(optimize (speed 3)))
(let ((b1 nil) (b2 nil) (b3 nil) (b4 nil) (b5 nil) (b6 nil))
: NOTE The setf / the junk may be ugly , but its certainly faster .
(setf b1 (ldb (byte 8 40) u48))
(setf b2 (ldb (byte 8 32) u48))
(setf u48 (mask-field (byte 32 0) (the uuid-ub48 u48)))
(setf b3 (ldb (byte 8 24) (the uuid-ub32 u48)))
(setf u48 (mask-field (byte 24 0) (the uuid-ub32 u48)))
(setf b4 (ldb (byte 8 16) (the uuid-ub24 u48)))
(setf b5 (ldb (byte 8 8) (the uuid-ub24 u48)))
(setf b6 (ldb (byte 8 0) (the uuid-ub24 u48)))
(locally
(declare (uuid-ub8 b1 b2 b3 b4 b5 b6))
(values b1 b2 b3 b4 b5 b6))))
;;; ==============================
: SOURCE usenet - legend / io.lisp
;; `uuid-disassemble-ub32' :WAS `disassemble-u32'
;; `uuid-assemble-ub32' :WAS `assemble-u32'
(declaim (inline uuid-disassemble-ub32))
(defun uuid-disassemble-ub32 (u32)
(declare (type uuid-ub32 u32)
(optimize (speed 3)))
(let ((b1 (ldb (byte 8 24) u32))
(b2 (ldb (byte 8 16) u32))
(b3 (ldb (byte 8 8) u32))
(b4 (ldb (byte 8 0) u32)))
(declare (uuid-ub8 b1 b2 b3 b4))
(values b1 b2 b3 b4)))
(declaim (inline uuid-disassemble-ub16))
(defun uuid-disassemble-ub16 (u16)
(declare (type uuid-ub16 u16)
(optimize (speed 3)))
(let ((b1 (ldb (byte 8 8) u16))
(b2 (ldb (byte 8 0) u16)))
(declare (uuid-ub8 b1 b2))
(values b1 b2)))
(declaim (inline %uuid_byte-array-16-ub8-reqeust))
(defun %uuid_byte-array-16-ub8-reqeust (byte-array offset)
;; Only intended to be used in requests for octet values of
` uuid - byte - 's e.g. ` uuid - from - byte - array '
(declare (uuid-byte-array-16 byte-array)
((integer 8 9) offset)
(optimize (speed 3)))
(the uuid-ub8 (uuid-request-integer byte-array offset 1)))
(declaim (inline uuid-assemble-ub48))
(defun uuid-assemble-ub48 (b1 b2 b3 b4 b5 b6)
(declare (type uuid-ub8 b1 b2 b3 b4 b5 b6)
(optimize (speed 3)))
(logand #xFFFFFFFFFFFF
(logior (ash b1 40)
(ash b2 32)
(ash b3 24)
(ash b4 16)
(ash b5 8)
(ash b6 0))))
(declaim (inline uuid-assemble-ub32))
(defun uuid-assemble-ub32 (b1 b2 b3 b4)
(declare (type uuid-ub8 b1 b2 b3 b4)
(optimize speed))
(logand #xFFFFFFFF
(logior (ash b1 24)
(ash b2 16)
(ash b3 8)
(ash b4 0))))
;; (uuid-disassemble-ub32 #xFFFFFFFF)
255 , 255 , 255 , 255
;(declare (inline uuid-assemble-ub16))
(defun uuid-assemble-ub16 (b1 b2)
(declare (type uuid-ub8 b1 b2)
(optimize (speed 3)))
(logand #xFFFF
(logior (ash b1 8)
(ash b2 0))))
;;; ==============================
;; Local Variables:
;; indent-tabs-mode: nil
;; show-trailing-whitespace: t
;; mode: lisp-interaction
;; package: unicly
;; End:
;;; ==============================
EOF
| null | https://raw.githubusercontent.com/mon-key/unicly/f9bd21446f35e28766d2f1ada2741399b14d93cb/unicly-integers.lisp | lisp | :FILE unicly/unicly-integers.lisp
==============================
==============================
:NOTE ironclad utility functions
ironclad:ub32ref/le buffer index => value
stored in little-endian order starting at index in array.
ironclad:ub16ref/be buffer index => value
ironclad:ub32ref/be buffer index => value
As the above, only the value is stored in big-endian order.
==============================
==============================
:PASTE-AUTHOR nyef -- Alistair Bridgewater
:PASTE-TITLE Informing loop of integer size -- how to do it idiomatically?
==============================
`uuid-disassemble-ub32' :WAS `disassemble-u32'
`uuid-assemble-ub32' :WAS `assemble-u32'
Only intended to be used in requests for octet values of
(uuid-disassemble-ub32 #xFFFFFFFF)
(declare (inline uuid-assemble-ub16))
==============================
Local Variables:
indent-tabs-mode: nil
show-trailing-whitespace: t
mode: lisp-interaction
package: unicly
End:
============================== | : FILE - CREATED < Timestamp : # { 2011 - 08 - 17T15:28:02 - 04:00Z}#{11333 } - by MON >
ironclad : ub16ref / le buffer index = > value
ironclad : ub64ref / le buffer index = > value
This family of functions accesses an unsigned 16 - bit , 32 - bit or 64 - bit value
array must be a ( SIMPLE - ARRAY ( UNSIGNED - BYTE 8) ( * ) ) . These functions are SETFable .
ironclad : / be buffer index = > value
(in-package #:unicly)
: PASTE 120426 : PASTE - URL ( URL ` /+2KX6/1 ' )
(defun uuid-request-integer (array offset length &key little-endian sign-extend)
(let ((value (loop
for i from 0 below length
for octet = (aref array (+ offset
(if little-endian
i
(- length i 1))))
sum (ash octet (* i 8)))))
(if (and sign-extend
(logbitp (1- (* length 8)) value))
(logior (lognot (1- (ash 1 (1- (* length 8))))) value)
value)))
(define-compiler-macro uuid-request-integer (&whole form array offset length &key little-endian sign-extend)
: NOTE the 4 is an ( unsigned - byte 32 ) which is n't a fixnum on x86 - 32
(if (and (member length '(1 2 4))
(member little-endian '(t nil))
(member sign-extend '(t nil)))
`(let* (,@(loop
for i from 0 below length
for var in '(byte-0 byte-1 byte-2 byte-3)
collect `(,var (aref ,array (+ ,offset
,(if little-endian
i
(- length i 1))))))
(value ,(elt '(#1=byte-0
#2=(dpb byte-1 (byte 8 8) #1#)
#3=(dpb byte-2 (byte 8 16) #2#)
(dpb byte-3 (byte 8 24) #3#))
(1- length))))
,(if sign-extend
`(if (logbitp ,(1- (* length 8)) value)
(logior ,(lognot (1- (ash 1 (1- (* length 8))))) value)
value)
'value))
form))
(declaim (inline uuid-disassemble-ub48))
(defun uuid-disassemble-ub48 (u48)
(declare (uuid-ub48 u48)
(optimize (speed 3)))
(let ((b1 nil) (b2 nil) (b3 nil) (b4 nil) (b5 nil) (b6 nil))
: NOTE The setf / the junk may be ugly , but its certainly faster .
(setf b1 (ldb (byte 8 40) u48))
(setf b2 (ldb (byte 8 32) u48))
(setf u48 (mask-field (byte 32 0) (the uuid-ub48 u48)))
(setf b3 (ldb (byte 8 24) (the uuid-ub32 u48)))
(setf u48 (mask-field (byte 24 0) (the uuid-ub32 u48)))
(setf b4 (ldb (byte 8 16) (the uuid-ub24 u48)))
(setf b5 (ldb (byte 8 8) (the uuid-ub24 u48)))
(setf b6 (ldb (byte 8 0) (the uuid-ub24 u48)))
(locally
(declare (uuid-ub8 b1 b2 b3 b4 b5 b6))
(values b1 b2 b3 b4 b5 b6))))
: SOURCE usenet - legend / io.lisp
(declaim (inline uuid-disassemble-ub32))
(defun uuid-disassemble-ub32 (u32)
(declare (type uuid-ub32 u32)
(optimize (speed 3)))
(let ((b1 (ldb (byte 8 24) u32))
(b2 (ldb (byte 8 16) u32))
(b3 (ldb (byte 8 8) u32))
(b4 (ldb (byte 8 0) u32)))
(declare (uuid-ub8 b1 b2 b3 b4))
(values b1 b2 b3 b4)))
(declaim (inline uuid-disassemble-ub16))
(defun uuid-disassemble-ub16 (u16)
(declare (type uuid-ub16 u16)
(optimize (speed 3)))
(let ((b1 (ldb (byte 8 8) u16))
(b2 (ldb (byte 8 0) u16)))
(declare (uuid-ub8 b1 b2))
(values b1 b2)))
(declaim (inline %uuid_byte-array-16-ub8-reqeust))
(defun %uuid_byte-array-16-ub8-reqeust (byte-array offset)
` uuid - byte - 's e.g. ` uuid - from - byte - array '
(declare (uuid-byte-array-16 byte-array)
((integer 8 9) offset)
(optimize (speed 3)))
(the uuid-ub8 (uuid-request-integer byte-array offset 1)))
(declaim (inline uuid-assemble-ub48))
(defun uuid-assemble-ub48 (b1 b2 b3 b4 b5 b6)
(declare (type uuid-ub8 b1 b2 b3 b4 b5 b6)
(optimize (speed 3)))
(logand #xFFFFFFFFFFFF
(logior (ash b1 40)
(ash b2 32)
(ash b3 24)
(ash b4 16)
(ash b5 8)
(ash b6 0))))
(declaim (inline uuid-assemble-ub32))
(defun uuid-assemble-ub32 (b1 b2 b3 b4)
(declare (type uuid-ub8 b1 b2 b3 b4)
(optimize speed))
(logand #xFFFFFFFF
(logior (ash b1 24)
(ash b2 16)
(ash b3 8)
(ash b4 0))))
255 , 255 , 255 , 255
(defun uuid-assemble-ub16 (b1 b2)
(declare (type uuid-ub8 b1 b2)
(optimize (speed 3)))
(logand #xFFFF
(logior (ash b1 8)
(ash b2 0))))
EOF
|
30ac2c6abaf65d09cd1a2b559b412c35789da3bb40777ca91edec973b25823a8 | pink-gorilla/notebook | events.cljs | (ns pinkgorilla.notebook-ui.completion.events
(:require
[taoensso.timbre :refer-macros [info]]
[re-frame.core :refer [reg-event-fx reg-event-db dispatch]]
[pinkgorilla.nrepl.client.core :refer [op-resolve-symbol op-docstring op-completions]]))
(reg-event-db
:completion/init
(fn [db [_]]
(let [db (or db {})]
(assoc db
:completion
{:word nil
:candidates []
:active nil
:show-all false
:docstring ""
:resolve nil}))))
; map cider operations:
(reg-event-db
:completion/save-result
(fn [db [_ result]]
(let [c (:completions result)]
(info "rcvd completion candidates: " result)
(-> db
(assoc-in [:completion :candidates] c)
(assoc-in [:completion :active] (first c))))))
(reg-event-fx
:nrepl/completion
(fn [cofx [_ q namespace context]]
(dispatch [:nrepl/op-dispatch (op-completions q namespace context) [:completion/save-result]])))
(reg-event-fx
:nrepl/docstring
(fn [cofx [_ symbol namespace]] ; "pprint-table" "clojure.pprint"
(dispatch [:nrepl/op-db (op-docstring symbol namespace) [:completion]])))
(reg-event-fx
:nrepl/resolve-symbol
(fn [cofx [_ symbol namespace]] ; "doseq" "clojure.core"
(dispatch [:nrepl/op-db (op-resolve-symbol symbol namespace) [:completion]])))
| null | https://raw.githubusercontent.com/pink-gorilla/notebook/b01c806535f204c1c6e24c75a6619d747aba5655/src/pinkgorilla/notebook_ui/completion/events.cljs | clojure | map cider operations:
"pprint-table" "clojure.pprint"
"doseq" "clojure.core" | (ns pinkgorilla.notebook-ui.completion.events
(:require
[taoensso.timbre :refer-macros [info]]
[re-frame.core :refer [reg-event-fx reg-event-db dispatch]]
[pinkgorilla.nrepl.client.core :refer [op-resolve-symbol op-docstring op-completions]]))
(reg-event-db
:completion/init
(fn [db [_]]
(let [db (or db {})]
(assoc db
:completion
{:word nil
:candidates []
:active nil
:show-all false
:docstring ""
:resolve nil}))))
(reg-event-db
:completion/save-result
(fn [db [_ result]]
(let [c (:completions result)]
(info "rcvd completion candidates: " result)
(-> db
(assoc-in [:completion :candidates] c)
(assoc-in [:completion :active] (first c))))))
(reg-event-fx
:nrepl/completion
(fn [cofx [_ q namespace context]]
(dispatch [:nrepl/op-dispatch (op-completions q namespace context) [:completion/save-result]])))
(reg-event-fx
:nrepl/docstring
(dispatch [:nrepl/op-db (op-docstring symbol namespace) [:completion]])))
(reg-event-fx
:nrepl/resolve-symbol
(dispatch [:nrepl/op-db (op-resolve-symbol symbol namespace) [:completion]])))
|
76b15caaa977c95b52832fd8aa67b07c5820fc4774ad08f7cd5240650b19692f | v-kolesnikov/sicp | wirelang.clj | (ns sicp.chapter03.wirelang)
; Wire
(defn call-each
[procs]
(if-not (empty? procs)
(do ((first procs))
(call-each (rest procs)))
:done))
(defn make-wire []
(let [signal-value (atom 0)
action-procs (atom [])]
(letfn [(set-signal! [value]
{:pre [(contains? [0 1] value)]}
(if-not (= @signal-value value)
(do (reset! signal-value value)
(call-each @action-procs))
:done))
(accept-action-proc! [proc]
(swap! action-procs conj proc)
(proc))]
{:get-signal (fn [] @signal-value)
:set-signal! set-signal!
:add-action! accept-action-proc!
:actions (fn [] @action-procs)})))
(defn get-signal [wire] ((wire :get-signal)))
(defn set-signal! [wire signal] ((wire :set-signal!) signal))
(defn add-action! [wire action] ((wire :add-action!) action))
; Queue
(defn make-queue
([] (atom clojure.lang.PersistentQueue/EMPTY))
([items] (atom (reduce conj clojure.lang.PersistentQueue/EMPTY items))))
(defn delete-queue! [queue] (swap! queue pop))
(defn insert-queue! [queue item] (swap! queue conj item))
(defn empty-queue? [queue] (empty? @queue))
(defn front-queue [queue] (peek @queue))
; Segments
(defn make-time-segment [t queue] {:time t :queue queue})
(defn segment-time [segment] (:time segment))
(defn segment-queue [segment] (:queue segment))
; Agenda
(defn make-agenda [] (atom {:time 0, :segments []}))
(defn segments [agenda] (:segments @agenda))
(defn current-time [agenda] (:time @agenda))
(defn empty-agenda? [agenda] (empty? (segments agenda)))
(defn set-current-time! [agenda t]
(swap! agenda assoc :time t))
(defn set-segments! [agenda segments]
(swap! agenda assoc :segments segments))
(defn first-segment [agenda] (first (segments agenda)))
(defn rest-segments [agenda] (rest (segments agenda)))
(defn first-agenda-item [agenda]
(if (empty-agenda? agenda)
(throw (Exception. "Agenda is empty -- FIRST-AGENDA-ITEM"))
(let [first-seg (first-segment agenda)]
(set-current-time! agenda (segment-time first-seg))
(front-queue (segment-queue first-seg)))))
(defn remove-first-agenda-item! [agenda]
(if (empty-agenda? agenda)
(throw (Exception. "Agenda is empty -- FIRST-AGENDA-ITEM"))
(let [queue (segment-queue (first-segment agenda))]
(delete-queue! queue)
(if (empty-queue? queue)
(set-segments! agenda (rest-segments agenda))))))
(defn add-to-agenda!
[agenda timestamp action]
(if-let [same-time-segment (some #(when (= timestamp (:time %)) %)
(segments agenda))]
(insert-queue! (:queue same-time-segment) action)
(set-segments! agenda
(concat (take-while #(> timestamp (:time %)) (segments agenda))
(list (make-time-segment timestamp (make-queue [action])))
(drop-while #(> timestamp (:time %)) (segments agenda))))))
(def the-agenda (make-agenda))
(def inverter-delay 2)
(def and-gate-delay 3)
(def or-gate-delay 5)
(defn after-delay
[delay-time action]
(add-to-agenda! the-agenda
(-> the-agenda current-time (+ delay-time))
action))
(defn propagate []
(if (empty-agenda? the-agenda)
:done
(let [first-item (first-agenda-item the-agenda)]
(first-item)
(remove-first-agenda-item! the-agenda)
(propagate))))
; Binary logic
(defn logical-not
[signal]
(cond (= signal 0) 1
(= signal 1) 0
:else (throw (Exception. (str "Unknown signal" signal)))))
(defn logical-and
[s1 s2]
(if (= 1 s1 s2) 1 0))
(defn logical-or
[s1 s2]
(if (or (= 1 s1) (= 1 s2)) 1 0))
; Elements
(defn inverter
[input output]
(let [invert-input
(fn []
(let [value (logical-not (get-signal input))
proc (after-delay inverter-delay
(fn [] (set-signal! output value)))]
(after-delay inverter-delay
(fn [] (set-signal! output value)))))]
(add-action! input invert-input)
:ok))
(defn and-gate
[a1 a2 output]
(let [and-proc
(fn []
(let [value (logical-and (get-signal a1)
(get-signal a2))]
(after-delay and-gate-delay
(fn [] (set-signal! output value)))))]
(add-action! a1 and-proc)
(add-action! a2 and-proc)
:ok))
(defn or-gate
[a1 a2 output]
(let [or-proc
(fn []
(let [value (logical-or (get-signal a1)
(get-signal a2))]
(after-delay or-gate-delay
(fn [] (set-signal! output value)))))]
(add-action! a1 or-proc)
(add-action! a2 or-proc)
:ok))
(defn half-adder
[a b s c]
(let [d (make-wire)
e (make-wire)]
(or-gate a b d)
(and-gate a b c)
(inverter c e)
(and-gate d e s)
:ok))
(defn full-adder
[a b c-in c-out sum]
(let [s (make-wire)
c1 (make-wire)
c2 (make-wire)]
(half-adder b c-in s c1)
(half-adder a s sum c2)
(or-gate c1 c2 c-out)
:ok))
(defn probe [name wire]
(add-action! wire
(fn []
(println name
(current-time the-agenda)
"New-value ="
(get-signal wire)))))
(comment
(let []
(def the-agenda (make-agenda))
(def input-1 (make-wire))
(def input-2 (make-wire))
(def sum (make-wire))
(def carry (make-wire))
(probe 'sum sum)
(probe 'carry carry)
(half-adder input-1 input-2 sum carry)
(set-signal! input-1 1)
(propagate)
(assert (= 1 (get-signal sum)))
(assert (= 0 (get-signal carry)))
(assert (= 8 (current-time the-agenda)))
(set-signal! input-2 1)
(propagate)
(assert (= 0 (get-signal sum)))
(assert (= 1 (get-signal carry)))
(assert (= 16 (current-time the-agenda)))))
| null | https://raw.githubusercontent.com/v-kolesnikov/sicp/4298de6083440a75898e97aad658025a8cecb631/src/sicp/chapter03/wirelang.clj | clojure | Wire
Queue
Segments
Agenda
Binary logic
Elements | (ns sicp.chapter03.wirelang)
(defn call-each
[procs]
(if-not (empty? procs)
(do ((first procs))
(call-each (rest procs)))
:done))
(defn make-wire []
(let [signal-value (atom 0)
action-procs (atom [])]
(letfn [(set-signal! [value]
{:pre [(contains? [0 1] value)]}
(if-not (= @signal-value value)
(do (reset! signal-value value)
(call-each @action-procs))
:done))
(accept-action-proc! [proc]
(swap! action-procs conj proc)
(proc))]
{:get-signal (fn [] @signal-value)
:set-signal! set-signal!
:add-action! accept-action-proc!
:actions (fn [] @action-procs)})))
(defn get-signal [wire] ((wire :get-signal)))
(defn set-signal! [wire signal] ((wire :set-signal!) signal))
(defn add-action! [wire action] ((wire :add-action!) action))
(defn make-queue
([] (atom clojure.lang.PersistentQueue/EMPTY))
([items] (atom (reduce conj clojure.lang.PersistentQueue/EMPTY items))))
(defn delete-queue! [queue] (swap! queue pop))
(defn insert-queue! [queue item] (swap! queue conj item))
(defn empty-queue? [queue] (empty? @queue))
(defn front-queue [queue] (peek @queue))
(defn make-time-segment [t queue] {:time t :queue queue})
(defn segment-time [segment] (:time segment))
(defn segment-queue [segment] (:queue segment))
(defn make-agenda [] (atom {:time 0, :segments []}))
(defn segments [agenda] (:segments @agenda))
(defn current-time [agenda] (:time @agenda))
(defn empty-agenda? [agenda] (empty? (segments agenda)))
(defn set-current-time! [agenda t]
(swap! agenda assoc :time t))
(defn set-segments! [agenda segments]
(swap! agenda assoc :segments segments))
(defn first-segment [agenda] (first (segments agenda)))
(defn rest-segments [agenda] (rest (segments agenda)))
(defn first-agenda-item [agenda]
(if (empty-agenda? agenda)
(throw (Exception. "Agenda is empty -- FIRST-AGENDA-ITEM"))
(let [first-seg (first-segment agenda)]
(set-current-time! agenda (segment-time first-seg))
(front-queue (segment-queue first-seg)))))
(defn remove-first-agenda-item! [agenda]
(if (empty-agenda? agenda)
(throw (Exception. "Agenda is empty -- FIRST-AGENDA-ITEM"))
(let [queue (segment-queue (first-segment agenda))]
(delete-queue! queue)
(if (empty-queue? queue)
(set-segments! agenda (rest-segments agenda))))))
(defn add-to-agenda!
[agenda timestamp action]
(if-let [same-time-segment (some #(when (= timestamp (:time %)) %)
(segments agenda))]
(insert-queue! (:queue same-time-segment) action)
(set-segments! agenda
(concat (take-while #(> timestamp (:time %)) (segments agenda))
(list (make-time-segment timestamp (make-queue [action])))
(drop-while #(> timestamp (:time %)) (segments agenda))))))
(def the-agenda (make-agenda))
(def inverter-delay 2)
(def and-gate-delay 3)
(def or-gate-delay 5)
(defn after-delay
[delay-time action]
(add-to-agenda! the-agenda
(-> the-agenda current-time (+ delay-time))
action))
(defn propagate []
(if (empty-agenda? the-agenda)
:done
(let [first-item (first-agenda-item the-agenda)]
(first-item)
(remove-first-agenda-item! the-agenda)
(propagate))))
(defn logical-not
[signal]
(cond (= signal 0) 1
(= signal 1) 0
:else (throw (Exception. (str "Unknown signal" signal)))))
(defn logical-and
[s1 s2]
(if (= 1 s1 s2) 1 0))
(defn logical-or
[s1 s2]
(if (or (= 1 s1) (= 1 s2)) 1 0))
(defn inverter
[input output]
(let [invert-input
(fn []
(let [value (logical-not (get-signal input))
proc (after-delay inverter-delay
(fn [] (set-signal! output value)))]
(after-delay inverter-delay
(fn [] (set-signal! output value)))))]
(add-action! input invert-input)
:ok))
(defn and-gate
[a1 a2 output]
(let [and-proc
(fn []
(let [value (logical-and (get-signal a1)
(get-signal a2))]
(after-delay and-gate-delay
(fn [] (set-signal! output value)))))]
(add-action! a1 and-proc)
(add-action! a2 and-proc)
:ok))
(defn or-gate
[a1 a2 output]
(let [or-proc
(fn []
(let [value (logical-or (get-signal a1)
(get-signal a2))]
(after-delay or-gate-delay
(fn [] (set-signal! output value)))))]
(add-action! a1 or-proc)
(add-action! a2 or-proc)
:ok))
(defn half-adder
[a b s c]
(let [d (make-wire)
e (make-wire)]
(or-gate a b d)
(and-gate a b c)
(inverter c e)
(and-gate d e s)
:ok))
(defn full-adder
[a b c-in c-out sum]
(let [s (make-wire)
c1 (make-wire)
c2 (make-wire)]
(half-adder b c-in s c1)
(half-adder a s sum c2)
(or-gate c1 c2 c-out)
:ok))
(defn probe [name wire]
(add-action! wire
(fn []
(println name
(current-time the-agenda)
"New-value ="
(get-signal wire)))))
(comment
(let []
(def the-agenda (make-agenda))
(def input-1 (make-wire))
(def input-2 (make-wire))
(def sum (make-wire))
(def carry (make-wire))
(probe 'sum sum)
(probe 'carry carry)
(half-adder input-1 input-2 sum carry)
(set-signal! input-1 1)
(propagate)
(assert (= 1 (get-signal sum)))
(assert (= 0 (get-signal carry)))
(assert (= 8 (current-time the-agenda)))
(set-signal! input-2 1)
(propagate)
(assert (= 0 (get-signal sum)))
(assert (= 1 (get-signal carry)))
(assert (= 16 (current-time the-agenda)))))
|
dd5f7734aa3319becf1a9d3dbe5764eed1bdc4d88839d593678ba34e73e6f9ea | flipstone/orville | TableDefinition.hs | |
Module : Database . Orville . PostgreSQL.Internal . TableDefinition
Copyright : Flipstone Technology Partners 2016 - 2018
License : MIT
Module : Database.Orville.PostgreSQL.Internal.TableDefinition
Copyright : Flipstone Technology Partners 2016-2018
License : MIT
-}
module Database.Orville.PostgreSQL.Internal.TableDefinition where
import Database.Orville.PostgreSQL.Internal.FieldDefinition
import Database.Orville.PostgreSQL.Internal.Types
tableColumnNames :: TableDefinition readEntity writeEntity key -> [String]
tableColumnNames = map someFieldName . tableFields
where
someFieldName (SomeField f) = fieldName f
tableAssignableFields ::
TableDefinition readEntity writeEntity key -> [SomeField]
tableAssignableFields =
filter (not . isSomeAssignedByDatabaseField) . tableFields
where
isSomeAssignedByDatabaseField (SomeField f) = isAssignedByDatabaseField f
tableAssignableColumnNames ::
TableDefinition readEntity writeEntity key -> [String]
tableAssignableColumnNames = map someFieldName . tableAssignableFields
where
someFieldName (SomeField f) = fieldName f
| null | https://raw.githubusercontent.com/flipstone/orville/aee8d7a47ab3a7b442fdb274dbb5a95d687a23ce/orville-postgresql/src/Database/Orville/PostgreSQL/Internal/TableDefinition.hs | haskell | |
Module : Database . Orville . PostgreSQL.Internal . TableDefinition
Copyright : Flipstone Technology Partners 2016 - 2018
License : MIT
Module : Database.Orville.PostgreSQL.Internal.TableDefinition
Copyright : Flipstone Technology Partners 2016-2018
License : MIT
-}
module Database.Orville.PostgreSQL.Internal.TableDefinition where
import Database.Orville.PostgreSQL.Internal.FieldDefinition
import Database.Orville.PostgreSQL.Internal.Types
tableColumnNames :: TableDefinition readEntity writeEntity key -> [String]
tableColumnNames = map someFieldName . tableFields
where
someFieldName (SomeField f) = fieldName f
tableAssignableFields ::
TableDefinition readEntity writeEntity key -> [SomeField]
tableAssignableFields =
filter (not . isSomeAssignedByDatabaseField) . tableFields
where
isSomeAssignedByDatabaseField (SomeField f) = isAssignedByDatabaseField f
tableAssignableColumnNames ::
TableDefinition readEntity writeEntity key -> [String]
tableAssignableColumnNames = map someFieldName . tableAssignableFields
where
someFieldName (SomeField f) = fieldName f
|
|
2cafe90dd3cff010d39bea6f24579d883b4626a1e1615d17f022aaf5b216284c | metabase/metabase | substitution.clj | (ns metabase.driver.sql.parameters.substitution
"These functions take the info for a param fetched by the functions above and add additional info about how that param
should be represented as SQL. (Specifically, they return information in this format:
{;; appropriate SQL that should be used to replace the param snippet, e.g. {{x}}
:replacement-snippet \"= ?\"
;; ; any prepared statement args (values for `?` placeholders) needed for the replacement snippet
:prepared-statement-args [#t \"2017-01-01\"]}"
(:require
[clojure.string :as str]
[metabase.driver :as driver]
[metabase.driver.common.parameters :as params]
[metabase.driver.common.parameters.dates :as params.dates]
[metabase.driver.common.parameters.operators :as params.ops]
[metabase.driver.sql.query-processor :as sql.qp]
[metabase.mbql.schema :as mbql.s]
[metabase.mbql.util :as mbql.u]
[metabase.query-processor.error-type :as qp.error-type]
[metabase.query-processor.middleware.wrap-value-literals
:as qp.wrap-value-literals]
[metabase.query-processor.store :as qp.store]
[metabase.query-processor.timezone :as qp.timezone]
[metabase.query-processor.util.add-alias-info :as add]
[metabase.util :as u]
[metabase.util.date-2 :as u.date]
[metabase.util.honeysql-extensions :as hx]
[metabase.util.i18n :refer [tru]]
[metabase.util.schema :as su]
[schema.core :as s])
(:import
(clojure.lang Keyword)
(honeysql.types SqlCall)
(java.time.temporal Temporal)
(java.util UUID)
(metabase.driver.common.parameters CommaSeparatedNumbers Date DateRange FieldFilter MultipleValues ReferencedCardQuery ReferencedQuerySnippet)))
;;; ------------------------------------ ->prepared-substitution & default impls -------------------------------------
(defmulti ->prepared-substitution
"Returns a `PreparedStatementSubstitution` (see schema below) for `x` and the given driver. This allows driver
specific parameters and SQL replacement text (usually just ?). The param value is already prepared and ready for
inlcusion in the query, such as what's needed for SQLite and timestamps."
{:arglists '([driver x])}
(fn [driver x] [(driver/dispatch-on-initialized-driver driver) (class x)])
:hierarchy #'driver/hierarchy)
(def PreparedStatementSubstitution
"Represents the SQL string replace value (usually ?) and the typed parameter value"
{:sql-string s/Str
:param-values [s/Any]})
(s/defn make-stmt-subs :- PreparedStatementSubstitution
"Create a `PreparedStatementSubstitution` map for `sql-string` and the `param-seq`"
[sql-string param-seq]
{:sql-string sql-string
:param-values param-seq})
(s/defn ^:private honeysql->prepared-stmt-subs
"Convert X to a replacement snippet info map by passing it to HoneySQL's `format` function."
[driver x]
(let [[snippet & args] (sql.qp/format-honeysql driver x)]
(make-stmt-subs snippet args)))
(s/defmethod ->prepared-substitution [:sql nil] :- PreparedStatementSubstitution
[driver _]
(honeysql->prepared-stmt-subs driver nil))
(s/defmethod ->prepared-substitution [:sql Object] :- PreparedStatementSubstitution
[driver obj]
(honeysql->prepared-stmt-subs driver (str obj)))
(s/defmethod ->prepared-substitution [:sql Number] :- PreparedStatementSubstitution
[driver num]
(honeysql->prepared-stmt-subs driver (sql.qp/inline-num num)))
(s/defmethod ->prepared-substitution [:sql Boolean] :- PreparedStatementSubstitution
[driver b]
(honeysql->prepared-stmt-subs driver b))
(s/defmethod ->prepared-substitution [:sql Keyword] :- PreparedStatementSubstitution
[driver kwd]
(honeysql->prepared-stmt-subs driver kwd))
(s/defmethod ->prepared-substitution [:sql SqlCall] :- PreparedStatementSubstitution
[driver sql-call]
(honeysql->prepared-stmt-subs driver sql-call))
;; TIMEZONE FIXME - remove this since we aren't using `Date` anymore
(s/defmethod ->prepared-substitution [:sql Date] :- PreparedStatementSubstitution
[_driver date]
(make-stmt-subs "?" [date]))
(s/defmethod ->prepared-substitution [:sql Temporal] :- PreparedStatementSubstitution
[_driver t]
(make-stmt-subs "?" [t]))
;;; ------------------------------------------- ->replacement-snippet-info -------------------------------------------
(def ^:private ParamSnippetInfo
{(s/optional-key :replacement-snippet) s/Str ; allowed to be blank if this is an optional param
(s/optional-key :prepared-statement-args) [s/Any]})
(defmulti ->replacement-snippet-info
"Return information about how `value` should be converted to SQL, as a map with keys `:replacement-snippet` and
`:prepared-statement-args`.
(->replacement-snippet-info :h2 \"ABC\") -> {:replacement-snippet \"?\", :prepared-statement-args \"ABC\"}"
{:arglists '([driver value])}
(fn [driver v] [(driver/the-initialized-driver driver) (class v)])
:hierarchy #'driver/hierarchy)
(defn- create-replacement-snippet
[driver nil-or-obj]
(let [{:keys [sql-string param-values]} (->prepared-substitution driver nil-or-obj)]
{:replacement-snippet sql-string
:prepared-statement-args param-values}))
(defmethod ->replacement-snippet-info [:sql nil]
[driver this]
(create-replacement-snippet driver this))
(defmethod ->replacement-snippet-info [:sql Object]
[driver this]
(create-replacement-snippet driver (str this)))
(defmethod ->replacement-snippet-info [:sql Number]
[driver this]
(create-replacement-snippet driver this))
(defmethod ->replacement-snippet-info [:sql Boolean]
[driver this]
(create-replacement-snippet driver this))
(defmethod ->replacement-snippet-info [:sql Keyword]
[driver this]
(if (= this params/no-value)
{:replacement-snippet ""}
(create-replacement-snippet driver this)))
(defmethod ->replacement-snippet-info [:sql SqlCall]
[driver this]
(create-replacement-snippet driver this))
(defmethod ->replacement-snippet-info [:sql UUID]
[_driver this]
{:replacement-snippet (format "CAST('%s' AS uuid)" (str this))})
(defmethod ->replacement-snippet-info [:sql CommaSeparatedNumbers]
[_driver {:keys [numbers]}]
{:replacement-snippet (str/join ", " numbers)})
(defmethod ->replacement-snippet-info [:sql MultipleValues]
[driver {:keys [values]}]
(let [values (map (partial ->replacement-snippet-info driver) values)]
{:replacement-snippet (str/join ", " (map :replacement-snippet values))
:prepared-statement-args (apply concat (map :prepared-statement-args values))}))
(defn- maybe-parse-temporal-literal [x]
(condp instance? x
String (u.date/parse x (qp.timezone/report-timezone-id-if-supported))
Temporal x
(throw (ex-info (tru "Don''t know how to parse {0} {1} as a temporal literal" (class x) (pr-str x))
{:type qp.error-type/invalid-parameter
:parameter x}))))
(defmethod ->replacement-snippet-info [:sql Date]
[driver {:keys [s]}]
(create-replacement-snippet driver (maybe-parse-temporal-literal s)))
(defn- prepared-ts-subs [driver operator date-str]
(let [{:keys [sql-string param-values]} (->prepared-substitution driver (maybe-parse-temporal-literal date-str))]
{:replacement-snippet (str operator " " sql-string)
:prepared-statement-args param-values}))
(defmethod ->replacement-snippet-info [:sql DateRange]
[driver {:keys [start end]}]
(cond
(= start end)
(prepared-ts-subs driver \= start)
(nil? start)
(prepared-ts-subs driver \< end)
(nil? end)
(prepared-ts-subs driver \> start)
:else
;; TIMEZONE FIXME - this is WRONG WRONG WRONG because date ranges should be inclusive for start and *exclusive*
;; for end
(let [[start end] (map (fn [s]
(->prepared-substitution driver (maybe-parse-temporal-literal s)))
[start end])]
{:replacement-snippet (format "BETWEEN %s AND %s" (:sql-string start) (:sql-string end))
:prepared-statement-args (concat (:param-values start) (:param-values end))})))
------------------------------------- Field Filter replacement snippet info --------------------------------------
(s/defn ^:private combine-replacement-snippet-maps :- ParamSnippetInfo
"Combine multiple `replacement-snippet-maps` into a single map using a SQL `AND` clause."
[replacement-snippet-maps :- [ParamSnippetInfo]]
{:replacement-snippet (str \( (str/join " AND " (map :replacement-snippet replacement-snippet-maps)) \))
:prepared-statement-args (reduce concat (map :prepared-statement-args replacement-snippet-maps))})
for relative dates convert the param to a ` DateRange ` record type and call ` ->replacement - snippet - info ` on it
(s/defn ^:private date-range-field-filter->replacement-snippet-info :- ParamSnippetInfo
[driver value]
(->> (params.dates/date-string->range value)
params/map->DateRange
(->replacement-snippet-info driver)))
(s/defn ^:private field-filter->equals-clause-sql :- ParamSnippetInfo
[driver value]
(-> (->replacement-snippet-info driver value)
(update :replacement-snippet (partial str "= "))))
(s/defn ^:private field-filter-multiple-values->in-clause-sql :- ParamSnippetInfo
[driver values]
(-> (->replacement-snippet-info driver (params/map->MultipleValues {:values values}))
(update :replacement-snippet (partial format "IN (%s)"))))
(s/defn ^:private honeysql->replacement-snippet-info :- ParamSnippetInfo
"Convert `hsql-form` to a replacement snippet info map by passing it to HoneySQL's `format` function."
[driver hsql-form]
(let [[snippet & args] (sql.qp/format-honeysql driver hsql-form)]
{:replacement-snippet snippet
:prepared-statement-args args}))
(s/defn ^:private field->clause :- mbql.s/field
[_driver {table-id :table_id, field-id :id, :as field} param-type]
;; The [[metabase.query-processor.middleware.parameters/substitute-parameters]] QP middleware actually happens before
;; the [[metabase.query-processor.middleware.resolve-fields/resolve-fields]] middleware that would normally fetch all
the we need in a single pass , so this is actually necessary here . I do n't think switching the order of the
middleware would work either because we do n't know what Field this parameter actually refers to until we resolve
;; the parameter. There's probably _some_ way to structure things that would make this "duplicate" call unneeded, but
;; I haven't figured out what that is yet
(qp.store/fetch-and-store-fields! #{field-id})
(qp.store/fetch-and-store-tables! #{table-id})
[:field
(u/the-id field)
{:base-type (:base_type field)
:temporal-unit (when (params.dates/date-type? param-type)
:day)
TODO -- are we sure we want to qualify this ?
in case anyone needs to know we 're compiling a Field filter .
::compiling-field-filter? true}])
(s/defn ^:private field->identifier :- su/NonBlankString
"Return an approprate snippet to represent this `field` in SQL given its param type.
For non-date Fields, this is just a quoted identifier; for dates, the SQL includes appropriately bucketing based on
the `param-type`."
[driver field param-type]
(binding [hx/*honey-sql-version* (sql.qp/honey-sql-version driver)]
(->> (field->clause driver field param-type)
(sql.qp/->honeysql driver)
(honeysql->replacement-snippet-info driver)
:replacement-snippet)))
(s/defn ^:private field-filter->replacement-snippet-info :- ParamSnippetInfo
"Return `[replacement-snippet & prepared-statement-args]` appropriate for a field filter parameter."
[driver {{param-type :type, value :value, :as params} :value, field :field, :as _field-filter}]
(assert (:id field) (format "Why doesn't Field have an ID?\n%s" (u/pprint-to-str field)))
(letfn [(prepend-field [x]
(update x :replacement-snippet
(partial str (field->identifier driver field param-type) " ")))]
(cond
(params.ops/operator? param-type)
(let [[snippet & args]
(binding [hx/*honey-sql-version* (sql.qp/honey-sql-version driver)]
(as-> (assoc params :target [:template-tag (field->clause driver field param-type)]) form
(params.ops/to-clause form)
(mbql.u/desugar-filter-clause form)
(qp.wrap-value-literals/wrap-value-literals-in-mbql form)
(sql.qp/->honeysql driver form)
(sql.qp/format-honeysql driver form)))]
{:replacement-snippet snippet, :prepared-statement-args (vec args)})
convert date ranges to DateRange record types
(params.dates/date-range-type? param-type) (prepend-field
(date-range-field-filter->replacement-snippet-info driver value))
;; convert all other dates to `= <date>`
(params.dates/date-type? param-type) (prepend-field
(field-filter->equals-clause-sql driver (params/map->Date {:s value})))
;; for sequences of multiple values we want to generate an `IN (...)` clause
(sequential? value) (prepend-field
(field-filter-multiple-values->in-clause-sql driver value))
;; convert everything else to `= <value>`
:else (prepend-field
(field-filter->equals-clause-sql driver value)))))
(defmethod ->replacement-snippet-info [:sql FieldFilter]
[driver {:keys [value], :as field-filter}]
(cond
otherwise if the value is n't present just put in something that will always be true , such as ` 1 ` ( e.g. ` WHERE 1
;; = 1`). This is only used for field filters outside of optional clauses
(= value params/no-value) {:replacement-snippet "1 = 1"}
;; if we have a vector of multiple values recursively convert them to SQL and combine into an `AND` clause
;; (This is multiple values in the sense that the frontend provided multiple maps with value values for the same
FieldFilter , not in the sense that we have a single map with multiple values for ` : value ` . )
(sequential? value)
(combine-replacement-snippet-maps (for [v value]
(->replacement-snippet-info driver (assoc field-filter :value v))))
;; otherwise convert single value to SQL.
;; Convert the value to a replacement snippet info map and then tack on the field identifier to the front
:else
(field-filter->replacement-snippet-info driver field-filter)))
;;; ------------------------------------ Referenced Card replacement snippet info ------------------------------------
(defmethod ->replacement-snippet-info [:sql ReferencedCardQuery]
[_ {:keys [query params]}]
{:prepared-statement-args (not-empty params)
:replacement-snippet (sql.qp/make-nestable-sql query)})
---------------------------------- Native Query Snippet replacement snippet info ---------------------------------
(defmethod ->replacement-snippet-info [:sql ReferencedQuerySnippet]
[_ {:keys [content]}]
{:prepared-statement-args nil
:replacement-snippet content})
| null | https://raw.githubusercontent.com/metabase/metabase/f8b048c8fa332636feef7083a8ad67e914942298/src/metabase/driver/sql/parameters/substitution.clj | clojure | appropriate SQL that should be used to replace the param snippet, e.g. {{x}}
; any prepared statement args (values for `?` placeholders) needed for the replacement snippet
------------------------------------ ->prepared-substitution & default impls -------------------------------------
TIMEZONE FIXME - remove this since we aren't using `Date` anymore
------------------------------------------- ->replacement-snippet-info -------------------------------------------
allowed to be blank if this is an optional param
TIMEZONE FIXME - this is WRONG WRONG WRONG because date ranges should be inclusive for start and *exclusive*
for end
The [[metabase.query-processor.middleware.parameters/substitute-parameters]] QP middleware actually happens before
the [[metabase.query-processor.middleware.resolve-fields/resolve-fields]] middleware that would normally fetch all
the parameter. There's probably _some_ way to structure things that would make this "duplicate" call unneeded, but
I haven't figured out what that is yet
for dates, the SQL includes appropriately bucketing based on
convert all other dates to `= <date>`
for sequences of multiple values we want to generate an `IN (...)` clause
convert everything else to `= <value>`
= 1`). This is only used for field filters outside of optional clauses
if we have a vector of multiple values recursively convert them to SQL and combine into an `AND` clause
(This is multiple values in the sense that the frontend provided multiple maps with value values for the same
otherwise convert single value to SQL.
Convert the value to a replacement snippet info map and then tack on the field identifier to the front
------------------------------------ Referenced Card replacement snippet info ------------------------------------ | (ns metabase.driver.sql.parameters.substitution
"These functions take the info for a param fetched by the functions above and add additional info about how that param
should be represented as SQL. (Specifically, they return information in this format:
:replacement-snippet \"= ?\"
:prepared-statement-args [#t \"2017-01-01\"]}"
(:require
[clojure.string :as str]
[metabase.driver :as driver]
[metabase.driver.common.parameters :as params]
[metabase.driver.common.parameters.dates :as params.dates]
[metabase.driver.common.parameters.operators :as params.ops]
[metabase.driver.sql.query-processor :as sql.qp]
[metabase.mbql.schema :as mbql.s]
[metabase.mbql.util :as mbql.u]
[metabase.query-processor.error-type :as qp.error-type]
[metabase.query-processor.middleware.wrap-value-literals
:as qp.wrap-value-literals]
[metabase.query-processor.store :as qp.store]
[metabase.query-processor.timezone :as qp.timezone]
[metabase.query-processor.util.add-alias-info :as add]
[metabase.util :as u]
[metabase.util.date-2 :as u.date]
[metabase.util.honeysql-extensions :as hx]
[metabase.util.i18n :refer [tru]]
[metabase.util.schema :as su]
[schema.core :as s])
(:import
(clojure.lang Keyword)
(honeysql.types SqlCall)
(java.time.temporal Temporal)
(java.util UUID)
(metabase.driver.common.parameters CommaSeparatedNumbers Date DateRange FieldFilter MultipleValues ReferencedCardQuery ReferencedQuerySnippet)))
(defmulti ->prepared-substitution
"Returns a `PreparedStatementSubstitution` (see schema below) for `x` and the given driver. This allows driver
specific parameters and SQL replacement text (usually just ?). The param value is already prepared and ready for
inlcusion in the query, such as what's needed for SQLite and timestamps."
{:arglists '([driver x])}
(fn [driver x] [(driver/dispatch-on-initialized-driver driver) (class x)])
:hierarchy #'driver/hierarchy)
(def PreparedStatementSubstitution
"Represents the SQL string replace value (usually ?) and the typed parameter value"
{:sql-string s/Str
:param-values [s/Any]})
(s/defn make-stmt-subs :- PreparedStatementSubstitution
"Create a `PreparedStatementSubstitution` map for `sql-string` and the `param-seq`"
[sql-string param-seq]
{:sql-string sql-string
:param-values param-seq})
(s/defn ^:private honeysql->prepared-stmt-subs
"Convert X to a replacement snippet info map by passing it to HoneySQL's `format` function."
[driver x]
(let [[snippet & args] (sql.qp/format-honeysql driver x)]
(make-stmt-subs snippet args)))
(s/defmethod ->prepared-substitution [:sql nil] :- PreparedStatementSubstitution
[driver _]
(honeysql->prepared-stmt-subs driver nil))
(s/defmethod ->prepared-substitution [:sql Object] :- PreparedStatementSubstitution
[driver obj]
(honeysql->prepared-stmt-subs driver (str obj)))
(s/defmethod ->prepared-substitution [:sql Number] :- PreparedStatementSubstitution
[driver num]
(honeysql->prepared-stmt-subs driver (sql.qp/inline-num num)))
(s/defmethod ->prepared-substitution [:sql Boolean] :- PreparedStatementSubstitution
[driver b]
(honeysql->prepared-stmt-subs driver b))
(s/defmethod ->prepared-substitution [:sql Keyword] :- PreparedStatementSubstitution
[driver kwd]
(honeysql->prepared-stmt-subs driver kwd))
(s/defmethod ->prepared-substitution [:sql SqlCall] :- PreparedStatementSubstitution
[driver sql-call]
(honeysql->prepared-stmt-subs driver sql-call))
(s/defmethod ->prepared-substitution [:sql Date] :- PreparedStatementSubstitution
[_driver date]
(make-stmt-subs "?" [date]))
(s/defmethod ->prepared-substitution [:sql Temporal] :- PreparedStatementSubstitution
[_driver t]
(make-stmt-subs "?" [t]))
(def ^:private ParamSnippetInfo
(s/optional-key :prepared-statement-args) [s/Any]})
(defmulti ->replacement-snippet-info
"Return information about how `value` should be converted to SQL, as a map with keys `:replacement-snippet` and
`:prepared-statement-args`.
(->replacement-snippet-info :h2 \"ABC\") -> {:replacement-snippet \"?\", :prepared-statement-args \"ABC\"}"
{:arglists '([driver value])}
(fn [driver v] [(driver/the-initialized-driver driver) (class v)])
:hierarchy #'driver/hierarchy)
(defn- create-replacement-snippet
[driver nil-or-obj]
(let [{:keys [sql-string param-values]} (->prepared-substitution driver nil-or-obj)]
{:replacement-snippet sql-string
:prepared-statement-args param-values}))
(defmethod ->replacement-snippet-info [:sql nil]
[driver this]
(create-replacement-snippet driver this))
(defmethod ->replacement-snippet-info [:sql Object]
[driver this]
(create-replacement-snippet driver (str this)))
(defmethod ->replacement-snippet-info [:sql Number]
[driver this]
(create-replacement-snippet driver this))
(defmethod ->replacement-snippet-info [:sql Boolean]
[driver this]
(create-replacement-snippet driver this))
(defmethod ->replacement-snippet-info [:sql Keyword]
[driver this]
(if (= this params/no-value)
{:replacement-snippet ""}
(create-replacement-snippet driver this)))
(defmethod ->replacement-snippet-info [:sql SqlCall]
[driver this]
(create-replacement-snippet driver this))
(defmethod ->replacement-snippet-info [:sql UUID]
[_driver this]
{:replacement-snippet (format "CAST('%s' AS uuid)" (str this))})
(defmethod ->replacement-snippet-info [:sql CommaSeparatedNumbers]
[_driver {:keys [numbers]}]
{:replacement-snippet (str/join ", " numbers)})
(defmethod ->replacement-snippet-info [:sql MultipleValues]
[driver {:keys [values]}]
(let [values (map (partial ->replacement-snippet-info driver) values)]
{:replacement-snippet (str/join ", " (map :replacement-snippet values))
:prepared-statement-args (apply concat (map :prepared-statement-args values))}))
(defn- maybe-parse-temporal-literal [x]
(condp instance? x
String (u.date/parse x (qp.timezone/report-timezone-id-if-supported))
Temporal x
(throw (ex-info (tru "Don''t know how to parse {0} {1} as a temporal literal" (class x) (pr-str x))
{:type qp.error-type/invalid-parameter
:parameter x}))))
(defmethod ->replacement-snippet-info [:sql Date]
[driver {:keys [s]}]
(create-replacement-snippet driver (maybe-parse-temporal-literal s)))
(defn- prepared-ts-subs [driver operator date-str]
(let [{:keys [sql-string param-values]} (->prepared-substitution driver (maybe-parse-temporal-literal date-str))]
{:replacement-snippet (str operator " " sql-string)
:prepared-statement-args param-values}))
(defmethod ->replacement-snippet-info [:sql DateRange]
[driver {:keys [start end]}]
(cond
(= start end)
(prepared-ts-subs driver \= start)
(nil? start)
(prepared-ts-subs driver \< end)
(nil? end)
(prepared-ts-subs driver \> start)
:else
(let [[start end] (map (fn [s]
(->prepared-substitution driver (maybe-parse-temporal-literal s)))
[start end])]
{:replacement-snippet (format "BETWEEN %s AND %s" (:sql-string start) (:sql-string end))
:prepared-statement-args (concat (:param-values start) (:param-values end))})))
------------------------------------- Field Filter replacement snippet info --------------------------------------
(s/defn ^:private combine-replacement-snippet-maps :- ParamSnippetInfo
"Combine multiple `replacement-snippet-maps` into a single map using a SQL `AND` clause."
[replacement-snippet-maps :- [ParamSnippetInfo]]
{:replacement-snippet (str \( (str/join " AND " (map :replacement-snippet replacement-snippet-maps)) \))
:prepared-statement-args (reduce concat (map :prepared-statement-args replacement-snippet-maps))})
for relative dates convert the param to a ` DateRange ` record type and call ` ->replacement - snippet - info ` on it
(s/defn ^:private date-range-field-filter->replacement-snippet-info :- ParamSnippetInfo
[driver value]
(->> (params.dates/date-string->range value)
params/map->DateRange
(->replacement-snippet-info driver)))
(s/defn ^:private field-filter->equals-clause-sql :- ParamSnippetInfo
[driver value]
(-> (->replacement-snippet-info driver value)
(update :replacement-snippet (partial str "= "))))
(s/defn ^:private field-filter-multiple-values->in-clause-sql :- ParamSnippetInfo
[driver values]
(-> (->replacement-snippet-info driver (params/map->MultipleValues {:values values}))
(update :replacement-snippet (partial format "IN (%s)"))))
(s/defn ^:private honeysql->replacement-snippet-info :- ParamSnippetInfo
"Convert `hsql-form` to a replacement snippet info map by passing it to HoneySQL's `format` function."
[driver hsql-form]
(let [[snippet & args] (sql.qp/format-honeysql driver hsql-form)]
{:replacement-snippet snippet
:prepared-statement-args args}))
(s/defn ^:private field->clause :- mbql.s/field
[_driver {table-id :table_id, field-id :id, :as field} param-type]
the we need in a single pass , so this is actually necessary here . I do n't think switching the order of the
middleware would work either because we do n't know what Field this parameter actually refers to until we resolve
(qp.store/fetch-and-store-fields! #{field-id})
(qp.store/fetch-and-store-tables! #{table-id})
[:field
(u/the-id field)
{:base-type (:base_type field)
:temporal-unit (when (params.dates/date-type? param-type)
:day)
TODO -- are we sure we want to qualify this ?
in case anyone needs to know we 're compiling a Field filter .
::compiling-field-filter? true}])
(s/defn ^:private field->identifier :- su/NonBlankString
"Return an approprate snippet to represent this `field` in SQL given its param type.
the `param-type`."
[driver field param-type]
(binding [hx/*honey-sql-version* (sql.qp/honey-sql-version driver)]
(->> (field->clause driver field param-type)
(sql.qp/->honeysql driver)
(honeysql->replacement-snippet-info driver)
:replacement-snippet)))
(s/defn ^:private field-filter->replacement-snippet-info :- ParamSnippetInfo
"Return `[replacement-snippet & prepared-statement-args]` appropriate for a field filter parameter."
[driver {{param-type :type, value :value, :as params} :value, field :field, :as _field-filter}]
(assert (:id field) (format "Why doesn't Field have an ID?\n%s" (u/pprint-to-str field)))
(letfn [(prepend-field [x]
(update x :replacement-snippet
(partial str (field->identifier driver field param-type) " ")))]
(cond
(params.ops/operator? param-type)
(let [[snippet & args]
(binding [hx/*honey-sql-version* (sql.qp/honey-sql-version driver)]
(as-> (assoc params :target [:template-tag (field->clause driver field param-type)]) form
(params.ops/to-clause form)
(mbql.u/desugar-filter-clause form)
(qp.wrap-value-literals/wrap-value-literals-in-mbql form)
(sql.qp/->honeysql driver form)
(sql.qp/format-honeysql driver form)))]
{:replacement-snippet snippet, :prepared-statement-args (vec args)})
convert date ranges to DateRange record types
(params.dates/date-range-type? param-type) (prepend-field
(date-range-field-filter->replacement-snippet-info driver value))
(params.dates/date-type? param-type) (prepend-field
(field-filter->equals-clause-sql driver (params/map->Date {:s value})))
(sequential? value) (prepend-field
(field-filter-multiple-values->in-clause-sql driver value))
:else (prepend-field
(field-filter->equals-clause-sql driver value)))))
(defmethod ->replacement-snippet-info [:sql FieldFilter]
[driver {:keys [value], :as field-filter}]
(cond
otherwise if the value is n't present just put in something that will always be true , such as ` 1 ` ( e.g. ` WHERE 1
(= value params/no-value) {:replacement-snippet "1 = 1"}
FieldFilter , not in the sense that we have a single map with multiple values for ` : value ` . )
(sequential? value)
(combine-replacement-snippet-maps (for [v value]
(->replacement-snippet-info driver (assoc field-filter :value v))))
:else
(field-filter->replacement-snippet-info driver field-filter)))
(defmethod ->replacement-snippet-info [:sql ReferencedCardQuery]
[_ {:keys [query params]}]
{:prepared-statement-args (not-empty params)
:replacement-snippet (sql.qp/make-nestable-sql query)})
---------------------------------- Native Query Snippet replacement snippet info ---------------------------------
(defmethod ->replacement-snippet-info [:sql ReferencedQuerySnippet]
[_ {:keys [content]}]
{:prepared-statement-args nil
:replacement-snippet content})
|
502b4b908eeeb6319fc134e95384dde37edb532fc7d3d9b25776ed96d8a49c8a | slyrus/abcl | late-setf.lisp | late-setf.lisp
;;;
Copyright ( C ) 2003 - 2005
$ Id$
;;;
;;; This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation ; either version 2
of the License , or ( at your option ) any later version .
;;;
;;; This program is distributed in the hope that it will be useful,
;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
;;; along with this program; if not, write to the Free Software
Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
;;;
;;; As a special exception, the copyright holders of this library give you
;;; permission to link this library with independent modules to produce an
;;; executable, regardless of the license terms of these independent
;;; modules, and to copy and distribute the resulting executable under
;;; terms of your choice, provided that you also meet, for each linked
;;; independent module, the terms and conditions of the license of that
;;; module. An independent module is a module which is not derived from
;;; or based on this library. If you modify this library, you may extend
;;; this exception to your version of the library, but you are not
;;; obligated to do so. If you do not wish to do so, delete this
;;; exception statement from your version.
From CMUCL / SBCL .
(in-package #:system)
(defmacro define-setf-expander (access-fn lambda-list &body body)
(require-type access-fn 'symbol)
(let ((whole (gensym "WHOLE-"))
(environment (gensym "ENV-")))
(multiple-value-bind (body local-decs doc)
(parse-defmacro lambda-list whole body access-fn
'define-setf-expander
:environment environment)
`(progn
(record-source-information-for-type ',access-fn :setf-expander)
(eval-when (:compile-toplevel :load-toplevel :execute)
,@(when doc
`((%set-documentation ',access-fn 'setf ,doc)))
(setf (get ',access-fn 'setf-expander)
#'(lambda (,whole ,environment)
,@local-decs
(block ,access-fn ,body)))
',access-fn)))))
(define-setf-expander values (&rest places &environment env)
(let ((setters ())
(getters ())
(all-dummies ())
(all-vals ())
(newvals ()))
(dolist (place places)
(multiple-value-bind (dummies vals newval setter getter)
(get-setf-expansion place env)
(setf all-dummies (append all-dummies dummies (cdr newval))
all-vals (append all-vals vals
(mapcar (constantly nil) (cdr newval)))
newvals (append newvals (list (car newval))))
(push setter setters)
(push getter getters)))
(values all-dummies all-vals newvals
`(values ,@(reverse setters)) `(values ,@(reverse getters)))))
(defun make-gensym-list (n)
(let ((list ()))
(dotimes (i n list)
(push (gensym) list))))
(define-setf-expander getf (place prop &optional default &environment env)
(multiple-value-bind (temps values stores set get)
(get-setf-expansion place env)
(let ((newval (gensym))
(ptemp (gensym))
(def-temp (if default (gensym))))
(values `(,@temps ,ptemp ,@(if default `(,def-temp)))
`(,@values ,prop ,@(if default `(,default)))
`(,newval)
`(let ((,(car stores) (%putf ,get ,ptemp ,newval)))
,set
,newval)
`(getf ,get ,ptemp ,@(if default `(,def-temp)))))))
(define-setf-expander apply (functionoid &rest args)
(let ((function (second functionoid))
(new-var (gensym))
(vars (make-gensym-list (length args))))
(values vars args (list new-var)
`(apply #'(setf ,function) ,new-var ,@vars)
`(apply #',function ,@vars))))
(define-setf-expander the (type place &environment env)
(multiple-value-bind (temps subforms store-vars setter getter)
(get-setf-expansion place env)
(values temps subforms store-vars
`(multiple-value-bind ,store-vars
(the ,type (values ,@store-vars))
,setter)
`(the ,type ,getter))))
(defun (setf macro-function) (new-function symbol &optional environment)
(declare (ignore environment))
(let ((macro (make-macro symbol (or (precompile nil new-function)
new-function))))
(fset symbol macro)
macro))
| null | https://raw.githubusercontent.com/slyrus/abcl/881f733fdbf4b722865318a7d2abe2ff8fdad96e/src/org/armedbear/lisp/late-setf.lisp | lisp |
This program is free software; you can redistribute it and/or
either version 2
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program; if not, write to the Free Software
As a special exception, the copyright holders of this library give you
permission to link this library with independent modules to produce an
executable, regardless of the license terms of these independent
modules, and to copy and distribute the resulting executable under
terms of your choice, provided that you also meet, for each linked
independent module, the terms and conditions of the license of that
module. An independent module is a module which is not derived from
or based on this library. If you modify this library, you may extend
this exception to your version of the library, but you are not
obligated to do so. If you do not wish to do so, delete this
exception statement from your version. | late-setf.lisp
Copyright ( C ) 2003 - 2005
$ Id$
modify it under the terms of the GNU General Public License
of the License , or ( at your option ) any later version .
You should have received a copy of the GNU General Public License
Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
From CMUCL / SBCL .
(in-package #:system)
(defmacro define-setf-expander (access-fn lambda-list &body body)
(require-type access-fn 'symbol)
(let ((whole (gensym "WHOLE-"))
(environment (gensym "ENV-")))
(multiple-value-bind (body local-decs doc)
(parse-defmacro lambda-list whole body access-fn
'define-setf-expander
:environment environment)
`(progn
(record-source-information-for-type ',access-fn :setf-expander)
(eval-when (:compile-toplevel :load-toplevel :execute)
,@(when doc
`((%set-documentation ',access-fn 'setf ,doc)))
(setf (get ',access-fn 'setf-expander)
#'(lambda (,whole ,environment)
,@local-decs
(block ,access-fn ,body)))
',access-fn)))))
(define-setf-expander values (&rest places &environment env)
(let ((setters ())
(getters ())
(all-dummies ())
(all-vals ())
(newvals ()))
(dolist (place places)
(multiple-value-bind (dummies vals newval setter getter)
(get-setf-expansion place env)
(setf all-dummies (append all-dummies dummies (cdr newval))
all-vals (append all-vals vals
(mapcar (constantly nil) (cdr newval)))
newvals (append newvals (list (car newval))))
(push setter setters)
(push getter getters)))
(values all-dummies all-vals newvals
`(values ,@(reverse setters)) `(values ,@(reverse getters)))))
(defun make-gensym-list (n)
(let ((list ()))
(dotimes (i n list)
(push (gensym) list))))
(define-setf-expander getf (place prop &optional default &environment env)
(multiple-value-bind (temps values stores set get)
(get-setf-expansion place env)
(let ((newval (gensym))
(ptemp (gensym))
(def-temp (if default (gensym))))
(values `(,@temps ,ptemp ,@(if default `(,def-temp)))
`(,@values ,prop ,@(if default `(,default)))
`(,newval)
`(let ((,(car stores) (%putf ,get ,ptemp ,newval)))
,set
,newval)
`(getf ,get ,ptemp ,@(if default `(,def-temp)))))))
(define-setf-expander apply (functionoid &rest args)
(let ((function (second functionoid))
(new-var (gensym))
(vars (make-gensym-list (length args))))
(values vars args (list new-var)
`(apply #'(setf ,function) ,new-var ,@vars)
`(apply #',function ,@vars))))
(define-setf-expander the (type place &environment env)
(multiple-value-bind (temps subforms store-vars setter getter)
(get-setf-expansion place env)
(values temps subforms store-vars
`(multiple-value-bind ,store-vars
(the ,type (values ,@store-vars))
,setter)
`(the ,type ,getter))))
(defun (setf macro-function) (new-function symbol &optional environment)
(declare (ignore environment))
(let ((macro (make-macro symbol (or (precompile nil new-function)
new-function))))
(fset symbol macro)
macro))
|
085756b26c6cfacafe7a05a6ccedb9d1c28ae0a2284e71f4557175c499f5299b | ucsd-progsys/mist | unit.hs | unit :: Unit
unit = Unit
| null | https://raw.githubusercontent.com/ucsd-progsys/mist/0a9345e73dc53ff8e8adb8bed78d0e3e0cdc6af0/tests/Tests/Integration/pos/unit.hs | haskell | unit :: Unit
unit = Unit
|
|
276c966e2b378a371bb5efc264e9afa61f9f7d3a220f8ce088d3ed60aa806a8b | berberman/arch-hs | Main.hs | # LANGUAGE DerivingStrategies #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
# LANGUAGE TupleSections #
# LANGUAGE ViewPatterns #
module Main (main) where
import qualified Algebra.Graph.AdjacencyMap.Algorithm as G
import qualified Algebra.Graph.Labelled.AdjacencyMap as GL
import Args
import Control.Monad (filterM, forM_, unless)
import qualified Data.Aeson as A
import qualified Data.ByteString.Lazy as LBS
import Data.Conduit.Process (system)
import Data.Containers.ListUtils (nubOrd)
import Data.IORef (IORef, modifyIORef', newIORef, readIORef)
import Data.List.NonEmpty (toList)
import qualified Data.Map.Strict as Map
import Data.Maybe (catMaybes, fromMaybe, mapMaybe)
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Distribution.ArchHs.Aur (Aur, aurToIO, isInAur)
import Distribution.ArchHs.CommunityDB
import Distribution.ArchHs.Core
import Distribution.ArchHs.Exception
import Distribution.ArchHs.FilesDB
import Distribution.ArchHs.Hackage
import Distribution.ArchHs.Internal.Prelude
import Distribution.ArchHs.Local
import Distribution.ArchHs.Name
import Distribution.ArchHs.Options
import Distribution.ArchHs.PP
import qualified Distribution.ArchHs.PkgBuild as N
import Distribution.ArchHs.Types
import Distribution.ArchHs.Utils
import GHC.IO.Encoding (setLocaleEncoding, utf8)
import Json
import Network.HTTP.Client (Manager)
import Network.HTTP.Client.TLS (newTlsManager)
import System.Directory (createDirectoryIfMissing)
import System.Exit (ExitCode (ExitFailure, ExitSuccess))
import System.FilePath (takeFileName)
app ::
Members '[Embed IO, State (Set.Set PackageName), KnownGHCVersion, CommunityEnv, HackageEnv, FlagAssignmentsEnv, DependencyRecord, Trace, Aur, WithMyErr] r =>
PackageName ->
FilePath ->
Bool ->
[String] ->
Bool ->
Bool ->
Bool ->
FilePath ->
Bool ->
(DBKind -> IO FilesDB) ->
Sem r ()
app target path aurSupport skip uusi force installDeps jsonPath noSkipMissing loadFilesDB' = do
(deps, sublibs, sysDeps) <- getDependencies (fmap mkUnqualComponentName skip) Nothing target
inCommunity <- isInCommunity target
when inCommunity $
if force
then printWarn $ "Target has been provided by" <+> ppCommunity <> comma <+> "but you specified --force"
else throw $ TargetExist target ByCommunity
when aurSupport $ do
inAur <- isInAur target
when inAur $
if force
then printWarn $ "Target has been provided by" <+> ppAur <> comma <+> "but you specified --force"
else throw $ TargetExist target ByAur
let removeSublibs pkgs =
pkgs ^.. each . filtered (\x -> x ^. pkgName `notElem` sublibs) & each %~ (\x -> x & pkgDeps %~ filter (\d -> d ^. depName `notElem` sublibs))
grouped = removeSublibs $ groupDeps deps
namesFromSolved x = x ^.. each . pkgName <> x ^.. each . pkgDeps . each . depName
allNames = nubOrd $ namesFromSolved grouped
communityProvideList <- (<> ghcLibList) <$> filterM (\x -> if x == target && force then return False else isInCommunity x) allNames
let providedPackages = filter (\x -> x ^. pkgName `elem` communityProvideList) grouped
abnormalDependencies =
mapMaybe
( \x -> case filter (`notElem` communityProvideList) (x ^. pkgDeps ^.. each . depName) of
[] -> Nothing
pkgs -> Just (x ^. pkgName, pkgs)
)
providedPackages
-- all missing transitive dependencies, excluding direct dependencies of the target
missingChildren = mconcat $ snd <$> filter (\x -> fst x /= target) abnormalDependencies
embed $
forM_ abnormalDependencies $ \(T.pack . unPackageName -> parent, children) -> do
printWarn $ "Package" <+> dquotes (pretty parent) <+> "is provided without" <> colon
forM_ children $ putStrLn . unPackageName
unless (null abnormalDependencies || noSkipMissing) $
printWarn "Above package(s) are ignored unless you specify --no-skip-missing"
let fillProvidedPkgs provideList provider = map (\x -> if (x ^. pkgName) `elem` provideList then ProvidedPackage (x ^. pkgName) provider else x)
fillProvidedDeps provideList provider = map (pkgDeps %~ each %~ (\y -> if y ^. depName `elem` provideList then y & depProvider ?~ provider else y))
filledByCommunity = fillProvidedPkgs communityProvideList ByCommunity . fillProvidedDeps communityProvideList ByCommunity $ grouped
-- after filling community
toBePacked1 = filledByCommunity ^.. each . filtered (not . isProvided)
(filledByBoth, toBePacked2) <- do
when aurSupport $ printInfo "Start searching AUR..."
aurProvideList <-
if aurSupport
then -- after filling aur. toBePacked1 should not appear after the next line
filterM (\n -> do printInfo ("Searching" <+> viaPretty n); isInAur n) $ filter (\x -> not $ x == target && force) $ toBePacked1 ^.. each . pkgName
else return []
let a = fillProvidedPkgs aurProvideList ByAur . fillProvidedDeps aurProvideList ByAur $ filledByCommunity
b = a ^.. each . filtered (not . isProvided)
return (a, b)
when (null filledByBoth) $
throw $ TargetDisappearException target
printInfo "Solved:"
embed $ T.putStrLn . prettySolvedPkgs $ filledByBoth
printInfo "Recommended package order:"
remove from the graph iff noSkipMissing is not enabled
let vertexesToBeRemoved = (if noSkipMissing then [] else missingChildren) <> filledByBoth ^.. each . filtered isProvided ^.. each . pkgName
removeSelfCycle g = foldr (\n acc -> GL.removeEdge n n acc) g $ toBePacked2 ^.. each . pkgName
newGraph = GL.induce (`notElem` vertexesToBeRemoved) deps
flattened <- case G.topSort . GL.skeleton $ removeSelfCycle newGraph of
Left c -> throw . CyclicExist $ toList c
Right x -> return $ filter (`notElem` sublibs) x
-- after removing missing children
-- toBePacked1 and toBePacked2 should appear after the next line
let toBePacked3 = filter (\x -> x ^. pkgName `elem` flattened) toBePacked2
-- add sign for missing children if we have
embed . putDoc $ (prettyDeps . reverse $ map (\x -> (x, x `elem` missingChildren)) flattened) <> line <> line
unless (null missingChildren || not noSkipMissing) $
embed . putDoc $ annotate italicized $ yellowStarInParens <+> "indicates a missing package" <> line <> line
let sysDepsToBePacked = Map.filterWithKey (\k _ -> k `elem` flattened) sysDeps
unless (null sysDepsToBePacked) $ do
printInfo "Detected pkgconfig or extraLib from target(s):"
embed $ T.putStrLn $ ppSysDependencies sysDepsToBePacked
sysDepsRef <- embed . newIORef $ toUnsolved <$> nubOrd (Map.foldMapWithKey (\_ x -> x) sysDepsToBePacked)
embed $
isAllSolvedM sysDepsRef >>= \b -> unless b $ do
printInfo "Now finding corresponding system package(s) using files db:"
coreFiles <- loadFilesDB' Core
modifyIORef' sysDepsRef $ fmap (trySolve coreFiles)
b' <- isAllSolvedM sysDepsRef
unless b' $ do
extraFiles <- loadFilesDB' Extra
modifyIORef' sysDepsRef $ fmap (trySolve extraFiles)
b'' <- isAllSolvedM sysDepsRef
unless b'' $ do
communityFiles <- loadFilesDB' Community
modifyIORef' sysDepsRef $ fmap (trySolve communityFiles)
sysDepsResult <- embed $ readIORef sysDepsRef
embed . unless (null sysDepsToBePacked) $ do
printInfo "Done:"
T.putStrLn . align2col $ ppEmergedSysDep <$> sysDepsResult
unless (isAllSolved sysDepsResult) $ printWarn "Unable to obtain all required system packages"
let sysDepsMapping = collectAllSolved sysDepsResult
getSysDeps name = nubOrd $ catMaybes [sysDepsMapping Map.!? file | (SystemDependency file) <- fromMaybe [] $ sysDeps Map.!? name]
flags <- filter (\(_, l) -> not $ null l) <$> mapM (\n -> (n,) <$> getPackageFlag n) flattened
embed $
unless (null flags) $ do
printInfo "Detected flag(s) from targets:"
putDoc $ prettyFlags flags <> line <> line
let jsonOutput =
ArchHSOutput
(fromAbnormalDependency <$> abnormalDependencies)
(fromSolvedPackage <$> filledByBoth)
(reverse flattened)
(fromEmergedSysDep <$> sysDepsResult)
(fromFlag <$> flags)
embed $
unless (null jsonPath) $ do
LBS.writeFile jsonPath $ A.encode jsonOutput
printInfo $ "Write file" <> colon <+> pretty jsonPath
unless (null path) $
mapM_
( \solved -> do
pkgBuild <- cabalToPkgBuild solved uusi $ getSysDeps (solved ^. pkgName)
let pName = N._pkgName pkgBuild
dir = path </> pName
fileName = dir </> "PKGBUILD"
txt = N.applyTemplate pkgBuild
embed $ do
createDirectoryIfMissing True dir
writeFile fileName txt
printInfo $ "Write file" <> colon <+> pretty fileName
)
toBePacked3
when installDeps $ do
let providedDepends pkg =
pkg ^. pkgDeps
^.. each
. filtered (\x -> depNotMyself (pkg ^. pkgName) x && depNotInGHCLib x && x ^. depProvider == Just ByCommunity)
toStr = unArchLinuxName . toArchLinuxName . _depName
depends = unwords . nubOrd . fmap toStr . mconcat $ providedDepends <$> toBePacked3
flattened' = filter (/= target) flattened
case flattened' of
[] -> pure ()
[x] -> printWarn $ "The following dependency is missing in" <+> ppCommunity <> colon <+> pretty (unPackageName x)
xs -> printWarn $ "Following dependencies are missing in" <+> ppCommunity <> colon <+> hsep (punctuate comma (pretty . unPackageName <$> xs))
embed $ putDoc line
case depends of
[] -> printInfo "No extra dependency to install"
xs ->
embed (system $ "sudo pacman --needed -S " <> xs) >>= \case
ExitSuccess -> printSuccess "Installed successfully"
ExitFailure c -> printError $ "pacman exited with" <+> pretty c
-----------------------------------------------------------------------------
data EmergedSysDep = Solved File ArchLinuxName | Unsolved File
deriving stock (Eq, Ord, Show)
toUnsolved :: SystemDependency -> EmergedSysDep
toUnsolved (SystemDependency x) = Unsolved x
trySolve :: FilesDB -> EmergedSysDep -> EmergedSysDep
trySolve db dep
| (Unsolved x) <- dep,
(pkg : _) <- lookupPkg x db =
Solved x pkg
| otherwise = dep
isAllSolved :: [EmergedSysDep] -> Bool
isAllSolved xs = null [() | (Unsolved _) <- xs]
isAllSolvedM :: IORef [EmergedSysDep] -> IO Bool
isAllSolvedM ref = isAllSolved <$> readIORef ref
collectAllSolved :: [EmergedSysDep] -> Map.Map File ArchLinuxName
collectAllSolved xs = Map.fromList [(file, name) | (Solved file name) <- xs]
ppEmergedSysDep :: EmergedSysDep -> (Doc AnsiStyle, Doc AnsiStyle)
ppEmergedSysDep (Solved file (ArchLinuxName name)) = (annGreen . pretty $ file, " ⇒ " <> (annCyan . pretty $ name))
ppEmergedSysDep (Unsolved file) = (annYellow . annBold . pretty $ file, indent 19 cuo)
fromEmergedSysDep :: EmergedSysDep -> SysDepsS
fromEmergedSysDep (Unsolved file) = SysDepsS file Nothing
fromEmergedSysDep (Solved file pkg) = SysDepsS file (Just pkg)
-----------------------------------------------------------------------------
runApp ::
HackageDB ->
CommunityDB ->
Map.Map PackageName FlagAssignment ->
Bool ->
FilePath ->
IORef (Set.Set PackageName) ->
Manager ->
Sem '[CommunityEnv, HackageEnv, FlagAssignmentsEnv, DependencyRecord, Trace, State (Set.Set PackageName), Aur, WithMyErr, Embed IO, Final IO] a ->
IO (Either MyException a)
runApp hackage community flags traceStdout tracePath ref manager =
runFinal
. embedToFinal
. errorToIOFinal
. aurToIO manager
. runStateIORef ref
. runTrace traceStdout tracePath
. evalState Map.empty
. runReader flags
. runReader hackage
. runReader community
runTrace :: Member (Embed IO) r => Bool -> FilePath -> Sem (Trace ': r) a -> Sem r a
runTrace stdout path = interpret $ \case
Trace m -> do
when stdout (embed $ putStrLn m)
unless (null path) (embed $ appendFile path (m ++ "\n"))
-----------------------------------------------------------------------------
main :: IO ()
main = printHandledIOException $
do
setLocaleEncoding utf8
Options {..} <- runArgsParser
unless (null optFileTrace) $ do
printInfo $ "Trace will be dumped to" <+> pretty optFileTrace
writeFile optFileTrace ""
unless (null optJson) $ do
printInfo $ "Output will be dumped to" <+> pretty optJson <+> "as json"
writeFile optJson ""
let isFlagEmpty = Map.null optFlags
isSkipEmpty = null optSkip
unless isFlagEmpty $ do
printInfo "You assigned flags:"
putDoc $ prettyFlagAssignments optFlags <> line <> line
unless isSkipEmpty $ do
printInfo "You chose to skip:"
putDoc $ prettySkip optSkip <> line <> line
when optAur $ printInfo "You specified -a, searching AUR may takes a long time"
when optUusi $ printInfo "You specified --uusi, uusi will become makedepends of each package"
hackage <- loadHackageDBFromOptions optHackage
let isExtraEmpty = null optExtraCabalDirs
optExtraCabal <- mapM findCabalFile optExtraCabalDirs
unless isExtraEmpty $
printInfo $ "You added" <+> hsep (punctuate comma $ pretty . takeFileName <$> optExtraCabal) <+> "as extra cabal file(s), starting parsing right now"
parsedExtra <- mapM parseCabalFile optExtraCabal
let newHackage = foldr insertDB hackage parsedExtra
community <- loadCommunityDBFromOptions optCommunityDB
printInfo "Start running..."
ref <- newIORef Set.empty
manager <- newTlsManager
runApp
newHackage
community
optFlags
optStdoutTrace
optFileTrace
ref
manager
(subsumeGHCVersion $ app optTarget optOutputDir optAur optSkip optUusi optForce optInstallDeps optJson optNoSkipMissing (loadFilesDBFromOptions optFilesDB))
& printAppResult
-----------------------------------------------------------------------------
groupDeps :: GL.AdjacencyMap (Set.Set DependencyType) PackageName -> [SolvedPackage]
groupDeps graph =
fmap
( \(name, deps) ->
SolvedPackage name $ fmap (uncurry . flip $ SolvedDependency Nothing) deps
)
$ result <> aloneChildren
where
result =
fmap ((\(a, b, c) -> (head b, zip a c)) . unzip3)
. groupBy (\x y -> uncurry (==) (getTwo _2 x y))
. fmap (_1 %~ Set.toList)
. GL.edgeList
$ graph
parents = fmap fst result
children = mconcat $ fmap (\(_, ds) -> fmap snd ds) result
-- Maybe 'G.vertexSet' is a better choice
aloneChildren = nubOrd $ zip (filter (`notElem` parents) children) (repeat [])
| null | https://raw.githubusercontent.com/berberman/arch-hs/f1f1972004235428576ed0d20ad1beb220250ed3/app/Main.hs | haskell | # LANGUAGE OverloadedStrings #
all missing transitive dependencies, excluding direct dependencies of the target
after filling community
after filling aur. toBePacked1 should not appear after the next line
after removing missing children
toBePacked1 and toBePacked2 should appear after the next line
add sign for missing children if we have
---------------------------------------------------------------------------
---------------------------------------------------------------------------
---------------------------------------------------------------------------
---------------------------------------------------------------------------
Maybe 'G.vertexSet' is a better choice | # LANGUAGE DerivingStrategies #
# LANGUAGE RecordWildCards #
# LANGUAGE TupleSections #
# LANGUAGE ViewPatterns #
module Main (main) where
import qualified Algebra.Graph.AdjacencyMap.Algorithm as G
import qualified Algebra.Graph.Labelled.AdjacencyMap as GL
import Args
import Control.Monad (filterM, forM_, unless)
import qualified Data.Aeson as A
import qualified Data.ByteString.Lazy as LBS
import Data.Conduit.Process (system)
import Data.Containers.ListUtils (nubOrd)
import Data.IORef (IORef, modifyIORef', newIORef, readIORef)
import Data.List.NonEmpty (toList)
import qualified Data.Map.Strict as Map
import Data.Maybe (catMaybes, fromMaybe, mapMaybe)
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Distribution.ArchHs.Aur (Aur, aurToIO, isInAur)
import Distribution.ArchHs.CommunityDB
import Distribution.ArchHs.Core
import Distribution.ArchHs.Exception
import Distribution.ArchHs.FilesDB
import Distribution.ArchHs.Hackage
import Distribution.ArchHs.Internal.Prelude
import Distribution.ArchHs.Local
import Distribution.ArchHs.Name
import Distribution.ArchHs.Options
import Distribution.ArchHs.PP
import qualified Distribution.ArchHs.PkgBuild as N
import Distribution.ArchHs.Types
import Distribution.ArchHs.Utils
import GHC.IO.Encoding (setLocaleEncoding, utf8)
import Json
import Network.HTTP.Client (Manager)
import Network.HTTP.Client.TLS (newTlsManager)
import System.Directory (createDirectoryIfMissing)
import System.Exit (ExitCode (ExitFailure, ExitSuccess))
import System.FilePath (takeFileName)
app ::
Members '[Embed IO, State (Set.Set PackageName), KnownGHCVersion, CommunityEnv, HackageEnv, FlagAssignmentsEnv, DependencyRecord, Trace, Aur, WithMyErr] r =>
PackageName ->
FilePath ->
Bool ->
[String] ->
Bool ->
Bool ->
Bool ->
FilePath ->
Bool ->
(DBKind -> IO FilesDB) ->
Sem r ()
app target path aurSupport skip uusi force installDeps jsonPath noSkipMissing loadFilesDB' = do
(deps, sublibs, sysDeps) <- getDependencies (fmap mkUnqualComponentName skip) Nothing target
inCommunity <- isInCommunity target
when inCommunity $
if force
then printWarn $ "Target has been provided by" <+> ppCommunity <> comma <+> "but you specified --force"
else throw $ TargetExist target ByCommunity
when aurSupport $ do
inAur <- isInAur target
when inAur $
if force
then printWarn $ "Target has been provided by" <+> ppAur <> comma <+> "but you specified --force"
else throw $ TargetExist target ByAur
let removeSublibs pkgs =
pkgs ^.. each . filtered (\x -> x ^. pkgName `notElem` sublibs) & each %~ (\x -> x & pkgDeps %~ filter (\d -> d ^. depName `notElem` sublibs))
grouped = removeSublibs $ groupDeps deps
namesFromSolved x = x ^.. each . pkgName <> x ^.. each . pkgDeps . each . depName
allNames = nubOrd $ namesFromSolved grouped
communityProvideList <- (<> ghcLibList) <$> filterM (\x -> if x == target && force then return False else isInCommunity x) allNames
let providedPackages = filter (\x -> x ^. pkgName `elem` communityProvideList) grouped
abnormalDependencies =
mapMaybe
( \x -> case filter (`notElem` communityProvideList) (x ^. pkgDeps ^.. each . depName) of
[] -> Nothing
pkgs -> Just (x ^. pkgName, pkgs)
)
providedPackages
missingChildren = mconcat $ snd <$> filter (\x -> fst x /= target) abnormalDependencies
embed $
forM_ abnormalDependencies $ \(T.pack . unPackageName -> parent, children) -> do
printWarn $ "Package" <+> dquotes (pretty parent) <+> "is provided without" <> colon
forM_ children $ putStrLn . unPackageName
unless (null abnormalDependencies || noSkipMissing) $
printWarn "Above package(s) are ignored unless you specify --no-skip-missing"
let fillProvidedPkgs provideList provider = map (\x -> if (x ^. pkgName) `elem` provideList then ProvidedPackage (x ^. pkgName) provider else x)
fillProvidedDeps provideList provider = map (pkgDeps %~ each %~ (\y -> if y ^. depName `elem` provideList then y & depProvider ?~ provider else y))
filledByCommunity = fillProvidedPkgs communityProvideList ByCommunity . fillProvidedDeps communityProvideList ByCommunity $ grouped
toBePacked1 = filledByCommunity ^.. each . filtered (not . isProvided)
(filledByBoth, toBePacked2) <- do
when aurSupport $ printInfo "Start searching AUR..."
aurProvideList <-
if aurSupport
filterM (\n -> do printInfo ("Searching" <+> viaPretty n); isInAur n) $ filter (\x -> not $ x == target && force) $ toBePacked1 ^.. each . pkgName
else return []
let a = fillProvidedPkgs aurProvideList ByAur . fillProvidedDeps aurProvideList ByAur $ filledByCommunity
b = a ^.. each . filtered (not . isProvided)
return (a, b)
when (null filledByBoth) $
throw $ TargetDisappearException target
printInfo "Solved:"
embed $ T.putStrLn . prettySolvedPkgs $ filledByBoth
printInfo "Recommended package order:"
remove from the graph iff noSkipMissing is not enabled
let vertexesToBeRemoved = (if noSkipMissing then [] else missingChildren) <> filledByBoth ^.. each . filtered isProvided ^.. each . pkgName
removeSelfCycle g = foldr (\n acc -> GL.removeEdge n n acc) g $ toBePacked2 ^.. each . pkgName
newGraph = GL.induce (`notElem` vertexesToBeRemoved) deps
flattened <- case G.topSort . GL.skeleton $ removeSelfCycle newGraph of
Left c -> throw . CyclicExist $ toList c
Right x -> return $ filter (`notElem` sublibs) x
let toBePacked3 = filter (\x -> x ^. pkgName `elem` flattened) toBePacked2
embed . putDoc $ (prettyDeps . reverse $ map (\x -> (x, x `elem` missingChildren)) flattened) <> line <> line
unless (null missingChildren || not noSkipMissing) $
embed . putDoc $ annotate italicized $ yellowStarInParens <+> "indicates a missing package" <> line <> line
let sysDepsToBePacked = Map.filterWithKey (\k _ -> k `elem` flattened) sysDeps
unless (null sysDepsToBePacked) $ do
printInfo "Detected pkgconfig or extraLib from target(s):"
embed $ T.putStrLn $ ppSysDependencies sysDepsToBePacked
sysDepsRef <- embed . newIORef $ toUnsolved <$> nubOrd (Map.foldMapWithKey (\_ x -> x) sysDepsToBePacked)
embed $
isAllSolvedM sysDepsRef >>= \b -> unless b $ do
printInfo "Now finding corresponding system package(s) using files db:"
coreFiles <- loadFilesDB' Core
modifyIORef' sysDepsRef $ fmap (trySolve coreFiles)
b' <- isAllSolvedM sysDepsRef
unless b' $ do
extraFiles <- loadFilesDB' Extra
modifyIORef' sysDepsRef $ fmap (trySolve extraFiles)
b'' <- isAllSolvedM sysDepsRef
unless b'' $ do
communityFiles <- loadFilesDB' Community
modifyIORef' sysDepsRef $ fmap (trySolve communityFiles)
sysDepsResult <- embed $ readIORef sysDepsRef
embed . unless (null sysDepsToBePacked) $ do
printInfo "Done:"
T.putStrLn . align2col $ ppEmergedSysDep <$> sysDepsResult
unless (isAllSolved sysDepsResult) $ printWarn "Unable to obtain all required system packages"
let sysDepsMapping = collectAllSolved sysDepsResult
getSysDeps name = nubOrd $ catMaybes [sysDepsMapping Map.!? file | (SystemDependency file) <- fromMaybe [] $ sysDeps Map.!? name]
flags <- filter (\(_, l) -> not $ null l) <$> mapM (\n -> (n,) <$> getPackageFlag n) flattened
embed $
unless (null flags) $ do
printInfo "Detected flag(s) from targets:"
putDoc $ prettyFlags flags <> line <> line
let jsonOutput =
ArchHSOutput
(fromAbnormalDependency <$> abnormalDependencies)
(fromSolvedPackage <$> filledByBoth)
(reverse flattened)
(fromEmergedSysDep <$> sysDepsResult)
(fromFlag <$> flags)
embed $
unless (null jsonPath) $ do
LBS.writeFile jsonPath $ A.encode jsonOutput
printInfo $ "Write file" <> colon <+> pretty jsonPath
unless (null path) $
mapM_
( \solved -> do
pkgBuild <- cabalToPkgBuild solved uusi $ getSysDeps (solved ^. pkgName)
let pName = N._pkgName pkgBuild
dir = path </> pName
fileName = dir </> "PKGBUILD"
txt = N.applyTemplate pkgBuild
embed $ do
createDirectoryIfMissing True dir
writeFile fileName txt
printInfo $ "Write file" <> colon <+> pretty fileName
)
toBePacked3
when installDeps $ do
let providedDepends pkg =
pkg ^. pkgDeps
^.. each
. filtered (\x -> depNotMyself (pkg ^. pkgName) x && depNotInGHCLib x && x ^. depProvider == Just ByCommunity)
toStr = unArchLinuxName . toArchLinuxName . _depName
depends = unwords . nubOrd . fmap toStr . mconcat $ providedDepends <$> toBePacked3
flattened' = filter (/= target) flattened
case flattened' of
[] -> pure ()
[x] -> printWarn $ "The following dependency is missing in" <+> ppCommunity <> colon <+> pretty (unPackageName x)
xs -> printWarn $ "Following dependencies are missing in" <+> ppCommunity <> colon <+> hsep (punctuate comma (pretty . unPackageName <$> xs))
embed $ putDoc line
case depends of
[] -> printInfo "No extra dependency to install"
xs ->
embed (system $ "sudo pacman --needed -S " <> xs) >>= \case
ExitSuccess -> printSuccess "Installed successfully"
ExitFailure c -> printError $ "pacman exited with" <+> pretty c
data EmergedSysDep = Solved File ArchLinuxName | Unsolved File
deriving stock (Eq, Ord, Show)
toUnsolved :: SystemDependency -> EmergedSysDep
toUnsolved (SystemDependency x) = Unsolved x
trySolve :: FilesDB -> EmergedSysDep -> EmergedSysDep
trySolve db dep
| (Unsolved x) <- dep,
(pkg : _) <- lookupPkg x db =
Solved x pkg
| otherwise = dep
isAllSolved :: [EmergedSysDep] -> Bool
isAllSolved xs = null [() | (Unsolved _) <- xs]
isAllSolvedM :: IORef [EmergedSysDep] -> IO Bool
isAllSolvedM ref = isAllSolved <$> readIORef ref
collectAllSolved :: [EmergedSysDep] -> Map.Map File ArchLinuxName
collectAllSolved xs = Map.fromList [(file, name) | (Solved file name) <- xs]
ppEmergedSysDep :: EmergedSysDep -> (Doc AnsiStyle, Doc AnsiStyle)
ppEmergedSysDep (Solved file (ArchLinuxName name)) = (annGreen . pretty $ file, " ⇒ " <> (annCyan . pretty $ name))
ppEmergedSysDep (Unsolved file) = (annYellow . annBold . pretty $ file, indent 19 cuo)
fromEmergedSysDep :: EmergedSysDep -> SysDepsS
fromEmergedSysDep (Unsolved file) = SysDepsS file Nothing
fromEmergedSysDep (Solved file pkg) = SysDepsS file (Just pkg)
runApp ::
HackageDB ->
CommunityDB ->
Map.Map PackageName FlagAssignment ->
Bool ->
FilePath ->
IORef (Set.Set PackageName) ->
Manager ->
Sem '[CommunityEnv, HackageEnv, FlagAssignmentsEnv, DependencyRecord, Trace, State (Set.Set PackageName), Aur, WithMyErr, Embed IO, Final IO] a ->
IO (Either MyException a)
runApp hackage community flags traceStdout tracePath ref manager =
runFinal
. embedToFinal
. errorToIOFinal
. aurToIO manager
. runStateIORef ref
. runTrace traceStdout tracePath
. evalState Map.empty
. runReader flags
. runReader hackage
. runReader community
runTrace :: Member (Embed IO) r => Bool -> FilePath -> Sem (Trace ': r) a -> Sem r a
runTrace stdout path = interpret $ \case
Trace m -> do
when stdout (embed $ putStrLn m)
unless (null path) (embed $ appendFile path (m ++ "\n"))
main :: IO ()
main = printHandledIOException $
do
setLocaleEncoding utf8
Options {..} <- runArgsParser
unless (null optFileTrace) $ do
printInfo $ "Trace will be dumped to" <+> pretty optFileTrace
writeFile optFileTrace ""
unless (null optJson) $ do
printInfo $ "Output will be dumped to" <+> pretty optJson <+> "as json"
writeFile optJson ""
let isFlagEmpty = Map.null optFlags
isSkipEmpty = null optSkip
unless isFlagEmpty $ do
printInfo "You assigned flags:"
putDoc $ prettyFlagAssignments optFlags <> line <> line
unless isSkipEmpty $ do
printInfo "You chose to skip:"
putDoc $ prettySkip optSkip <> line <> line
when optAur $ printInfo "You specified -a, searching AUR may takes a long time"
when optUusi $ printInfo "You specified --uusi, uusi will become makedepends of each package"
hackage <- loadHackageDBFromOptions optHackage
let isExtraEmpty = null optExtraCabalDirs
optExtraCabal <- mapM findCabalFile optExtraCabalDirs
unless isExtraEmpty $
printInfo $ "You added" <+> hsep (punctuate comma $ pretty . takeFileName <$> optExtraCabal) <+> "as extra cabal file(s), starting parsing right now"
parsedExtra <- mapM parseCabalFile optExtraCabal
let newHackage = foldr insertDB hackage parsedExtra
community <- loadCommunityDBFromOptions optCommunityDB
printInfo "Start running..."
ref <- newIORef Set.empty
manager <- newTlsManager
runApp
newHackage
community
optFlags
optStdoutTrace
optFileTrace
ref
manager
(subsumeGHCVersion $ app optTarget optOutputDir optAur optSkip optUusi optForce optInstallDeps optJson optNoSkipMissing (loadFilesDBFromOptions optFilesDB))
& printAppResult
groupDeps :: GL.AdjacencyMap (Set.Set DependencyType) PackageName -> [SolvedPackage]
groupDeps graph =
fmap
( \(name, deps) ->
SolvedPackage name $ fmap (uncurry . flip $ SolvedDependency Nothing) deps
)
$ result <> aloneChildren
where
result =
fmap ((\(a, b, c) -> (head b, zip a c)) . unzip3)
. groupBy (\x y -> uncurry (==) (getTwo _2 x y))
. fmap (_1 %~ Set.toList)
. GL.edgeList
$ graph
parents = fmap fst result
children = mconcat $ fmap (\(_, ds) -> fmap snd ds) result
aloneChildren = nubOrd $ zip (filter (`notElem` parents) children) (repeat [])
|
937af996fa3d6a845dbe970064c2fe6c0e34770ce9e08c5a11696e096526c55a | patrikja/AFPcourse | Main.hs | module Problem3.Main where
import qualified Problem3.QuestionCode as P3Q
main = P3Q.main
-- (Right 0,Right 1738)
Motivation :
main runs the same program in the two monads Eval1 and Eval2
From 3(a ) ( Problem3.Eval_expanded ) we know that gives either an
error or a new store . Thus , even if the failure is handled , the
modified State will be thrown away . The program will thus return the
emptyStore , which is 0 . Eval2 , on the other hand , always returns the
new store , even in the case of error . Thus , the state modification
( CMS.put 1738 ) done before failing propagates through catch and is
visible in the result , which is 1738 .
Motivation:
main runs the same program in the two monads Eval1 and Eval2
From 3(a) (Problem3.Eval_expanded) we know that Eval1 gives either an
error or a new store. Thus, even if the failure is handled, the
modified State will be thrown away. The program will thus return the
emptyStore, which is 0. Eval2, on the other hand, always returns the
new store, even in the case of error. Thus, the state modification
(CMS.put 1738) done before failing propagates through catch and is
visible in the result, which is 1738.
-}
| null | https://raw.githubusercontent.com/patrikja/AFPcourse/1a079ae80ba2dbb36f3f79f0fc96a502c0f670b6/exam/2010-03/Problem3/Main.hs | haskell | (Right 0,Right 1738) | module Problem3.Main where
import qualified Problem3.QuestionCode as P3Q
main = P3Q.main
Motivation :
main runs the same program in the two monads Eval1 and Eval2
From 3(a ) ( Problem3.Eval_expanded ) we know that gives either an
error or a new store . Thus , even if the failure is handled , the
modified State will be thrown away . The program will thus return the
emptyStore , which is 0 . Eval2 , on the other hand , always returns the
new store , even in the case of error . Thus , the state modification
( CMS.put 1738 ) done before failing propagates through catch and is
visible in the result , which is 1738 .
Motivation:
main runs the same program in the two monads Eval1 and Eval2
From 3(a) (Problem3.Eval_expanded) we know that Eval1 gives either an
error or a new store. Thus, even if the failure is handled, the
modified State will be thrown away. The program will thus return the
emptyStore, which is 0. Eval2, on the other hand, always returns the
new store, even in the case of error. Thus, the state modification
(CMS.put 1738) done before failing propagates through catch and is
visible in the result, which is 1738.
-}
|
53a3db0a4c1870d537d5f9ad5f2ddde83cc8f184db686a47767d8ce2d6e81b22 | CatalaLang/catala | ninja_utils.ml | This file is part of the Catala build system , a specification language for
tax and social benefits computation rules . Copyright ( C ) 2020 ,
contributor : < >
Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
use this file except in compliance with the License . You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . See the
License for the specific language governing permissions and limitations under
the License .
tax and social benefits computation rules. Copyright (C) 2020 Inria,
contributor: Emile Rolley <>
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License. *)
module Expr = struct
type t = Lit of string | Var of string | Seq of t list
let rec format fmt = function
| Lit s -> Format.pp_print_string fmt s
| Var s -> Format.fprintf fmt "$%s" s
| Seq ls -> format_list fmt ls
and format_list fmt ls =
Format.pp_print_list
~pp_sep:(fun fmt () -> Format.pp_print_char fmt ' ')
format fmt ls
end
module Rule = struct
type t = { name : string; command : Expr.t; description : Expr.t option }
let make name ~command ~description =
{ name; command; description = Option.some description }
let format fmt rule =
let format_description fmt = function
| Some e -> Format.fprintf fmt " description = %a\n" Expr.format e
| None -> Format.fprintf fmt "\n"
in
Format.fprintf fmt "rule %s\n command = %a\n%a" rule.name Expr.format
rule.command format_description rule.description
end
module Build = struct
type t = {
outputs : Expr.t list;
rule : string;
inputs : Expr.t list option;
vars : (string * Expr.t) list;
}
let make ~outputs ~rule = { outputs; rule; inputs = Option.none; vars = [] }
let make_with_vars ~outputs ~rule ~vars =
{ outputs; rule; inputs = Option.none; vars }
let make_with_inputs ~outputs ~rule ~inputs =
{ outputs; rule; inputs = Option.some inputs; vars = [] }
let make_with_vars_and_inputs ~outputs ~rule ~inputs ~vars =
{ outputs; rule; inputs = Option.some inputs; vars }
let empty = make ~outputs:[Expr.Lit "empty"] ~rule:"phony"
let unpath ?(sep = "-") path =
Re.Pcre.(substitute ~rex:(regexp "/") ~subst:(fun _ -> sep)) path
let format fmt build =
let format_inputs fmt = function
| Some exs -> Format.fprintf fmt " %a" Expr.format_list exs
| None -> ()
and format_vars fmt vars =
List.iter
(fun (name, exp) ->
Format.fprintf fmt " %s = %a\n" name Expr.format exp)
vars
in
Format.fprintf fmt "build %a: %s%a\n%a" Expr.format_list build.outputs
build.rule format_inputs build.inputs format_vars build.vars
end
module RuleMap : Map.S with type key = String.t = Map.Make (String)
module BuildMap : Map.S with type key = String.t = Map.Make (String)
type ninja = { rules : Rule.t RuleMap.t; builds : Build.t BuildMap.t }
let empty = { rules = RuleMap.empty; builds = BuildMap.empty }
let format fmt ninja =
let format_for_all iter format =
iter (fun _name rule -> Format.fprintf fmt "%a\n" format rule)
in
format_for_all RuleMap.iter Rule.format ninja.rules;
format_for_all BuildMap.iter Build.format ninja.builds
| null | https://raw.githubusercontent.com/CatalaLang/catala/d23fcf719496defb069468d6bc47e7f437d8bede/build_system/ninja_utils.ml | ocaml | This file is part of the Catala build system , a specification language for
tax and social benefits computation rules . Copyright ( C ) 2020 ,
contributor : < >
Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
use this file except in compliance with the License . You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . See the
License for the specific language governing permissions and limitations under
the License .
tax and social benefits computation rules. Copyright (C) 2020 Inria,
contributor: Emile Rolley <>
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License. *)
module Expr = struct
type t = Lit of string | Var of string | Seq of t list
let rec format fmt = function
| Lit s -> Format.pp_print_string fmt s
| Var s -> Format.fprintf fmt "$%s" s
| Seq ls -> format_list fmt ls
and format_list fmt ls =
Format.pp_print_list
~pp_sep:(fun fmt () -> Format.pp_print_char fmt ' ')
format fmt ls
end
module Rule = struct
type t = { name : string; command : Expr.t; description : Expr.t option }
let make name ~command ~description =
{ name; command; description = Option.some description }
let format fmt rule =
let format_description fmt = function
| Some e -> Format.fprintf fmt " description = %a\n" Expr.format e
| None -> Format.fprintf fmt "\n"
in
Format.fprintf fmt "rule %s\n command = %a\n%a" rule.name Expr.format
rule.command format_description rule.description
end
module Build = struct
type t = {
outputs : Expr.t list;
rule : string;
inputs : Expr.t list option;
vars : (string * Expr.t) list;
}
let make ~outputs ~rule = { outputs; rule; inputs = Option.none; vars = [] }
let make_with_vars ~outputs ~rule ~vars =
{ outputs; rule; inputs = Option.none; vars }
let make_with_inputs ~outputs ~rule ~inputs =
{ outputs; rule; inputs = Option.some inputs; vars = [] }
let make_with_vars_and_inputs ~outputs ~rule ~inputs ~vars =
{ outputs; rule; inputs = Option.some inputs; vars }
let empty = make ~outputs:[Expr.Lit "empty"] ~rule:"phony"
let unpath ?(sep = "-") path =
Re.Pcre.(substitute ~rex:(regexp "/") ~subst:(fun _ -> sep)) path
let format fmt build =
let format_inputs fmt = function
| Some exs -> Format.fprintf fmt " %a" Expr.format_list exs
| None -> ()
and format_vars fmt vars =
List.iter
(fun (name, exp) ->
Format.fprintf fmt " %s = %a\n" name Expr.format exp)
vars
in
Format.fprintf fmt "build %a: %s%a\n%a" Expr.format_list build.outputs
build.rule format_inputs build.inputs format_vars build.vars
end
module RuleMap : Map.S with type key = String.t = Map.Make (String)
module BuildMap : Map.S with type key = String.t = Map.Make (String)
type ninja = { rules : Rule.t RuleMap.t; builds : Build.t BuildMap.t }
let empty = { rules = RuleMap.empty; builds = BuildMap.empty }
let format fmt ninja =
let format_for_all iter format =
iter (fun _name rule -> Format.fprintf fmt "%a\n" format rule)
in
format_for_all RuleMap.iter Rule.format ninja.rules;
format_for_all BuildMap.iter Build.format ninja.builds
|
|
d51721c8353e6d3ee8131a6c3391abf3c2ec1d3e13a8a7d4d701c88dfae3f9db | cbaggers/rtg-math | types-docs.lisp | (in-package :rtg-math.types)
;;----------------------------------------------------------------
(docs:define-docs
(deftype vec2
"
A `simple-array` of 2 `single-float`s")
(deftype vec3
"
A `simple-array` of 3 `single-float`s")
(deftype vec4
"
A `simple-array` of 4 `single-float`s")
(deftype ivec2
"
A `simple-array` of 2 `(signed-byte 32)`s")
(deftype ivec3
"
A `simple-array` of 3 `(signed-byte 32)`s")
(deftype ivec4
"
A `simple-array` of 4 `(signed-byte 32)`s")
(deftype uvec2
"
A `simple-array` of 2 `(unsigned-byte 32)`s")
(deftype uvec3
"
A `simple-array` of 3 `(unsigned-byte 32)`s")
(deftype uvec4
"
A `simple-array` of 4 `(unsigned-byte 32)`s")
(deftype int8-vec2
"
A `simple-array` of 2 `(signed-byte 8)`s")
(deftype int8-vec3
"
A `simple-array` of 3 `(signed-byte 8)`s")
(deftype int8-vec4
"
A `simple-array` of 4 `(signed-byte 8)`s")
(deftype uint8-vec2
"
A `simple-array` of 2 `(unsigned-byte 8)`s")
(deftype uint8-vec3
"
A `simple-array` of 3 `(unsigned-byte 8)`s")
(deftype uint8-vec4
"
A `simple-array` of 4 `(unsigned-byte 8)`s")
(deftype mat2
"
A `simple-array` of 4 `single-float`s")
(deftype mat3
"
A `simple-array` of 9 `single-float`s")
(deftype mat4
"
A `simple-array` of 16 `single-float`s"))
| null | https://raw.githubusercontent.com/cbaggers/rtg-math/29fc5b3d0028a4a11a82355ecc8cca62662c69e0/types-docs.lisp | lisp | ---------------------------------------------------------------- | (in-package :rtg-math.types)
(docs:define-docs
(deftype vec2
"
A `simple-array` of 2 `single-float`s")
(deftype vec3
"
A `simple-array` of 3 `single-float`s")
(deftype vec4
"
A `simple-array` of 4 `single-float`s")
(deftype ivec2
"
A `simple-array` of 2 `(signed-byte 32)`s")
(deftype ivec3
"
A `simple-array` of 3 `(signed-byte 32)`s")
(deftype ivec4
"
A `simple-array` of 4 `(signed-byte 32)`s")
(deftype uvec2
"
A `simple-array` of 2 `(unsigned-byte 32)`s")
(deftype uvec3
"
A `simple-array` of 3 `(unsigned-byte 32)`s")
(deftype uvec4
"
A `simple-array` of 4 `(unsigned-byte 32)`s")
(deftype int8-vec2
"
A `simple-array` of 2 `(signed-byte 8)`s")
(deftype int8-vec3
"
A `simple-array` of 3 `(signed-byte 8)`s")
(deftype int8-vec4
"
A `simple-array` of 4 `(signed-byte 8)`s")
(deftype uint8-vec2
"
A `simple-array` of 2 `(unsigned-byte 8)`s")
(deftype uint8-vec3
"
A `simple-array` of 3 `(unsigned-byte 8)`s")
(deftype uint8-vec4
"
A `simple-array` of 4 `(unsigned-byte 8)`s")
(deftype mat2
"
A `simple-array` of 4 `single-float`s")
(deftype mat3
"
A `simple-array` of 9 `single-float`s")
(deftype mat4
"
A `simple-array` of 16 `single-float`s"))
|
42864b3fc09600ff38c23fe9ca9c726f6e816f4fbbca25e94bfb5fd0e1809735 | OCamlPro/digodoc | dtd.ml |
* Xml Light , an small Xml parser / printer with DTD support .
* Copyright ( C ) 2003 ( )
*
* This library is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation ; either
* version 2.1 of the License , or ( at your option ) any later version .
*
* This library has the special exception on linking described in file
* README .
*
* This library is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library ; if not , write to the Free Software
* Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston ,
* MA 02110 - 1301 USA
* Xml Light, an small Xml parser/printer with DTD support.
* Copyright (C) 2003 Nicolas Cannasse ()
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library has the special exception on linking described in file
* README.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301 USA
*)
open EzCompat
open Xml_types
open Printf
type parse_error_msg =
| InvalidDTDDecl
| InvalidDTDElement
| InvalidDTDAttribute
| InvalidDTDTag
| DTDItemExpected
type check_error =
| ElementDefinedTwice of string
| AttributeDefinedTwice of string * string
| ElementEmptyContructor of string
| ElementReferenced of string * string
| ElementNotDeclared of string
| WrongImplicitValueForID of string * string
type prove_error =
| UnexpectedPCData
| UnexpectedTag of string
| UnexpectedAttribute of string
| InvalidAttributeValue of string
| RequiredAttribute of string
| ChildExpected of string
| EmptyExpected
| DuplicateID of string
| MissingID of string
type dtd_child =
| DTDTag of string
| DTDPCData
| DTDOptional of dtd_child
| DTDZeroOrMore of dtd_child
| DTDOneOrMore of dtd_child
| DTDChoice of dtd_child list
| DTDChildren of dtd_child list
type dtd_element_type =
| DTDEmpty
| DTDAny
| DTDChild of dtd_child
type dtd_attr_default =
| DTDDefault of string
| DTDRequired
| DTDImplied
| DTDFixed of string
type dtd_attr_type =
| DTDCData
| DTDNMToken
| DTDEnum of string list
| DTDID
| DTDIDRef
type dtd_item =
| DTDAttribute of string * string * dtd_attr_type * dtd_attr_default
| DTDElement of string * dtd_element_type
type dtd = dtd_item list
type dtd_child =
| DTDTag of string
| DTDPCData
| DTDOptional of dtd_child
| DTDZeroOrMore of dtd_child
| DTDOneOrMore of dtd_child
| DTDChoice of dtd_child list
| DTDChildren of dtd_child list
type dtd_element_type =
| DTDEmpty
| DTDAny
| DTDChild of dtd_child
type dtd_attr_default =
| DTDDefault of string
| DTDRequired
| DTDImplied
| DTDFixed of string
type dtd_attr_type =
| DTDCData
| DTDNMToken
| DTDEnum of string list
| DTDID
| DTDIDRef
type dtd_item =
| DTDAttribute of string * string * dtd_attr_type * dtd_attr_default
| DTDElement of string * dtd_element_type
type dtd = dtd_item list
*)
type dtd_result =
| DTDNext
| DTDNotMatched
| DTDMatched
| DTDMatchedResult of dtd_child
type error_pos = {
eline : int;
eline_start : int;
emin : int;
emax : int;
}
type parse_error = parse_error_msg * Xml_types.error_pos
exception Parse_error of parse_error
exception Check_error of check_error
exception Prove_error of prove_error
module StringMap = Map.Make(String)
type 'a map = 'a StringMap.t ref
type checked = {
c_elements : dtd_element_type map;
c_attribs : (dtd_attr_type * dtd_attr_default) map map;
}
type dtd_state = {
elements : dtd_element_type map;
attribs : (dtd_attr_type * dtd_attr_default) map map;
mutable current : dtd_element_type;
mutable curtag : string;
state : (string * dtd_element_type) Stack.t;
}
let file_not_found = ref (fun _ -> assert false)
let _raises e =
file_not_found := e
let create_map() = ref StringMap.empty
let empty_map = create_map()
let find_map m k = StringMap.find k (!m)
let set_map m k v = m := StringMap.add k v (!m)
let unset_map m k = m := StringMap.remove k (!m)
let iter_map f m = StringMap.iter f (!m)
let fold_map f m = StringMap.fold f (!m)
let mem_map m k = StringMap.mem k (!m)
let pos source =
let line, lstart, min, max = Xml_lexer.pos source in
({
eline = line;
eline_start = lstart;
emin = min;
emax = max;
} : Xml_types.error_pos)
let convert = function
| Xml_lexer.EInvalidDTDDecl -> InvalidDTDDecl
| Xml_lexer.EInvalidDTDElement -> InvalidDTDElement
| Xml_lexer.EInvalidDTDTag -> InvalidDTDTag
| Xml_lexer.EDTDItemExpected -> DTDItemExpected
| Xml_lexer.EInvalidDTDAttribute -> InvalidDTDAttribute
let parse source =
try
Xml_lexer.init source;
(* local cast Dtd.dtd -> dtd *)
let dtd = (Obj.magic Xml_lexer.dtd source : dtd) in
Xml_lexer.close source;
dtd
with
| Xml_lexer.DTDError e ->
Xml_lexer.close source;
raise (Parse_error (convert e,pos source))
let parse_string s = parse (Lexing.from_string s)
let parse_in ch = parse (Lexing.from_channel ch)
let parse_file fname =
let ch = (try open_in fname with Sys_error _ -> raise (!file_not_found fname)) in
try
let x = parse (Lexing.from_channel ch) in
close_in ch;
x
with
e ->
close_in ch;
raise e
let check dtd =
let attribs = create_map() in
let hdone = create_map() in
let htodo = create_map() in
let ftodo tag from =
try
ignore(find_map hdone tag);
with
Not_found ->
try
match find_map htodo tag with
| None -> set_map htodo tag from
| Some _ -> ()
with
Not_found ->
set_map htodo tag from
in
let fdone tag edata =
try
ignore(find_map hdone tag);
raise (Check_error (ElementDefinedTwice tag));
with
Not_found ->
unset_map htodo tag;
set_map hdone tag edata
in
let fattrib tag aname adata =
(match adata with
| DTDID,DTDImplied -> ()
| DTDID,DTDRequired -> ()
| DTDID,_ -> raise (Check_error (WrongImplicitValueForID (tag,aname)))
| _ -> ());
let h = (try
find_map attribs tag
with
Not_found ->
let h = create_map() in
set_map attribs tag h;
h) in
try
ignore(find_map h aname);
raise (Check_error (AttributeDefinedTwice (tag,aname)));
with
Not_found ->
set_map h aname adata
in
let check_item = function
| DTDAttribute (tag,aname,atype,adef) ->
let utag = String.uppercase tag in
ftodo utag None;
fattrib utag (String.uppercase aname) (atype,adef)
| DTDElement (tag,etype) ->
let utag = String.uppercase tag in
fdone utag etype;
let check_type = function
| DTDEmpty -> ()
| DTDAny -> ()
| DTDChild x ->
let rec check_child = function
| DTDTag s -> ftodo (String.uppercase s) (Some utag)
| DTDPCData -> ()
| DTDOptional c
| DTDZeroOrMore c
| DTDOneOrMore c ->
check_child c
| DTDChoice []
| DTDChildren [] ->
raise (Check_error (ElementEmptyContructor tag))
| DTDChoice l
| DTDChildren l ->
List.iter check_child l
in
check_child x
in
check_type etype
in
List.iter check_item dtd;
iter_map (fun t from ->
match from with
| None -> raise (Check_error (ElementNotDeclared t))
| Some tag -> raise (Check_error (ElementReferenced (t,tag)))
) htodo;
{
c_elements = hdone;
c_attribs = attribs;
}
let start_prove dtd root =
let d = {
elements = dtd.c_elements;
attribs = dtd.c_attribs;
state = Stack.create();
current = DTDChild (DTDTag root);
curtag = "_root";
} in
try
ignore(find_map d.elements (String.uppercase root));
d
with
Not_found -> raise (Check_error (ElementNotDeclared root))
(* - for debug only - *)
let to_string_ref = ref (fun _ -> assert false)
let _trace dtd tag =
let item = DTDElement ("current",dtd.current) in
printf "%s : %s\n"
(match tag with None -> "#PCDATA" | Some t -> t)
(!to_string_ref item)
exception TmpResult of dtd_result
let prove_child dtd tag =
match dtd.current with
| DTDEmpty -> raise (Prove_error EmptyExpected)
| DTDAny -> ()
| DTDChild elt ->
let rec update = function
| DTDTag s ->
(match tag with
| None -> DTDNotMatched
| Some t when t = String.uppercase s -> DTDMatched
| Some _ -> DTDNotMatched)
| DTDPCData ->
(match tag with
| None -> DTDMatched
| Some _ -> DTDNotMatched)
| DTDOptional x ->
(match update x with
| DTDNotMatched
| DTDNext -> DTDNext
| DTDMatched
| DTDMatchedResult _ -> DTDMatched)
| DTDZeroOrMore x ->
(match update x with
| DTDNotMatched
| DTDNext -> DTDNext
| DTDMatched
| DTDMatchedResult _ -> DTDMatchedResult (DTDZeroOrMore x))
| DTDOneOrMore x ->
(match update x with
| DTDNotMatched
| DTDNext -> DTDNotMatched
| DTDMatched
| DTDMatchedResult _ -> DTDMatchedResult (DTDZeroOrMore x))
| DTDChoice l ->
(try
(match List.exists (fun x ->
match update x with
| DTDMatched -> true
| DTDMatchedResult _ as r -> raise (TmpResult r)
| DTDNext | DTDNotMatched -> false) l with
| true -> DTDMatched
| false -> DTDNotMatched)
with
TmpResult r -> r)
DTD is checked !
| DTDChildren (h :: t) ->
(match update h with
| DTDNext ->
(match t with
| [] -> DTDNotMatched
| _ -> update (DTDChildren t))
| DTDNotMatched -> DTDNotMatched
| DTDMatchedResult r ->
DTDMatchedResult (DTDChildren (r::t))
| DTDMatched ->
match t with
| [] -> DTDMatched
| _ -> DTDMatchedResult (DTDChildren t))
in
match update elt with
| DTDNext | DTDNotMatched ->
(match tag with
| None -> raise (Prove_error UnexpectedPCData)
| Some t -> raise (Prove_error (UnexpectedTag t)))
| DTDMatched ->
dtd.current <- DTDEmpty
| DTDMatchedResult r ->
dtd.current <- DTDChild r
let is_nmtoken_char = function
| 'A'..'Z' | 'a'..'z' | '0'..'9' | '.' | '-' | '_' | ':' -> true
| _ -> false
let prove_attrib _dtd hid hidref attr aname (atype,adef) accu =
let aval = (try Some (List.assoc aname attr) with Not_found -> None) in
(match atype, aval with
| DTDCData, _ -> ()
| DTDNMToken, None -> ()
| DTDNMToken, Some v ->
for i = 0 to String.length v - 1 do
if not (is_nmtoken_char v.[i]) then raise (Prove_error (InvalidAttributeValue aname));
done
| DTDEnum _l, None -> ()
| DTDEnum l, Some v ->
if not (List.exists ((=) v) l) then raise (Prove_error (InvalidAttributeValue aname))
| DTDID, None -> ()
| DTDID, Some id ->
if mem_map hid id then raise (Prove_error (DuplicateID id));
set_map hid id ()
| DTDIDRef, None -> ()
| DTDIDRef, Some idref ->
set_map hidref idref ());
match adef, aval with
| DTDRequired, None -> raise (Prove_error (RequiredAttribute aname))
| DTDFixed v, Some av when v <> av -> raise (Prove_error (InvalidAttributeValue aname))
| DTDImplied, None -> accu
| DTDFixed v , None
| DTDDefault _, Some v
| DTDDefault v, None
| DTDRequired, Some v
| DTDImplied, Some v
| DTDFixed _, Some v -> (aname,v) :: accu
let check_attrib ahash (aname,_) =
try
ignore(find_map ahash aname);
with
Not_found -> raise (Prove_error (UnexpectedAttribute aname))
let rec do_prove hid hidref dtd = function
| PCData s ->
prove_child dtd None;
PCData s
| Element (tag,attr,childs) ->
let utag = String.uppercase tag in
let uattr = List.map (fun (aname,aval) -> String.uppercase aname , aval) attr in
prove_child dtd (Some utag);
Stack.push (dtd.curtag,dtd.current) dtd.state;
let elt = (try find_map dtd.elements utag with Not_found -> raise (Prove_error (UnexpectedTag tag))) in
let ahash = (try find_map dtd.attribs utag with Not_found -> empty_map) in
dtd.curtag <- tag;
dtd.current <- elt;
List.iter (check_attrib ahash) uattr;
let attr = fold_map (prove_attrib dtd hid hidref uattr) ahash [] in
let childs = ref (List.map (do_prove hid hidref dtd) childs) in
(match dtd.current with
| DTDAny
| DTDEmpty -> ()
| DTDChild elt ->
let name = ref "" in
let rec check = function
| DTDTag t ->
name := t;
false
| DTDPCData when !childs = [] ->
childs := [PCData ""];
true
| DTDPCData ->
name := "#PCDATA";
false
| DTDOptional _ -> true
| DTDZeroOrMore _ -> true
| DTDOneOrMore e ->
ignore(check e);
false
| DTDChoice l -> List.exists check l
| DTDChildren l -> List.for_all check l
in
match check elt with
| true -> ()
| false -> raise (Prove_error (ChildExpected !name)));
let _ctag, cur = Stack.pop dtd.state in
dtd.curtag <- tag;
dtd.current <- cur;
Element (tag,attr,!childs)
let prove dtd root xml =
let hid = create_map() in
let hidref = create_map() in
let x = do_prove hid hidref (start_prove dtd root) xml in
iter_map (fun id () ->
if not (mem_map hid id) then raise (Prove_error (MissingID id))
) hidref;
x
let parse_error_msg = function
| InvalidDTDDecl -> "Invalid DOCTYPE declaration"
| InvalidDTDElement -> "Invalid DTD element declaration"
| InvalidDTDAttribute -> "Invalid DTD attribute declaration"
| InvalidDTDTag -> "Invalid DTD tag"
| DTDItemExpected -> "DTD item expected"
let parse_error (msg,pos) =
let pos = (Obj.magic pos : error_pos) in
if pos.emin = pos.emax then
sprintf "%s line %d character %d" (parse_error_msg msg) pos.eline (pos.emin - pos.eline_start)
else
sprintf "%s line %d characters %d-%d" (parse_error_msg msg) pos.eline (pos.emin - pos.eline_start) (pos.emax - pos.eline_start)
let check_error = function
| ElementDefinedTwice tag -> sprintf "Element '%s' defined twice" tag
| AttributeDefinedTwice (tag,aname) -> sprintf "Attribute '%s' of element '%s' defined twice" aname tag
| ElementEmptyContructor tag -> sprintf "Element '%s' has empty constructor" tag
| ElementReferenced (tag,from) -> sprintf "Element '%s' referenced by '%s' is not declared" tag from
| ElementNotDeclared tag -> sprintf "Element '%s' needed but is not declared" tag
| WrongImplicitValueForID (tag,idname) -> sprintf "Attribute '%s' of type ID of element '%s' not defined with implicit value #REQUIRED or #IMPLIED" idname tag
let prove_error = function
| UnexpectedPCData -> "Unexpected PCData"
| UnexpectedTag tag -> sprintf "Unexpected tag : '%s'" tag
| UnexpectedAttribute att -> sprintf "Unexpected attribute : '%s'" att
| InvalidAttributeValue att -> sprintf "Invalid attribute value for '%s'" att
| RequiredAttribute att -> sprintf "Required attribute not found : '%s'" att
| ChildExpected cname -> sprintf "Child expected : '%s'" cname
| EmptyExpected -> "No more children expected"
| DuplicateID id -> sprintf "ID '%s' used several times" id
| MissingID idref -> sprintf "missing ID value for IDREF '%s'" idref
let to_string = function
| DTDAttribute (tag,aname,atype,adef) ->
let atype_to_string = function
| DTDCData -> "CDATA"
| DTDNMToken -> "NMTOKEN"
| DTDEnum l -> sprintf "(%s)" (String.concat "|" l)
| DTDID -> "ID"
| DTDIDRef -> "IDREF"
in
let adefault_to_string = function
| DTDDefault s -> sprintf "\"%s\"" s
| DTDRequired -> "#REQUIRED"
| DTDImplied -> "#IMPLIED"
| DTDFixed s -> sprintf "#FIXED \"%s\"" s
in
sprintf "<!ATTLIST %s %s %s %s>" tag aname (atype_to_string atype) (adefault_to_string adef)
| DTDElement (tag,etype) ->
let rec echild_to_string = function
| DTDTag s -> s
| DTDPCData -> "#PCDATA"
| DTDOptional c -> sprintf "%s?" (echild_to_string c)
| DTDZeroOrMore c -> sprintf "%s*" (echild_to_string c)
| DTDOneOrMore c -> sprintf "%s+" (echild_to_string c)
| DTDChoice [c] -> echild_to_string c
| DTDChoice l -> sprintf "(%s)" (String.concat "|" (List.map echild_to_string l))
| DTDChildren [c] -> echild_to_string c
| DTDChildren l -> sprintf "(%s)" (String.concat "," (List.map echild_to_string l))
in
let etype_to_string = function
| DTDEmpty -> "EMPTY"
| DTDAny -> "ANY"
| DTDChild x ->
let rec op_to_string = function
| DTDOptional c -> sprintf "%s?" (op_to_string c)
| DTDZeroOrMore c -> sprintf "%s*" (op_to_string c)
| DTDOneOrMore c -> sprintf "%s+" (op_to_string c)
| _ -> ""
in
let rec root = function
| DTDOptional c
| DTDZeroOrMore c
| DTDOneOrMore c ->
root c
| DTDChoice [_]
| DTDChildren [_] as x ->
x, false
| DTDChoice _
| DTDChildren _ as x ->
x, true
| x -> x, false
in
match root x with
| r, true -> sprintf "%s%s" (echild_to_string r) (op_to_string x)
| r, false -> sprintf "(%s%s)" (echild_to_string r) (op_to_string x)
in
sprintf "<!ELEMENT %s %s>" tag (etype_to_string etype)
;;
to_string_ref := to_string
| null | https://raw.githubusercontent.com/OCamlPro/digodoc/2856aff0a9b3a9bc2d3414eea780a3a39315e40c/src/ez_html/dtd.ml | ocaml | local cast Dtd.dtd -> dtd
- for debug only - |
* Xml Light , an small Xml parser / printer with DTD support .
* Copyright ( C ) 2003 ( )
*
* This library is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation ; either
* version 2.1 of the License , or ( at your option ) any later version .
*
* This library has the special exception on linking described in file
* README .
*
* This library is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library ; if not , write to the Free Software
* Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston ,
* MA 02110 - 1301 USA
* Xml Light, an small Xml parser/printer with DTD support.
* Copyright (C) 2003 Nicolas Cannasse ()
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library has the special exception on linking described in file
* README.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301 USA
*)
open EzCompat
open Xml_types
open Printf
type parse_error_msg =
| InvalidDTDDecl
| InvalidDTDElement
| InvalidDTDAttribute
| InvalidDTDTag
| DTDItemExpected
type check_error =
| ElementDefinedTwice of string
| AttributeDefinedTwice of string * string
| ElementEmptyContructor of string
| ElementReferenced of string * string
| ElementNotDeclared of string
| WrongImplicitValueForID of string * string
type prove_error =
| UnexpectedPCData
| UnexpectedTag of string
| UnexpectedAttribute of string
| InvalidAttributeValue of string
| RequiredAttribute of string
| ChildExpected of string
| EmptyExpected
| DuplicateID of string
| MissingID of string
type dtd_child =
| DTDTag of string
| DTDPCData
| DTDOptional of dtd_child
| DTDZeroOrMore of dtd_child
| DTDOneOrMore of dtd_child
| DTDChoice of dtd_child list
| DTDChildren of dtd_child list
type dtd_element_type =
| DTDEmpty
| DTDAny
| DTDChild of dtd_child
type dtd_attr_default =
| DTDDefault of string
| DTDRequired
| DTDImplied
| DTDFixed of string
type dtd_attr_type =
| DTDCData
| DTDNMToken
| DTDEnum of string list
| DTDID
| DTDIDRef
type dtd_item =
| DTDAttribute of string * string * dtd_attr_type * dtd_attr_default
| DTDElement of string * dtd_element_type
type dtd = dtd_item list
type dtd_child =
| DTDTag of string
| DTDPCData
| DTDOptional of dtd_child
| DTDZeroOrMore of dtd_child
| DTDOneOrMore of dtd_child
| DTDChoice of dtd_child list
| DTDChildren of dtd_child list
type dtd_element_type =
| DTDEmpty
| DTDAny
| DTDChild of dtd_child
type dtd_attr_default =
| DTDDefault of string
| DTDRequired
| DTDImplied
| DTDFixed of string
type dtd_attr_type =
| DTDCData
| DTDNMToken
| DTDEnum of string list
| DTDID
| DTDIDRef
type dtd_item =
| DTDAttribute of string * string * dtd_attr_type * dtd_attr_default
| DTDElement of string * dtd_element_type
type dtd = dtd_item list
*)
type dtd_result =
| DTDNext
| DTDNotMatched
| DTDMatched
| DTDMatchedResult of dtd_child
type error_pos = {
eline : int;
eline_start : int;
emin : int;
emax : int;
}
type parse_error = parse_error_msg * Xml_types.error_pos
exception Parse_error of parse_error
exception Check_error of check_error
exception Prove_error of prove_error
module StringMap = Map.Make(String)
type 'a map = 'a StringMap.t ref
type checked = {
c_elements : dtd_element_type map;
c_attribs : (dtd_attr_type * dtd_attr_default) map map;
}
type dtd_state = {
elements : dtd_element_type map;
attribs : (dtd_attr_type * dtd_attr_default) map map;
mutable current : dtd_element_type;
mutable curtag : string;
state : (string * dtd_element_type) Stack.t;
}
let file_not_found = ref (fun _ -> assert false)
let _raises e =
file_not_found := e
let create_map() = ref StringMap.empty
let empty_map = create_map()
let find_map m k = StringMap.find k (!m)
let set_map m k v = m := StringMap.add k v (!m)
let unset_map m k = m := StringMap.remove k (!m)
let iter_map f m = StringMap.iter f (!m)
let fold_map f m = StringMap.fold f (!m)
let mem_map m k = StringMap.mem k (!m)
let pos source =
let line, lstart, min, max = Xml_lexer.pos source in
({
eline = line;
eline_start = lstart;
emin = min;
emax = max;
} : Xml_types.error_pos)
let convert = function
| Xml_lexer.EInvalidDTDDecl -> InvalidDTDDecl
| Xml_lexer.EInvalidDTDElement -> InvalidDTDElement
| Xml_lexer.EInvalidDTDTag -> InvalidDTDTag
| Xml_lexer.EDTDItemExpected -> DTDItemExpected
| Xml_lexer.EInvalidDTDAttribute -> InvalidDTDAttribute
let parse source =
try
Xml_lexer.init source;
let dtd = (Obj.magic Xml_lexer.dtd source : dtd) in
Xml_lexer.close source;
dtd
with
| Xml_lexer.DTDError e ->
Xml_lexer.close source;
raise (Parse_error (convert e,pos source))
let parse_string s = parse (Lexing.from_string s)
let parse_in ch = parse (Lexing.from_channel ch)
let parse_file fname =
let ch = (try open_in fname with Sys_error _ -> raise (!file_not_found fname)) in
try
let x = parse (Lexing.from_channel ch) in
close_in ch;
x
with
e ->
close_in ch;
raise e
let check dtd =
let attribs = create_map() in
let hdone = create_map() in
let htodo = create_map() in
let ftodo tag from =
try
ignore(find_map hdone tag);
with
Not_found ->
try
match find_map htodo tag with
| None -> set_map htodo tag from
| Some _ -> ()
with
Not_found ->
set_map htodo tag from
in
let fdone tag edata =
try
ignore(find_map hdone tag);
raise (Check_error (ElementDefinedTwice tag));
with
Not_found ->
unset_map htodo tag;
set_map hdone tag edata
in
let fattrib tag aname adata =
(match adata with
| DTDID,DTDImplied -> ()
| DTDID,DTDRequired -> ()
| DTDID,_ -> raise (Check_error (WrongImplicitValueForID (tag,aname)))
| _ -> ());
let h = (try
find_map attribs tag
with
Not_found ->
let h = create_map() in
set_map attribs tag h;
h) in
try
ignore(find_map h aname);
raise (Check_error (AttributeDefinedTwice (tag,aname)));
with
Not_found ->
set_map h aname adata
in
let check_item = function
| DTDAttribute (tag,aname,atype,adef) ->
let utag = String.uppercase tag in
ftodo utag None;
fattrib utag (String.uppercase aname) (atype,adef)
| DTDElement (tag,etype) ->
let utag = String.uppercase tag in
fdone utag etype;
let check_type = function
| DTDEmpty -> ()
| DTDAny -> ()
| DTDChild x ->
let rec check_child = function
| DTDTag s -> ftodo (String.uppercase s) (Some utag)
| DTDPCData -> ()
| DTDOptional c
| DTDZeroOrMore c
| DTDOneOrMore c ->
check_child c
| DTDChoice []
| DTDChildren [] ->
raise (Check_error (ElementEmptyContructor tag))
| DTDChoice l
| DTDChildren l ->
List.iter check_child l
in
check_child x
in
check_type etype
in
List.iter check_item dtd;
iter_map (fun t from ->
match from with
| None -> raise (Check_error (ElementNotDeclared t))
| Some tag -> raise (Check_error (ElementReferenced (t,tag)))
) htodo;
{
c_elements = hdone;
c_attribs = attribs;
}
let start_prove dtd root =
let d = {
elements = dtd.c_elements;
attribs = dtd.c_attribs;
state = Stack.create();
current = DTDChild (DTDTag root);
curtag = "_root";
} in
try
ignore(find_map d.elements (String.uppercase root));
d
with
Not_found -> raise (Check_error (ElementNotDeclared root))
let to_string_ref = ref (fun _ -> assert false)
let _trace dtd tag =
let item = DTDElement ("current",dtd.current) in
printf "%s : %s\n"
(match tag with None -> "#PCDATA" | Some t -> t)
(!to_string_ref item)
exception TmpResult of dtd_result
let prove_child dtd tag =
match dtd.current with
| DTDEmpty -> raise (Prove_error EmptyExpected)
| DTDAny -> ()
| DTDChild elt ->
let rec update = function
| DTDTag s ->
(match tag with
| None -> DTDNotMatched
| Some t when t = String.uppercase s -> DTDMatched
| Some _ -> DTDNotMatched)
| DTDPCData ->
(match tag with
| None -> DTDMatched
| Some _ -> DTDNotMatched)
| DTDOptional x ->
(match update x with
| DTDNotMatched
| DTDNext -> DTDNext
| DTDMatched
| DTDMatchedResult _ -> DTDMatched)
| DTDZeroOrMore x ->
(match update x with
| DTDNotMatched
| DTDNext -> DTDNext
| DTDMatched
| DTDMatchedResult _ -> DTDMatchedResult (DTDZeroOrMore x))
| DTDOneOrMore x ->
(match update x with
| DTDNotMatched
| DTDNext -> DTDNotMatched
| DTDMatched
| DTDMatchedResult _ -> DTDMatchedResult (DTDZeroOrMore x))
| DTDChoice l ->
(try
(match List.exists (fun x ->
match update x with
| DTDMatched -> true
| DTDMatchedResult _ as r -> raise (TmpResult r)
| DTDNext | DTDNotMatched -> false) l with
| true -> DTDMatched
| false -> DTDNotMatched)
with
TmpResult r -> r)
DTD is checked !
| DTDChildren (h :: t) ->
(match update h with
| DTDNext ->
(match t with
| [] -> DTDNotMatched
| _ -> update (DTDChildren t))
| DTDNotMatched -> DTDNotMatched
| DTDMatchedResult r ->
DTDMatchedResult (DTDChildren (r::t))
| DTDMatched ->
match t with
| [] -> DTDMatched
| _ -> DTDMatchedResult (DTDChildren t))
in
match update elt with
| DTDNext | DTDNotMatched ->
(match tag with
| None -> raise (Prove_error UnexpectedPCData)
| Some t -> raise (Prove_error (UnexpectedTag t)))
| DTDMatched ->
dtd.current <- DTDEmpty
| DTDMatchedResult r ->
dtd.current <- DTDChild r
let is_nmtoken_char = function
| 'A'..'Z' | 'a'..'z' | '0'..'9' | '.' | '-' | '_' | ':' -> true
| _ -> false
let prove_attrib _dtd hid hidref attr aname (atype,adef) accu =
let aval = (try Some (List.assoc aname attr) with Not_found -> None) in
(match atype, aval with
| DTDCData, _ -> ()
| DTDNMToken, None -> ()
| DTDNMToken, Some v ->
for i = 0 to String.length v - 1 do
if not (is_nmtoken_char v.[i]) then raise (Prove_error (InvalidAttributeValue aname));
done
| DTDEnum _l, None -> ()
| DTDEnum l, Some v ->
if not (List.exists ((=) v) l) then raise (Prove_error (InvalidAttributeValue aname))
| DTDID, None -> ()
| DTDID, Some id ->
if mem_map hid id then raise (Prove_error (DuplicateID id));
set_map hid id ()
| DTDIDRef, None -> ()
| DTDIDRef, Some idref ->
set_map hidref idref ());
match adef, aval with
| DTDRequired, None -> raise (Prove_error (RequiredAttribute aname))
| DTDFixed v, Some av when v <> av -> raise (Prove_error (InvalidAttributeValue aname))
| DTDImplied, None -> accu
| DTDFixed v , None
| DTDDefault _, Some v
| DTDDefault v, None
| DTDRequired, Some v
| DTDImplied, Some v
| DTDFixed _, Some v -> (aname,v) :: accu
let check_attrib ahash (aname,_) =
try
ignore(find_map ahash aname);
with
Not_found -> raise (Prove_error (UnexpectedAttribute aname))
let rec do_prove hid hidref dtd = function
| PCData s ->
prove_child dtd None;
PCData s
| Element (tag,attr,childs) ->
let utag = String.uppercase tag in
let uattr = List.map (fun (aname,aval) -> String.uppercase aname , aval) attr in
prove_child dtd (Some utag);
Stack.push (dtd.curtag,dtd.current) dtd.state;
let elt = (try find_map dtd.elements utag with Not_found -> raise (Prove_error (UnexpectedTag tag))) in
let ahash = (try find_map dtd.attribs utag with Not_found -> empty_map) in
dtd.curtag <- tag;
dtd.current <- elt;
List.iter (check_attrib ahash) uattr;
let attr = fold_map (prove_attrib dtd hid hidref uattr) ahash [] in
let childs = ref (List.map (do_prove hid hidref dtd) childs) in
(match dtd.current with
| DTDAny
| DTDEmpty -> ()
| DTDChild elt ->
let name = ref "" in
let rec check = function
| DTDTag t ->
name := t;
false
| DTDPCData when !childs = [] ->
childs := [PCData ""];
true
| DTDPCData ->
name := "#PCDATA";
false
| DTDOptional _ -> true
| DTDZeroOrMore _ -> true
| DTDOneOrMore e ->
ignore(check e);
false
| DTDChoice l -> List.exists check l
| DTDChildren l -> List.for_all check l
in
match check elt with
| true -> ()
| false -> raise (Prove_error (ChildExpected !name)));
let _ctag, cur = Stack.pop dtd.state in
dtd.curtag <- tag;
dtd.current <- cur;
Element (tag,attr,!childs)
let prove dtd root xml =
let hid = create_map() in
let hidref = create_map() in
let x = do_prove hid hidref (start_prove dtd root) xml in
iter_map (fun id () ->
if not (mem_map hid id) then raise (Prove_error (MissingID id))
) hidref;
x
let parse_error_msg = function
| InvalidDTDDecl -> "Invalid DOCTYPE declaration"
| InvalidDTDElement -> "Invalid DTD element declaration"
| InvalidDTDAttribute -> "Invalid DTD attribute declaration"
| InvalidDTDTag -> "Invalid DTD tag"
| DTDItemExpected -> "DTD item expected"
let parse_error (msg,pos) =
let pos = (Obj.magic pos : error_pos) in
if pos.emin = pos.emax then
sprintf "%s line %d character %d" (parse_error_msg msg) pos.eline (pos.emin - pos.eline_start)
else
sprintf "%s line %d characters %d-%d" (parse_error_msg msg) pos.eline (pos.emin - pos.eline_start) (pos.emax - pos.eline_start)
let check_error = function
| ElementDefinedTwice tag -> sprintf "Element '%s' defined twice" tag
| AttributeDefinedTwice (tag,aname) -> sprintf "Attribute '%s' of element '%s' defined twice" aname tag
| ElementEmptyContructor tag -> sprintf "Element '%s' has empty constructor" tag
| ElementReferenced (tag,from) -> sprintf "Element '%s' referenced by '%s' is not declared" tag from
| ElementNotDeclared tag -> sprintf "Element '%s' needed but is not declared" tag
| WrongImplicitValueForID (tag,idname) -> sprintf "Attribute '%s' of type ID of element '%s' not defined with implicit value #REQUIRED or #IMPLIED" idname tag
let prove_error = function
| UnexpectedPCData -> "Unexpected PCData"
| UnexpectedTag tag -> sprintf "Unexpected tag : '%s'" tag
| UnexpectedAttribute att -> sprintf "Unexpected attribute : '%s'" att
| InvalidAttributeValue att -> sprintf "Invalid attribute value for '%s'" att
| RequiredAttribute att -> sprintf "Required attribute not found : '%s'" att
| ChildExpected cname -> sprintf "Child expected : '%s'" cname
| EmptyExpected -> "No more children expected"
| DuplicateID id -> sprintf "ID '%s' used several times" id
| MissingID idref -> sprintf "missing ID value for IDREF '%s'" idref
let to_string = function
| DTDAttribute (tag,aname,atype,adef) ->
let atype_to_string = function
| DTDCData -> "CDATA"
| DTDNMToken -> "NMTOKEN"
| DTDEnum l -> sprintf "(%s)" (String.concat "|" l)
| DTDID -> "ID"
| DTDIDRef -> "IDREF"
in
let adefault_to_string = function
| DTDDefault s -> sprintf "\"%s\"" s
| DTDRequired -> "#REQUIRED"
| DTDImplied -> "#IMPLIED"
| DTDFixed s -> sprintf "#FIXED \"%s\"" s
in
sprintf "<!ATTLIST %s %s %s %s>" tag aname (atype_to_string atype) (adefault_to_string adef)
| DTDElement (tag,etype) ->
let rec echild_to_string = function
| DTDTag s -> s
| DTDPCData -> "#PCDATA"
| DTDOptional c -> sprintf "%s?" (echild_to_string c)
| DTDZeroOrMore c -> sprintf "%s*" (echild_to_string c)
| DTDOneOrMore c -> sprintf "%s+" (echild_to_string c)
| DTDChoice [c] -> echild_to_string c
| DTDChoice l -> sprintf "(%s)" (String.concat "|" (List.map echild_to_string l))
| DTDChildren [c] -> echild_to_string c
| DTDChildren l -> sprintf "(%s)" (String.concat "," (List.map echild_to_string l))
in
let etype_to_string = function
| DTDEmpty -> "EMPTY"
| DTDAny -> "ANY"
| DTDChild x ->
let rec op_to_string = function
| DTDOptional c -> sprintf "%s?" (op_to_string c)
| DTDZeroOrMore c -> sprintf "%s*" (op_to_string c)
| DTDOneOrMore c -> sprintf "%s+" (op_to_string c)
| _ -> ""
in
let rec root = function
| DTDOptional c
| DTDZeroOrMore c
| DTDOneOrMore c ->
root c
| DTDChoice [_]
| DTDChildren [_] as x ->
x, false
| DTDChoice _
| DTDChildren _ as x ->
x, true
| x -> x, false
in
match root x with
| r, true -> sprintf "%s%s" (echild_to_string r) (op_to_string x)
| r, false -> sprintf "(%s%s)" (echild_to_string r) (op_to_string x)
in
sprintf "<!ELEMENT %s %s>" tag (etype_to_string etype)
;;
to_string_ref := to_string
|
72994126eebcc346c90e49bf21a0a99926068ff6a123c213ebb44a443f649db5 | lymar/hastache | Context.hs | # LANGUAGE ScopedTypeVariables #
# LANGUAGE CPP #
{-# LANGUAGE RankNTypes #-}
Module : Text . Hastache . Context
Copyright : ( c ) 2011 - 2013
License : BSD3
-- Maintainer: Sergey S Lymar <>
-- Stability: experimental
-- Portability: portable
|
Hastache context helpers
Hastache context helpers
-}
module Text.Hastache.Context (
mkStrContext
, mkStrContextM
, mkGenericContext
, mkGenericContext'
, Ext
, defaultExt
) where
import Data.Data
import Data.Generics
import Data.Int
import Data.Version (Version)
import Data.Ratio (Ratio)
import Data.Word
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import qualified Data.Map as Map
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.Lazy as TL
import Text.Hastache
x ~> f = f $ x
infixl 9 ~>
| Make Hastache context from String - > MuType function
mkStrContext :: Monad m => (String -> MuType m) -> MuContext m
mkStrContext f a = decodeStr a ~> f ~> return
| Make Hastache context from monadic String - > MuType function
mkStrContextM :: Monad m => (String -> m (MuType m)) -> MuContext m
mkStrContextM f a = decodeStr a ~> f
type Ext = forall b. (Data b, Typeable b) => b -> String
-- | @defaultExt ==@ 'gshow'
defaultExt :: Ext
defaultExt = gshow
|
Make Hastache context from Data . Data deriving type
Supported field types :
* ( )
* String
*
* Double
* Float
* Int
* Int8
* Int16
* Int32
* Int64
* Integer
* Word
* Word8
* * * Word64
* Data . ByteString . ByteString
* Data . ByteString . Lazy . ByteString
* Data . Text . Text
* Data . Text . Lazy . Text
* * Version
* Maybe @a@ ( where @a@ is a supported datatype )
* Either @a@ @b@ ( where @a@ and are supported datatypes )
* Data . Text . Text - > Data . Text . Text
* Data . Text . Text - > Data . Text . Lazy . Text
* Data . Text . Lazy . Text - > Data . Text . Lazy . Text
* Data . ByteString . ByteString - > Data . ByteString . ByteString
* String - > String
* Data . ByteString . ByteString - > Data . ByteString . Lazy . * MonadIO m = > Data . Text . Text - > m Data . Text . Text
* MonadIO m = > Data . Text . Text - > m Data . Text . Lazy . Text
* MonadIO m = > Data . Text . Lazy . Text - > m Data . Text . Lazy . Text
* MonadIO m = > Data . ByteString . ByteString - > m Data . ByteString . ByteString
* MonadIO m = > String - > m String
* MonadIO m = > Data . ByteString . ByteString - > m Data . ByteString . Lazy . ByteString
Example :
@
import Text . Hastache
import Text . Hastache . Context
import qualified Data . Text as T
import qualified Data . Text . Lazy as TL
import qualified Data . Text . Lazy . IO as TL
import Data . Data
import Data . Generics
import Data .
data InternalData = InternalData {
: : String ,
anotherField : : Int
} deriving ( Data , Typeable , Show )
data Example = Example {
stringField : : String ,
intField : : Int ,
: : InternalData ,
simpleListField : : [ String ] ,
dataListField : : [ InternalData ] ,
stringFunc : : String - > String ,
textFunc : : T.Text - > T.Text ,
monadicStringFunc : : String - > IO String ,
monadicTextFunc : : T.Text - > IO T.Text
} deriving ( Data , Typeable )
example = hastacheStr defaultConfig ( encodeStr template )
( mkGenericContext context )
where
template = unlines [
\"string : { { stringField}}\ " ,
\"int : { { intField}}\ " ,
\"data : { { dataField.someField } } , { { dataField.anotherField}}\ " ,
\"data : { { # dataField}}{{someField } } , { { " ,
\"simple list : { { # simpleListField } } { { . } } { { /simpleListField}}\ " ,
\"data list:\ " ,
\"{{#dataListField}}\ " ,
\ " * { { } } , { { anotherField } } . top level var : { { intField}}\ " ,
\"{{/dataListField}}\ " ,
\"{{#stringFunc}}upper{{/stringFunc}}\ " ,
" ,
\"{{#monadicStringFunc}}upper ( monadic){{/monadicStringFunc}}\ " ,
( monadic){{/monadicTextFunc}}\ " ]
context = Example { stringField = , intField = 1 ,
= InternalData \"val\ " 123 , simpleListField = [ \"a\",\"b\",\"c\ " ] ,
dataListField = [ InternalData \"aaa\ " 1 , InternalData \"bbb\ " 2 ] ,
stringFunc = map toUpper ,
textFunc = T.reverse ,
monadicStringFunc = return . map toUpper ,
monadicTextFunc = return . T.reverse }
main = example > > = TL.putStrLn
@
Result :
@
string : string value
int : 1
data : val , 123
data : val , 123
simple list : a b c
data list :
* aaa , 1 . top level var : 1
* bbb , 2 . top level var : 1
UPPER
esrever
UPPER ( MONADIC )
) cidanom ( esrever
@
Hastache also supports datatypes with multiple constructors :
@
data A = A { str : : String }
| B { num : : Int }
{ { # A } }
A : { { str } }
{ { /A } }
{ { # B } }
B : { { num } }
{ { /B } }
@
Make Hastache context from Data.Data deriving type
Supported field types:
* ()
* String
* Char
* Double
* Float
* Int
* Int8
* Int16
* Int32
* Int64
* Integer
* Word
* Word8
* Word16
* Word32
* Word64
* Data.ByteString.ByteString
* Data.ByteString.Lazy.ByteString
* Data.Text.Text
* Data.Text.Lazy.Text
* Bool
* Version
* Maybe @a@ (where @a@ is a supported datatype)
* Either @a@ @b@ (where @a@ and @b@ are supported datatypes)
* Data.Text.Text -> Data.Text.Text
* Data.Text.Text -> Data.Text.Lazy.Text
* Data.Text.Lazy.Text -> Data.Text.Lazy.Text
* Data.ByteString.ByteString -> Data.ByteString.ByteString
* String -> String
* Data.ByteString.ByteString -> Data.ByteString.Lazy.ByteString
* MonadIO m => Data.Text.Text -> m Data.Text.Text
* MonadIO m => Data.Text.Text -> m Data.Text.Lazy.Text
* MonadIO m => Data.Text.Lazy.Text -> m Data.Text.Lazy.Text
* MonadIO m => Data.ByteString.ByteString -> m Data.ByteString.ByteString
* MonadIO m => String -> m String
* MonadIO m => Data.ByteString.ByteString -> m Data.ByteString.Lazy.ByteString
Example:
@
import Text.Hastache
import Text.Hastache.Context
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.IO as TL
import Data.Data
import Data.Generics
import Data.Char
data InternalData = InternalData {
someField :: String,
anotherField :: Int
} deriving (Data, Typeable, Show)
data Example = Example {
stringField :: String,
intField :: Int,
dataField :: InternalData,
simpleListField :: [String],
dataListField :: [InternalData],
stringFunc :: String -> String,
textFunc :: T.Text -> T.Text,
monadicStringFunc :: String -> IO String,
monadicTextFunc :: T.Text -> IO T.Text
} deriving (Data, Typeable)
example = hastacheStr defaultConfig (encodeStr template)
(mkGenericContext context)
where
template = unlines [
\"string: {{stringField}}\",
\"int: {{intField}}\",
\"data: {{dataField.someField}}, {{dataField.anotherField}}\",
\"data: {{#dataField}}{{someField}}, {{anotherField}}{{/dataField}}\",
\"simple list: {{#simpleListField}}{{.}} {{/simpleListField}}\",
\"data list:\",
\"{{#dataListField}}\",
\" * {{someField}}, {{anotherField}}. top level var: {{intField}}\",
\"{{/dataListField}}\",
\"{{#stringFunc}}upper{{/stringFunc}}\",
\"{{#textFunc}}reverse{{/textFunc}}\",
\"{{#monadicStringFunc}}upper (monadic){{/monadicStringFunc}}\",
\"{{#monadicTextFunc}}reverse (monadic){{/monadicTextFunc}}\"]
context = Example { stringField = \"string value\", intField = 1,
dataField = InternalData \"val\" 123, simpleListField = [\"a\",\"b\",\"c\"],
dataListField = [InternalData \"aaa\" 1, InternalData \"bbb\" 2],
stringFunc = map toUpper,
textFunc = T.reverse,
monadicStringFunc = return . map toUpper,
monadicTextFunc = return . T.reverse }
main = example >>= TL.putStrLn
@
Result:
@
string: string value
int: 1
data: val, 123
data: val, 123
simple list: a b c
data list:
* aaa, 1. top level var: 1
* bbb, 2. top level var: 1
UPPER
esrever
UPPER (MONADIC)
)cidanom( esrever
@
Hastache also supports datatypes with multiple constructors:
@
data A = A { str :: String }
| B { num :: Int }
{{#A}}
A : {{str}}
{{/A}}
{{#B}}
B : {{num}}
{{/B}}
@
-}
#if MIN_VERSION_base(4,7,0)
mkGenericContext :: (Monad m, Data a, Typeable m) => a -> MuContext m
#else
mkGenericContext :: (Monad m, Data a, Typeable1 m) => a -> MuContext m
#endif
mkGenericContext val = toGenTemp id defaultExt val ~> convertGenTempToContext
|
Like ' mkGenericContext ' , but apply the first function to record field
names when constructing the context . The second function is used to
constructing values for context from datatypes that are nor supported
as primitives in the library . The resulting value can be accessed
using the @.DatatypeName@ field :
@
\{\-\ # LANGUAGE DeriveDataTypeable \#\-\ }
\{\-\ # LANGUAGE FlexibleInstances \#\-\ }
\{\-\ # LANGUAGE ScopedTypeVariables \#\-\ }
\{\-\ # LANGUAGE StandaloneDeriving \#\-\ }
\{\-\ # LANGUAGE TypeSynonymInstances \#\-\ }
import Text . Hastache
import Text . Hastache . Context
import qualified Data . Text . Lazy as TL
import qualified Data . Text . Lazy . IO as TL
import Data . Data ( Data , )
import Data . Decimal
import Data . Generics . Aliases ( extQ )
data Test = Test { n::Int , } deriving ( Data , Typeable )
deriving instance Data Decimal
val : : Test
val = Test 1 ( Decimal 3 1500 )
q : : Ext
q = defaultExt \`extQ\ ` ( \(i::Decimal ) - > " A decimal : " + + show i )
r " m " = " moo "
r x = x
example : : IO TL.Text
example = hastacheStr defaultConfig
( encodeStr template )
( mkGenericContext ' r q val )
template = concat [
" { { n}}\\n " ,
" { { moo . Decimal } } "
]
main = example > > = TL.putStrLn
@
Result :
@
1
A decimal : 1.500
@
Like 'mkGenericContext', but apply the first function to record field
names when constructing the context. The second function is used to
constructing values for context from datatypes that are nor supported
as primitives in the library. The resulting value can be accessed
using the @.DatatypeName@ field:
@
\{\-\# LANGUAGE DeriveDataTypeable \#\-\}
\{\-\# LANGUAGE FlexibleInstances \#\-\}
\{\-\# LANGUAGE ScopedTypeVariables \#\-\}
\{\-\# LANGUAGE StandaloneDeriving \#\-\}
\{\-\# LANGUAGE TypeSynonymInstances \#\-\}
import Text.Hastache
import Text.Hastache.Context
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.IO as TL
import Data.Data (Data, Typeable)
import Data.Decimal
import Data.Generics.Aliases (extQ)
data Test = Test {n::Int, m::Decimal} deriving (Data, Typeable)
deriving instance Data Decimal
val :: Test
val = Test 1 (Decimal 3 1500)
q :: Ext
q = defaultExt \`extQ\` (\(i::Decimal) -> "A decimal: " ++ show i)
r "m" = "moo"
r x = x
example :: IO TL.Text
example = hastacheStr defaultConfig
(encodeStr template)
(mkGenericContext' r q val)
template = concat [
"{{n}}\\n",
"{{moo.Decimal}}"
]
main = example >>= TL.putStrLn
@
Result:
@
1
A decimal: 1.500
@
-}
#if MIN_VERSION_base(4,7,0)
mkGenericContext' :: (Monad m, Data a, Typeable m)
=> (String -> String) -> Ext -> a -> MuContext m
#else
mkGenericContext' :: (Monad m, Data a, Typeable1 m)
=> (String -> String) -> Ext -> a -> MuContext m
#endif
mkGenericContext' f ext val = toGenTemp f ext val ~> convertGenTempToContext
data TD m =
TSimple (MuType m)
| TObj [(String, TD m)]
| TList [TD m]
| TUnknown
deriving (Show)
#if MIN_VERSION_base(4,7,0)
toGenTemp :: (Data a, Monad m, Typeable m)
=> (String -> String) -> Ext -> a -> TD m
#else
toGenTemp :: (Data a, Monad m, Typeable1 m)
=> (String -> String) -> Ext -> a -> TD m
#endif
toGenTemp f g a = TObj $ conName : zip fields (gmapQ (procField f g) a)
where
fields = toConstr a ~> constrFields ~> map f
conName = (toConstr a ~> showConstr, TSimple . MuVariable $ g a)
#if MIN_VERSION_base(4,7,0)
procField :: (Data a, Monad m, Typeable m)
=> (String -> String) -> Ext -> a -> TD m
#else
procField :: (Data a, Monad m, Typeable1 m)
=> (String -> String) -> Ext -> a -> TD m
#endif
procField f g a =
case res a of
TUnknown -> TSimple . MuVariable . g $ a
b -> b
where
res = obj
`ext1Q` list
`extQ` (\(i::String) -> MuVariable (encodeStr i) ~> TSimple)
`extQ` (\(i::Char) -> MuVariable i ~> TSimple)
`extQ` (\(i::Double) -> MuVariable i ~> TSimple)
`extQ` (\(i::Float) -> MuVariable i ~> TSimple)
`extQ` (\(i::Int) -> MuVariable i ~> TSimple)
`extQ` (\(i::Int8) -> MuVariable i ~> TSimple)
`extQ` (\(i::Int16) -> MuVariable i ~> TSimple)
`extQ` (\(i::Int32) -> MuVariable i ~> TSimple)
`extQ` (\(i::Int64) -> MuVariable i ~> TSimple)
`extQ` (\(i::Integer) -> MuVariable i ~> TSimple)
`extQ` (\(i::Word) -> MuVariable i ~> TSimple)
`extQ` (\(i::Word8) -> MuVariable i ~> TSimple)
`extQ` (\(i::Word16) -> MuVariable i ~> TSimple)
`extQ` (\(i::Word32) -> MuVariable i ~> TSimple)
`extQ` (\(i::Word64) -> MuVariable i ~> TSimple)
`extQ` (\(i::BS.ByteString) -> MuVariable i ~> TSimple)
`extQ` (\(i::LBS.ByteString) -> MuVariable i ~> TSimple)
`extQ` (\(i::T.Text) -> MuVariable i ~> TSimple)
`extQ` (\(i::TL.Text) -> MuVariable i ~> TSimple)
`extQ` (\(i::Bool) -> MuBool i ~> TSimple)
`extQ` (\() -> MuVariable () ~> TSimple)
`extQ` (\(i::Version) -> MuVariable i ~> TSimple)
`extQ` muLambdaTT
`extQ` muLambdaTTL
`extQ` muLambdaTLTL
`extQ` muLambdaBSBS
`extQ` muLambdaSS
`extQ` muLambdaBSLBS
`extQ` muLambdaMTT
`extQ` muLambdaMTTL
`extQ` muLambdaMTLTL
`extQ` muLambdaMBSBS
`extQ` muLambdaMSS
`extQ` muLambdaMBSLBS
`ext1Q` muMaybe
`ext2Q` muEither
obj a = case dataTypeRep (dataTypeOf a) of
AlgRep (_:_) -> toGenTemp f g a
_ -> TUnknown
list a = map (procField f g) a ~> TList
muMaybe Nothing = TSimple MuNothing
muMaybe (Just a) = TList [procField f g a]
muEither (Left a) = procField f g a
muEither (Right b) = procField f g b
muLambdaTT :: (T.Text -> T.Text) -> TD m
muLambdaTT f = MuLambda f ~> TSimple
muLambdaTLTL :: (TL.Text -> TL.Text) -> TD m
muLambdaTLTL f = MuLambda (f . TL.fromStrict) ~> TSimple
muLambdaTTL :: (T.Text -> TL.Text) -> TD m
muLambdaTTL f = MuLambda f ~> TSimple
muLambdaBSBS :: (BS.ByteString -> BS.ByteString) -> TD m
muLambdaBSBS f = MuLambda (f . T.encodeUtf8) ~> TSimple
muLambdaBSLBS :: (BS.ByteString -> LBS.ByteString) -> TD m
muLambdaBSLBS f = MuLambda (f . T.encodeUtf8) ~> TSimple
muLambdaSS :: (String -> String) -> TD m
muLambdaSS f = MuLambda fd ~> TSimple
where
fd s = decodeStr s ~> f
-- monadic
muLambdaMTT :: (T.Text -> m T.Text) -> TD m
muLambdaMTT f = MuLambdaM f ~> TSimple
muLambdaMTLTL :: (TL.Text -> m TL.Text) -> TD m
muLambdaMTLTL f = MuLambdaM (f . TL.fromStrict) ~> TSimple
muLambdaMTTL :: (T.Text -> m TL.Text) -> TD m
muLambdaMTTL f = MuLambdaM f ~> TSimple
muLambdaMBSBS :: (BS.ByteString -> m BS.ByteString) -> TD m
muLambdaMBSBS f = MuLambdaM (f . T.encodeUtf8) ~> TSimple
muLambdaMBSLBS :: (BS.ByteString -> m LBS.ByteString) -> TD m
muLambdaMBSLBS f = MuLambdaM (f . T.encodeUtf8) ~> TSimple
muLambdaMSS :: (String -> m String) -> TD m
muLambdaMSS f = MuLambdaM fd ~> TSimple
where
fd s = decodeStr s ~> f
convertGenTempToContext :: Monad m => TD m -> MuContext m
convertGenTempToContext v = mkMap "" Map.empty v ~> mkMapContext
where
mkMap name m (TSimple t) = Map.insert (encodeStr name) t m
mkMap name m (TObj lst) = foldl (foldTObj name) m lst ~>
Map.insert (encodeStr name)
([foldl (foldTObj "") Map.empty lst ~> mkMapContext] ~> MuList)
mkMap name m (TList lst) = Map.insert (encodeStr name)
(map convertGenTempToContext lst ~> MuList) m
mkMap _ m _ = m
mkName name newName = if length name > 0
then concat [name, ".", newName]
else newName
foldTObj name m (fn, fv) = mkMap (mkName name fn) m fv
mkMapContext m a = return $ case Map.lookup a m of
Nothing ->
case a == dotT of
True ->
case Map.lookup T.empty m of
Nothing -> MuNothing
Just a -> a
_ -> MuNothing
Just a -> a
dotT :: T.Text
dotT = T.singleton '.'
| null | https://raw.githubusercontent.com/lymar/hastache/cd299ff1ac4c35259fbd333ea7fa9b3c280b9ff9/Text/Hastache/Context.hs | haskell | # LANGUAGE RankNTypes #
Maintainer: Sergey S Lymar <>
Stability: experimental
Portability: portable
| @defaultExt ==@ 'gshow'
monadic | # LANGUAGE ScopedTypeVariables #
# LANGUAGE CPP #
Module : Text . Hastache . Context
Copyright : ( c ) 2011 - 2013
License : BSD3
|
Hastache context helpers
Hastache context helpers
-}
module Text.Hastache.Context (
mkStrContext
, mkStrContextM
, mkGenericContext
, mkGenericContext'
, Ext
, defaultExt
) where
import Data.Data
import Data.Generics
import Data.Int
import Data.Version (Version)
import Data.Ratio (Ratio)
import Data.Word
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import qualified Data.Map as Map
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.Lazy as TL
import Text.Hastache
x ~> f = f $ x
infixl 9 ~>
| Make Hastache context from String - > MuType function
mkStrContext :: Monad m => (String -> MuType m) -> MuContext m
mkStrContext f a = decodeStr a ~> f ~> return
| Make Hastache context from monadic String - > MuType function
mkStrContextM :: Monad m => (String -> m (MuType m)) -> MuContext m
mkStrContextM f a = decodeStr a ~> f
type Ext = forall b. (Data b, Typeable b) => b -> String
defaultExt :: Ext
defaultExt = gshow
|
Make Hastache context from Data . Data deriving type
Supported field types :
* ( )
* String
*
* Double
* Float
* Int
* Int8
* Int16
* Int32
* Int64
* Integer
* Word
* Word8
* * * Word64
* Data . ByteString . ByteString
* Data . ByteString . Lazy . ByteString
* Data . Text . Text
* Data . Text . Lazy . Text
* * Version
* Maybe @a@ ( where @a@ is a supported datatype )
* Either @a@ @b@ ( where @a@ and are supported datatypes )
* Data . Text . Text - > Data . Text . Text
* Data . Text . Text - > Data . Text . Lazy . Text
* Data . Text . Lazy . Text - > Data . Text . Lazy . Text
* Data . ByteString . ByteString - > Data . ByteString . ByteString
* String - > String
* Data . ByteString . ByteString - > Data . ByteString . Lazy . * MonadIO m = > Data . Text . Text - > m Data . Text . Text
* MonadIO m = > Data . Text . Text - > m Data . Text . Lazy . Text
* MonadIO m = > Data . Text . Lazy . Text - > m Data . Text . Lazy . Text
* MonadIO m = > Data . ByteString . ByteString - > m Data . ByteString . ByteString
* MonadIO m = > String - > m String
* MonadIO m = > Data . ByteString . ByteString - > m Data . ByteString . Lazy . ByteString
Example :
@
import Text . Hastache
import Text . Hastache . Context
import qualified Data . Text as T
import qualified Data . Text . Lazy as TL
import qualified Data . Text . Lazy . IO as TL
import Data . Data
import Data . Generics
import Data .
data InternalData = InternalData {
: : String ,
anotherField : : Int
} deriving ( Data , Typeable , Show )
data Example = Example {
stringField : : String ,
intField : : Int ,
: : InternalData ,
simpleListField : : [ String ] ,
dataListField : : [ InternalData ] ,
stringFunc : : String - > String ,
textFunc : : T.Text - > T.Text ,
monadicStringFunc : : String - > IO String ,
monadicTextFunc : : T.Text - > IO T.Text
} deriving ( Data , Typeable )
example = hastacheStr defaultConfig ( encodeStr template )
( mkGenericContext context )
where
template = unlines [
\"string : { { stringField}}\ " ,
\"int : { { intField}}\ " ,
\"data : { { dataField.someField } } , { { dataField.anotherField}}\ " ,
\"data : { { # dataField}}{{someField } } , { { " ,
\"simple list : { { # simpleListField } } { { . } } { { /simpleListField}}\ " ,
\"data list:\ " ,
\"{{#dataListField}}\ " ,
\ " * { { } } , { { anotherField } } . top level var : { { intField}}\ " ,
\"{{/dataListField}}\ " ,
\"{{#stringFunc}}upper{{/stringFunc}}\ " ,
" ,
\"{{#monadicStringFunc}}upper ( monadic){{/monadicStringFunc}}\ " ,
( monadic){{/monadicTextFunc}}\ " ]
context = Example { stringField = , intField = 1 ,
= InternalData \"val\ " 123 , simpleListField = [ \"a\",\"b\",\"c\ " ] ,
dataListField = [ InternalData \"aaa\ " 1 , InternalData \"bbb\ " 2 ] ,
stringFunc = map toUpper ,
textFunc = T.reverse ,
monadicStringFunc = return . map toUpper ,
monadicTextFunc = return . T.reverse }
main = example > > = TL.putStrLn
@
Result :
@
string : string value
int : 1
data : val , 123
data : val , 123
simple list : a b c
data list :
* aaa , 1 . top level var : 1
* bbb , 2 . top level var : 1
UPPER
esrever
UPPER ( MONADIC )
) cidanom ( esrever
@
Hastache also supports datatypes with multiple constructors :
@
data A = A { str : : String }
| B { num : : Int }
{ { # A } }
A : { { str } }
{ { /A } }
{ { # B } }
B : { { num } }
{ { /B } }
@
Make Hastache context from Data.Data deriving type
Supported field types:
* ()
* String
* Char
* Double
* Float
* Int
* Int8
* Int16
* Int32
* Int64
* Integer
* Word
* Word8
* Word16
* Word32
* Word64
* Data.ByteString.ByteString
* Data.ByteString.Lazy.ByteString
* Data.Text.Text
* Data.Text.Lazy.Text
* Bool
* Version
* Maybe @a@ (where @a@ is a supported datatype)
* Either @a@ @b@ (where @a@ and @b@ are supported datatypes)
* Data.Text.Text -> Data.Text.Text
* Data.Text.Text -> Data.Text.Lazy.Text
* Data.Text.Lazy.Text -> Data.Text.Lazy.Text
* Data.ByteString.ByteString -> Data.ByteString.ByteString
* String -> String
* Data.ByteString.ByteString -> Data.ByteString.Lazy.ByteString
* MonadIO m => Data.Text.Text -> m Data.Text.Text
* MonadIO m => Data.Text.Text -> m Data.Text.Lazy.Text
* MonadIO m => Data.Text.Lazy.Text -> m Data.Text.Lazy.Text
* MonadIO m => Data.ByteString.ByteString -> m Data.ByteString.ByteString
* MonadIO m => String -> m String
* MonadIO m => Data.ByteString.ByteString -> m Data.ByteString.Lazy.ByteString
Example:
@
import Text.Hastache
import Text.Hastache.Context
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.IO as TL
import Data.Data
import Data.Generics
import Data.Char
data InternalData = InternalData {
someField :: String,
anotherField :: Int
} deriving (Data, Typeable, Show)
data Example = Example {
stringField :: String,
intField :: Int,
dataField :: InternalData,
simpleListField :: [String],
dataListField :: [InternalData],
stringFunc :: String -> String,
textFunc :: T.Text -> T.Text,
monadicStringFunc :: String -> IO String,
monadicTextFunc :: T.Text -> IO T.Text
} deriving (Data, Typeable)
example = hastacheStr defaultConfig (encodeStr template)
(mkGenericContext context)
where
template = unlines [
\"string: {{stringField}}\",
\"int: {{intField}}\",
\"data: {{dataField.someField}}, {{dataField.anotherField}}\",
\"data: {{#dataField}}{{someField}}, {{anotherField}}{{/dataField}}\",
\"simple list: {{#simpleListField}}{{.}} {{/simpleListField}}\",
\"data list:\",
\"{{#dataListField}}\",
\" * {{someField}}, {{anotherField}}. top level var: {{intField}}\",
\"{{/dataListField}}\",
\"{{#stringFunc}}upper{{/stringFunc}}\",
\"{{#textFunc}}reverse{{/textFunc}}\",
\"{{#monadicStringFunc}}upper (monadic){{/monadicStringFunc}}\",
\"{{#monadicTextFunc}}reverse (monadic){{/monadicTextFunc}}\"]
context = Example { stringField = \"string value\", intField = 1,
dataField = InternalData \"val\" 123, simpleListField = [\"a\",\"b\",\"c\"],
dataListField = [InternalData \"aaa\" 1, InternalData \"bbb\" 2],
stringFunc = map toUpper,
textFunc = T.reverse,
monadicStringFunc = return . map toUpper,
monadicTextFunc = return . T.reverse }
main = example >>= TL.putStrLn
@
Result:
@
string: string value
int: 1
data: val, 123
data: val, 123
simple list: a b c
data list:
* aaa, 1. top level var: 1
* bbb, 2. top level var: 1
UPPER
esrever
UPPER (MONADIC)
)cidanom( esrever
@
Hastache also supports datatypes with multiple constructors:
@
data A = A { str :: String }
| B { num :: Int }
{{#A}}
A : {{str}}
{{/A}}
{{#B}}
B : {{num}}
{{/B}}
@
-}
#if MIN_VERSION_base(4,7,0)
mkGenericContext :: (Monad m, Data a, Typeable m) => a -> MuContext m
#else
mkGenericContext :: (Monad m, Data a, Typeable1 m) => a -> MuContext m
#endif
mkGenericContext val = toGenTemp id defaultExt val ~> convertGenTempToContext
|
Like ' mkGenericContext ' , but apply the first function to record field
names when constructing the context . The second function is used to
constructing values for context from datatypes that are nor supported
as primitives in the library . The resulting value can be accessed
using the @.DatatypeName@ field :
@
\{\-\ # LANGUAGE DeriveDataTypeable \#\-\ }
\{\-\ # LANGUAGE FlexibleInstances \#\-\ }
\{\-\ # LANGUAGE ScopedTypeVariables \#\-\ }
\{\-\ # LANGUAGE StandaloneDeriving \#\-\ }
\{\-\ # LANGUAGE TypeSynonymInstances \#\-\ }
import Text . Hastache
import Text . Hastache . Context
import qualified Data . Text . Lazy as TL
import qualified Data . Text . Lazy . IO as TL
import Data . Data ( Data , )
import Data . Decimal
import Data . Generics . Aliases ( extQ )
data Test = Test { n::Int , } deriving ( Data , Typeable )
deriving instance Data Decimal
val : : Test
val = Test 1 ( Decimal 3 1500 )
q : : Ext
q = defaultExt \`extQ\ ` ( \(i::Decimal ) - > " A decimal : " + + show i )
r " m " = " moo "
r x = x
example : : IO TL.Text
example = hastacheStr defaultConfig
( encodeStr template )
( mkGenericContext ' r q val )
template = concat [
" { { n}}\\n " ,
" { { moo . Decimal } } "
]
main = example > > = TL.putStrLn
@
Result :
@
1
A decimal : 1.500
@
Like 'mkGenericContext', but apply the first function to record field
names when constructing the context. The second function is used to
constructing values for context from datatypes that are nor supported
as primitives in the library. The resulting value can be accessed
using the @.DatatypeName@ field:
@
\{\-\# LANGUAGE DeriveDataTypeable \#\-\}
\{\-\# LANGUAGE FlexibleInstances \#\-\}
\{\-\# LANGUAGE ScopedTypeVariables \#\-\}
\{\-\# LANGUAGE StandaloneDeriving \#\-\}
\{\-\# LANGUAGE TypeSynonymInstances \#\-\}
import Text.Hastache
import Text.Hastache.Context
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.IO as TL
import Data.Data (Data, Typeable)
import Data.Decimal
import Data.Generics.Aliases (extQ)
data Test = Test {n::Int, m::Decimal} deriving (Data, Typeable)
deriving instance Data Decimal
val :: Test
val = Test 1 (Decimal 3 1500)
q :: Ext
q = defaultExt \`extQ\` (\(i::Decimal) -> "A decimal: " ++ show i)
r "m" = "moo"
r x = x
example :: IO TL.Text
example = hastacheStr defaultConfig
(encodeStr template)
(mkGenericContext' r q val)
template = concat [
"{{n}}\\n",
"{{moo.Decimal}}"
]
main = example >>= TL.putStrLn
@
Result:
@
1
A decimal: 1.500
@
-}
#if MIN_VERSION_base(4,7,0)
mkGenericContext' :: (Monad m, Data a, Typeable m)
=> (String -> String) -> Ext -> a -> MuContext m
#else
mkGenericContext' :: (Monad m, Data a, Typeable1 m)
=> (String -> String) -> Ext -> a -> MuContext m
#endif
mkGenericContext' f ext val = toGenTemp f ext val ~> convertGenTempToContext
data TD m =
TSimple (MuType m)
| TObj [(String, TD m)]
| TList [TD m]
| TUnknown
deriving (Show)
#if MIN_VERSION_base(4,7,0)
toGenTemp :: (Data a, Monad m, Typeable m)
=> (String -> String) -> Ext -> a -> TD m
#else
toGenTemp :: (Data a, Monad m, Typeable1 m)
=> (String -> String) -> Ext -> a -> TD m
#endif
toGenTemp f g a = TObj $ conName : zip fields (gmapQ (procField f g) a)
where
fields = toConstr a ~> constrFields ~> map f
conName = (toConstr a ~> showConstr, TSimple . MuVariable $ g a)
#if MIN_VERSION_base(4,7,0)
procField :: (Data a, Monad m, Typeable m)
=> (String -> String) -> Ext -> a -> TD m
#else
procField :: (Data a, Monad m, Typeable1 m)
=> (String -> String) -> Ext -> a -> TD m
#endif
procField f g a =
case res a of
TUnknown -> TSimple . MuVariable . g $ a
b -> b
where
res = obj
`ext1Q` list
`extQ` (\(i::String) -> MuVariable (encodeStr i) ~> TSimple)
`extQ` (\(i::Char) -> MuVariable i ~> TSimple)
`extQ` (\(i::Double) -> MuVariable i ~> TSimple)
`extQ` (\(i::Float) -> MuVariable i ~> TSimple)
`extQ` (\(i::Int) -> MuVariable i ~> TSimple)
`extQ` (\(i::Int8) -> MuVariable i ~> TSimple)
`extQ` (\(i::Int16) -> MuVariable i ~> TSimple)
`extQ` (\(i::Int32) -> MuVariable i ~> TSimple)
`extQ` (\(i::Int64) -> MuVariable i ~> TSimple)
`extQ` (\(i::Integer) -> MuVariable i ~> TSimple)
`extQ` (\(i::Word) -> MuVariable i ~> TSimple)
`extQ` (\(i::Word8) -> MuVariable i ~> TSimple)
`extQ` (\(i::Word16) -> MuVariable i ~> TSimple)
`extQ` (\(i::Word32) -> MuVariable i ~> TSimple)
`extQ` (\(i::Word64) -> MuVariable i ~> TSimple)
`extQ` (\(i::BS.ByteString) -> MuVariable i ~> TSimple)
`extQ` (\(i::LBS.ByteString) -> MuVariable i ~> TSimple)
`extQ` (\(i::T.Text) -> MuVariable i ~> TSimple)
`extQ` (\(i::TL.Text) -> MuVariable i ~> TSimple)
`extQ` (\(i::Bool) -> MuBool i ~> TSimple)
`extQ` (\() -> MuVariable () ~> TSimple)
`extQ` (\(i::Version) -> MuVariable i ~> TSimple)
`extQ` muLambdaTT
`extQ` muLambdaTTL
`extQ` muLambdaTLTL
`extQ` muLambdaBSBS
`extQ` muLambdaSS
`extQ` muLambdaBSLBS
`extQ` muLambdaMTT
`extQ` muLambdaMTTL
`extQ` muLambdaMTLTL
`extQ` muLambdaMBSBS
`extQ` muLambdaMSS
`extQ` muLambdaMBSLBS
`ext1Q` muMaybe
`ext2Q` muEither
obj a = case dataTypeRep (dataTypeOf a) of
AlgRep (_:_) -> toGenTemp f g a
_ -> TUnknown
list a = map (procField f g) a ~> TList
muMaybe Nothing = TSimple MuNothing
muMaybe (Just a) = TList [procField f g a]
muEither (Left a) = procField f g a
muEither (Right b) = procField f g b
muLambdaTT :: (T.Text -> T.Text) -> TD m
muLambdaTT f = MuLambda f ~> TSimple
muLambdaTLTL :: (TL.Text -> TL.Text) -> TD m
muLambdaTLTL f = MuLambda (f . TL.fromStrict) ~> TSimple
muLambdaTTL :: (T.Text -> TL.Text) -> TD m
muLambdaTTL f = MuLambda f ~> TSimple
muLambdaBSBS :: (BS.ByteString -> BS.ByteString) -> TD m
muLambdaBSBS f = MuLambda (f . T.encodeUtf8) ~> TSimple
muLambdaBSLBS :: (BS.ByteString -> LBS.ByteString) -> TD m
muLambdaBSLBS f = MuLambda (f . T.encodeUtf8) ~> TSimple
muLambdaSS :: (String -> String) -> TD m
muLambdaSS f = MuLambda fd ~> TSimple
where
fd s = decodeStr s ~> f
muLambdaMTT :: (T.Text -> m T.Text) -> TD m
muLambdaMTT f = MuLambdaM f ~> TSimple
muLambdaMTLTL :: (TL.Text -> m TL.Text) -> TD m
muLambdaMTLTL f = MuLambdaM (f . TL.fromStrict) ~> TSimple
muLambdaMTTL :: (T.Text -> m TL.Text) -> TD m
muLambdaMTTL f = MuLambdaM f ~> TSimple
muLambdaMBSBS :: (BS.ByteString -> m BS.ByteString) -> TD m
muLambdaMBSBS f = MuLambdaM (f . T.encodeUtf8) ~> TSimple
muLambdaMBSLBS :: (BS.ByteString -> m LBS.ByteString) -> TD m
muLambdaMBSLBS f = MuLambdaM (f . T.encodeUtf8) ~> TSimple
muLambdaMSS :: (String -> m String) -> TD m
muLambdaMSS f = MuLambdaM fd ~> TSimple
where
fd s = decodeStr s ~> f
convertGenTempToContext :: Monad m => TD m -> MuContext m
convertGenTempToContext v = mkMap "" Map.empty v ~> mkMapContext
where
mkMap name m (TSimple t) = Map.insert (encodeStr name) t m
mkMap name m (TObj lst) = foldl (foldTObj name) m lst ~>
Map.insert (encodeStr name)
([foldl (foldTObj "") Map.empty lst ~> mkMapContext] ~> MuList)
mkMap name m (TList lst) = Map.insert (encodeStr name)
(map convertGenTempToContext lst ~> MuList) m
mkMap _ m _ = m
mkName name newName = if length name > 0
then concat [name, ".", newName]
else newName
foldTObj name m (fn, fv) = mkMap (mkName name fn) m fv
mkMapContext m a = return $ case Map.lookup a m of
Nothing ->
case a == dotT of
True ->
case Map.lookup T.empty m of
Nothing -> MuNothing
Just a -> a
_ -> MuNothing
Just a -> a
dotT :: T.Text
dotT = T.singleton '.'
|
81b72da06d5c3f8f6160329bfc8026c64668f1936a855125ce6abf1f02d3a190 | openbadgefactory/salava | profile.cljs | (ns salava.profile.ui.profile
(:require [salava.core.ui.helper :refer [plugin-fun path-for not-activated? private?]]
[salava.core.ui.ajax-utils :as ajax]
[salava.profile.ui.block :as pb]
[salava.core.ui.error :as err]
[reagent.core :as reagent :refer [atom cursor]]
[reagent.session :as session]
[salava.core.ui.layout :as layout]
[salava.core.i18n :refer [t]]
[salava.profile.ui.helper :as ph]
[reagent-modals.modals :as m]
[salava.profile.ui.edit :as pe]
[salava.page.themes :refer [themes borders]]
[salava.core.ui.modal :as mo]
[salava.profile.ui.modal :refer [userinfoblock]]))
(defn connect-user [user-id]
(let [connectuser (first (plugin-fun (session/get :plugins) "block" "connectuser"))]
(if connectuser
[connectuser user-id]
[:div ""])))
(defn profile-blocks [state]
(let [blocks (cursor state [:blocks])
block-count (count @blocks)
position (if (pos? block-count) (dec block-count) nil)]
[:div {:id "field-editor"}
(into [:div {:id "page-blocks"}]
(for [index (range (count @blocks))]
(ph/block-for-edit (cursor blocks [index]) state index)))
[ph/field-after blocks state nil]]))
(defn edit-profile-content [state]
(fn []
[:div (if (= 0 @(cursor state [:active-index]))
[:div#page-edit
[pe/action-buttons state]
[:div.panel.thumbnail
[:div.panel-heading
[:h3.sectiontitle (t :profile/Personalinformation)]]
[:div.panel-body
[pe/edit-profile state]]]
[profile-blocks state]]
@(cursor state [:tab-content]))]))
(defn view-profile [state]
(let [blocks (cursor state [:blocks])]
[:div;#profile
(if (= 0 @(cursor state [:active-index]))
[:div#page-view
[:div {:id (str "theme-" (or @(cursor state [:theme]) 0))
:class "page-content"}
[:div.panel
[:div.panel-left
[:div.panel-right
[:div.panel-content
[:div.panel-body
[userinfoblock state]
(into [:div];#profile]
(for [index (range (count @blocks))]
(ph/block (cursor blocks [index]) state index)))]]]]]]]
@(cursor state [:tab-content]))]))
(defn theme-selection [theme-atom themes]
(reduce (fn [r theme]
(conj r [:div {:class (str "theme-" (:id theme))}
[:a {:href "#" :on-click #(do
(.preventDefault %)
(reset! theme-atom (js/parseInt (:id theme))))
:alt (t (:name theme)) :title (t (:name theme))
:aria-label (str (t :page/Selecttheme) " " (:id theme))}
[:div {:class (str "panel-right theme-container" (if (= @theme-atom (:id theme)) " selected"))} " "]]]))
[:div {:id "theme-container"}] themes))
(defn edit-theme [state]
[:div {:id "page-edit-theme"}
[:div {:class "panel thumbnail" :id "theme-panel"}
[:div.panel-heading
[:h2.sectiontitle (t :page/Selecttheme)]]
[:div.panel-body
[theme-selection (cursor state [:theme]) themes]]]
[pe/action-buttons state]
[view-profile state]])
(defn edit-settings [state]
(let [visibility-atom (cursor state [:edit-profile :user :profile_visibility])]
[:div#page-edit
[:div.panel.thumbnail
[:div.panel-heading
[:h2.sectiontitle (t :page/Settings)]]
[:div.panel-body
(if-not (private?)
[:div.col-md-12
[:div.row [:span._label.col-xs-12 (t :user/Profilevisibility)]]
[:div.radio {:id "visibility-radio-internal"}
[:fieldset
[:legend {:style {:display "none"}} ""]
[:label [:input {:name "visibility"
:value "internal"
:type "radio"
:checked (= "internal" @visibility-atom)
:on-change #(reset! visibility-atom (.-target.value %))}]
(t :user/Visibleonlytoregistered)]]
[:div.radio
[:label [:input {:name "visibility"
:value "public"
:type "radio"
:checked (= "public" @visibility-atom)
:on-change #(reset! visibility-atom (.-target.value %))}]
(t :core/Public)]]]])]]
[pe/action-buttons state]]))
(defn edit-profile [state]
(let [content @(cursor state [:edit :active-tab])]
[:div
(case content
:content [edit-profile-content state]
:theme [edit-theme state]
:settings [edit-settings state]
:preview [view-profile state]
nil)]))
(defn content [state]
[:div
[m/modal-window]
(if (= "gone" (:visibility @state))
[:div.col-md-12
[:p
[:b (t :profile/Userdoesnotexist)]]]
[:div#profile
[:h2.sr-only (t :user/Editprofile)]
[ph/profile-navi state]
(when @(cursor state [:show-manage-buttons]) [ph/manage-buttons state])
(if @(cursor state [:edit-mode])
[edit-profile state]
[view-profile state])])])
(defn init-data [user-id state]
(ajax/GET
(path-for (str "/obpv1/profile/" user-id) true)
{:handler (fn [data]
(let [data-with-uuids (assoc data :blocks (vec (map #(assoc % :key (pe/random-key))
(get data :blocks))))]
(swap! state assoc :permission "success" :edit {:active-tab :content} :edit-mode (session/get! :edit-mode false))
(swap! state merge data-with-uuids)))}))
(defn handler [site-navi params]
(let [user-id (:user-id params)
state (atom {:user-id user-id
:permission "initial"
:badge-small-view false
:pages-small-view true
:active-index 0
:edit-mode nil
:toggle-move-mode false
:blocks []
:edit {:active-tab :content}
:theme 0
:alert nil
:tabs []
:show-manage-buttons true})
user (session/get :user)]
(init-data user-id state)
(fn []
(cond
(= "initial" (:permission @state)) (layout/default site-navi [:div])
(and user (= "error" (:permission @state))) (layout/default-no-sidebar site-navi (err/error-content))
(= "error" (:permission @state)) (layout/landing-page site-navi (err/error-content))
(= (:id user) (js/parseInt user-id)) (layout/default site-navi (content state))
(and (= "success" (:permission @state)) user) (layout/default-no-sidebar site-navi (content state))
:else (layout/landing-page site-navi (content state))))))
| null | https://raw.githubusercontent.com/openbadgefactory/salava/97f05992406e4dcbe3c4bff75c04378d19606b61/src/cljs/salava/profile/ui/profile.cljs | clojure | #profile
#profile] | (ns salava.profile.ui.profile
(:require [salava.core.ui.helper :refer [plugin-fun path-for not-activated? private?]]
[salava.core.ui.ajax-utils :as ajax]
[salava.profile.ui.block :as pb]
[salava.core.ui.error :as err]
[reagent.core :as reagent :refer [atom cursor]]
[reagent.session :as session]
[salava.core.ui.layout :as layout]
[salava.core.i18n :refer [t]]
[salava.profile.ui.helper :as ph]
[reagent-modals.modals :as m]
[salava.profile.ui.edit :as pe]
[salava.page.themes :refer [themes borders]]
[salava.core.ui.modal :as mo]
[salava.profile.ui.modal :refer [userinfoblock]]))
(defn connect-user [user-id]
(let [connectuser (first (plugin-fun (session/get :plugins) "block" "connectuser"))]
(if connectuser
[connectuser user-id]
[:div ""])))
(defn profile-blocks [state]
(let [blocks (cursor state [:blocks])
block-count (count @blocks)
position (if (pos? block-count) (dec block-count) nil)]
[:div {:id "field-editor"}
(into [:div {:id "page-blocks"}]
(for [index (range (count @blocks))]
(ph/block-for-edit (cursor blocks [index]) state index)))
[ph/field-after blocks state nil]]))
(defn edit-profile-content [state]
(fn []
[:div (if (= 0 @(cursor state [:active-index]))
[:div#page-edit
[pe/action-buttons state]
[:div.panel.thumbnail
[:div.panel-heading
[:h3.sectiontitle (t :profile/Personalinformation)]]
[:div.panel-body
[pe/edit-profile state]]]
[profile-blocks state]]
@(cursor state [:tab-content]))]))
(defn view-profile [state]
(let [blocks (cursor state [:blocks])]
(if (= 0 @(cursor state [:active-index]))
[:div#page-view
[:div {:id (str "theme-" (or @(cursor state [:theme]) 0))
:class "page-content"}
[:div.panel
[:div.panel-left
[:div.panel-right
[:div.panel-content
[:div.panel-body
[userinfoblock state]
(for [index (range (count @blocks))]
(ph/block (cursor blocks [index]) state index)))]]]]]]]
@(cursor state [:tab-content]))]))
(defn theme-selection [theme-atom themes]
(reduce (fn [r theme]
(conj r [:div {:class (str "theme-" (:id theme))}
[:a {:href "#" :on-click #(do
(.preventDefault %)
(reset! theme-atom (js/parseInt (:id theme))))
:alt (t (:name theme)) :title (t (:name theme))
:aria-label (str (t :page/Selecttheme) " " (:id theme))}
[:div {:class (str "panel-right theme-container" (if (= @theme-atom (:id theme)) " selected"))} " "]]]))
[:div {:id "theme-container"}] themes))
(defn edit-theme [state]
[:div {:id "page-edit-theme"}
[:div {:class "panel thumbnail" :id "theme-panel"}
[:div.panel-heading
[:h2.sectiontitle (t :page/Selecttheme)]]
[:div.panel-body
[theme-selection (cursor state [:theme]) themes]]]
[pe/action-buttons state]
[view-profile state]])
(defn edit-settings [state]
(let [visibility-atom (cursor state [:edit-profile :user :profile_visibility])]
[:div#page-edit
[:div.panel.thumbnail
[:div.panel-heading
[:h2.sectiontitle (t :page/Settings)]]
[:div.panel-body
(if-not (private?)
[:div.col-md-12
[:div.row [:span._label.col-xs-12 (t :user/Profilevisibility)]]
[:div.radio {:id "visibility-radio-internal"}
[:fieldset
[:legend {:style {:display "none"}} ""]
[:label [:input {:name "visibility"
:value "internal"
:type "radio"
:checked (= "internal" @visibility-atom)
:on-change #(reset! visibility-atom (.-target.value %))}]
(t :user/Visibleonlytoregistered)]]
[:div.radio
[:label [:input {:name "visibility"
:value "public"
:type "radio"
:checked (= "public" @visibility-atom)
:on-change #(reset! visibility-atom (.-target.value %))}]
(t :core/Public)]]]])]]
[pe/action-buttons state]]))
(defn edit-profile [state]
(let [content @(cursor state [:edit :active-tab])]
[:div
(case content
:content [edit-profile-content state]
:theme [edit-theme state]
:settings [edit-settings state]
:preview [view-profile state]
nil)]))
(defn content [state]
[:div
[m/modal-window]
(if (= "gone" (:visibility @state))
[:div.col-md-12
[:p
[:b (t :profile/Userdoesnotexist)]]]
[:div#profile
[:h2.sr-only (t :user/Editprofile)]
[ph/profile-navi state]
(when @(cursor state [:show-manage-buttons]) [ph/manage-buttons state])
(if @(cursor state [:edit-mode])
[edit-profile state]
[view-profile state])])])
(defn init-data [user-id state]
(ajax/GET
(path-for (str "/obpv1/profile/" user-id) true)
{:handler (fn [data]
(let [data-with-uuids (assoc data :blocks (vec (map #(assoc % :key (pe/random-key))
(get data :blocks))))]
(swap! state assoc :permission "success" :edit {:active-tab :content} :edit-mode (session/get! :edit-mode false))
(swap! state merge data-with-uuids)))}))
(defn handler [site-navi params]
(let [user-id (:user-id params)
state (atom {:user-id user-id
:permission "initial"
:badge-small-view false
:pages-small-view true
:active-index 0
:edit-mode nil
:toggle-move-mode false
:blocks []
:edit {:active-tab :content}
:theme 0
:alert nil
:tabs []
:show-manage-buttons true})
user (session/get :user)]
(init-data user-id state)
(fn []
(cond
(= "initial" (:permission @state)) (layout/default site-navi [:div])
(and user (= "error" (:permission @state))) (layout/default-no-sidebar site-navi (err/error-content))
(= "error" (:permission @state)) (layout/landing-page site-navi (err/error-content))
(= (:id user) (js/parseInt user-id)) (layout/default site-navi (content state))
(and (= "success" (:permission @state)) user) (layout/default-no-sidebar site-navi (content state))
:else (layout/landing-page site-navi (content state))))))
|
7a72058779b2a1eacedf9879315a85f18e85997d28f6d5a5e4279cdb0d36f158 | Frama-C/Frama-C-snapshot | analysis.mli | (**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
open Cil_types
open Eval
module type Results = sig
type state
type value
type location
val get_stmt_state : after:bool -> stmt -> state or_bottom
val get_kinstr_state: after:bool -> kinstr -> state or_bottom
val get_stmt_state_by_callstack:
after:bool -> stmt -> state Value_types.Callstack.Hashtbl.t or_top_or_bottom
val get_initial_state_by_callstack:
kernel_function -> state Value_types.Callstack.Hashtbl.t or_top_or_bottom
val eval_expr : state -> exp -> value evaluated
val copy_lvalue: state -> lval -> value flagged_value evaluated
val eval_lval_to_loc: state -> lval -> location evaluated
val eval_function_exp:
state -> ?args:exp list -> exp -> kernel_function list evaluated
end
module Make (Abstract: Abstractions.S) : sig
val compute_from_entry_point : kernel_function -> lib_entry:bool -> unit
val compute_from_init_state: kernel_function -> Abstract.Dom.t -> unit
include Results with type state := Abstract.Dom.state
and type value := Abstract.Val.t
and type location := Abstract.Loc.location
end
module type S = sig
include Abstractions.S
include Results with type state := Dom.state
and type value := Val.t
and type location := Loc.location
end
val current_analyzer : unit -> (module S)
(** The abstractions used in the latest analysis, and its results. *)
val register_hook: ((module S) -> unit) -> unit
(** Registers a hook that will be called each time the [current] analyzer
is changed. This happens when a new analysis is run with different
abstractions than before, or when the current project is changed. *)
val force_compute : unit -> unit
(** Perform a full analysis, starting from the [main] function. *)
val cvalue_initial_state: unit -> Cvalue.Model.t
(** Return the initial state of the cvalue domain only. *)
| null | https://raw.githubusercontent.com/Frama-C/Frama-C-snapshot/639a3647736bf8ac127d00ebe4c4c259f75f9b87/src/plugins/value/engine/analysis.mli | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
* The abstractions used in the latest analysis, and its results.
* Registers a hook that will be called each time the [current] analyzer
is changed. This happens when a new analysis is run with different
abstractions than before, or when the current project is changed.
* Perform a full analysis, starting from the [main] function.
* Return the initial state of the cvalue domain only. | This file is part of Frama - C.
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
open Cil_types
open Eval
module type Results = sig
type state
type value
type location
val get_stmt_state : after:bool -> stmt -> state or_bottom
val get_kinstr_state: after:bool -> kinstr -> state or_bottom
val get_stmt_state_by_callstack:
after:bool -> stmt -> state Value_types.Callstack.Hashtbl.t or_top_or_bottom
val get_initial_state_by_callstack:
kernel_function -> state Value_types.Callstack.Hashtbl.t or_top_or_bottom
val eval_expr : state -> exp -> value evaluated
val copy_lvalue: state -> lval -> value flagged_value evaluated
val eval_lval_to_loc: state -> lval -> location evaluated
val eval_function_exp:
state -> ?args:exp list -> exp -> kernel_function list evaluated
end
module Make (Abstract: Abstractions.S) : sig
val compute_from_entry_point : kernel_function -> lib_entry:bool -> unit
val compute_from_init_state: kernel_function -> Abstract.Dom.t -> unit
include Results with type state := Abstract.Dom.state
and type value := Abstract.Val.t
and type location := Abstract.Loc.location
end
module type S = sig
include Abstractions.S
include Results with type state := Dom.state
and type value := Val.t
and type location := Loc.location
end
val current_analyzer : unit -> (module S)
val register_hook: ((module S) -> unit) -> unit
val force_compute : unit -> unit
val cvalue_initial_state: unit -> Cvalue.Model.t
|
992a18be6c1388d2b7d974d050859d2ea503fd242804864b71866d613a81cabc | JohnLato/iteratee | Wave.hs | # LANGUAGE RankNTypes , FlexibleContexts #
{-
This module is not meant primarily for instructive and pedagogical purposes.
As such, it is not fully featured, and sacrifices performance and generality
for clarity of code.
-}
module Data.Iteratee.Codecs.Wave {-# DEPRECATED "This will be moved to a separate package in the future" #-} (
WAVEDE (..),
WAVEDE_ENUM (..),
WAVE_CHUNK (..),
AudioFormat (..),
waveReader,
readRiff,
waveChunk,
chunkToString,
dictReadFormat,
dictReadFirstFormat,
dictReadLastFormat,
dictReadFirstData,
dictReadLastData,
dictReadData,
dictProcessData
)
where
import Prelude as P
import Control.Monad (join)
import Control.Monad.Trans (lift)
import Data.Iteratee
import qualified Data.Iteratee as Iter
import Data.Iteratee.Binary
import Data.Char (chr, ord)
import Data.Int
import Data.Word
import Data.Bits (shiftL)
import Data.Maybe
import qualified Data.IntMap as IM
-- =====================================================
WAVE libary code
-- useful type synonyms
|A WAVE directory is a list associating WAVE chunks with
a record WAVEDE
type WAVEDict = IM.IntMap [WAVEDE]
data WAVEDE = WAVEDE{
wavede_count :: Int, -- ^length of chunk
wavede_type :: WAVE_CHUNK, -- ^type of chunk
wavede_enum :: WAVEDE_ENUM -- ^enumerator to get values of chunk
}
type EnumeratorM sFrom sTo m a = Iteratee sTo m a -> m (Iteratee sFrom m a)
joinL :: (Monad m, Nullable s) => m (Iteratee s m a) -> Iteratee s m a
joinL = join . lift
data WAVEDE_ENUM =
WEN_BYTE (forall a. EnumeratorM [Word8] [Word8] IO a)
| WEN_DUB (forall a. EnumeratorM [Word8] [Double] IO a)
-- |Standard WAVE Chunks
^Format
| WAVE_DATA -- ^Data
| WAVE_OTHER String -- ^Other
deriving (Eq, Ord, Show)
instance Enum WAVE_CHUNK where
fromEnum WAVE_FMT = 1
fromEnum WAVE_DATA = 2
fromEnum (WAVE_OTHER _) = 3
toEnum 1 = WAVE_FMT
toEnum 2 = WAVE_DATA
toEnum 3 = WAVE_OTHER ""
toEnum _ = error "Invalid enumeration value"
-- -----------------
-- wave chunk reading/writing functions
-- |Convert a string to WAVE_CHUNK type
waveChunk :: String -> Maybe WAVE_CHUNK
waveChunk str
| str == "fmt " = Just WAVE_FMT
| str == "data" = Just WAVE_DATA
| P.length str == 4 = Just $ WAVE_OTHER str
| otherwise = Nothing
-- |Convert a WAVE_CHUNK to the representative string
chunkToString :: WAVE_CHUNK -> String
chunkToString WAVE_FMT = "fmt "
chunkToString WAVE_DATA = "data"
chunkToString (WAVE_OTHER str) = str
-- -----------------
data AudioFormat = AudioFormat {
numberOfChannels :: NumChannels, -- ^Number of channels in the audio data
sampleRate :: SampleRate, -- ^Sample rate of the audio
bitDepth :: BitDepth -- ^Bit depth of the audio data
} deriving (Show, Eq)
type NumChannels = Integer
type SampleRate = Integer
type BitDepth = Integer
convenience function to read a 4 - byte ASCII string
stringRead4 :: Monad m => Iteratee [Word8] m String
stringRead4 = do
s1 <- Iter.head
s2 <- Iter.head
s3 <- Iter.head
s4 <- Iter.head
return $ map (chr . fromIntegral) [s1, s2, s3, s4]
-- -----------------
|The library function to read the WAVE dictionary
waveReader :: Iteratee [Word8] IO (Maybe WAVEDict)
waveReader = do
readRiff
tot_size <- endianRead4 LSB
readRiffWave
chunks_m <- findChunks $ fromIntegral tot_size
loadDict $ joinM chunks_m
-- |Read the RIFF header of a file.
readRiff :: Iteratee [Word8] IO ()
readRiff = do
cnt <- heads $ fmap (fromIntegral . ord) "RIFF"
if cnt == 4 then return () else throwErr $ iterStrExc "Bad RIFF header"
| Read the WAVE part of the RIFF header .
readRiffWave :: Iteratee [Word8] IO ()
readRiffWave = do
cnt <- heads $ fmap (fromIntegral . ord) "WAVE"
if cnt == 4 then return () else throwErr $ iterStrExc "Bad RIFF/WAVE header"
-- | An internal function to find all the chunks. It assumes that the
stream is positioned to read the first chunk .
findChunks :: Int -> Iteratee [Word8] IO (Maybe [(Int, WAVE_CHUNK, Int)])
findChunks n = findChunks' 12 []
where
findChunks' offset acc = do
typ <- stringRead4
count <- endianRead4 LSB
case waveChunk typ of
Nothing -> (throwErr . iterStrExc $ "Bad subchunk descriptor: " ++ show typ)
>> return Nothing
Just chk -> let newpos = offset + 8 + count in
case newpos >= fromIntegral n of
True -> return . Just $ reverse $
(fromIntegral offset, chk, fromIntegral count) : acc
False -> do
Iter.seek $ fromIntegral newpos
findChunks' newpos $
(fromIntegral offset, chk, fromIntegral count) : acc
loadDict :: [(Int, WAVE_CHUNK, Int)] ->
Iteratee [Word8] IO (Maybe WAVEDict)
loadDict = P.foldl read_entry (return (Just IM.empty))
where
read_entry dictM (offset, typ, count) = dictM >>=
maybe (return Nothing) (\dict -> do
enum_m <- readValue dict offset typ count
case (enum_m, IM.lookup (fromEnum typ) dict) of
(Just enum, Nothing) -> --insert new entry
return . Just $ IM.insert (fromEnum typ)
[WAVEDE (fromIntegral count) typ enum] dict
(Just enum, Just _vals) -> --existing entry
return . Just $ IM.update
(\ls -> Just $ ls ++ [WAVEDE (fromIntegral count) typ enum])
(fromEnum typ) dict
(Nothing, _) -> return (Just dict)
)
readValue :: WAVEDict ->
Int -> -- Offset
WAVE_CHUNK -> -- Chunk type
Int -> -- Count
Iteratee [Word8] IO (Maybe WAVEDE_ENUM)
readValue _dict offset _ 0 = do
throwErr . iterStrExc $ "Zero count in the entry of chunk at: " ++ show offset
return Nothing
readValue dict offset WAVE_DATA count = do
fmt_m <- dictReadLastFormat dict
case fmt_m of
Just fmt ->
return . Just . WEN_DUB $ \iter_dub -> return $ do
Iter.seek (8 + fromIntegral offset)
let iter = Iter.convStream (convFunc fmt) iter_dub
joinI . joinI . Iter.take count $ iter
Nothing -> do
throwErr . iterStrExc $ "No valid format for data chunk at: " ++ show offset
return Nothing
return the WaveFormat iteratee
readValue _dict offset WAVE_FMT count =
return . Just . WEN_BYTE $ \iter -> return $ do
Iter.seek (8 + fromIntegral offset)
Iter.joinI $ Iter.take count iter
-- for WAVE_OTHER, return Word8s and maybe the user can parse them
readValue _dict offset (WAVE_OTHER _str) count =
return . Just . WEN_BYTE $ \iter -> return $ do
Iter.seek (8 + fromIntegral offset)
Iter.joinI $ Iter.take count iter
-- |Convert Word8s to Doubles
convFunc :: AudioFormat -> Iteratee [Word8] IO [Double]
convFunc (AudioFormat _nc _sr 8) = fmap
((:[]) . normalize 8 . (fromIntegral :: Word8 -> Int8))
Iter.head
convFunc (AudioFormat _nc _sr 16) = fmap
((:[]) . normalize 16 . (fromIntegral :: Word16 -> Int16))
(endianRead2 LSB)
convFunc (AudioFormat _nc _sr 24) = fmap
((:[]) . normalize 24 . (fromIntegral :: Word32 -> Int32))
(endianRead3 LSB)
convFunc (AudioFormat _nc _sr 32) = fmap
((:[]) . normalize 32 . (fromIntegral :: Word32 -> Int32))
(endianRead4 LSB)
convFunc _ = error "unrecognized audio format in convFunc"
eitherToMaybe :: Either a b -> Maybe b
eitherToMaybe = either (const Nothing) Just
-- |An Iteratee to read a wave format chunk
sWaveFormat :: Iteratee [Word8] IO (Maybe AudioFormat)
sWaveFormat = do
data format , 1==PCM
nc <- endianRead2 LSB
sr <- endianRead4 LSB
Iter.drop 6
bd <- endianRead2 LSB
case f' == 1 of
True -> return . Just $ AudioFormat (fromIntegral nc)
(fromIntegral sr)
(fromIntegral bd)
False -> return Nothing
-- ---------------------
-- functions to assist with reading from the dictionary
|Read the first format chunk in the WAVE dictionary .
dictReadFirstFormat :: WAVEDict -> Iteratee [Word8] IO (Maybe AudioFormat)
dictReadFirstFormat dict = case IM.lookup (fromEnum WAVE_FMT) dict of
Just [] -> return Nothing
Just ((WAVEDE _ WAVE_FMT (WEN_BYTE enum)) : _xs) -> joinIM $ enum sWaveFormat
_ -> return Nothing
|Read the last fromat chunk from the WAVE dictionary . This is useful
-- when parsing all chunks in the dictionary.
dictReadLastFormat :: WAVEDict -> Iteratee [Word8] IO (Maybe AudioFormat)
dictReadLastFormat dict = case IM.lookup (fromEnum WAVE_FMT) dict of
Just [] -> return Nothing
Just xs -> let (WAVEDE _ WAVE_FMT (WEN_BYTE enum)) = last xs in
joinIM $ enum sWaveFormat
_ -> return Nothing
|Read the specified format chunk from the WAVE dictionary
dictReadFormat :: Int -> --Index in the format chunk list to read
WAVEDict -> --Dictionary
Iteratee [Word8] IO (Maybe AudioFormat)
dictReadFormat ix dict = case IM.lookup (fromEnum WAVE_FMT) dict of
Just xs -> let (WAVEDE _ WAVE_FMT (WEN_BYTE enum)) = (!!) xs ix in
joinIM $ enum sWaveFormat
_ -> return Nothing
|Read the first data chunk in the WAVE dictionary .
dictReadFirstData :: WAVEDict -> Iteratee [Word8] IO (Maybe [Double])
dictReadFirstData dict = case IM.lookup (fromEnum WAVE_DATA) dict of
Just [] -> return Nothing
Just ((WAVEDE _ WAVE_DATA (WEN_DUB enum)) : _xs) -> do
e <- joinIM $ enum Iter.stream2list
return $ Just e
_ -> return Nothing
|Read the last data chunk in the WAVE dictionary .
dictReadLastData :: WAVEDict -> Iteratee [Word8] IO (Maybe [Double])
dictReadLastData dict = case IM.lookup (fromEnum WAVE_DATA) dict of
Just [] -> return Nothing
Just xs -> let (WAVEDE _ WAVE_DATA (WEN_DUB enum)) = last xs in do
e <- joinIM $ enum Iter.stream2list
return $ Just e
_ -> return Nothing
|Read the specified data chunk from the WAVE dictionary .
dictReadData :: Int -> --Index in the data chunk list to read
WAVEDict -> --Dictionary
Iteratee [Word8] IO (Maybe [Double])
dictReadData ix dict = case IM.lookup (fromEnum WAVE_DATA) dict of
Just xs -> let (WAVEDE _ WAVE_DATA (WEN_DUB enum)) = (!!) xs ix in do
e <- joinIM $ enum Iter.stream2list
return $ Just e
_ -> return Nothing
-- |Read the specified data chunk from the dictionary, applying the
data to the specified Iteratee .
dictProcessData :: Int -> -- Index in the data chunk list to read
WAVEDict -> -- Dictionary
Iteratee [Double] IO a ->
Iteratee [Word8] IO (Maybe a)
dictProcessData ix dict iter = case IM.lookup (fromEnum WAVE_DATA) dict of
Just xs -> let (WAVEDE _ WAVE_DATA (WEN_DUB enum)) = (!!) xs ix in do
e <- joinIM $ enum iter
return $ Just e
_ -> return Nothing
-- ---------------------
-- convenience functions
-- |Convert (Maybe []) to []. Nothing maps to an empty list.
joinM :: Maybe [a] -> [a]
joinM Nothing = []
joinM (Just a) = a
-- |Normalize a given value for the provided bit depth.
normalize :: Integral a => BitDepth -> a -> Double
normalize 8 a = (fromIntegral a - 128) / 128
normalize bd a = case (a > 0) of
True -> fromIntegral a / divPos
False -> fromIntegral a / divNeg
where
divPos = fromIntegral (1 `shiftL` fromIntegral (bd - 1) :: Int) - 1
divNeg = fromIntegral (1 `shiftL` fromIntegral (bd - 1) :: Int)
| null | https://raw.githubusercontent.com/JohnLato/iteratee/83852cebab1051999d70d2abce86f5ab88c6d7ec/Examples/Wave.hs | haskell |
This module is not meant primarily for instructive and pedagogical purposes.
As such, it is not fully featured, and sacrifices performance and generality
for clarity of code.
# DEPRECATED "This will be moved to a separate package in the future" #
=====================================================
useful type synonyms
^length of chunk
^type of chunk
^enumerator to get values of chunk
|Standard WAVE Chunks
^Data
^Other
-----------------
wave chunk reading/writing functions
|Convert a string to WAVE_CHUNK type
|Convert a WAVE_CHUNK to the representative string
-----------------
^Number of channels in the audio data
^Sample rate of the audio
^Bit depth of the audio data
-----------------
|Read the RIFF header of a file.
| An internal function to find all the chunks. It assumes that the
insert new entry
existing entry
Offset
Chunk type
Count
for WAVE_OTHER, return Word8s and maybe the user can parse them
|Convert Word8s to Doubles
|An Iteratee to read a wave format chunk
---------------------
functions to assist with reading from the dictionary
when parsing all chunks in the dictionary.
Index in the format chunk list to read
Dictionary
Index in the data chunk list to read
Dictionary
|Read the specified data chunk from the dictionary, applying the
Index in the data chunk list to read
Dictionary
---------------------
convenience functions
|Convert (Maybe []) to []. Nothing maps to an empty list.
|Normalize a given value for the provided bit depth. | # LANGUAGE RankNTypes , FlexibleContexts #
WAVEDE (..),
WAVEDE_ENUM (..),
WAVE_CHUNK (..),
AudioFormat (..),
waveReader,
readRiff,
waveChunk,
chunkToString,
dictReadFormat,
dictReadFirstFormat,
dictReadLastFormat,
dictReadFirstData,
dictReadLastData,
dictReadData,
dictProcessData
)
where
import Prelude as P
import Control.Monad (join)
import Control.Monad.Trans (lift)
import Data.Iteratee
import qualified Data.Iteratee as Iter
import Data.Iteratee.Binary
import Data.Char (chr, ord)
import Data.Int
import Data.Word
import Data.Bits (shiftL)
import Data.Maybe
import qualified Data.IntMap as IM
WAVE libary code
|A WAVE directory is a list associating WAVE chunks with
a record WAVEDE
type WAVEDict = IM.IntMap [WAVEDE]
data WAVEDE = WAVEDE{
}
type EnumeratorM sFrom sTo m a = Iteratee sTo m a -> m (Iteratee sFrom m a)
joinL :: (Monad m, Nullable s) => m (Iteratee s m a) -> Iteratee s m a
joinL = join . lift
data WAVEDE_ENUM =
WEN_BYTE (forall a. EnumeratorM [Word8] [Word8] IO a)
| WEN_DUB (forall a. EnumeratorM [Word8] [Double] IO a)
^Format
deriving (Eq, Ord, Show)
instance Enum WAVE_CHUNK where
fromEnum WAVE_FMT = 1
fromEnum WAVE_DATA = 2
fromEnum (WAVE_OTHER _) = 3
toEnum 1 = WAVE_FMT
toEnum 2 = WAVE_DATA
toEnum 3 = WAVE_OTHER ""
toEnum _ = error "Invalid enumeration value"
waveChunk :: String -> Maybe WAVE_CHUNK
waveChunk str
| str == "fmt " = Just WAVE_FMT
| str == "data" = Just WAVE_DATA
| P.length str == 4 = Just $ WAVE_OTHER str
| otherwise = Nothing
chunkToString :: WAVE_CHUNK -> String
chunkToString WAVE_FMT = "fmt "
chunkToString WAVE_DATA = "data"
chunkToString (WAVE_OTHER str) = str
data AudioFormat = AudioFormat {
} deriving (Show, Eq)
type NumChannels = Integer
type SampleRate = Integer
type BitDepth = Integer
convenience function to read a 4 - byte ASCII string
stringRead4 :: Monad m => Iteratee [Word8] m String
stringRead4 = do
s1 <- Iter.head
s2 <- Iter.head
s3 <- Iter.head
s4 <- Iter.head
return $ map (chr . fromIntegral) [s1, s2, s3, s4]
|The library function to read the WAVE dictionary
waveReader :: Iteratee [Word8] IO (Maybe WAVEDict)
waveReader = do
readRiff
tot_size <- endianRead4 LSB
readRiffWave
chunks_m <- findChunks $ fromIntegral tot_size
loadDict $ joinM chunks_m
readRiff :: Iteratee [Word8] IO ()
readRiff = do
cnt <- heads $ fmap (fromIntegral . ord) "RIFF"
if cnt == 4 then return () else throwErr $ iterStrExc "Bad RIFF header"
| Read the WAVE part of the RIFF header .
readRiffWave :: Iteratee [Word8] IO ()
readRiffWave = do
cnt <- heads $ fmap (fromIntegral . ord) "WAVE"
if cnt == 4 then return () else throwErr $ iterStrExc "Bad RIFF/WAVE header"
stream is positioned to read the first chunk .
findChunks :: Int -> Iteratee [Word8] IO (Maybe [(Int, WAVE_CHUNK, Int)])
findChunks n = findChunks' 12 []
where
findChunks' offset acc = do
typ <- stringRead4
count <- endianRead4 LSB
case waveChunk typ of
Nothing -> (throwErr . iterStrExc $ "Bad subchunk descriptor: " ++ show typ)
>> return Nothing
Just chk -> let newpos = offset + 8 + count in
case newpos >= fromIntegral n of
True -> return . Just $ reverse $
(fromIntegral offset, chk, fromIntegral count) : acc
False -> do
Iter.seek $ fromIntegral newpos
findChunks' newpos $
(fromIntegral offset, chk, fromIntegral count) : acc
loadDict :: [(Int, WAVE_CHUNK, Int)] ->
Iteratee [Word8] IO (Maybe WAVEDict)
loadDict = P.foldl read_entry (return (Just IM.empty))
where
read_entry dictM (offset, typ, count) = dictM >>=
maybe (return Nothing) (\dict -> do
enum_m <- readValue dict offset typ count
case (enum_m, IM.lookup (fromEnum typ) dict) of
return . Just $ IM.insert (fromEnum typ)
[WAVEDE (fromIntegral count) typ enum] dict
return . Just $ IM.update
(\ls -> Just $ ls ++ [WAVEDE (fromIntegral count) typ enum])
(fromEnum typ) dict
(Nothing, _) -> return (Just dict)
)
readValue :: WAVEDict ->
Iteratee [Word8] IO (Maybe WAVEDE_ENUM)
readValue _dict offset _ 0 = do
throwErr . iterStrExc $ "Zero count in the entry of chunk at: " ++ show offset
return Nothing
readValue dict offset WAVE_DATA count = do
fmt_m <- dictReadLastFormat dict
case fmt_m of
Just fmt ->
return . Just . WEN_DUB $ \iter_dub -> return $ do
Iter.seek (8 + fromIntegral offset)
let iter = Iter.convStream (convFunc fmt) iter_dub
joinI . joinI . Iter.take count $ iter
Nothing -> do
throwErr . iterStrExc $ "No valid format for data chunk at: " ++ show offset
return Nothing
return the WaveFormat iteratee
readValue _dict offset WAVE_FMT count =
return . Just . WEN_BYTE $ \iter -> return $ do
Iter.seek (8 + fromIntegral offset)
Iter.joinI $ Iter.take count iter
readValue _dict offset (WAVE_OTHER _str) count =
return . Just . WEN_BYTE $ \iter -> return $ do
Iter.seek (8 + fromIntegral offset)
Iter.joinI $ Iter.take count iter
convFunc :: AudioFormat -> Iteratee [Word8] IO [Double]
convFunc (AudioFormat _nc _sr 8) = fmap
((:[]) . normalize 8 . (fromIntegral :: Word8 -> Int8))
Iter.head
convFunc (AudioFormat _nc _sr 16) = fmap
((:[]) . normalize 16 . (fromIntegral :: Word16 -> Int16))
(endianRead2 LSB)
convFunc (AudioFormat _nc _sr 24) = fmap
((:[]) . normalize 24 . (fromIntegral :: Word32 -> Int32))
(endianRead3 LSB)
convFunc (AudioFormat _nc _sr 32) = fmap
((:[]) . normalize 32 . (fromIntegral :: Word32 -> Int32))
(endianRead4 LSB)
convFunc _ = error "unrecognized audio format in convFunc"
eitherToMaybe :: Either a b -> Maybe b
eitherToMaybe = either (const Nothing) Just
sWaveFormat :: Iteratee [Word8] IO (Maybe AudioFormat)
sWaveFormat = do
data format , 1==PCM
nc <- endianRead2 LSB
sr <- endianRead4 LSB
Iter.drop 6
bd <- endianRead2 LSB
case f' == 1 of
True -> return . Just $ AudioFormat (fromIntegral nc)
(fromIntegral sr)
(fromIntegral bd)
False -> return Nothing
|Read the first format chunk in the WAVE dictionary .
dictReadFirstFormat :: WAVEDict -> Iteratee [Word8] IO (Maybe AudioFormat)
dictReadFirstFormat dict = case IM.lookup (fromEnum WAVE_FMT) dict of
Just [] -> return Nothing
Just ((WAVEDE _ WAVE_FMT (WEN_BYTE enum)) : _xs) -> joinIM $ enum sWaveFormat
_ -> return Nothing
|Read the last fromat chunk from the WAVE dictionary . This is useful
dictReadLastFormat :: WAVEDict -> Iteratee [Word8] IO (Maybe AudioFormat)
dictReadLastFormat dict = case IM.lookup (fromEnum WAVE_FMT) dict of
Just [] -> return Nothing
Just xs -> let (WAVEDE _ WAVE_FMT (WEN_BYTE enum)) = last xs in
joinIM $ enum sWaveFormat
_ -> return Nothing
|Read the specified format chunk from the WAVE dictionary
Iteratee [Word8] IO (Maybe AudioFormat)
dictReadFormat ix dict = case IM.lookup (fromEnum WAVE_FMT) dict of
Just xs -> let (WAVEDE _ WAVE_FMT (WEN_BYTE enum)) = (!!) xs ix in
joinIM $ enum sWaveFormat
_ -> return Nothing
|Read the first data chunk in the WAVE dictionary .
dictReadFirstData :: WAVEDict -> Iteratee [Word8] IO (Maybe [Double])
dictReadFirstData dict = case IM.lookup (fromEnum WAVE_DATA) dict of
Just [] -> return Nothing
Just ((WAVEDE _ WAVE_DATA (WEN_DUB enum)) : _xs) -> do
e <- joinIM $ enum Iter.stream2list
return $ Just e
_ -> return Nothing
|Read the last data chunk in the WAVE dictionary .
dictReadLastData :: WAVEDict -> Iteratee [Word8] IO (Maybe [Double])
dictReadLastData dict = case IM.lookup (fromEnum WAVE_DATA) dict of
Just [] -> return Nothing
Just xs -> let (WAVEDE _ WAVE_DATA (WEN_DUB enum)) = last xs in do
e <- joinIM $ enum Iter.stream2list
return $ Just e
_ -> return Nothing
|Read the specified data chunk from the WAVE dictionary .
Iteratee [Word8] IO (Maybe [Double])
dictReadData ix dict = case IM.lookup (fromEnum WAVE_DATA) dict of
Just xs -> let (WAVEDE _ WAVE_DATA (WEN_DUB enum)) = (!!) xs ix in do
e <- joinIM $ enum Iter.stream2list
return $ Just e
_ -> return Nothing
data to the specified Iteratee .
Iteratee [Double] IO a ->
Iteratee [Word8] IO (Maybe a)
dictProcessData ix dict iter = case IM.lookup (fromEnum WAVE_DATA) dict of
Just xs -> let (WAVEDE _ WAVE_DATA (WEN_DUB enum)) = (!!) xs ix in do
e <- joinIM $ enum iter
return $ Just e
_ -> return Nothing
joinM :: Maybe [a] -> [a]
joinM Nothing = []
joinM (Just a) = a
normalize :: Integral a => BitDepth -> a -> Double
normalize 8 a = (fromIntegral a - 128) / 128
normalize bd a = case (a > 0) of
True -> fromIntegral a / divPos
False -> fromIntegral a / divNeg
where
divPos = fromIntegral (1 `shiftL` fromIntegral (bd - 1) :: Int) - 1
divNeg = fromIntegral (1 `shiftL` fromIntegral (bd - 1) :: Int)
|
553306cca07ca15246e9d3cdb8e2aff64ed7db4f27b0aa3f580d2947be3ca1bf | Oblosys/proxima | MagicMonad.hs | {-# OPTIONS_GHC -fglasgow-exts -fallow-overlapping-instances -fallow-undecidable-instances -fno-monomorphism-restriction #-}
-----------------------------------------------------------------------------------------
{-| Module : Magic
Copyright :
License : All Rights Reserved
Maintainer :
Stability :
Portability :
-}
-----------------------------------------------------------------------------------------
module MagicMonad where
import Layers hiding (LayerFn, Simple (..))
data Simple m state map doc pres gest upd =
Simple { present :: LayerFn m state doc (map, state) pres
, interpret :: LayerFn m (map, state) gest state upd
}
--- from lib
fix :: (a->a) -> a
fix a = let fixa = a fixa
in fixa
type LayerFn m horArgs vertArg horRess vertRes =
horArgs -> vertArg -> m (vertRes, horRess)
liftStep :: Monad m => LayerFn m hArg vArg hRes vRes -> (hRes -> g m ns) -> hArg -> (Step d vArg vRes :.: g) m ns
liftStep f next horArgs = Comp . Step $
\vArg -> do { (vertRes, horRes) <- f horArgs vArg
; return (vertRes, next horRes)
}
lfix f = fix f' where f' n = Fix . (f . lNilStep) n
lNilStep next hRes = NilStep $ next hRes
cfix f = fix f'
where f' n (Fix u) (Fix l) = Fix $ f n u l
combineStepDown :: Monad m => (f m x -> g m y -> h m ns) ->
(Step Down a b :.: f) m x ->
(Step Down b c :.: g) m y ->
(Step Down a c :.: h) m ns
combineStepDown next (Comp (Step upper))
(Comp (Step lower)) = Comp . Step $
\h -> do { (m ,upperf) <- upper h
; (l, lowerf) <- lower m
; return (l, next upperf lowerf)
}
combineStepUp :: Monad m => (f m x -> g m y -> h m ns) ->
(Step Up b c :.: f) m x ->
(Step Up a b :.: g) m y ->
(Step Up a c :.: h) m ns
combineStepUp next (Comp (Step upper))
(Comp (Step lower)) = Comp . Step $
\l -> do { (m, lowerf) <- lower l
; (h, upperf) <- upper m
; return (h, next upperf lowerf)
}
unStep (Comp (Step step)) = step
unNil (NilStep step) = step
newtype Fix m f = Fix (f m (Fix m f))
infixr :.:
newtype (:.:) f g (m :: * -> *) ns = Comp (f m (g m ns))
-- kind sig because otherwise m may get * if there are no applications
newtype NilStep m t = NilStep t
newtype Step dir a b m ns = Step (a -> m (b, ns))
data Up
data Down
class Comp (cmp :: (* -> *) -> * -> *) r c | cmp -> r c where
compose :: cmp m t -> r -> c
instance Comp (NilStep) (b->res) (b->res) where
compose cmp r = r
instance Comp g (a->res) cmp =>
Comp (f :.: g) (y->res) ((a->y) -> cmp) where
compose cmp r = \ab -> compose (rightType cmp) (r.ab)
rightType :: (f :.: g) m t -> g m t
rightType = undefined
class App (cmp :: (* -> *) -> * -> *) f fx r | cmp f -> fx r where
app :: cmp m t -> f -> fx -> r
instance App (NilStep) (a->b) a b where
app cmp f a = f a
instance ( Monad m
, App g (a->b) d e ) =>
App (Step dr ar rs :.: g) (a->b)
(((hRes -> g m ns) -> hArg ->
(Step dr vArg vRes :.: g) m ns) ->d)
(LayerFn m hArg vArg hRes vRes ->e) where
app cmp f fx = \lf -> (app (rightType cmp) f
(fx (liftStep lf)))
class ResType f res | f -> res where
resType :: f -> res
resType = undefined
instance ResType (Fix m ct) (ct m t)
instance ResType f r => ResType (a -> f) r
genericLift = app (resType genericLift) lfix
(compose (resType genericLift) id)
-- combine
class Combine (cmp :: (* -> *) -> * -> *) t f | cmp t -> f where
combineC :: cmp m t -> f
instance Monad m => Combine NilStep t ((u -> l -> c) ->
(NilStep m u) -> (NilStep m l) -> NilStep m c) where
combineC _ = \next (NilStep u) (NilStep l) ->
NilStep (next u l)
instance ( Monad m
, Combine c ct ( (ut -> lt -> ct) ->
u m ut -> l m lt-> c m ct) ) =>
Combine (Step Down a r :.: c) ct
((ut -> lt -> ct) ->
(Step Down a m' :.: u) m ut ->
(Step Down m' r :.: l) m lt ->
(Step Down a r :.: c) m ct) where
combineC cmp = \next u l ->
combineStepDown (combineC (rightType cmp) next) u l
instance ( Monad m
, Combine c ct ( (ut -> lt -> ct) ->
u m ut -> l m lt-> c m ct) ) =>
Combine (Step Up a r :.: c) ct
((ut -> lt -> ct) ->
(Step Up m' r :.: u) m ut ->
(Step Up a m' :.: l) m lt ->
(Step Up a r :.: c) m ct) where
combineC cmp = \next f g ->
combineStepUp (combineC (rightType cmp) next) f g
-- derived sig is not accepted, but this one is: (replace comp by f)
non monadic
genericCombine : : ( Combine f t ( ( Fix t1 - > Fix t2 - > Fix f ) - >
t1 ( Fix t1 ) - > t2 ( Fix t2 ) - >
f ( Fix f ) )
, ResType ( Fix t1 - > Fix t2 - > Fix f ) ( f t )
) = >
Fix t1 - > Fix t2 - > Fix f
genericCombine :: (Combine f t ( (Fix t1 -> Fix t2 -> Fix f) ->
t1 (Fix t1) -> t2 (Fix t2) ->
f (Fix f))
, ResType (Fix t1 -> Fix t2 -> Fix f) (f t)
) =>
Fix t1 -> Fix t2 -> Fix f
-}
genericCombine = cfix (combineC (resType genericCombine))
-- testing
type Layer m dc prs gst upd =
Fix m (Step Down dc prs :.: Step Up gst upd :.: NilStep)
lift :: Monad m => Simple m state map doc pres gest upd ->
state -> Layer m doc pres gest upd
lift smpl = genericLift (present smpl) (interpret smpl)
main layer1 layer2 layer3 =
do { (state1, state2, state3) <- initStates
; doc <- initDoc
; let layers = lift layer1 state1 `genericCombine`
lift layer2 state2 `genericCombine`
lift layer3 state3
; editLoop layers doc
}
editLoop (Fix presentStep) doc =
do { (pres , interpretStep) <-
unStep presentStep $ doc
; showRendering pres
; gesture <- getGesture
; (update, presentStep') <-
unStep interpretStep $ gesture
; let doc' = updateDocument update doc
;
; editLoop (unNil presentStep') doc'
}
type Layer2 a b a2 b2 = Fix ( Step Down a b : . : Step Up a2 b2 : . : )
combineTest =
do { ( state0 , state1 , state2 ) < - initStates
; doc < - initDoc
-- ; let ( state0 , state1 , state2 ) = ( 0 , 10 , 20 )
-- ; let doc = " DOC "
; let lift : : Simple state map doc pres gest upd - >
state - > Layer2 doc pres gest upd
lift l = genericLift ( present l ) ( interpret l )
; let layers = lift layer0 state0
` genericCombine ` lift layer1 state1
` genericCombine ` lift layer2 state2
: : Layer2 Document Rendering EditRendering EditDocument
-- : : Layer2 String String String String
; let ( Fix ( compPresentStep ) ) = layers
; let ( Comp ( Step presentStep ) ) = compPresentStep
; let ( pres , Comp ( Step interpretStep ) ) = presentStep $ doc
; let interpretStep = interpretStep
; print pres
; gesture < - getGesture
-- ; let gesture = " Gest "
; let ( update::EditDocument , next ) = interpretStep $ gesture
-- ; print update
;
; return ( )
}
type Layer2 a b a2 b2 = Fix (Step Down a b :.: Step Up a2 b2 :.: NilStep)
combineTest =
do { (state0, state1, state2) <- initStates
; doc <- initDoc
--; let (state0, state1, state2) = (0, 10, 20)
--; let doc = "DOC"
; let lift :: Simple state map doc pres gest upd ->
state -> Layer2 doc pres gest upd
lift l = genericLift (present l) (interpret l)
; let layers = lift layer0 state0
`genericCombine` lift layer1 state1
`genericCombine` lift layer2 state2
:: Layer2 Document Rendering EditRendering EditDocument
-- :: Layer2 String String String String
; let (Fix (compPresentStep)) = layers
; let (Comp (Step presentStep)) = compPresentStep
; let (pres , Comp (Step interpretStep)) = presentStep $ doc
; let interpretStep = interpretStep
; print pres
; gesture <- getGesture
-- ; let gesture = "Gest"
; let (update::EditDocument, next) = interpretStep $ gesture
-- ; print update
; getChar
; return ()
}
-} | null | https://raw.githubusercontent.com/Oblosys/proxima/f154dff2ccb8afe00eeb325d9d06f5e2a5ee7589/papers/Haskell%202008/Haskell/src/MagicMonad.hs | haskell | # OPTIONS_GHC -fglasgow-exts -fallow-overlapping-instances -fallow-undecidable-instances -fno-monomorphism-restriction #
---------------------------------------------------------------------------------------
| Module : Magic
Copyright :
License : All Rights Reserved
Maintainer :
Stability :
Portability :
---------------------------------------------------------------------------------------
- from lib
kind sig because otherwise m may get * if there are no applications
combine
derived sig is not accepted, but this one is: (replace comp by f)
testing
; let ( state0 , state1 , state2 ) = ( 0 , 10 , 20 )
; let doc = " DOC "
: : Layer2 String String String String
; let gesture = " Gest "
; print update
; let (state0, state1, state2) = (0, 10, 20)
; let doc = "DOC"
:: Layer2 String String String String
; let gesture = "Gest"
; print update
|
module MagicMonad where
import Layers hiding (LayerFn, Simple (..))
data Simple m state map doc pres gest upd =
Simple { present :: LayerFn m state doc (map, state) pres
, interpret :: LayerFn m (map, state) gest state upd
}
fix :: (a->a) -> a
fix a = let fixa = a fixa
in fixa
type LayerFn m horArgs vertArg horRess vertRes =
horArgs -> vertArg -> m (vertRes, horRess)
liftStep :: Monad m => LayerFn m hArg vArg hRes vRes -> (hRes -> g m ns) -> hArg -> (Step d vArg vRes :.: g) m ns
liftStep f next horArgs = Comp . Step $
\vArg -> do { (vertRes, horRes) <- f horArgs vArg
; return (vertRes, next horRes)
}
lfix f = fix f' where f' n = Fix . (f . lNilStep) n
lNilStep next hRes = NilStep $ next hRes
cfix f = fix f'
where f' n (Fix u) (Fix l) = Fix $ f n u l
combineStepDown :: Monad m => (f m x -> g m y -> h m ns) ->
(Step Down a b :.: f) m x ->
(Step Down b c :.: g) m y ->
(Step Down a c :.: h) m ns
combineStepDown next (Comp (Step upper))
(Comp (Step lower)) = Comp . Step $
\h -> do { (m ,upperf) <- upper h
; (l, lowerf) <- lower m
; return (l, next upperf lowerf)
}
combineStepUp :: Monad m => (f m x -> g m y -> h m ns) ->
(Step Up b c :.: f) m x ->
(Step Up a b :.: g) m y ->
(Step Up a c :.: h) m ns
combineStepUp next (Comp (Step upper))
(Comp (Step lower)) = Comp . Step $
\l -> do { (m, lowerf) <- lower l
; (h, upperf) <- upper m
; return (h, next upperf lowerf)
}
unStep (Comp (Step step)) = step
unNil (NilStep step) = step
newtype Fix m f = Fix (f m (Fix m f))
infixr :.:
newtype (:.:) f g (m :: * -> *) ns = Comp (f m (g m ns))
newtype NilStep m t = NilStep t
newtype Step dir a b m ns = Step (a -> m (b, ns))
data Up
data Down
class Comp (cmp :: (* -> *) -> * -> *) r c | cmp -> r c where
compose :: cmp m t -> r -> c
instance Comp (NilStep) (b->res) (b->res) where
compose cmp r = r
instance Comp g (a->res) cmp =>
Comp (f :.: g) (y->res) ((a->y) -> cmp) where
compose cmp r = \ab -> compose (rightType cmp) (r.ab)
rightType :: (f :.: g) m t -> g m t
rightType = undefined
class App (cmp :: (* -> *) -> * -> *) f fx r | cmp f -> fx r where
app :: cmp m t -> f -> fx -> r
instance App (NilStep) (a->b) a b where
app cmp f a = f a
instance ( Monad m
, App g (a->b) d e ) =>
App (Step dr ar rs :.: g) (a->b)
(((hRes -> g m ns) -> hArg ->
(Step dr vArg vRes :.: g) m ns) ->d)
(LayerFn m hArg vArg hRes vRes ->e) where
app cmp f fx = \lf -> (app (rightType cmp) f
(fx (liftStep lf)))
class ResType f res | f -> res where
resType :: f -> res
resType = undefined
instance ResType (Fix m ct) (ct m t)
instance ResType f r => ResType (a -> f) r
genericLift = app (resType genericLift) lfix
(compose (resType genericLift) id)
class Combine (cmp :: (* -> *) -> * -> *) t f | cmp t -> f where
combineC :: cmp m t -> f
instance Monad m => Combine NilStep t ((u -> l -> c) ->
(NilStep m u) -> (NilStep m l) -> NilStep m c) where
combineC _ = \next (NilStep u) (NilStep l) ->
NilStep (next u l)
instance ( Monad m
, Combine c ct ( (ut -> lt -> ct) ->
u m ut -> l m lt-> c m ct) ) =>
Combine (Step Down a r :.: c) ct
((ut -> lt -> ct) ->
(Step Down a m' :.: u) m ut ->
(Step Down m' r :.: l) m lt ->
(Step Down a r :.: c) m ct) where
combineC cmp = \next u l ->
combineStepDown (combineC (rightType cmp) next) u l
instance ( Monad m
, Combine c ct ( (ut -> lt -> ct) ->
u m ut -> l m lt-> c m ct) ) =>
Combine (Step Up a r :.: c) ct
((ut -> lt -> ct) ->
(Step Up m' r :.: u) m ut ->
(Step Up a m' :.: l) m lt ->
(Step Up a r :.: c) m ct) where
combineC cmp = \next f g ->
combineStepUp (combineC (rightType cmp) next) f g
non monadic
genericCombine : : ( Combine f t ( ( Fix t1 - > Fix t2 - > Fix f ) - >
t1 ( Fix t1 ) - > t2 ( Fix t2 ) - >
f ( Fix f ) )
, ResType ( Fix t1 - > Fix t2 - > Fix f ) ( f t )
) = >
Fix t1 - > Fix t2 - > Fix f
genericCombine :: (Combine f t ( (Fix t1 -> Fix t2 -> Fix f) ->
t1 (Fix t1) -> t2 (Fix t2) ->
f (Fix f))
, ResType (Fix t1 -> Fix t2 -> Fix f) (f t)
) =>
Fix t1 -> Fix t2 -> Fix f
-}
genericCombine = cfix (combineC (resType genericCombine))
type Layer m dc prs gst upd =
Fix m (Step Down dc prs :.: Step Up gst upd :.: NilStep)
lift :: Monad m => Simple m state map doc pres gest upd ->
state -> Layer m doc pres gest upd
lift smpl = genericLift (present smpl) (interpret smpl)
main layer1 layer2 layer3 =
do { (state1, state2, state3) <- initStates
; doc <- initDoc
; let layers = lift layer1 state1 `genericCombine`
lift layer2 state2 `genericCombine`
lift layer3 state3
; editLoop layers doc
}
editLoop (Fix presentStep) doc =
do { (pres , interpretStep) <-
unStep presentStep $ doc
; showRendering pres
; gesture <- getGesture
; (update, presentStep') <-
unStep interpretStep $ gesture
; let doc' = updateDocument update doc
;
; editLoop (unNil presentStep') doc'
}
type Layer2 a b a2 b2 = Fix ( Step Down a b : . : Step Up a2 b2 : . : )
combineTest =
do { ( state0 , state1 , state2 ) < - initStates
; doc < - initDoc
; let lift : : Simple state map doc pres gest upd - >
state - > Layer2 doc pres gest upd
lift l = genericLift ( present l ) ( interpret l )
; let layers = lift layer0 state0
` genericCombine ` lift layer1 state1
` genericCombine ` lift layer2 state2
: : Layer2 Document Rendering EditRendering EditDocument
; let ( Fix ( compPresentStep ) ) = layers
; let ( Comp ( Step presentStep ) ) = compPresentStep
; let ( pres , Comp ( Step interpretStep ) ) = presentStep $ doc
; let interpretStep = interpretStep
; print pres
; gesture < - getGesture
; let ( update::EditDocument , next ) = interpretStep $ gesture
;
; return ( )
}
type Layer2 a b a2 b2 = Fix (Step Down a b :.: Step Up a2 b2 :.: NilStep)
combineTest =
do { (state0, state1, state2) <- initStates
; doc <- initDoc
; let lift :: Simple state map doc pres gest upd ->
state -> Layer2 doc pres gest upd
lift l = genericLift (present l) (interpret l)
; let layers = lift layer0 state0
`genericCombine` lift layer1 state1
`genericCombine` lift layer2 state2
:: Layer2 Document Rendering EditRendering EditDocument
; let (Fix (compPresentStep)) = layers
; let (Comp (Step presentStep)) = compPresentStep
; let (pres , Comp (Step interpretStep)) = presentStep $ doc
; let interpretStep = interpretStep
; print pres
; gesture <- getGesture
; let (update::EditDocument, next) = interpretStep $ gesture
; getChar
; return ()
}
-} |
e2d6d9d653e1808f9a4e7d06d77148c4abd5cd4b2fb88e0f1ea4817d6765ba00 | lvh/caesium | sodium_test.clj | (ns caesium.sodium-test
(:require [caesium.sodium :as s]
[clojure.test :refer [deftest is]]))
(deftest init-test
(is (#{0 1} (s/init)))
(is (= 1 (s/init))))
| null | https://raw.githubusercontent.com/lvh/caesium/cb90a4325fa48a2e4fb6cad810f340125a53fc57/test/caesium/sodium_test.clj | clojure | (ns caesium.sodium-test
(:require [caesium.sodium :as s]
[clojure.test :refer [deftest is]]))
(deftest init-test
(is (#{0 1} (s/init)))
(is (= 1 (s/init))))
|
|
a09ad4a0dcb3f3b3caf485385bc8ca51584f4a7da8cfce954e59b9e4867d445c | rmloveland/scheme48-0.53 | env.scm | Copyright ( c ) 1993 - 1999 by and . See file COPYING .
(define *env*)
(define (current-env) *env*)
(define (set-current-env! env) (set! *env* env))
; Access to environment slots
(define env-ref vm-vector-ref)
(define env-set! vm-vector-set!)
(define (env-parent env) (env-ref env 0))
(define (set-env-parent! env x) (env-set! env 0 x))
Resembles
(do ((env env (env-parent env))
(i back (- i 1)))
((= i 0) env)))
; Making new environments
(define (pop-args-into-env count)
(push *env*)
(push (make-header (enum stob vector) (cells->bytes (+ count 1))))
(add-env-stats count)
(set! *env* (address->stob-descriptor (address2+ *stack*))))
; Alternative method for making environments - put the values into the heap.
(define (heap-env-space count)
(+ stob-overhead (+ count 1))) ; includes superior environment
(define (pop-args-into-heap-env count key)
(let ((stob (make-d-vector (enum stob vector) (+ count 1) key)))
(copy-memory! (address1+ *stack*)
(address+ (address-after-header stob)
(cells->a-units 1))
(cells->bytes count))
(add-cells-to-stack! (- 0 count))
(vm-vector-set! stob 0 *env*)
(set! *env* stob)))
; Migrate the current environment to the heap. Used when creating a closure.
; CURRENT-ENV-SIZE size is conservative.
(define (current-env-size)
(if (within-stack? *env*)
(stack-size)
0))
; This is what the interpreter calls when it needs to put the current
; environment in a closure.
(define (preserve-current-env key)
(preserve-current-env-with-reason key (enum copy closure)))
(define (preserve-current-env-with-reason key reason)
(if (within-stack? *env*)
(set! *env* (save-env-in-heap *env* *cont* key reason)))
*env*)
1 ) Copy ENV and its ancestors into heap , adding forwarding pointers
2 ) Go down the continuation chain updating the env pointers
;
; This code depends on continuation-cont pointers not crossing environment
; parent pointers on the stack.
(define (save-env-in-heap env cont key reason)
(let ((top (copy-env env key reason)))
(let loop ((env top))
(cond ((within-stack? (env-parent env))
(let ((new (copy-env (env-parent env) key reason)))
(set-env-parent! env new)
(loop new)))))
(let loop ((cont cont))
(let ((env (continuation-env cont)))
(cond ((and (stob? env)
(stob? (stob-header env)))
(set-continuation-env! cont (stob-header env))
(loop (continuation-cont cont))))))
top))
ARGUMENTS - ON - STACK needs to walk down the stack and find the end of the
; current arguments. It looks for headers, which we clobber with forwarding
pointers , so we put a marker in the first slot of the environment and
ARGUMENTS - ON - STACK knows to back up one if it finds the marker .
( Putting the forwarding pointer in the first slot does n't work , because
we ca n't distinguish between it and a normal first slot . )
(define (copy-env env key reason)
(let ((new (header+contents->stob (stob-header env)
(address-after-header env)
key)))
(add-copy-env-stats env reason)
(vm-vector-set! env 0 argument-limit-marker)
(stob-header-set! env new)
new))
| null | https://raw.githubusercontent.com/rmloveland/scheme48-0.53/1ae4531fac7150bd2af42d124da9b50dd1b89ec1/scheme/vm/env.scm | scheme | Access to environment slots
Making new environments
Alternative method for making environments - put the values into the heap.
includes superior environment
Migrate the current environment to the heap. Used when creating a closure.
CURRENT-ENV-SIZE size is conservative.
This is what the interpreter calls when it needs to put the current
environment in a closure.
This code depends on continuation-cont pointers not crossing environment
parent pointers on the stack.
current arguments. It looks for headers, which we clobber with forwarding | Copyright ( c ) 1993 - 1999 by and . See file COPYING .
(define *env*)
(define (current-env) *env*)
(define (set-current-env! env) (set! *env* env))
(define env-ref vm-vector-ref)
(define env-set! vm-vector-set!)
(define (env-parent env) (env-ref env 0))
(define (set-env-parent! env x) (env-set! env 0 x))
Resembles
(do ((env env (env-parent env))
(i back (- i 1)))
((= i 0) env)))
(define (pop-args-into-env count)
(push *env*)
(push (make-header (enum stob vector) (cells->bytes (+ count 1))))
(add-env-stats count)
(set! *env* (address->stob-descriptor (address2+ *stack*))))
(define (heap-env-space count)
(define (pop-args-into-heap-env count key)
(let ((stob (make-d-vector (enum stob vector) (+ count 1) key)))
(copy-memory! (address1+ *stack*)
(address+ (address-after-header stob)
(cells->a-units 1))
(cells->bytes count))
(add-cells-to-stack! (- 0 count))
(vm-vector-set! stob 0 *env*)
(set! *env* stob)))
(define (current-env-size)
(if (within-stack? *env*)
(stack-size)
0))
(define (preserve-current-env key)
(preserve-current-env-with-reason key (enum copy closure)))
(define (preserve-current-env-with-reason key reason)
(if (within-stack? *env*)
(set! *env* (save-env-in-heap *env* *cont* key reason)))
*env*)
1 ) Copy ENV and its ancestors into heap , adding forwarding pointers
2 ) Go down the continuation chain updating the env pointers
(define (save-env-in-heap env cont key reason)
(let ((top (copy-env env key reason)))
(let loop ((env top))
(cond ((within-stack? (env-parent env))
(let ((new (copy-env (env-parent env) key reason)))
(set-env-parent! env new)
(loop new)))))
(let loop ((cont cont))
(let ((env (continuation-env cont)))
(cond ((and (stob? env)
(stob? (stob-header env)))
(set-continuation-env! cont (stob-header env))
(loop (continuation-cont cont))))))
top))
ARGUMENTS - ON - STACK needs to walk down the stack and find the end of the
pointers , so we put a marker in the first slot of the environment and
ARGUMENTS - ON - STACK knows to back up one if it finds the marker .
( Putting the forwarding pointer in the first slot does n't work , because
we ca n't distinguish between it and a normal first slot . )
(define (copy-env env key reason)
(let ((new (header+contents->stob (stob-header env)
(address-after-header env)
key)))
(add-copy-env-stats env reason)
(vm-vector-set! env 0 argument-limit-marker)
(stob-header-set! env new)
new))
|
05a7daba2293877a928faa09ab109c5e3f85e483909daaf230128bc20fa3c0e5 | p2k/ecoinpool | ebitcoin_crash_repo.erl |
%%
Copyright ( C ) 2011 Patrick " p2k " < >
%%
This file is part of ebitcoin .
%%
%% ebitcoin is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
%% (at your option) any later version.
%%
%% ebitcoin is distributed in the hope that it will be useful,
%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
%% GNU General Public License for more details.
%%
You should have received a copy of the GNU General Public License
along with ebitcoin . If not , see < / > .
%%
% The "crash repository" stores data between module crashes to allow consistent
% operation. You typically use the terminate function of gen_server for that
% opportunity and check for data in the init function. Note that fetching will
% remove data from the repo.
-module(ebitcoin_crash_repo).
-behaviour(gen_server).
-export([
start_link/1,
store/3,
fetch/2,
transfer_ets/2
]).
% Callbacks from gen_server
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
% Internal state record
-record(state, {
kvstorage,
etsstorage
}).
%% ===================================================================
%% API functions
%% ===================================================================
start_link(ServerName) ->
gen_server:start_link(ServerName, ?MODULE, [], []).
store(ServerRef, Key, Value) ->
gen_server:cast(ServerRef, {store, Key, Value}).
fetch(ServerRef, Key) ->
gen_server:call(ServerRef, {fetch, Key}).
transfer_ets(ServerRef, Key) ->
gen_server:call(ServerRef, {transfer_ets, Key}).
%% ===================================================================
%% Gen_Server callbacks
%% ===================================================================
init([]) ->
{ok, #state{kvstorage=dict:new(), etsstorage=dict:new()}}.
handle_call({fetch, Key}, _From, State=#state{kvstorage=KVStorage}) ->
case dict:find(Key, KVStorage) of
{ok, Value} ->
{reply, {ok, Value}, State#state{kvstorage=dict:erase(Key, KVStorage)}};
error ->
{reply, error, State}
end;
handle_call({transfer_ets, Key}, {Pid, _}, State=#state{etsstorage=ETSStorage}) ->
case dict:find(Key, ETSStorage) of
{ok, Tab} ->
ets:give_away(Tab, Pid, Key),
{reply, ok, State#state{etsstorage=dict:erase(Key, ETSStorage)}};
error ->
{reply, error, State}
end;
handle_call(_Message, _From, State) ->
{reply, {error, no_such_call}, State}.
handle_cast({store, Key, Value}, State=#state{kvstorage=KVStorage}) ->
{noreply, State#state{kvstorage=dict:store(Key, Value, KVStorage)}};
handle_cast(_Message, State) ->
{noreply, State}.
handle_info({'ETS-TRANSFER', Tab, _FromPid, Key}, State=#state{etsstorage=ETSStorage}) ->
{noreply, State#state{etsstorage=dict:store(Key, Tab, ETSStorage)}};
handle_info(_Message, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVersion, State, _Extra) ->
{ok, State}.
| null | https://raw.githubusercontent.com/p2k/ecoinpool/01ba76a7ab4b17b60cb0c525786fddef43ea80e1/apps/ebitcoin/src/ebitcoin_crash_repo.erl | erlang |
ebitcoin is free software: you can redistribute it and/or modify
(at your option) any later version.
ebitcoin is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
The "crash repository" stores data between module crashes to allow consistent
operation. You typically use the terminate function of gen_server for that
opportunity and check for data in the init function. Note that fetching will
remove data from the repo.
Callbacks from gen_server
Internal state record
===================================================================
API functions
===================================================================
===================================================================
Gen_Server callbacks
=================================================================== |
Copyright ( C ) 2011 Patrick " p2k " < >
This file is part of ebitcoin .
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
along with ebitcoin . If not , see < / > .
-module(ebitcoin_crash_repo).
-behaviour(gen_server).
-export([
start_link/1,
store/3,
fetch/2,
transfer_ets/2
]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-record(state, {
kvstorage,
etsstorage
}).
start_link(ServerName) ->
gen_server:start_link(ServerName, ?MODULE, [], []).
store(ServerRef, Key, Value) ->
gen_server:cast(ServerRef, {store, Key, Value}).
fetch(ServerRef, Key) ->
gen_server:call(ServerRef, {fetch, Key}).
transfer_ets(ServerRef, Key) ->
gen_server:call(ServerRef, {transfer_ets, Key}).
init([]) ->
{ok, #state{kvstorage=dict:new(), etsstorage=dict:new()}}.
handle_call({fetch, Key}, _From, State=#state{kvstorage=KVStorage}) ->
case dict:find(Key, KVStorage) of
{ok, Value} ->
{reply, {ok, Value}, State#state{kvstorage=dict:erase(Key, KVStorage)}};
error ->
{reply, error, State}
end;
handle_call({transfer_ets, Key}, {Pid, _}, State=#state{etsstorage=ETSStorage}) ->
case dict:find(Key, ETSStorage) of
{ok, Tab} ->
ets:give_away(Tab, Pid, Key),
{reply, ok, State#state{etsstorage=dict:erase(Key, ETSStorage)}};
error ->
{reply, error, State}
end;
handle_call(_Message, _From, State) ->
{reply, {error, no_such_call}, State}.
handle_cast({store, Key, Value}, State=#state{kvstorage=KVStorage}) ->
{noreply, State#state{kvstorage=dict:store(Key, Value, KVStorage)}};
handle_cast(_Message, State) ->
{noreply, State}.
handle_info({'ETS-TRANSFER', Tab, _FromPid, Key}, State=#state{etsstorage=ETSStorage}) ->
{noreply, State#state{etsstorage=dict:store(Key, Tab, ETSStorage)}};
handle_info(_Message, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVersion, State, _Extra) ->
{ok, State}.
|
2c6cfef249e40b8c828af44692bf43a13c6843475c4fe80c62c1a009b4a08357 | AeneasVerif/aeneas | InterpreterExpressions.ml | module T = Types
module PV = PrimitiveValues
module V = Values
module LA = LlbcAst
open Scalars
module E = Expressions
open Utils
module C = Contexts
module Subst = Substitute
module L = Logging
open TypesUtils
open ValuesUtils
module Inv = Invariants
module S = SynthesizeSymbolic
open Cps
open InterpreterUtils
open InterpreterExpansion
open InterpreterPaths
(** The local logger *)
let log = L.expressions_log
(** As long as there are symbolic values at a given place (potentially in subvalues)
which contain borrows and are primitively copyable, expand them.
We use this function before copying values.
Note that the place should have been prepared so that there are no remaining
loans.
*)
let expand_primitively_copyable_at_place (config : C.config)
(access : access_kind) (p : E.place) : cm_fun =
fun cf ctx ->
(* Small helper *)
let rec expand : cm_fun =
fun cf ctx ->
let v = read_place access p ctx in
match
find_first_primitively_copyable_sv_with_borrows
ctx.type_context.type_infos v
with
| None -> cf ctx
| Some sv ->
let cc =
expand_symbolic_value_no_branching config sv
(Some (S.mk_mplace p ctx))
in
comp cc expand cf ctx
in
(* Apply *)
expand cf ctx
* Read a place ( CPS - style function ) .
We also check that the value * does n't contain bottoms or reserved
borrows * .
We also check that the value *doesn't contain bottoms or reserved
borrows*.
*)
let read_place (access : access_kind) (p : E.place)
(cf : V.typed_value -> m_fun) : m_fun =
fun ctx ->
let v = read_place access p ctx in
(* Check that there are no bottoms in the value *)
assert (not (bottom_in_value ctx.ended_regions v));
(* Check that there are no reserved borrows in the value *)
assert (not (reserved_in_value v));
(* Call the continuation *)
cf v ctx
let access_rplace_reorganize_and_read (config : C.config)
(expand_prim_copy : bool) (access : access_kind) (p : E.place)
(cf : V.typed_value -> m_fun) : m_fun =
fun ctx ->
(* Make sure we can evaluate the path *)
let cc = update_ctx_along_read_place config access p in
(* End the proper loans at the place itself *)
let cc = comp cc (end_loans_at_place config access p) in
(* Expand the copyable values which contain borrows (which are necessarily shared
* borrows) *)
let cc =
if expand_prim_copy then
comp cc (expand_primitively_copyable_at_place config access p)
else cc
in
(* Read the place - note that this checks that the value doesn't contain bottoms *)
let read_place = read_place access p in
(* Compose *)
comp cc read_place cf ctx
let access_rplace_reorganize (config : C.config) (expand_prim_copy : bool)
(access : access_kind) (p : E.place) : cm_fun =
fun cf ctx ->
access_rplace_reorganize_and_read config expand_prim_copy access p
(fun _v -> cf)
ctx
(** Convert an operand constant operand value to a typed value *)
let primitive_to_typed_value (ty : T.ety) (cv : V.primitive_value) :
V.typed_value =
(* Check the type while converting - we actually need some information
* contained in the type *)
log#ldebug
(lazy
("primitive_to_typed_value:" ^ "\n- cv: "
^ Print.PrimitiveValues.primitive_value_to_string cv));
match (ty, cv) with
Scalar , boolean ...
| T.Bool, Bool v -> { V.value = V.Primitive (Bool v); ty }
| T.Char, Char v -> { V.value = V.Primitive (Char v); ty }
| T.Str, String v -> { V.value = V.Primitive (String v); ty }
| T.Integer int_ty, PV.Scalar v ->
(* Check the type and the ranges *)
assert (int_ty = v.int_ty);
assert (check_scalar_value_in_range v);
{ V.value = V.Primitive (PV.Scalar v); ty }
(* Remaining cases (invalid) *)
| _, _ -> raise (Failure "Improperly typed constant value")
* Copy a value , and return the resulting value .
Note that copying values might update the context . For instance , when
copying shared borrows , we need to insert new shared borrows in the context .
Also , this function is actually more general than it should be : it can be
used to copy concrete ADT values , while ADT copy should be done through the
Copy trait ( i.e. , by calling a dedicated function ) . This is why we added a
parameter to control this copy ( [ allow_adt_copy ] ) . Note that here by ADT we
mean the user - defined ADTs ( not tuples or assumed types ) .
Note that copying values might update the context. For instance, when
copying shared borrows, we need to insert new shared borrows in the context.
Also, this function is actually more general than it should be: it can be
used to copy concrete ADT values, while ADT copy should be done through the
Copy trait (i.e., by calling a dedicated function). This is why we added a
parameter to control this copy ([allow_adt_copy]). Note that here by ADT we
mean the user-defined ADTs (not tuples or assumed types).
*)
let rec copy_value (allow_adt_copy : bool) (config : C.config)
(ctx : C.eval_ctx) (v : V.typed_value) : C.eval_ctx * V.typed_value =
log#ldebug
(lazy
("copy_value: "
^ typed_value_to_string ctx v
^ "\n- context:\n" ^ eval_ctx_to_string ctx));
(* Remark: at some point we rewrote this function to use iterators, but then
* we reverted the changes: the result was less clear actually. In particular,
* the fact that we have exhaustive matches below makes very obvious the cases
* in which we need to fail *)
match v.V.value with
| V.Primitive _ -> (ctx, v)
| V.Adt av ->
(* Sanity check *)
(match v.V.ty with
| T.Adt (T.Assumed (T.Box | Vec), _, _) ->
raise (Failure "Can't copy an assumed value other than Option")
| T.Adt (T.AdtId _, _, _) -> assert allow_adt_copy
| T.Adt ((T.Assumed Option | T.Tuple), _, _) -> () (* Ok *)
| _ -> raise (Failure "Unreachable"));
let ctx, fields =
List.fold_left_map
(copy_value allow_adt_copy config)
ctx av.field_values
in
(ctx, { v with V.value = V.Adt { av with field_values = fields } })
| V.Bottom -> raise (Failure "Can't copy ⊥")
| V.Borrow bc -> (
(* We can only copy shared borrows *)
match bc with
| SharedBorrow bid ->
(* We need to create a new borrow id for the copied borrow, and
* update the context accordingly *)
let bid' = C.fresh_borrow_id () in
let ctx = InterpreterBorrows.reborrow_shared bid bid' ctx in
(ctx, { v with V.value = V.Borrow (SharedBorrow bid') })
| MutBorrow (_, _) -> raise (Failure "Can't copy a mutable borrow")
| V.ReservedMutBorrow _ ->
raise (Failure "Can't copy a reserved mut borrow"))
| V.Loan lc -> (
(* We can only copy shared loans *)
match lc with
| V.MutLoan _ -> raise (Failure "Can't copy a mutable loan")
| V.SharedLoan (_, sv) ->
(* We don't copy the shared loan: only the shared value inside *)
copy_value allow_adt_copy config ctx sv)
| V.Symbolic sp ->
(* We can copy only if the type is "primitively" copyable.
* Note that in the general case, copy is a trait: copying values
* thus requires calling the proper function. Here, we copy values
* for very simple types such as integers, shared borrows, etc. *)
assert (ty_is_primitively_copyable (Subst.erase_regions sp.V.sv_ty));
If the type is copyable , we simply return the current value . Side
* remark : what is important to look at when copying symbolic values
* is symbolic expansion . The important subcase is the expansion of shared
* borrows : when doing so , every occurrence of the same symbolic value
* must use a fresh borrow i d.
* remark: what is important to look at when copying symbolic values
* is symbolic expansion. The important subcase is the expansion of shared
* borrows: when doing so, every occurrence of the same symbolic value
* must use a fresh borrow id. *)
(ctx, v)
* Reorganize the environment in preparation for the evaluation of an operand .
Evaluating an operand requires reorganizing the environment to get access
to a given place ( by ending borrows , expanding symbolic values ... ) then
applying the operand operation ( move , copy , etc . ) .
Sometimes , we want to decouple the two operations .
Consider the following example :
{ [
context = {
x - > shared_borrow l0
y - > shared_loan { l0 } v
}
dest < - f(move x , move y ) ;
...
] }
Because of the way { ! end_borrow } is implemented , when giving back the borrow
[ l0 ] upon evaluating [ move y ] , we wo n't notice that [ shared_borrow l0 ] has
disappeared from the environment ( it has been moved and not assigned yet ,
and so is hanging in " thin air " ) .
By first " preparing " the operands evaluation , we make sure no such thing
happens . To be more precise , we make sure all the updates to borrows triggered
by access * and * move operations have already been applied .
. : in the formalization , we always have an explicit " reorganization " step
in the rule premises , before the actual operand evaluation , that allows to
reorganize the environment so that it satisfies the proper conditions . This
function 's role is to do the reorganization .
. : doing this is actually not completely necessary because when
generating MIR , rustc introduces intermediate assignments for all the function
parameters . Still , it is better for soundness purposes , and corresponds to
what we do in the formalization ( because we do n't enforce the same constraints
as MIR in the formalization ) .
Evaluating an operand requires reorganizing the environment to get access
to a given place (by ending borrows, expanding symbolic values...) then
applying the operand operation (move, copy, etc.).
Sometimes, we want to decouple the two operations.
Consider the following example:
{[
context = {
x -> shared_borrow l0
y -> shared_loan {l0} v
}
dest <- f(move x, move y);
...
]}
Because of the way {!end_borrow} is implemented, when giving back the borrow
[l0] upon evaluating [move y], we won't notice that [shared_borrow l0] has
disappeared from the environment (it has been moved and not assigned yet,
and so is hanging in "thin air").
By first "preparing" the operands evaluation, we make sure no such thing
happens. To be more precise, we make sure all the updates to borrows triggered
by access *and* move operations have already been applied.
Rk.: in the formalization, we always have an explicit "reorganization" step
in the rule premises, before the actual operand evaluation, that allows to
reorganize the environment so that it satisfies the proper conditions. This
function's role is to do the reorganization.
Rk.: doing this is actually not completely necessary because when
generating MIR, rustc introduces intermediate assignments for all the function
parameters. Still, it is better for soundness purposes, and corresponds to
what we do in the formalization (because we don't enforce the same constraints
as MIR in the formalization).
*)
let prepare_eval_operand_reorganize (config : C.config) (op : E.operand) :
cm_fun =
fun cf ctx ->
let prepare : cm_fun =
fun cf ctx ->
match op with
| Expressions.Constant (ty, cv) ->
(* No need to reorganize the context *)
primitive_to_typed_value ty cv |> ignore;
cf ctx
| Expressions.Copy p ->
(* Access the value *)
let access = Read in
(* Expand the symbolic values, if necessary *)
let expand_prim_copy = true in
access_rplace_reorganize config expand_prim_copy access p cf ctx
| Expressions.Move p ->
(* Access the value *)
let access = Move in
let expand_prim_copy = false in
access_rplace_reorganize config expand_prim_copy access p cf ctx
in
(* Apply *)
prepare cf ctx
(** Evaluate an operand, without reorganizing the context before *)
let eval_operand_no_reorganize (config : C.config) (op : E.operand)
(cf : V.typed_value -> m_fun) : m_fun =
fun ctx ->
(* Debug *)
log#ldebug
(lazy
("eval_operand_no_reorganize: op: " ^ operand_to_string ctx op
^ "\n- ctx:\n" ^ eval_ctx_to_string ctx ^ "\n"));
(* Evaluate *)
match op with
| Expressions.Constant (ty, cv) -> cf (primitive_to_typed_value ty cv) ctx
| Expressions.Copy p ->
(* Access the value *)
let access = Read in
let cc = read_place access p in
(* Copy the value *)
let copy cf v : m_fun =
fun ctx ->
(* Sanity checks *)
assert (not (bottom_in_value ctx.ended_regions v));
assert (
Option.is_none
(find_first_primitively_copyable_sv_with_borrows
ctx.type_context.type_infos v));
(* Actually perform the copy *)
let allow_adt_copy = false in
let ctx, v = copy_value allow_adt_copy config ctx v in
(* Continue *)
cf v ctx
in
(* Compose and apply *)
comp cc copy cf ctx
| Expressions.Move p ->
(* Access the value *)
let access = Move in
let cc = read_place access p in
(* Move the value *)
let move cf v : m_fun =
fun ctx ->
(* Check that there are no bottoms in the value we are about to move *)
assert (not (bottom_in_value ctx.ended_regions v));
let bottom : V.typed_value = { V.value = Bottom; ty = v.ty } in
let ctx = write_place access p bottom ctx in
cf v ctx
in
(* Compose and apply *)
comp cc move cf ctx
let eval_operand (config : C.config) (op : E.operand)
(cf : V.typed_value -> m_fun) : m_fun =
fun ctx ->
(* Debug *)
log#ldebug
(lazy
("eval_operand: op: " ^ operand_to_string ctx op ^ "\n- ctx:\n"
^ eval_ctx_to_string ctx ^ "\n"));
(* We reorganize the context, then evaluate the operand *)
comp
(prepare_eval_operand_reorganize config op)
(eval_operand_no_reorganize config op)
cf ctx
(** Small utility.
See [prepare_eval_operand_reorganize].
*)
let prepare_eval_operands_reorganize (config : C.config) (ops : E.operand list)
: cm_fun =
fold_left_apply_continuation (prepare_eval_operand_reorganize config) ops
(** Evaluate several operands. *)
let eval_operands (config : C.config) (ops : E.operand list)
(cf : V.typed_value list -> m_fun) : m_fun =
fun ctx ->
(* Prepare the operands *)
let prepare = prepare_eval_operands_reorganize config ops in
(* Evaluate the operands *)
let eval =
fold_left_list_apply_continuation (eval_operand_no_reorganize config) ops
in
(* Compose and apply *)
comp prepare eval cf ctx
let eval_two_operands (config : C.config) (op1 : E.operand) (op2 : E.operand)
(cf : V.typed_value * V.typed_value -> m_fun) : m_fun =
let eval_op = eval_operands config [ op1; op2 ] in
let use_res cf res =
match res with
| [ v1; v2 ] -> cf (v1, v2)
| _ -> raise (Failure "Unreachable")
in
comp eval_op use_res cf
let eval_unary_op_concrete (config : C.config) (unop : E.unop) (op : E.operand)
(cf : (V.typed_value, eval_error) result -> m_fun) : m_fun =
(* Evaluate the operand *)
let eval_op = eval_operand config op in
(* Apply the unop *)
let apply cf (v : V.typed_value) : m_fun =
match (unop, v.V.value) with
| E.Not, V.Primitive (Bool b) ->
cf (Ok { v with V.value = V.Primitive (Bool (not b)) })
| E.Neg, V.Primitive (PV.Scalar sv) -> (
let i = Z.neg sv.PV.value in
match mk_scalar sv.int_ty i with
| Error _ -> cf (Error EPanic)
| Ok sv -> cf (Ok { v with V.value = V.Primitive (PV.Scalar sv) }))
| E.Cast (src_ty, tgt_ty), V.Primitive (PV.Scalar sv) -> (
assert (src_ty = sv.int_ty);
let i = sv.PV.value in
match mk_scalar tgt_ty i with
| Error _ -> cf (Error EPanic)
| Ok sv ->
let ty = T.Integer tgt_ty in
let value = V.Primitive (PV.Scalar sv) in
cf (Ok { V.ty; value }))
| _ -> raise (Failure "Invalid input for unop")
in
comp eval_op apply cf
let eval_unary_op_symbolic (config : C.config) (unop : E.unop) (op : E.operand)
(cf : (V.typed_value, eval_error) result -> m_fun) : m_fun =
fun ctx ->
(* Evaluate the operand *)
let eval_op = eval_operand config op in
(* Generate a fresh symbolic value to store the result *)
let apply cf (v : V.typed_value) : m_fun =
fun ctx ->
let res_sv_id = C.fresh_symbolic_value_id () in
let res_sv_ty =
match (unop, v.V.ty) with
| E.Not, T.Bool -> T.Bool
| E.Neg, T.Integer int_ty -> T.Integer int_ty
| E.Cast (_, tgt_ty), _ -> T.Integer tgt_ty
| _ -> raise (Failure "Invalid input for unop")
in
let res_sv =
{ V.sv_kind = V.FunCallRet; V.sv_id = res_sv_id; sv_ty = res_sv_ty }
in
(* Call the continuation *)
let expr = cf (Ok (mk_typed_value_from_symbolic_value res_sv)) ctx in
Synthesize the symbolic AST
S.synthesize_unary_op ctx unop v
(S.mk_opt_place_from_op op ctx)
res_sv None expr
in
(* Compose and apply *)
comp eval_op apply cf ctx
let eval_unary_op (config : C.config) (unop : E.unop) (op : E.operand)
(cf : (V.typed_value, eval_error) result -> m_fun) : m_fun =
match config.mode with
| C.ConcreteMode -> eval_unary_op_concrete config unop op cf
| C.SymbolicMode -> eval_unary_op_symbolic config unop op cf
(** Small helper for [eval_binary_op_concrete]: computes the result of applying
the binop *after* the operands have been successfully evaluated
*)
let eval_binary_op_concrete_compute (binop : E.binop) (v1 : V.typed_value)
(v2 : V.typed_value) : (V.typed_value, eval_error) result =
Equality check ( Eq , Ne ) accept values from a wide variety of types .
* The remaining binops only operate on scalars .
* The remaining binops only operate on scalars. *)
if binop = Eq || binop = Ne then (
(* Equality operations *)
assert (v1.ty = v2.ty);
(* Equality/inequality check is primitive only for a subset of types *)
assert (ty_is_primitively_copyable v1.ty);
let b = v1 = v2 in
Ok { V.value = V.Primitive (Bool b); ty = T.Bool })
else
(* For the non-equality operations, the input values are necessarily scalars *)
match (v1.V.value, v2.V.value) with
| V.Primitive (PV.Scalar sv1), V.Primitive (PV.Scalar sv2) -> (
There are binops which require the two operands to have the same
type , and binops for which it is not the case .
There are also binops which return booleans , and binops which
return integers .
type, and binops for which it is not the case.
There are also binops which return booleans, and binops which
return integers.
*)
match binop with
| E.Lt | E.Le | E.Ge | E.Gt ->
The two operands must have the same type and the result is a boolean
assert (sv1.int_ty = sv2.int_ty);
let b =
match binop with
| E.Lt -> Z.lt sv1.PV.value sv2.PV.value
| E.Le -> Z.leq sv1.PV.value sv2.PV.value
| E.Ge -> Z.geq sv1.PV.value sv2.PV.value
| E.Gt -> Z.gt sv1.PV.value sv2.PV.value
| E.Div | E.Rem | E.Add | E.Sub | E.Mul | E.BitXor | E.BitAnd
| E.BitOr | E.Shl | E.Shr | E.Ne | E.Eq ->
raise (Failure "Unreachable")
in
Ok ({ V.value = V.Primitive (Bool b); ty = T.Bool } : V.typed_value)
| E.Div | E.Rem | E.Add | E.Sub | E.Mul | E.BitXor | E.BitAnd | E.BitOr
-> (
The two operands must have the same type and the result is an integer
assert (sv1.int_ty = sv2.int_ty);
let res =
match binop with
| E.Div ->
if sv2.PV.value = Z.zero then Error ()
else mk_scalar sv1.int_ty (Z.div sv1.PV.value sv2.PV.value)
| E.Rem ->
(* See [] *)
if sv2.PV.value = Z.zero then Error ()
else mk_scalar sv1.int_ty (Z.rem sv1.PV.value sv2.PV.value)
| E.Add -> mk_scalar sv1.int_ty (Z.add sv1.PV.value sv2.PV.value)
| E.Sub -> mk_scalar sv1.int_ty (Z.sub sv1.PV.value sv2.PV.value)
| E.Mul -> mk_scalar sv1.int_ty (Z.mul sv1.PV.value sv2.PV.value)
| E.BitXor -> raise Unimplemented
| E.BitAnd -> raise Unimplemented
| E.BitOr -> raise Unimplemented
| E.Lt | E.Le | E.Ge | E.Gt | E.Shl | E.Shr | E.Ne | E.Eq ->
raise (Failure "Unreachable")
in
match res with
| Error _ -> Error EPanic
| Ok sv ->
Ok
{
V.value = V.Primitive (PV.Scalar sv);
ty = Integer sv1.int_ty;
})
| E.Shl | E.Shr -> raise Unimplemented
| E.Ne | E.Eq -> raise (Failure "Unreachable"))
| _ -> raise (Failure "Invalid inputs for binop")
let eval_binary_op_concrete (config : C.config) (binop : E.binop)
(op1 : E.operand) (op2 : E.operand)
(cf : (V.typed_value, eval_error) result -> m_fun) : m_fun =
(* Evaluate the operands *)
let eval_ops = eval_two_operands config op1 op2 in
(* Compute the result of the binop *)
let compute cf (res : V.typed_value * V.typed_value) =
let v1, v2 = res in
cf (eval_binary_op_concrete_compute binop v1 v2)
in
(* Compose and apply *)
comp eval_ops compute cf
let eval_binary_op_symbolic (config : C.config) (binop : E.binop)
(op1 : E.operand) (op2 : E.operand)
(cf : (V.typed_value, eval_error) result -> m_fun) : m_fun =
fun ctx ->
(* Evaluate the operands *)
let eval_ops = eval_two_operands config op1 op2 in
(* Compute the result of applying the binop *)
let compute cf ((v1, v2) : V.typed_value * V.typed_value) : m_fun =
fun ctx ->
(* Generate a fresh symbolic value to store the result *)
let res_sv_id = C.fresh_symbolic_value_id () in
let res_sv_ty =
if binop = Eq || binop = Ne then (
(* Equality operations *)
assert (v1.ty = v2.ty);
(* Equality/inequality check is primitive only for a subset of types *)
assert (ty_is_primitively_copyable v1.ty);
T.Bool)
else
(* Other operations: input types are integers *)
match (v1.V.ty, v2.V.ty) with
| T.Integer int_ty1, T.Integer int_ty2 -> (
match binop with
| E.Lt | E.Le | E.Ge | E.Gt ->
assert (int_ty1 = int_ty2);
T.Bool
| E.Div | E.Rem | E.Add | E.Sub | E.Mul | E.BitXor | E.BitAnd
| E.BitOr ->
assert (int_ty1 = int_ty2);
T.Integer int_ty1
| E.Shl | E.Shr -> raise Unimplemented
| E.Ne | E.Eq -> raise (Failure "Unreachable"))
| _ -> raise (Failure "Invalid inputs for binop")
in
let res_sv =
{ V.sv_kind = V.FunCallRet; V.sv_id = res_sv_id; sv_ty = res_sv_ty }
in
(* Call the continuattion *)
let v = mk_typed_value_from_symbolic_value res_sv in
let expr = cf (Ok v) ctx in
Synthesize the symbolic AST
let p1 = S.mk_opt_place_from_op op1 ctx in
let p2 = S.mk_opt_place_from_op op2 ctx in
S.synthesize_binary_op ctx binop v1 p1 v2 p2 res_sv None expr
in
(* Compose and apply *)
comp eval_ops compute cf ctx
let eval_binary_op (config : C.config) (binop : E.binop) (op1 : E.operand)
(op2 : E.operand) (cf : (V.typed_value, eval_error) result -> m_fun) : m_fun
=
match config.mode with
| C.ConcreteMode -> eval_binary_op_concrete config binop op1 op2 cf
| C.SymbolicMode -> eval_binary_op_symbolic config binop op1 op2 cf
let eval_rvalue_ref (config : C.config) (p : E.place) (bkind : E.borrow_kind)
(cf : V.typed_value -> m_fun) : m_fun =
fun ctx ->
match bkind with
| E.Shared | E.TwoPhaseMut | E.Shallow ->
(* **REMARK**: we initially treated shallow borrows like shared borrows.
In practice this restricted the behaviour too much, so for now we
forbid them.
*)
assert (bkind <> E.Shallow);
(* Access the value *)
let access =
match bkind with
| E.Shared | E.Shallow -> Read
| E.TwoPhaseMut -> Write
| _ -> raise (Failure "Unreachable")
in
let expand_prim_copy = false in
let prepare =
access_rplace_reorganize_and_read config expand_prim_copy access p
in
(* Evaluate the borrowing operation *)
let eval (cf : V.typed_value -> m_fun) (v : V.typed_value) : m_fun =
fun ctx ->
(* Generate the fresh borrow id *)
let bid = C.fresh_borrow_id () in
(* Compute the loan value, with which to replace the value at place p *)
let nv =
match v.V.value with
| V.Loan (V.SharedLoan (bids, sv)) ->
(* Shared loan: insert the new borrow id *)
let bids1 = V.BorrowId.Set.add bid bids in
{ v with V.value = V.Loan (V.SharedLoan (bids1, sv)) }
| _ ->
(* Not a shared loan: add a wrapper *)
let v' =
V.Loan (V.SharedLoan (V.BorrowId.Set.singleton bid, v))
in
{ v with V.value = v' }
in
(* Update the borrowed value in the context *)
let ctx = write_place access p nv ctx in
Compute the rvalue - simply a shared borrow with a the fresh i d.
* Note that the reference is * mutable * if we do a two - phase borrow
* Note that the reference is *mutable* if we do a two-phase borrow *)
let ref_kind =
match bkind with
| E.Shared | E.Shallow -> T.Shared
| E.TwoPhaseMut -> T.Mut
| _ -> raise (Failure "Unreachable")
in
let rv_ty = T.Ref (T.Erased, v.ty, ref_kind) in
let bc =
match bkind with
| E.Shared | E.Shallow ->
(* See the remark at the beginning of the match branch: we
handle shallow borrows like shared borrows *)
V.SharedBorrow bid
| E.TwoPhaseMut -> V.ReservedMutBorrow bid
| _ -> raise (Failure "Unreachable")
in
let rv : V.typed_value = { V.value = V.Borrow bc; ty = rv_ty } in
(* Continue *)
cf rv ctx
in
(* Compose and apply *)
comp prepare eval cf ctx
| E.Mut ->
(* Access the value *)
let access = Write in
let expand_prim_copy = false in
let prepare =
access_rplace_reorganize_and_read config expand_prim_copy access p
in
(* Evaluate the borrowing operation *)
let eval (cf : V.typed_value -> m_fun) (v : V.typed_value) : m_fun =
fun ctx ->
(* Compute the rvalue - wrap the value in a mutable borrow with a fresh id *)
let bid = C.fresh_borrow_id () in
let rv_ty = T.Ref (T.Erased, v.ty, Mut) in
let rv : V.typed_value =
{ V.value = V.Borrow (V.MutBorrow (bid, v)); ty = rv_ty }
in
(* Compute the value with which to replace the value at place p *)
let nv = { v with V.value = V.Loan (V.MutLoan bid) } in
(* Update the value in the context *)
let ctx = write_place access p nv ctx in
(* Continue *)
cf rv ctx
in
(* Compose and apply *)
comp prepare eval cf ctx
let eval_rvalue_aggregate (config : C.config)
(aggregate_kind : E.aggregate_kind) (ops : E.operand list)
(cf : V.typed_value -> m_fun) : m_fun =
(* Evaluate the operands *)
let eval_ops = eval_operands config ops in
(* Compute the value *)
let compute (cf : V.typed_value -> m_fun) (values : V.typed_value list) :
m_fun =
fun ctx ->
(* Match on the aggregate kind *)
match aggregate_kind with
| E.AggregatedTuple ->
let tys = List.map (fun (v : V.typed_value) -> v.V.ty) values in
let v = V.Adt { variant_id = None; field_values = values } in
let ty = T.Adt (T.Tuple, [], tys) in
let aggregated : V.typed_value = { V.value = v; ty } in
(* Call the continuation *)
cf aggregated ctx
| E.AggregatedOption (variant_id, ty) ->
(* Sanity check *)
if variant_id = T.option_none_id then assert (values = [])
else if variant_id = T.option_some_id then
assert (List.length values = 1)
else raise (Failure "Unreachable");
(* Construt the value *)
let aty = T.Adt (T.Assumed T.Option, [], [ ty ]) in
let av : V.adt_value =
{ V.variant_id = Some variant_id; V.field_values = values }
in
let aggregated : V.typed_value = { V.value = Adt av; ty = aty } in
(* Call the continuation *)
cf aggregated ctx
| E.AggregatedAdt (def_id, opt_variant_id, regions, types) ->
(* Sanity checks *)
let type_decl = C.ctx_lookup_type_decl ctx def_id in
assert (List.length type_decl.region_params = List.length regions);
let expected_field_types =
Subst.ctx_adt_get_instantiated_field_etypes ctx def_id opt_variant_id
types
in
assert (
expected_field_types
= List.map (fun (v : V.typed_value) -> v.V.ty) values);
Construct the value
let av : V.adt_value =
{ V.variant_id = opt_variant_id; V.field_values = values }
in
let aty = T.Adt (T.AdtId def_id, regions, types) in
let aggregated : V.typed_value = { V.value = Adt av; ty = aty } in
(* Call the continuation *)
cf aggregated ctx
in
(* Compose and apply *)
comp eval_ops compute cf
let eval_rvalue_not_global (config : C.config) (rvalue : E.rvalue)
(cf : (V.typed_value, eval_error) result -> m_fun) : m_fun =
fun ctx ->
log#ldebug (lazy "eval_rvalue");
(* Small helpers *)
let wrap_in_result (cf : (V.typed_value, eval_error) result -> m_fun)
(v : V.typed_value) : m_fun =
cf (Ok v)
in
let comp_wrap f = comp f wrap_in_result cf in
(* Delegate to the proper auxiliary function *)
match rvalue with
| E.Use op -> comp_wrap (eval_operand config op) ctx
| E.Ref (p, bkind) -> comp_wrap (eval_rvalue_ref config p bkind) ctx
| E.UnaryOp (unop, op) -> eval_unary_op config unop op cf ctx
| E.BinaryOp (binop, op1, op2) -> eval_binary_op config binop op1 op2 cf ctx
| E.Aggregate (aggregate_kind, ops) ->
comp_wrap (eval_rvalue_aggregate config aggregate_kind ops) ctx
| E.Discriminant _ ->
raise
(Failure
"Unreachable: discriminant reads should have been eliminated from \
the AST")
| E.Global _ -> raise (Failure "Unreachable")
let eval_fake_read (config : C.config) (p : E.place) : cm_fun =
fun cf ctx ->
let expand_prim_copy = false in
let cf_prepare cf =
access_rplace_reorganize_and_read config expand_prim_copy Read p cf
in
let cf_continue cf v : m_fun =
fun ctx ->
assert (not (bottom_in_value ctx.ended_regions v));
cf ctx
in
comp cf_prepare cf_continue cf ctx
| null | https://raw.githubusercontent.com/AeneasVerif/aeneas/1535b37ae84ddb1d2679c19b8fcc734351e5ce5d/compiler/InterpreterExpressions.ml | ocaml | * The local logger
* As long as there are symbolic values at a given place (potentially in subvalues)
which contain borrows and are primitively copyable, expand them.
We use this function before copying values.
Note that the place should have been prepared so that there are no remaining
loans.
Small helper
Apply
Check that there are no bottoms in the value
Check that there are no reserved borrows in the value
Call the continuation
Make sure we can evaluate the path
End the proper loans at the place itself
Expand the copyable values which contain borrows (which are necessarily shared
* borrows)
Read the place - note that this checks that the value doesn't contain bottoms
Compose
* Convert an operand constant operand value to a typed value
Check the type while converting - we actually need some information
* contained in the type
Check the type and the ranges
Remaining cases (invalid)
Remark: at some point we rewrote this function to use iterators, but then
* we reverted the changes: the result was less clear actually. In particular,
* the fact that we have exhaustive matches below makes very obvious the cases
* in which we need to fail
Sanity check
Ok
We can only copy shared borrows
We need to create a new borrow id for the copied borrow, and
* update the context accordingly
We can only copy shared loans
We don't copy the shared loan: only the shared value inside
We can copy only if the type is "primitively" copyable.
* Note that in the general case, copy is a trait: copying values
* thus requires calling the proper function. Here, we copy values
* for very simple types such as integers, shared borrows, etc.
No need to reorganize the context
Access the value
Expand the symbolic values, if necessary
Access the value
Apply
* Evaluate an operand, without reorganizing the context before
Debug
Evaluate
Access the value
Copy the value
Sanity checks
Actually perform the copy
Continue
Compose and apply
Access the value
Move the value
Check that there are no bottoms in the value we are about to move
Compose and apply
Debug
We reorganize the context, then evaluate the operand
* Small utility.
See [prepare_eval_operand_reorganize].
* Evaluate several operands.
Prepare the operands
Evaluate the operands
Compose and apply
Evaluate the operand
Apply the unop
Evaluate the operand
Generate a fresh symbolic value to store the result
Call the continuation
Compose and apply
* Small helper for [eval_binary_op_concrete]: computes the result of applying
the binop *after* the operands have been successfully evaluated
Equality operations
Equality/inequality check is primitive only for a subset of types
For the non-equality operations, the input values are necessarily scalars
See []
Evaluate the operands
Compute the result of the binop
Compose and apply
Evaluate the operands
Compute the result of applying the binop
Generate a fresh symbolic value to store the result
Equality operations
Equality/inequality check is primitive only for a subset of types
Other operations: input types are integers
Call the continuattion
Compose and apply
**REMARK**: we initially treated shallow borrows like shared borrows.
In practice this restricted the behaviour too much, so for now we
forbid them.
Access the value
Evaluate the borrowing operation
Generate the fresh borrow id
Compute the loan value, with which to replace the value at place p
Shared loan: insert the new borrow id
Not a shared loan: add a wrapper
Update the borrowed value in the context
See the remark at the beginning of the match branch: we
handle shallow borrows like shared borrows
Continue
Compose and apply
Access the value
Evaluate the borrowing operation
Compute the rvalue - wrap the value in a mutable borrow with a fresh id
Compute the value with which to replace the value at place p
Update the value in the context
Continue
Compose and apply
Evaluate the operands
Compute the value
Match on the aggregate kind
Call the continuation
Sanity check
Construt the value
Call the continuation
Sanity checks
Call the continuation
Compose and apply
Small helpers
Delegate to the proper auxiliary function | module T = Types
module PV = PrimitiveValues
module V = Values
module LA = LlbcAst
open Scalars
module E = Expressions
open Utils
module C = Contexts
module Subst = Substitute
module L = Logging
open TypesUtils
open ValuesUtils
module Inv = Invariants
module S = SynthesizeSymbolic
open Cps
open InterpreterUtils
open InterpreterExpansion
open InterpreterPaths
let log = L.expressions_log
let expand_primitively_copyable_at_place (config : C.config)
(access : access_kind) (p : E.place) : cm_fun =
fun cf ctx ->
let rec expand : cm_fun =
fun cf ctx ->
let v = read_place access p ctx in
match
find_first_primitively_copyable_sv_with_borrows
ctx.type_context.type_infos v
with
| None -> cf ctx
| Some sv ->
let cc =
expand_symbolic_value_no_branching config sv
(Some (S.mk_mplace p ctx))
in
comp cc expand cf ctx
in
expand cf ctx
* Read a place ( CPS - style function ) .
We also check that the value * does n't contain bottoms or reserved
borrows * .
We also check that the value *doesn't contain bottoms or reserved
borrows*.
*)
let read_place (access : access_kind) (p : E.place)
(cf : V.typed_value -> m_fun) : m_fun =
fun ctx ->
let v = read_place access p ctx in
assert (not (bottom_in_value ctx.ended_regions v));
assert (not (reserved_in_value v));
cf v ctx
let access_rplace_reorganize_and_read (config : C.config)
(expand_prim_copy : bool) (access : access_kind) (p : E.place)
(cf : V.typed_value -> m_fun) : m_fun =
fun ctx ->
let cc = update_ctx_along_read_place config access p in
let cc = comp cc (end_loans_at_place config access p) in
let cc =
if expand_prim_copy then
comp cc (expand_primitively_copyable_at_place config access p)
else cc
in
let read_place = read_place access p in
comp cc read_place cf ctx
let access_rplace_reorganize (config : C.config) (expand_prim_copy : bool)
(access : access_kind) (p : E.place) : cm_fun =
fun cf ctx ->
access_rplace_reorganize_and_read config expand_prim_copy access p
(fun _v -> cf)
ctx
let primitive_to_typed_value (ty : T.ety) (cv : V.primitive_value) :
V.typed_value =
log#ldebug
(lazy
("primitive_to_typed_value:" ^ "\n- cv: "
^ Print.PrimitiveValues.primitive_value_to_string cv));
match (ty, cv) with
Scalar , boolean ...
| T.Bool, Bool v -> { V.value = V.Primitive (Bool v); ty }
| T.Char, Char v -> { V.value = V.Primitive (Char v); ty }
| T.Str, String v -> { V.value = V.Primitive (String v); ty }
| T.Integer int_ty, PV.Scalar v ->
assert (int_ty = v.int_ty);
assert (check_scalar_value_in_range v);
{ V.value = V.Primitive (PV.Scalar v); ty }
| _, _ -> raise (Failure "Improperly typed constant value")
* Copy a value , and return the resulting value .
Note that copying values might update the context . For instance , when
copying shared borrows , we need to insert new shared borrows in the context .
Also , this function is actually more general than it should be : it can be
used to copy concrete ADT values , while ADT copy should be done through the
Copy trait ( i.e. , by calling a dedicated function ) . This is why we added a
parameter to control this copy ( [ allow_adt_copy ] ) . Note that here by ADT we
mean the user - defined ADTs ( not tuples or assumed types ) .
Note that copying values might update the context. For instance, when
copying shared borrows, we need to insert new shared borrows in the context.
Also, this function is actually more general than it should be: it can be
used to copy concrete ADT values, while ADT copy should be done through the
Copy trait (i.e., by calling a dedicated function). This is why we added a
parameter to control this copy ([allow_adt_copy]). Note that here by ADT we
mean the user-defined ADTs (not tuples or assumed types).
*)
let rec copy_value (allow_adt_copy : bool) (config : C.config)
(ctx : C.eval_ctx) (v : V.typed_value) : C.eval_ctx * V.typed_value =
log#ldebug
(lazy
("copy_value: "
^ typed_value_to_string ctx v
^ "\n- context:\n" ^ eval_ctx_to_string ctx));
match v.V.value with
| V.Primitive _ -> (ctx, v)
| V.Adt av ->
(match v.V.ty with
| T.Adt (T.Assumed (T.Box | Vec), _, _) ->
raise (Failure "Can't copy an assumed value other than Option")
| T.Adt (T.AdtId _, _, _) -> assert allow_adt_copy
| _ -> raise (Failure "Unreachable"));
let ctx, fields =
List.fold_left_map
(copy_value allow_adt_copy config)
ctx av.field_values
in
(ctx, { v with V.value = V.Adt { av with field_values = fields } })
| V.Bottom -> raise (Failure "Can't copy ⊥")
| V.Borrow bc -> (
match bc with
| SharedBorrow bid ->
let bid' = C.fresh_borrow_id () in
let ctx = InterpreterBorrows.reborrow_shared bid bid' ctx in
(ctx, { v with V.value = V.Borrow (SharedBorrow bid') })
| MutBorrow (_, _) -> raise (Failure "Can't copy a mutable borrow")
| V.ReservedMutBorrow _ ->
raise (Failure "Can't copy a reserved mut borrow"))
| V.Loan lc -> (
match lc with
| V.MutLoan _ -> raise (Failure "Can't copy a mutable loan")
| V.SharedLoan (_, sv) ->
copy_value allow_adt_copy config ctx sv)
| V.Symbolic sp ->
assert (ty_is_primitively_copyable (Subst.erase_regions sp.V.sv_ty));
If the type is copyable , we simply return the current value . Side
* remark : what is important to look at when copying symbolic values
* is symbolic expansion . The important subcase is the expansion of shared
* borrows : when doing so , every occurrence of the same symbolic value
* must use a fresh borrow i d.
* remark: what is important to look at when copying symbolic values
* is symbolic expansion. The important subcase is the expansion of shared
* borrows: when doing so, every occurrence of the same symbolic value
* must use a fresh borrow id. *)
(ctx, v)
* Reorganize the environment in preparation for the evaluation of an operand .
Evaluating an operand requires reorganizing the environment to get access
to a given place ( by ending borrows , expanding symbolic values ... ) then
applying the operand operation ( move , copy , etc . ) .
Sometimes , we want to decouple the two operations .
Consider the following example :
{ [
context = {
x - > shared_borrow l0
y - > shared_loan { l0 } v
}
dest < - f(move x , move y ) ;
...
] }
Because of the way { ! end_borrow } is implemented , when giving back the borrow
[ l0 ] upon evaluating [ move y ] , we wo n't notice that [ shared_borrow l0 ] has
disappeared from the environment ( it has been moved and not assigned yet ,
and so is hanging in " thin air " ) .
By first " preparing " the operands evaluation , we make sure no such thing
happens . To be more precise , we make sure all the updates to borrows triggered
by access * and * move operations have already been applied .
. : in the formalization , we always have an explicit " reorganization " step
in the rule premises , before the actual operand evaluation , that allows to
reorganize the environment so that it satisfies the proper conditions . This
function 's role is to do the reorganization .
. : doing this is actually not completely necessary because when
generating MIR , rustc introduces intermediate assignments for all the function
parameters . Still , it is better for soundness purposes , and corresponds to
what we do in the formalization ( because we do n't enforce the same constraints
as MIR in the formalization ) .
Evaluating an operand requires reorganizing the environment to get access
to a given place (by ending borrows, expanding symbolic values...) then
applying the operand operation (move, copy, etc.).
Sometimes, we want to decouple the two operations.
Consider the following example:
{[
context = {
x -> shared_borrow l0
y -> shared_loan {l0} v
}
dest <- f(move x, move y);
...
]}
Because of the way {!end_borrow} is implemented, when giving back the borrow
[l0] upon evaluating [move y], we won't notice that [shared_borrow l0] has
disappeared from the environment (it has been moved and not assigned yet,
and so is hanging in "thin air").
By first "preparing" the operands evaluation, we make sure no such thing
happens. To be more precise, we make sure all the updates to borrows triggered
by access *and* move operations have already been applied.
Rk.: in the formalization, we always have an explicit "reorganization" step
in the rule premises, before the actual operand evaluation, that allows to
reorganize the environment so that it satisfies the proper conditions. This
function's role is to do the reorganization.
Rk.: doing this is actually not completely necessary because when
generating MIR, rustc introduces intermediate assignments for all the function
parameters. Still, it is better for soundness purposes, and corresponds to
what we do in the formalization (because we don't enforce the same constraints
as MIR in the formalization).
*)
let prepare_eval_operand_reorganize (config : C.config) (op : E.operand) :
cm_fun =
fun cf ctx ->
let prepare : cm_fun =
fun cf ctx ->
match op with
| Expressions.Constant (ty, cv) ->
primitive_to_typed_value ty cv |> ignore;
cf ctx
| Expressions.Copy p ->
let access = Read in
let expand_prim_copy = true in
access_rplace_reorganize config expand_prim_copy access p cf ctx
| Expressions.Move p ->
let access = Move in
let expand_prim_copy = false in
access_rplace_reorganize config expand_prim_copy access p cf ctx
in
prepare cf ctx
let eval_operand_no_reorganize (config : C.config) (op : E.operand)
(cf : V.typed_value -> m_fun) : m_fun =
fun ctx ->
log#ldebug
(lazy
("eval_operand_no_reorganize: op: " ^ operand_to_string ctx op
^ "\n- ctx:\n" ^ eval_ctx_to_string ctx ^ "\n"));
match op with
| Expressions.Constant (ty, cv) -> cf (primitive_to_typed_value ty cv) ctx
| Expressions.Copy p ->
let access = Read in
let cc = read_place access p in
let copy cf v : m_fun =
fun ctx ->
assert (not (bottom_in_value ctx.ended_regions v));
assert (
Option.is_none
(find_first_primitively_copyable_sv_with_borrows
ctx.type_context.type_infos v));
let allow_adt_copy = false in
let ctx, v = copy_value allow_adt_copy config ctx v in
cf v ctx
in
comp cc copy cf ctx
| Expressions.Move p ->
let access = Move in
let cc = read_place access p in
let move cf v : m_fun =
fun ctx ->
assert (not (bottom_in_value ctx.ended_regions v));
let bottom : V.typed_value = { V.value = Bottom; ty = v.ty } in
let ctx = write_place access p bottom ctx in
cf v ctx
in
comp cc move cf ctx
let eval_operand (config : C.config) (op : E.operand)
(cf : V.typed_value -> m_fun) : m_fun =
fun ctx ->
log#ldebug
(lazy
("eval_operand: op: " ^ operand_to_string ctx op ^ "\n- ctx:\n"
^ eval_ctx_to_string ctx ^ "\n"));
comp
(prepare_eval_operand_reorganize config op)
(eval_operand_no_reorganize config op)
cf ctx
let prepare_eval_operands_reorganize (config : C.config) (ops : E.operand list)
: cm_fun =
fold_left_apply_continuation (prepare_eval_operand_reorganize config) ops
let eval_operands (config : C.config) (ops : E.operand list)
(cf : V.typed_value list -> m_fun) : m_fun =
fun ctx ->
let prepare = prepare_eval_operands_reorganize config ops in
let eval =
fold_left_list_apply_continuation (eval_operand_no_reorganize config) ops
in
comp prepare eval cf ctx
let eval_two_operands (config : C.config) (op1 : E.operand) (op2 : E.operand)
(cf : V.typed_value * V.typed_value -> m_fun) : m_fun =
let eval_op = eval_operands config [ op1; op2 ] in
let use_res cf res =
match res with
| [ v1; v2 ] -> cf (v1, v2)
| _ -> raise (Failure "Unreachable")
in
comp eval_op use_res cf
let eval_unary_op_concrete (config : C.config) (unop : E.unop) (op : E.operand)
(cf : (V.typed_value, eval_error) result -> m_fun) : m_fun =
let eval_op = eval_operand config op in
let apply cf (v : V.typed_value) : m_fun =
match (unop, v.V.value) with
| E.Not, V.Primitive (Bool b) ->
cf (Ok { v with V.value = V.Primitive (Bool (not b)) })
| E.Neg, V.Primitive (PV.Scalar sv) -> (
let i = Z.neg sv.PV.value in
match mk_scalar sv.int_ty i with
| Error _ -> cf (Error EPanic)
| Ok sv -> cf (Ok { v with V.value = V.Primitive (PV.Scalar sv) }))
| E.Cast (src_ty, tgt_ty), V.Primitive (PV.Scalar sv) -> (
assert (src_ty = sv.int_ty);
let i = sv.PV.value in
match mk_scalar tgt_ty i with
| Error _ -> cf (Error EPanic)
| Ok sv ->
let ty = T.Integer tgt_ty in
let value = V.Primitive (PV.Scalar sv) in
cf (Ok { V.ty; value }))
| _ -> raise (Failure "Invalid input for unop")
in
comp eval_op apply cf
let eval_unary_op_symbolic (config : C.config) (unop : E.unop) (op : E.operand)
(cf : (V.typed_value, eval_error) result -> m_fun) : m_fun =
fun ctx ->
let eval_op = eval_operand config op in
let apply cf (v : V.typed_value) : m_fun =
fun ctx ->
let res_sv_id = C.fresh_symbolic_value_id () in
let res_sv_ty =
match (unop, v.V.ty) with
| E.Not, T.Bool -> T.Bool
| E.Neg, T.Integer int_ty -> T.Integer int_ty
| E.Cast (_, tgt_ty), _ -> T.Integer tgt_ty
| _ -> raise (Failure "Invalid input for unop")
in
let res_sv =
{ V.sv_kind = V.FunCallRet; V.sv_id = res_sv_id; sv_ty = res_sv_ty }
in
let expr = cf (Ok (mk_typed_value_from_symbolic_value res_sv)) ctx in
Synthesize the symbolic AST
S.synthesize_unary_op ctx unop v
(S.mk_opt_place_from_op op ctx)
res_sv None expr
in
comp eval_op apply cf ctx
let eval_unary_op (config : C.config) (unop : E.unop) (op : E.operand)
(cf : (V.typed_value, eval_error) result -> m_fun) : m_fun =
match config.mode with
| C.ConcreteMode -> eval_unary_op_concrete config unop op cf
| C.SymbolicMode -> eval_unary_op_symbolic config unop op cf
let eval_binary_op_concrete_compute (binop : E.binop) (v1 : V.typed_value)
(v2 : V.typed_value) : (V.typed_value, eval_error) result =
Equality check ( Eq , Ne ) accept values from a wide variety of types .
* The remaining binops only operate on scalars .
* The remaining binops only operate on scalars. *)
if binop = Eq || binop = Ne then (
assert (v1.ty = v2.ty);
assert (ty_is_primitively_copyable v1.ty);
let b = v1 = v2 in
Ok { V.value = V.Primitive (Bool b); ty = T.Bool })
else
match (v1.V.value, v2.V.value) with
| V.Primitive (PV.Scalar sv1), V.Primitive (PV.Scalar sv2) -> (
There are binops which require the two operands to have the same
type , and binops for which it is not the case .
There are also binops which return booleans , and binops which
return integers .
type, and binops for which it is not the case.
There are also binops which return booleans, and binops which
return integers.
*)
match binop with
| E.Lt | E.Le | E.Ge | E.Gt ->
The two operands must have the same type and the result is a boolean
assert (sv1.int_ty = sv2.int_ty);
let b =
match binop with
| E.Lt -> Z.lt sv1.PV.value sv2.PV.value
| E.Le -> Z.leq sv1.PV.value sv2.PV.value
| E.Ge -> Z.geq sv1.PV.value sv2.PV.value
| E.Gt -> Z.gt sv1.PV.value sv2.PV.value
| E.Div | E.Rem | E.Add | E.Sub | E.Mul | E.BitXor | E.BitAnd
| E.BitOr | E.Shl | E.Shr | E.Ne | E.Eq ->
raise (Failure "Unreachable")
in
Ok ({ V.value = V.Primitive (Bool b); ty = T.Bool } : V.typed_value)
| E.Div | E.Rem | E.Add | E.Sub | E.Mul | E.BitXor | E.BitAnd | E.BitOr
-> (
The two operands must have the same type and the result is an integer
assert (sv1.int_ty = sv2.int_ty);
let res =
match binop with
| E.Div ->
if sv2.PV.value = Z.zero then Error ()
else mk_scalar sv1.int_ty (Z.div sv1.PV.value sv2.PV.value)
| E.Rem ->
if sv2.PV.value = Z.zero then Error ()
else mk_scalar sv1.int_ty (Z.rem sv1.PV.value sv2.PV.value)
| E.Add -> mk_scalar sv1.int_ty (Z.add sv1.PV.value sv2.PV.value)
| E.Sub -> mk_scalar sv1.int_ty (Z.sub sv1.PV.value sv2.PV.value)
| E.Mul -> mk_scalar sv1.int_ty (Z.mul sv1.PV.value sv2.PV.value)
| E.BitXor -> raise Unimplemented
| E.BitAnd -> raise Unimplemented
| E.BitOr -> raise Unimplemented
| E.Lt | E.Le | E.Ge | E.Gt | E.Shl | E.Shr | E.Ne | E.Eq ->
raise (Failure "Unreachable")
in
match res with
| Error _ -> Error EPanic
| Ok sv ->
Ok
{
V.value = V.Primitive (PV.Scalar sv);
ty = Integer sv1.int_ty;
})
| E.Shl | E.Shr -> raise Unimplemented
| E.Ne | E.Eq -> raise (Failure "Unreachable"))
| _ -> raise (Failure "Invalid inputs for binop")
let eval_binary_op_concrete (config : C.config) (binop : E.binop)
(op1 : E.operand) (op2 : E.operand)
(cf : (V.typed_value, eval_error) result -> m_fun) : m_fun =
let eval_ops = eval_two_operands config op1 op2 in
let compute cf (res : V.typed_value * V.typed_value) =
let v1, v2 = res in
cf (eval_binary_op_concrete_compute binop v1 v2)
in
comp eval_ops compute cf
let eval_binary_op_symbolic (config : C.config) (binop : E.binop)
(op1 : E.operand) (op2 : E.operand)
(cf : (V.typed_value, eval_error) result -> m_fun) : m_fun =
fun ctx ->
let eval_ops = eval_two_operands config op1 op2 in
let compute cf ((v1, v2) : V.typed_value * V.typed_value) : m_fun =
fun ctx ->
let res_sv_id = C.fresh_symbolic_value_id () in
let res_sv_ty =
if binop = Eq || binop = Ne then (
assert (v1.ty = v2.ty);
assert (ty_is_primitively_copyable v1.ty);
T.Bool)
else
match (v1.V.ty, v2.V.ty) with
| T.Integer int_ty1, T.Integer int_ty2 -> (
match binop with
| E.Lt | E.Le | E.Ge | E.Gt ->
assert (int_ty1 = int_ty2);
T.Bool
| E.Div | E.Rem | E.Add | E.Sub | E.Mul | E.BitXor | E.BitAnd
| E.BitOr ->
assert (int_ty1 = int_ty2);
T.Integer int_ty1
| E.Shl | E.Shr -> raise Unimplemented
| E.Ne | E.Eq -> raise (Failure "Unreachable"))
| _ -> raise (Failure "Invalid inputs for binop")
in
let res_sv =
{ V.sv_kind = V.FunCallRet; V.sv_id = res_sv_id; sv_ty = res_sv_ty }
in
let v = mk_typed_value_from_symbolic_value res_sv in
let expr = cf (Ok v) ctx in
Synthesize the symbolic AST
let p1 = S.mk_opt_place_from_op op1 ctx in
let p2 = S.mk_opt_place_from_op op2 ctx in
S.synthesize_binary_op ctx binop v1 p1 v2 p2 res_sv None expr
in
comp eval_ops compute cf ctx
let eval_binary_op (config : C.config) (binop : E.binop) (op1 : E.operand)
(op2 : E.operand) (cf : (V.typed_value, eval_error) result -> m_fun) : m_fun
=
match config.mode with
| C.ConcreteMode -> eval_binary_op_concrete config binop op1 op2 cf
| C.SymbolicMode -> eval_binary_op_symbolic config binop op1 op2 cf
let eval_rvalue_ref (config : C.config) (p : E.place) (bkind : E.borrow_kind)
(cf : V.typed_value -> m_fun) : m_fun =
fun ctx ->
match bkind with
| E.Shared | E.TwoPhaseMut | E.Shallow ->
assert (bkind <> E.Shallow);
let access =
match bkind with
| E.Shared | E.Shallow -> Read
| E.TwoPhaseMut -> Write
| _ -> raise (Failure "Unreachable")
in
let expand_prim_copy = false in
let prepare =
access_rplace_reorganize_and_read config expand_prim_copy access p
in
let eval (cf : V.typed_value -> m_fun) (v : V.typed_value) : m_fun =
fun ctx ->
let bid = C.fresh_borrow_id () in
let nv =
match v.V.value with
| V.Loan (V.SharedLoan (bids, sv)) ->
let bids1 = V.BorrowId.Set.add bid bids in
{ v with V.value = V.Loan (V.SharedLoan (bids1, sv)) }
| _ ->
let v' =
V.Loan (V.SharedLoan (V.BorrowId.Set.singleton bid, v))
in
{ v with V.value = v' }
in
let ctx = write_place access p nv ctx in
Compute the rvalue - simply a shared borrow with a the fresh i d.
* Note that the reference is * mutable * if we do a two - phase borrow
* Note that the reference is *mutable* if we do a two-phase borrow *)
let ref_kind =
match bkind with
| E.Shared | E.Shallow -> T.Shared
| E.TwoPhaseMut -> T.Mut
| _ -> raise (Failure "Unreachable")
in
let rv_ty = T.Ref (T.Erased, v.ty, ref_kind) in
let bc =
match bkind with
| E.Shared | E.Shallow ->
V.SharedBorrow bid
| E.TwoPhaseMut -> V.ReservedMutBorrow bid
| _ -> raise (Failure "Unreachable")
in
let rv : V.typed_value = { V.value = V.Borrow bc; ty = rv_ty } in
cf rv ctx
in
comp prepare eval cf ctx
| E.Mut ->
let access = Write in
let expand_prim_copy = false in
let prepare =
access_rplace_reorganize_and_read config expand_prim_copy access p
in
let eval (cf : V.typed_value -> m_fun) (v : V.typed_value) : m_fun =
fun ctx ->
let bid = C.fresh_borrow_id () in
let rv_ty = T.Ref (T.Erased, v.ty, Mut) in
let rv : V.typed_value =
{ V.value = V.Borrow (V.MutBorrow (bid, v)); ty = rv_ty }
in
let nv = { v with V.value = V.Loan (V.MutLoan bid) } in
let ctx = write_place access p nv ctx in
cf rv ctx
in
comp prepare eval cf ctx
let eval_rvalue_aggregate (config : C.config)
(aggregate_kind : E.aggregate_kind) (ops : E.operand list)
(cf : V.typed_value -> m_fun) : m_fun =
let eval_ops = eval_operands config ops in
let compute (cf : V.typed_value -> m_fun) (values : V.typed_value list) :
m_fun =
fun ctx ->
match aggregate_kind with
| E.AggregatedTuple ->
let tys = List.map (fun (v : V.typed_value) -> v.V.ty) values in
let v = V.Adt { variant_id = None; field_values = values } in
let ty = T.Adt (T.Tuple, [], tys) in
let aggregated : V.typed_value = { V.value = v; ty } in
cf aggregated ctx
| E.AggregatedOption (variant_id, ty) ->
if variant_id = T.option_none_id then assert (values = [])
else if variant_id = T.option_some_id then
assert (List.length values = 1)
else raise (Failure "Unreachable");
let aty = T.Adt (T.Assumed T.Option, [], [ ty ]) in
let av : V.adt_value =
{ V.variant_id = Some variant_id; V.field_values = values }
in
let aggregated : V.typed_value = { V.value = Adt av; ty = aty } in
cf aggregated ctx
| E.AggregatedAdt (def_id, opt_variant_id, regions, types) ->
let type_decl = C.ctx_lookup_type_decl ctx def_id in
assert (List.length type_decl.region_params = List.length regions);
let expected_field_types =
Subst.ctx_adt_get_instantiated_field_etypes ctx def_id opt_variant_id
types
in
assert (
expected_field_types
= List.map (fun (v : V.typed_value) -> v.V.ty) values);
Construct the value
let av : V.adt_value =
{ V.variant_id = opt_variant_id; V.field_values = values }
in
let aty = T.Adt (T.AdtId def_id, regions, types) in
let aggregated : V.typed_value = { V.value = Adt av; ty = aty } in
cf aggregated ctx
in
comp eval_ops compute cf
let eval_rvalue_not_global (config : C.config) (rvalue : E.rvalue)
(cf : (V.typed_value, eval_error) result -> m_fun) : m_fun =
fun ctx ->
log#ldebug (lazy "eval_rvalue");
let wrap_in_result (cf : (V.typed_value, eval_error) result -> m_fun)
(v : V.typed_value) : m_fun =
cf (Ok v)
in
let comp_wrap f = comp f wrap_in_result cf in
match rvalue with
| E.Use op -> comp_wrap (eval_operand config op) ctx
| E.Ref (p, bkind) -> comp_wrap (eval_rvalue_ref config p bkind) ctx
| E.UnaryOp (unop, op) -> eval_unary_op config unop op cf ctx
| E.BinaryOp (binop, op1, op2) -> eval_binary_op config binop op1 op2 cf ctx
| E.Aggregate (aggregate_kind, ops) ->
comp_wrap (eval_rvalue_aggregate config aggregate_kind ops) ctx
| E.Discriminant _ ->
raise
(Failure
"Unreachable: discriminant reads should have been eliminated from \
the AST")
| E.Global _ -> raise (Failure "Unreachable")
let eval_fake_read (config : C.config) (p : E.place) : cm_fun =
fun cf ctx ->
let expand_prim_copy = false in
let cf_prepare cf =
access_rplace_reorganize_and_read config expand_prim_copy Read p cf
in
let cf_continue cf v : m_fun =
fun ctx ->
assert (not (bottom_in_value ctx.ended_regions v));
cf ctx
in
comp cf_prepare cf_continue cf ctx
|
a3e6573eb587da790bc785ed98840bc30edfa234c926bf478bbef8c83388ade7 | reasonml/reason | migrate_parsetree_500_414_migrate.ml | open Stdlib0
module From = Ast_500
module To = Ast_414
let rec copy_out_type_extension :
Ast_500.Outcometree.out_type_extension ->
Ast_414.Outcometree.out_type_extension
=
fun
{ Ast_500.Outcometree.otyext_name = otyext_name;
Ast_500.Outcometree.otyext_params = otyext_params;
Ast_500.Outcometree.otyext_constructors = otyext_constructors;
Ast_500.Outcometree.otyext_private = otyext_private }
->
{
Ast_414.Outcometree.otyext_name = otyext_name;
Ast_414.Outcometree.otyext_params =
(List.map (fun x -> x) otyext_params);
Ast_414.Outcometree.otyext_constructors =
(List.map copy_out_constructor otyext_constructors);
Ast_414.Outcometree.otyext_private = (copy_private_flag otyext_private)
}
and copy_out_phrase :
Ast_500.Outcometree.out_phrase -> Ast_414.Outcometree.out_phrase =
function
| Ast_500.Outcometree.Ophr_eval (x0, x1) ->
Ast_414.Outcometree.Ophr_eval ((copy_out_value x0), (copy_out_type x1))
| Ast_500.Outcometree.Ophr_signature x0 ->
Ast_414.Outcometree.Ophr_signature
(List.map
(fun x ->
let (x0, x1) = x in
((copy_out_sig_item x0), (Option.map copy_out_value x1))) x0)
| Ast_500.Outcometree.Ophr_exception x0 ->
Ast_414.Outcometree.Ophr_exception
(let (x0, x1) = x0 in (x0, (copy_out_value x1)))
and copy_out_sig_item :
Ast_500.Outcometree.out_sig_item -> Ast_414.Outcometree.out_sig_item =
function
| Ast_500.Outcometree.Osig_class (x0, x1, x2, x3, x4) ->
Ast_414.Outcometree.Osig_class
(x0, x1, (List.map copy_out_type_param x2), (copy_out_class_type x3),
(copy_out_rec_status x4))
| Ast_500.Outcometree.Osig_class_type (x0, x1, x2, x3, x4) ->
Ast_414.Outcometree.Osig_class_type
(x0, x1, (List.map copy_out_type_param x2), (copy_out_class_type x3),
(copy_out_rec_status x4))
| Ast_500.Outcometree.Osig_typext (x0, x1) ->
Ast_414.Outcometree.Osig_typext
((copy_out_extension_constructor x0), (copy_out_ext_status x1))
| Ast_500.Outcometree.Osig_modtype (x0, x1) ->
Ast_414.Outcometree.Osig_modtype (x0, (copy_out_module_type x1))
| Ast_500.Outcometree.Osig_module (x0, x1, x2) ->
Ast_414.Outcometree.Osig_module
(x0, (copy_out_module_type x1), (copy_out_rec_status x2))
| Ast_500.Outcometree.Osig_type (x0, x1) ->
Ast_414.Outcometree.Osig_type
((copy_out_type_decl x0), (copy_out_rec_status x1))
| Ast_500.Outcometree.Osig_value x0 ->
Ast_414.Outcometree.Osig_value (copy_out_val_decl x0)
| Ast_500.Outcometree.Osig_ellipsis -> Ast_414.Outcometree.Osig_ellipsis
and copy_out_val_decl :
Ast_500.Outcometree.out_val_decl -> Ast_414.Outcometree.out_val_decl =
fun
{ Ast_500.Outcometree.oval_name = oval_name;
Ast_500.Outcometree.oval_type = oval_type;
Ast_500.Outcometree.oval_prims = oval_prims;
Ast_500.Outcometree.oval_attributes = oval_attributes }
->
{
Ast_414.Outcometree.oval_name = oval_name;
Ast_414.Outcometree.oval_type = (copy_out_type oval_type);
Ast_414.Outcometree.oval_prims = (List.map (fun x -> x) oval_prims);
Ast_414.Outcometree.oval_attributes =
(List.map copy_out_attribute oval_attributes)
}
and copy_out_type_decl :
Ast_500.Outcometree.out_type_decl -> Ast_414.Outcometree.out_type_decl =
fun
{ Ast_500.Outcometree.otype_name = otype_name;
Ast_500.Outcometree.otype_params = otype_params;
Ast_500.Outcometree.otype_type = otype_type;
Ast_500.Outcometree.otype_private = otype_private;
Ast_500.Outcometree.otype_immediate = otype_immediate;
Ast_500.Outcometree.otype_unboxed = otype_unboxed;
Ast_500.Outcometree.otype_cstrs = otype_cstrs }
->
{
Ast_414.Outcometree.otype_name = otype_name;
Ast_414.Outcometree.otype_params =
(List.map copy_out_type_param otype_params);
Ast_414.Outcometree.otype_type = (copy_out_type otype_type);
Ast_414.Outcometree.otype_private = (copy_private_flag otype_private);
Ast_414.Outcometree.otype_immediate =
(copy_Type_immediacy_t otype_immediate);
Ast_414.Outcometree.otype_unboxed = otype_unboxed;
Ast_414.Outcometree.otype_cstrs =
(List.map
(fun x ->
let (x0, x1) = x in ((copy_out_type x0), (copy_out_type x1)))
otype_cstrs)
}
and copy_Type_immediacy_t :
Ast_500.Type_immediacy.t -> Ast_414.Type_immediacy.t =
function
| Ast_500.Type_immediacy.Unknown -> Ast_414.Type_immediacy.Unknown
| Ast_500.Type_immediacy.Always -> Ast_414.Type_immediacy.Always
| Ast_500.Type_immediacy.Always_on_64bits ->
Ast_414.Type_immediacy.Always_on_64bits
and copy_out_module_type :
Ast_500.Outcometree.out_module_type -> Ast_414.Outcometree.out_module_type
=
function
| Ast_500.Outcometree.Omty_abstract -> Ast_414.Outcometree.Omty_abstract
| Ast_500.Outcometree.Omty_functor (x0, x1) ->
Ast_414.Outcometree.Omty_functor
((Option.map
(fun x ->
let (x0, x1) = x in
((Option.map (fun x -> x) x0), (copy_out_module_type x1))) x0),
(copy_out_module_type x1))
| Ast_500.Outcometree.Omty_ident x0 ->
Ast_414.Outcometree.Omty_ident (copy_out_ident x0)
| Ast_500.Outcometree.Omty_signature x0 ->
Ast_414.Outcometree.Omty_signature (List.map copy_out_sig_item x0)
| Ast_500.Outcometree.Omty_alias x0 ->
Ast_414.Outcometree.Omty_alias (copy_out_ident x0)
and copy_out_ext_status :
Ast_500.Outcometree.out_ext_status -> Ast_414.Outcometree.out_ext_status =
function
| Ast_500.Outcometree.Oext_first -> Ast_414.Outcometree.Oext_first
| Ast_500.Outcometree.Oext_next -> Ast_414.Outcometree.Oext_next
| Ast_500.Outcometree.Oext_exception -> Ast_414.Outcometree.Oext_exception
and copy_out_extension_constructor :
Ast_500.Outcometree.out_extension_constructor ->
Ast_414.Outcometree.out_extension_constructor
=
fun
{ Ast_500.Outcometree.oext_name = oext_name;
Ast_500.Outcometree.oext_type_name = oext_type_name;
Ast_500.Outcometree.oext_type_params = oext_type_params;
Ast_500.Outcometree.oext_args = oext_args;
Ast_500.Outcometree.oext_ret_type = oext_ret_type;
Ast_500.Outcometree.oext_private = oext_private }
->
{
Ast_414.Outcometree.oext_name = oext_name;
Ast_414.Outcometree.oext_type_name = oext_type_name;
Ast_414.Outcometree.oext_type_params =
(List.map (fun x -> x) oext_type_params);
Ast_414.Outcometree.oext_args = (List.map copy_out_type oext_args);
Ast_414.Outcometree.oext_ret_type =
(Option.map copy_out_type oext_ret_type);
Ast_414.Outcometree.oext_private = (copy_private_flag oext_private)
}
and copy_out_rec_status :
Ast_500.Outcometree.out_rec_status -> Ast_414.Outcometree.out_rec_status =
function
| Ast_500.Outcometree.Orec_not -> Ast_414.Outcometree.Orec_not
| Ast_500.Outcometree.Orec_first -> Ast_414.Outcometree.Orec_first
| Ast_500.Outcometree.Orec_next -> Ast_414.Outcometree.Orec_next
and copy_out_class_type :
Ast_500.Outcometree.out_class_type -> Ast_414.Outcometree.out_class_type =
function
| Ast_500.Outcometree.Octy_constr (x0, x1) ->
Ast_414.Outcometree.Octy_constr
((copy_out_ident x0), (List.map copy_out_type x1))
| Ast_500.Outcometree.Octy_arrow (x0, x1, x2) ->
Ast_414.Outcometree.Octy_arrow
(x0, (copy_out_type x1), (copy_out_class_type x2))
| Ast_500.Outcometree.Octy_signature (x0, x1) ->
Ast_414.Outcometree.Octy_signature
((Option.map copy_out_type x0),
(List.map copy_out_class_sig_item x1))
and copy_out_class_sig_item :
Ast_500.Outcometree.out_class_sig_item ->
Ast_414.Outcometree.out_class_sig_item
=
function
| Ast_500.Outcometree.Ocsg_constraint (x0, x1) ->
Ast_414.Outcometree.Ocsg_constraint
((copy_out_type x0), (copy_out_type x1))
| Ast_500.Outcometree.Ocsg_method (x0, x1, x2, x3) ->
Ast_414.Outcometree.Ocsg_method (x0, x1, x2, (copy_out_type x3))
| Ast_500.Outcometree.Ocsg_value (x0, x1, x2, x3) ->
Ast_414.Outcometree.Ocsg_value (x0, x1, x2, (copy_out_type x3))
and copy_out_type_param :
Ast_500.Outcometree.out_type_param -> Ast_414.Outcometree.out_type_param =
fun x ->
let (x0, x1) = x in
(x0, (let (x0, x1) = x1 in ((copy_variance x0), (copy_injectivity x1))))
and copy_out_type :
Ast_500.Outcometree.out_type -> Ast_414.Outcometree.out_type =
function
| Ast_500.Outcometree.Otyp_abstract -> Ast_414.Outcometree.Otyp_abstract
| Ast_500.Outcometree.Otyp_open -> Ast_414.Outcometree.Otyp_open
| Ast_500.Outcometree.Otyp_alias (x0, x1) ->
Ast_414.Outcometree.Otyp_alias ((copy_out_type x0), x1)
| Ast_500.Outcometree.Otyp_arrow (x0, x1, x2) ->
Ast_414.Outcometree.Otyp_arrow
(x0, (copy_out_type x1), (copy_out_type x2))
| Ast_500.Outcometree.Otyp_class (x0, x1, x2) ->
Ast_414.Outcometree.Otyp_class
(x0, (copy_out_ident x1), (List.map copy_out_type x2))
| Ast_500.Outcometree.Otyp_constr (x0, x1) ->
Ast_414.Outcometree.Otyp_constr
((copy_out_ident x0), (List.map copy_out_type x1))
| Ast_500.Outcometree.Otyp_manifest (x0, x1) ->
Ast_414.Outcometree.Otyp_manifest
((copy_out_type x0), (copy_out_type x1))
| Ast_500.Outcometree.Otyp_object (x0, x1) ->
Ast_414.Outcometree.Otyp_object
((List.map (fun x -> let (x0, x1) = x in (x0, (copy_out_type x1))) x0),
(Option.map (fun x -> x) x1))
| Ast_500.Outcometree.Otyp_record x0 ->
Ast_414.Outcometree.Otyp_record
(List.map
(fun x -> let (x0, x1, x2) = x in (x0, x1, (copy_out_type x2))) x0)
| Ast_500.Outcometree.Otyp_stuff x0 -> Ast_414.Outcometree.Otyp_stuff x0
| Ast_500.Outcometree.Otyp_sum x0 ->
Ast_414.Outcometree.Otyp_sum (List.map copy_out_constructor x0)
| Ast_500.Outcometree.Otyp_tuple x0 ->
Ast_414.Outcometree.Otyp_tuple (List.map copy_out_type x0)
| Ast_500.Outcometree.Otyp_var (x0, x1) ->
Ast_414.Outcometree.Otyp_var (x0, x1)
| Ast_500.Outcometree.Otyp_variant (x0, x1, x2, x3) ->
Ast_414.Outcometree.Otyp_variant
(x0, (copy_out_variant x1), x2,
(Option.map (fun x -> List.map (fun x -> x) x) x3))
| Ast_500.Outcometree.Otyp_poly (x0, x1) ->
Ast_414.Outcometree.Otyp_poly
((List.map (fun x -> x) x0), (copy_out_type x1))
| Ast_500.Outcometree.Otyp_module (x0, x1) ->
Ast_414.Outcometree.Otyp_module
((copy_out_ident x0),
(List.map (fun x -> let (x0, x1) = x in (x0, (copy_out_type x1)))
x1))
| Ast_500.Outcometree.Otyp_attribute (x0, x1) ->
Ast_414.Outcometree.Otyp_attribute
((copy_out_type x0), (copy_out_attribute x1))
and copy_out_attribute :
Ast_500.Outcometree.out_attribute -> Ast_414.Outcometree.out_attribute =
fun { Ast_500.Outcometree.oattr_name = oattr_name } ->
{ Ast_414.Outcometree.oattr_name = oattr_name }
and copy_out_variant :
Ast_500.Outcometree.out_variant -> Ast_414.Outcometree.out_variant =
function
| Ast_500.Outcometree.Ovar_fields x0 ->
Ast_414.Outcometree.Ovar_fields
(List.map
(fun x ->
let (x0, x1, x2) = x in (x0, x1, (List.map copy_out_type x2)))
x0)
| Ast_500.Outcometree.Ovar_typ x0 ->
Ast_414.Outcometree.Ovar_typ (copy_out_type x0)
and copy_out_constructor :
Ast_500.Outcometree.out_constructor -> Ast_414.Outcometree.out_constructor
=
fun
{ Ast_500.Outcometree.ocstr_name = ocstr_name;
Ast_500.Outcometree.ocstr_args = ocstr_args;
Ast_500.Outcometree.ocstr_return_type = ocstr_return_type }
->
{
Ast_414.Outcometree.ocstr_name = ocstr_name;
Ast_414.Outcometree.ocstr_args = (List.map copy_out_type ocstr_args);
Ast_414.Outcometree.ocstr_return_type =
(Option.map copy_out_type ocstr_return_type)
}
and copy_out_value :
Ast_500.Outcometree.out_value -> Ast_414.Outcometree.out_value =
function
| Ast_500.Outcometree.Oval_array x0 ->
Ast_414.Outcometree.Oval_array (List.map copy_out_value x0)
| Ast_500.Outcometree.Oval_char x0 -> Ast_414.Outcometree.Oval_char x0
| Ast_500.Outcometree.Oval_constr (x0, x1) ->
Ast_414.Outcometree.Oval_constr
((copy_out_ident x0), (List.map copy_out_value x1))
| Ast_500.Outcometree.Oval_ellipsis -> Ast_414.Outcometree.Oval_ellipsis
| Ast_500.Outcometree.Oval_float x0 -> Ast_414.Outcometree.Oval_float x0
| Ast_500.Outcometree.Oval_int x0 -> Ast_414.Outcometree.Oval_int x0
| Ast_500.Outcometree.Oval_int32 x0 -> Ast_414.Outcometree.Oval_int32 x0
| Ast_500.Outcometree.Oval_int64 x0 -> Ast_414.Outcometree.Oval_int64 x0
| Ast_500.Outcometree.Oval_nativeint x0 ->
Ast_414.Outcometree.Oval_nativeint x0
| Ast_500.Outcometree.Oval_list x0 ->
Ast_414.Outcometree.Oval_list (List.map copy_out_value x0)
| Ast_500.Outcometree.Oval_printer x0 ->
Ast_414.Outcometree.Oval_printer x0
| Ast_500.Outcometree.Oval_record x0 ->
Ast_414.Outcometree.Oval_record
(List.map
(fun x ->
let (x0, x1) = x in ((copy_out_ident x0), (copy_out_value x1)))
x0)
| Ast_500.Outcometree.Oval_string (x0, x1, x2) ->
Ast_414.Outcometree.Oval_string (x0, x1, (copy_out_string x2))
| Ast_500.Outcometree.Oval_stuff x0 -> Ast_414.Outcometree.Oval_stuff x0
| Ast_500.Outcometree.Oval_tuple x0 ->
Ast_414.Outcometree.Oval_tuple (List.map copy_out_value x0)
| Ast_500.Outcometree.Oval_variant (x0, x1) ->
Ast_414.Outcometree.Oval_variant (x0, (Option.map copy_out_value x1))
and copy_out_string :
Ast_500.Outcometree.out_string -> Ast_414.Outcometree.out_string =
function
| Ast_500.Outcometree.Ostr_string -> Ast_414.Outcometree.Ostr_string
| Ast_500.Outcometree.Ostr_bytes -> Ast_414.Outcometree.Ostr_bytes
and copy_out_ident :
Ast_500.Outcometree.out_ident -> Ast_414.Outcometree.out_ident =
function
| Ast_500.Outcometree.Oide_apply (x0, x1) ->
Ast_414.Outcometree.Oide_apply
((copy_out_ident x0), (copy_out_ident x1))
| Ast_500.Outcometree.Oide_dot (x0, x1) ->
Ast_414.Outcometree.Oide_dot ((copy_out_ident x0), x1)
| Ast_500.Outcometree.Oide_ident x0 ->
Ast_414.Outcometree.Oide_ident (copy_out_name x0)
and copy_out_name :
Ast_500.Outcometree.out_name -> Ast_414.Outcometree.out_name =
fun { Ast_500.Outcometree.printed_name = printed_name } ->
{ Ast_414.Outcometree.printed_name = printed_name }
and copy_toplevel_phrase :
Ast_500.Parsetree.toplevel_phrase -> Ast_414.Parsetree.toplevel_phrase =
function
| Ast_500.Parsetree.Ptop_def x0 ->
Ast_414.Parsetree.Ptop_def (copy_structure x0)
| Ast_500.Parsetree.Ptop_dir x0 ->
Ast_414.Parsetree.Ptop_dir (copy_toplevel_directive x0)
and copy_toplevel_directive :
Ast_500.Parsetree.toplevel_directive ->
Ast_414.Parsetree.toplevel_directive
=
fun
{ Ast_500.Parsetree.pdir_name = pdir_name;
Ast_500.Parsetree.pdir_arg = pdir_arg;
Ast_500.Parsetree.pdir_loc = pdir_loc }
->
{
Ast_414.Parsetree.pdir_name = (copy_loc (fun x -> x) pdir_name);
Ast_414.Parsetree.pdir_arg =
(Option.map copy_directive_argument pdir_arg);
Ast_414.Parsetree.pdir_loc = (copy_location pdir_loc)
}
and copy_directive_argument :
Ast_500.Parsetree.directive_argument ->
Ast_414.Parsetree.directive_argument
=
fun
{ Ast_500.Parsetree.pdira_desc = pdira_desc;
Ast_500.Parsetree.pdira_loc = pdira_loc }
->
{
Ast_414.Parsetree.pdira_desc =
(copy_directive_argument_desc pdira_desc);
Ast_414.Parsetree.pdira_loc = (copy_location pdira_loc)
}
and copy_directive_argument_desc :
Ast_500.Parsetree.directive_argument_desc ->
Ast_414.Parsetree.directive_argument_desc
=
function
| Ast_500.Parsetree.Pdir_string x0 -> Ast_414.Parsetree.Pdir_string x0
| Ast_500.Parsetree.Pdir_int (x0, x1) ->
Ast_414.Parsetree.Pdir_int (x0, (Option.map (fun x -> x) x1))
| Ast_500.Parsetree.Pdir_ident x0 ->
Ast_414.Parsetree.Pdir_ident (copy_Longident_t x0)
| Ast_500.Parsetree.Pdir_bool x0 -> Ast_414.Parsetree.Pdir_bool x0
and copy_expression :
Ast_500.Parsetree.expression -> Ast_414.Parsetree.expression =
fun
{ Ast_500.Parsetree.pexp_desc = pexp_desc;
Ast_500.Parsetree.pexp_loc = pexp_loc;
Ast_500.Parsetree.pexp_loc_stack = pexp_loc_stack;
Ast_500.Parsetree.pexp_attributes = pexp_attributes }
->
{
Ast_414.Parsetree.pexp_desc = (copy_expression_desc pexp_desc);
Ast_414.Parsetree.pexp_loc = (copy_location pexp_loc);
Ast_414.Parsetree.pexp_loc_stack = (copy_location_stack pexp_loc_stack);
Ast_414.Parsetree.pexp_attributes = (copy_attributes pexp_attributes)
}
and copy_expression_desc :
Ast_500.Parsetree.expression_desc -> Ast_414.Parsetree.expression_desc =
function
| Ast_500.Parsetree.Pexp_ident x0 ->
Ast_414.Parsetree.Pexp_ident (copy_loc copy_Longident_t x0)
| Ast_500.Parsetree.Pexp_constant x0 ->
Ast_414.Parsetree.Pexp_constant (copy_constant x0)
| Ast_500.Parsetree.Pexp_let (x0, x1, x2) ->
Ast_414.Parsetree.Pexp_let
((copy_rec_flag x0), (List.map copy_value_binding x1),
(copy_expression x2))
| Ast_500.Parsetree.Pexp_function x0 ->
Ast_414.Parsetree.Pexp_function (List.map copy_case x0)
| Ast_500.Parsetree.Pexp_fun (x0, x1, x2, x3) ->
Ast_414.Parsetree.Pexp_fun
((copy_arg_label x0), (Option.map copy_expression x1),
(copy_pattern x2), (copy_expression x3))
| Ast_500.Parsetree.Pexp_apply (x0, x1) ->
Ast_414.Parsetree.Pexp_apply
((copy_expression x0),
(List.map
(fun x ->
let (x0, x1) = x in
((copy_arg_label x0), (copy_expression x1))) x1))
| Ast_500.Parsetree.Pexp_match (x0, x1) ->
Ast_414.Parsetree.Pexp_match
((copy_expression x0), (List.map copy_case x1))
| Ast_500.Parsetree.Pexp_try (x0, x1) ->
Ast_414.Parsetree.Pexp_try
((copy_expression x0), (List.map copy_case x1))
| Ast_500.Parsetree.Pexp_tuple x0 ->
Ast_414.Parsetree.Pexp_tuple (List.map copy_expression x0)
| Ast_500.Parsetree.Pexp_construct (x0, x1) ->
Ast_414.Parsetree.Pexp_construct
((copy_loc copy_Longident_t x0), (Option.map copy_expression x1))
| Ast_500.Parsetree.Pexp_variant (x0, x1) ->
Ast_414.Parsetree.Pexp_variant
((copy_label x0), (Option.map copy_expression x1))
| Ast_500.Parsetree.Pexp_record (x0, x1) ->
Ast_414.Parsetree.Pexp_record
((List.map
(fun x ->
let (x0, x1) = x in
((copy_loc copy_Longident_t x0), (copy_expression x1))) x0),
(Option.map copy_expression x1))
| Ast_500.Parsetree.Pexp_field (x0, x1) ->
Ast_414.Parsetree.Pexp_field
((copy_expression x0), (copy_loc copy_Longident_t x1))
| Ast_500.Parsetree.Pexp_setfield (x0, x1, x2) ->
Ast_414.Parsetree.Pexp_setfield
((copy_expression x0), (copy_loc copy_Longident_t x1),
(copy_expression x2))
| Ast_500.Parsetree.Pexp_array x0 ->
Ast_414.Parsetree.Pexp_array (List.map copy_expression x0)
| Ast_500.Parsetree.Pexp_ifthenelse (x0, x1, x2) ->
Ast_414.Parsetree.Pexp_ifthenelse
((copy_expression x0), (copy_expression x1),
(Option.map copy_expression x2))
| Ast_500.Parsetree.Pexp_sequence (x0, x1) ->
Ast_414.Parsetree.Pexp_sequence
((copy_expression x0), (copy_expression x1))
| Ast_500.Parsetree.Pexp_while (x0, x1) ->
Ast_414.Parsetree.Pexp_while
((copy_expression x0), (copy_expression x1))
| Ast_500.Parsetree.Pexp_for (x0, x1, x2, x3, x4) ->
Ast_414.Parsetree.Pexp_for
((copy_pattern x0), (copy_expression x1), (copy_expression x2),
(copy_direction_flag x3), (copy_expression x4))
| Ast_500.Parsetree.Pexp_constraint (x0, x1) ->
Ast_414.Parsetree.Pexp_constraint
((copy_expression x0), (copy_core_type x1))
| Ast_500.Parsetree.Pexp_coerce (x0, x1, x2) ->
Ast_414.Parsetree.Pexp_coerce
((copy_expression x0), (Option.map copy_core_type x1),
(copy_core_type x2))
| Ast_500.Parsetree.Pexp_send (x0, x1) ->
Ast_414.Parsetree.Pexp_send
((copy_expression x0), (copy_loc copy_label x1))
| Ast_500.Parsetree.Pexp_new x0 ->
Ast_414.Parsetree.Pexp_new (copy_loc copy_Longident_t x0)
| Ast_500.Parsetree.Pexp_setinstvar (x0, x1) ->
Ast_414.Parsetree.Pexp_setinstvar
((copy_loc copy_label x0), (copy_expression x1))
| Ast_500.Parsetree.Pexp_override x0 ->
Ast_414.Parsetree.Pexp_override
(List.map
(fun x ->
let (x0, x1) = x in
((copy_loc copy_label x0), (copy_expression x1))) x0)
| Ast_500.Parsetree.Pexp_letmodule (x0, x1, x2) ->
Ast_414.Parsetree.Pexp_letmodule
((copy_loc (fun x -> Option.map (fun x -> x) x) x0),
(copy_module_expr x1), (copy_expression x2))
| Ast_500.Parsetree.Pexp_letexception (x0, x1) ->
Ast_414.Parsetree.Pexp_letexception
((copy_extension_constructor x0), (copy_expression x1))
| Ast_500.Parsetree.Pexp_assert x0 ->
Ast_414.Parsetree.Pexp_assert (copy_expression x0)
| Ast_500.Parsetree.Pexp_lazy x0 ->
Ast_414.Parsetree.Pexp_lazy (copy_expression x0)
| Ast_500.Parsetree.Pexp_poly (x0, x1) ->
Ast_414.Parsetree.Pexp_poly
((copy_expression x0), (Option.map copy_core_type x1))
| Ast_500.Parsetree.Pexp_object x0 ->
Ast_414.Parsetree.Pexp_object (copy_class_structure x0)
| Ast_500.Parsetree.Pexp_newtype (x0, x1) ->
Ast_414.Parsetree.Pexp_newtype
((copy_loc (fun x -> x) x0), (copy_expression x1))
| Ast_500.Parsetree.Pexp_pack x0 ->
Ast_414.Parsetree.Pexp_pack (copy_module_expr x0)
| Ast_500.Parsetree.Pexp_open (x0, x1) ->
Ast_414.Parsetree.Pexp_open
((copy_open_declaration x0), (copy_expression x1))
| Ast_500.Parsetree.Pexp_letop x0 ->
Ast_414.Parsetree.Pexp_letop (copy_letop x0)
| Ast_500.Parsetree.Pexp_extension x0 ->
Ast_414.Parsetree.Pexp_extension (copy_extension x0)
| Ast_500.Parsetree.Pexp_unreachable -> Ast_414.Parsetree.Pexp_unreachable
and copy_letop : Ast_500.Parsetree.letop -> Ast_414.Parsetree.letop =
fun
{ Ast_500.Parsetree.let_ = let_; Ast_500.Parsetree.ands = ands;
Ast_500.Parsetree.body = body }
->
{
Ast_414.Parsetree.let_ = (copy_binding_op let_);
Ast_414.Parsetree.ands = (List.map copy_binding_op ands);
Ast_414.Parsetree.body = (copy_expression body)
}
and copy_binding_op :
Ast_500.Parsetree.binding_op -> Ast_414.Parsetree.binding_op =
fun
{ Ast_500.Parsetree.pbop_op = pbop_op;
Ast_500.Parsetree.pbop_pat = pbop_pat;
Ast_500.Parsetree.pbop_exp = pbop_exp;
Ast_500.Parsetree.pbop_loc = pbop_loc }
->
{
Ast_414.Parsetree.pbop_op = (copy_loc (fun x -> x) pbop_op);
Ast_414.Parsetree.pbop_pat = (copy_pattern pbop_pat);
Ast_414.Parsetree.pbop_exp = (copy_expression pbop_exp);
Ast_414.Parsetree.pbop_loc = (copy_location pbop_loc)
}
and copy_direction_flag :
Ast_500.Asttypes.direction_flag -> Ast_414.Asttypes.direction_flag =
function
| Ast_500.Asttypes.Upto -> Ast_414.Asttypes.Upto
| Ast_500.Asttypes.Downto -> Ast_414.Asttypes.Downto
and copy_case : Ast_500.Parsetree.case -> Ast_414.Parsetree.case =
fun
{ Ast_500.Parsetree.pc_lhs = pc_lhs;
Ast_500.Parsetree.pc_guard = pc_guard;
Ast_500.Parsetree.pc_rhs = pc_rhs }
->
{
Ast_414.Parsetree.pc_lhs = (copy_pattern pc_lhs);
Ast_414.Parsetree.pc_guard = (Option.map copy_expression pc_guard);
Ast_414.Parsetree.pc_rhs = (copy_expression pc_rhs)
}
and copy_value_binding :
Ast_500.Parsetree.value_binding -> Ast_414.Parsetree.value_binding =
fun
{ Ast_500.Parsetree.pvb_pat = pvb_pat;
Ast_500.Parsetree.pvb_expr = pvb_expr;
Ast_500.Parsetree.pvb_attributes = pvb_attributes;
Ast_500.Parsetree.pvb_loc = pvb_loc }
->
{
Ast_414.Parsetree.pvb_pat = (copy_pattern pvb_pat);
Ast_414.Parsetree.pvb_expr = (copy_expression pvb_expr);
Ast_414.Parsetree.pvb_attributes = (copy_attributes pvb_attributes);
Ast_414.Parsetree.pvb_loc = (copy_location pvb_loc)
}
and copy_pattern : Ast_500.Parsetree.pattern -> Ast_414.Parsetree.pattern =
fun
{ Ast_500.Parsetree.ppat_desc = ppat_desc;
Ast_500.Parsetree.ppat_loc = ppat_loc;
Ast_500.Parsetree.ppat_loc_stack = ppat_loc_stack;
Ast_500.Parsetree.ppat_attributes = ppat_attributes }
->
{
Ast_414.Parsetree.ppat_desc = (copy_pattern_desc ppat_desc);
Ast_414.Parsetree.ppat_loc = (copy_location ppat_loc);
Ast_414.Parsetree.ppat_loc_stack = (copy_location_stack ppat_loc_stack);
Ast_414.Parsetree.ppat_attributes = (copy_attributes ppat_attributes)
}
and copy_pattern_desc :
Ast_500.Parsetree.pattern_desc -> Ast_414.Parsetree.pattern_desc =
function
| Ast_500.Parsetree.Ppat_any -> Ast_414.Parsetree.Ppat_any
| Ast_500.Parsetree.Ppat_var x0 ->
Ast_414.Parsetree.Ppat_var (copy_loc (fun x -> x) x0)
| Ast_500.Parsetree.Ppat_alias (x0, x1) ->
Ast_414.Parsetree.Ppat_alias
((copy_pattern x0), (copy_loc (fun x -> x) x1))
| Ast_500.Parsetree.Ppat_constant x0 ->
Ast_414.Parsetree.Ppat_constant (copy_constant x0)
| Ast_500.Parsetree.Ppat_interval (x0, x1) ->
Ast_414.Parsetree.Ppat_interval
((copy_constant x0), (copy_constant x1))
| Ast_500.Parsetree.Ppat_tuple x0 ->
Ast_414.Parsetree.Ppat_tuple (List.map copy_pattern x0)
| Ast_500.Parsetree.Ppat_construct (x0, x1) ->
Ast_414.Parsetree.Ppat_construct
((copy_loc copy_Longident_t x0),
(Option.map
(fun x ->
let (x0, x1) = x in
((List.map (fun x -> copy_loc (fun x -> x) x) x0),
(copy_pattern x1))) x1))
| Ast_500.Parsetree.Ppat_variant (x0, x1) ->
Ast_414.Parsetree.Ppat_variant
((copy_label x0), (Option.map copy_pattern x1))
| Ast_500.Parsetree.Ppat_record (x0, x1) ->
Ast_414.Parsetree.Ppat_record
((List.map
(fun x ->
let (x0, x1) = x in
((copy_loc copy_Longident_t x0), (copy_pattern x1))) x0),
(copy_closed_flag x1))
| Ast_500.Parsetree.Ppat_array x0 ->
Ast_414.Parsetree.Ppat_array (List.map copy_pattern x0)
| Ast_500.Parsetree.Ppat_or (x0, x1) ->
Ast_414.Parsetree.Ppat_or ((copy_pattern x0), (copy_pattern x1))
| Ast_500.Parsetree.Ppat_constraint (x0, x1) ->
Ast_414.Parsetree.Ppat_constraint
((copy_pattern x0), (copy_core_type x1))
| Ast_500.Parsetree.Ppat_type x0 ->
Ast_414.Parsetree.Ppat_type (copy_loc copy_Longident_t x0)
| Ast_500.Parsetree.Ppat_lazy x0 ->
Ast_414.Parsetree.Ppat_lazy (copy_pattern x0)
| Ast_500.Parsetree.Ppat_unpack x0 ->
Ast_414.Parsetree.Ppat_unpack
(copy_loc (fun x -> Option.map (fun x -> x) x) x0)
| Ast_500.Parsetree.Ppat_exception x0 ->
Ast_414.Parsetree.Ppat_exception (copy_pattern x0)
| Ast_500.Parsetree.Ppat_extension x0 ->
Ast_414.Parsetree.Ppat_extension (copy_extension x0)
| Ast_500.Parsetree.Ppat_open (x0, x1) ->
Ast_414.Parsetree.Ppat_open
((copy_loc copy_Longident_t x0), (copy_pattern x1))
and copy_core_type :
Ast_500.Parsetree.core_type -> Ast_414.Parsetree.core_type =
fun
{ Ast_500.Parsetree.ptyp_desc = ptyp_desc;
Ast_500.Parsetree.ptyp_loc = ptyp_loc;
Ast_500.Parsetree.ptyp_loc_stack = ptyp_loc_stack;
Ast_500.Parsetree.ptyp_attributes = ptyp_attributes }
->
{
Ast_414.Parsetree.ptyp_desc = (copy_core_type_desc ptyp_desc);
Ast_414.Parsetree.ptyp_loc = (copy_location ptyp_loc);
Ast_414.Parsetree.ptyp_loc_stack = (copy_location_stack ptyp_loc_stack);
Ast_414.Parsetree.ptyp_attributes = (copy_attributes ptyp_attributes)
}
and copy_location_stack :
Ast_500.Parsetree.location_stack -> Ast_414.Parsetree.location_stack =
fun x -> List.map copy_location x
and copy_core_type_desc :
Ast_500.Parsetree.core_type_desc -> Ast_414.Parsetree.core_type_desc =
function
| Ast_500.Parsetree.Ptyp_any -> Ast_414.Parsetree.Ptyp_any
| Ast_500.Parsetree.Ptyp_var x0 -> Ast_414.Parsetree.Ptyp_var x0
| Ast_500.Parsetree.Ptyp_arrow (x0, x1, x2) ->
Ast_414.Parsetree.Ptyp_arrow
((copy_arg_label x0), (copy_core_type x1), (copy_core_type x2))
| Ast_500.Parsetree.Ptyp_tuple x0 ->
Ast_414.Parsetree.Ptyp_tuple (List.map copy_core_type x0)
| Ast_500.Parsetree.Ptyp_constr (x0, x1) ->
Ast_414.Parsetree.Ptyp_constr
((copy_loc copy_Longident_t x0), (List.map copy_core_type x1))
| Ast_500.Parsetree.Ptyp_object (x0, x1) ->
Ast_414.Parsetree.Ptyp_object
((List.map copy_object_field x0), (copy_closed_flag x1))
| Ast_500.Parsetree.Ptyp_class (x0, x1) ->
Ast_414.Parsetree.Ptyp_class
((copy_loc copy_Longident_t x0), (List.map copy_core_type x1))
| Ast_500.Parsetree.Ptyp_alias (x0, x1) ->
Ast_414.Parsetree.Ptyp_alias ((copy_core_type x0), x1)
| Ast_500.Parsetree.Ptyp_variant (x0, x1, x2) ->
Ast_414.Parsetree.Ptyp_variant
((List.map copy_row_field x0), (copy_closed_flag x1),
(Option.map (fun x -> List.map copy_label x) x2))
| Ast_500.Parsetree.Ptyp_poly (x0, x1) ->
Ast_414.Parsetree.Ptyp_poly
((List.map (fun x -> copy_loc (fun x -> x) x) x0),
(copy_core_type x1))
| Ast_500.Parsetree.Ptyp_package x0 ->
Ast_414.Parsetree.Ptyp_package (copy_package_type x0)
| Ast_500.Parsetree.Ptyp_extension x0 ->
Ast_414.Parsetree.Ptyp_extension (copy_extension x0)
and copy_package_type :
Ast_500.Parsetree.package_type -> Ast_414.Parsetree.package_type =
fun x ->
let (x0, x1) = x in
((copy_loc copy_Longident_t x0),
(List.map
(fun x ->
let (x0, x1) = x in
((copy_loc copy_Longident_t x0), (copy_core_type x1))) x1))
and copy_row_field :
Ast_500.Parsetree.row_field -> Ast_414.Parsetree.row_field =
fun
{ Ast_500.Parsetree.prf_desc = prf_desc;
Ast_500.Parsetree.prf_loc = prf_loc;
Ast_500.Parsetree.prf_attributes = prf_attributes }
->
{
Ast_414.Parsetree.prf_desc = (copy_row_field_desc prf_desc);
Ast_414.Parsetree.prf_loc = (copy_location prf_loc);
Ast_414.Parsetree.prf_attributes = (copy_attributes prf_attributes)
}
and copy_row_field_desc :
Ast_500.Parsetree.row_field_desc -> Ast_414.Parsetree.row_field_desc =
function
| Ast_500.Parsetree.Rtag (x0, x1, x2) ->
Ast_414.Parsetree.Rtag
((copy_loc copy_label x0), x1, (List.map copy_core_type x2))
| Ast_500.Parsetree.Rinherit x0 ->
Ast_414.Parsetree.Rinherit (copy_core_type x0)
and copy_object_field :
Ast_500.Parsetree.object_field -> Ast_414.Parsetree.object_field =
fun
{ Ast_500.Parsetree.pof_desc = pof_desc;
Ast_500.Parsetree.pof_loc = pof_loc;
Ast_500.Parsetree.pof_attributes = pof_attributes }
->
{
Ast_414.Parsetree.pof_desc = (copy_object_field_desc pof_desc);
Ast_414.Parsetree.pof_loc = (copy_location pof_loc);
Ast_414.Parsetree.pof_attributes = (copy_attributes pof_attributes)
}
and copy_attributes :
Ast_500.Parsetree.attributes -> Ast_414.Parsetree.attributes =
fun x -> List.map copy_attribute x
and copy_attribute :
Ast_500.Parsetree.attribute -> Ast_414.Parsetree.attribute =
fun
{ Ast_500.Parsetree.attr_name = attr_name;
Ast_500.Parsetree.attr_payload = attr_payload;
Ast_500.Parsetree.attr_loc = attr_loc }
->
{
Ast_414.Parsetree.attr_name = (copy_loc (fun x -> x) attr_name);
Ast_414.Parsetree.attr_payload = (copy_payload attr_payload);
Ast_414.Parsetree.attr_loc = (copy_location attr_loc)
}
and copy_payload : Ast_500.Parsetree.payload -> Ast_414.Parsetree.payload =
function
| Ast_500.Parsetree.PStr x0 -> Ast_414.Parsetree.PStr (copy_structure x0)
| Ast_500.Parsetree.PSig x0 -> Ast_414.Parsetree.PSig (copy_signature x0)
| Ast_500.Parsetree.PTyp x0 -> Ast_414.Parsetree.PTyp (copy_core_type x0)
| Ast_500.Parsetree.PPat (x0, x1) ->
Ast_414.Parsetree.PPat
((copy_pattern x0), (Option.map copy_expression x1))
and copy_structure :
Ast_500.Parsetree.structure -> Ast_414.Parsetree.structure =
fun x -> List.map copy_structure_item x
and copy_structure_item :
Ast_500.Parsetree.structure_item -> Ast_414.Parsetree.structure_item =
fun
{ Ast_500.Parsetree.pstr_desc = pstr_desc;
Ast_500.Parsetree.pstr_loc = pstr_loc }
->
{
Ast_414.Parsetree.pstr_desc = (copy_structure_item_desc pstr_desc);
Ast_414.Parsetree.pstr_loc = (copy_location pstr_loc)
}
and copy_structure_item_desc :
Ast_500.Parsetree.structure_item_desc ->
Ast_414.Parsetree.structure_item_desc
=
function
| Ast_500.Parsetree.Pstr_eval (x0, x1) ->
Ast_414.Parsetree.Pstr_eval
((copy_expression x0), (copy_attributes x1))
| Ast_500.Parsetree.Pstr_value (x0, x1) ->
Ast_414.Parsetree.Pstr_value
((copy_rec_flag x0), (List.map copy_value_binding x1))
| Ast_500.Parsetree.Pstr_primitive x0 ->
Ast_414.Parsetree.Pstr_primitive (copy_value_description x0)
| Ast_500.Parsetree.Pstr_type (x0, x1) ->
Ast_414.Parsetree.Pstr_type
((copy_rec_flag x0), (List.map copy_type_declaration x1))
| Ast_500.Parsetree.Pstr_typext x0 ->
Ast_414.Parsetree.Pstr_typext (copy_type_extension x0)
| Ast_500.Parsetree.Pstr_exception x0 ->
Ast_414.Parsetree.Pstr_exception (copy_type_exception x0)
| Ast_500.Parsetree.Pstr_module x0 ->
Ast_414.Parsetree.Pstr_module (copy_module_binding x0)
| Ast_500.Parsetree.Pstr_recmodule x0 ->
Ast_414.Parsetree.Pstr_recmodule (List.map copy_module_binding x0)
| Ast_500.Parsetree.Pstr_modtype x0 ->
Ast_414.Parsetree.Pstr_modtype (copy_module_type_declaration x0)
| Ast_500.Parsetree.Pstr_open x0 ->
Ast_414.Parsetree.Pstr_open (copy_open_declaration x0)
| Ast_500.Parsetree.Pstr_class x0 ->
Ast_414.Parsetree.Pstr_class (List.map copy_class_declaration x0)
| Ast_500.Parsetree.Pstr_class_type x0 ->
Ast_414.Parsetree.Pstr_class_type
(List.map copy_class_type_declaration x0)
| Ast_500.Parsetree.Pstr_include x0 ->
Ast_414.Parsetree.Pstr_include (copy_include_declaration x0)
| Ast_500.Parsetree.Pstr_attribute x0 ->
Ast_414.Parsetree.Pstr_attribute (copy_attribute x0)
| Ast_500.Parsetree.Pstr_extension (x0, x1) ->
Ast_414.Parsetree.Pstr_extension
((copy_extension x0), (copy_attributes x1))
and copy_include_declaration :
Ast_500.Parsetree.include_declaration ->
Ast_414.Parsetree.include_declaration
= fun x -> copy_include_infos copy_module_expr x
and copy_class_declaration :
Ast_500.Parsetree.class_declaration -> Ast_414.Parsetree.class_declaration
= fun x -> copy_class_infos copy_class_expr x
and copy_class_expr :
Ast_500.Parsetree.class_expr -> Ast_414.Parsetree.class_expr =
fun
{ Ast_500.Parsetree.pcl_desc = pcl_desc;
Ast_500.Parsetree.pcl_loc = pcl_loc;
Ast_500.Parsetree.pcl_attributes = pcl_attributes }
->
{
Ast_414.Parsetree.pcl_desc = (copy_class_expr_desc pcl_desc);
Ast_414.Parsetree.pcl_loc = (copy_location pcl_loc);
Ast_414.Parsetree.pcl_attributes = (copy_attributes pcl_attributes)
}
and copy_class_expr_desc :
Ast_500.Parsetree.class_expr_desc -> Ast_414.Parsetree.class_expr_desc =
function
| Ast_500.Parsetree.Pcl_constr (x0, x1) ->
Ast_414.Parsetree.Pcl_constr
((copy_loc copy_Longident_t x0), (List.map copy_core_type x1))
| Ast_500.Parsetree.Pcl_structure x0 ->
Ast_414.Parsetree.Pcl_structure (copy_class_structure x0)
| Ast_500.Parsetree.Pcl_fun (x0, x1, x2, x3) ->
Ast_414.Parsetree.Pcl_fun
((copy_arg_label x0), (Option.map copy_expression x1),
(copy_pattern x2), (copy_class_expr x3))
| Ast_500.Parsetree.Pcl_apply (x0, x1) ->
Ast_414.Parsetree.Pcl_apply
((copy_class_expr x0),
(List.map
(fun x ->
let (x0, x1) = x in
((copy_arg_label x0), (copy_expression x1))) x1))
| Ast_500.Parsetree.Pcl_let (x0, x1, x2) ->
Ast_414.Parsetree.Pcl_let
((copy_rec_flag x0), (List.map copy_value_binding x1),
(copy_class_expr x2))
| Ast_500.Parsetree.Pcl_constraint (x0, x1) ->
Ast_414.Parsetree.Pcl_constraint
((copy_class_expr x0), (copy_class_type x1))
| Ast_500.Parsetree.Pcl_extension x0 ->
Ast_414.Parsetree.Pcl_extension (copy_extension x0)
| Ast_500.Parsetree.Pcl_open (x0, x1) ->
Ast_414.Parsetree.Pcl_open
((copy_open_description x0), (copy_class_expr x1))
and copy_class_structure :
Ast_500.Parsetree.class_structure -> Ast_414.Parsetree.class_structure =
fun
{ Ast_500.Parsetree.pcstr_self = pcstr_self;
Ast_500.Parsetree.pcstr_fields = pcstr_fields }
->
{
Ast_414.Parsetree.pcstr_self = (copy_pattern pcstr_self);
Ast_414.Parsetree.pcstr_fields =
(List.map copy_class_field pcstr_fields)
}
and copy_class_field :
Ast_500.Parsetree.class_field -> Ast_414.Parsetree.class_field =
fun
{ Ast_500.Parsetree.pcf_desc = pcf_desc;
Ast_500.Parsetree.pcf_loc = pcf_loc;
Ast_500.Parsetree.pcf_attributes = pcf_attributes }
->
{
Ast_414.Parsetree.pcf_desc = (copy_class_field_desc pcf_desc);
Ast_414.Parsetree.pcf_loc = (copy_location pcf_loc);
Ast_414.Parsetree.pcf_attributes = (copy_attributes pcf_attributes)
}
and copy_class_field_desc :
Ast_500.Parsetree.class_field_desc -> Ast_414.Parsetree.class_field_desc =
function
| Ast_500.Parsetree.Pcf_inherit (x0, x1, x2) ->
Ast_414.Parsetree.Pcf_inherit
((copy_override_flag x0), (copy_class_expr x1),
(Option.map (fun x -> copy_loc (fun x -> x) x) x2))
| Ast_500.Parsetree.Pcf_val x0 ->
Ast_414.Parsetree.Pcf_val
(let (x0, x1, x2) = x0 in
((copy_loc copy_label x0), (copy_mutable_flag x1),
(copy_class_field_kind x2)))
| Ast_500.Parsetree.Pcf_method x0 ->
Ast_414.Parsetree.Pcf_method
(let (x0, x1, x2) = x0 in
((copy_loc copy_label x0), (copy_private_flag x1),
(copy_class_field_kind x2)))
| Ast_500.Parsetree.Pcf_constraint x0 ->
Ast_414.Parsetree.Pcf_constraint
(let (x0, x1) = x0 in ((copy_core_type x0), (copy_core_type x1)))
| Ast_500.Parsetree.Pcf_initializer x0 ->
Ast_414.Parsetree.Pcf_initializer (copy_expression x0)
| Ast_500.Parsetree.Pcf_attribute x0 ->
Ast_414.Parsetree.Pcf_attribute (copy_attribute x0)
| Ast_500.Parsetree.Pcf_extension x0 ->
Ast_414.Parsetree.Pcf_extension (copy_extension x0)
and copy_class_field_kind :
Ast_500.Parsetree.class_field_kind -> Ast_414.Parsetree.class_field_kind =
function
| Ast_500.Parsetree.Cfk_virtual x0 ->
Ast_414.Parsetree.Cfk_virtual (copy_core_type x0)
| Ast_500.Parsetree.Cfk_concrete (x0, x1) ->
Ast_414.Parsetree.Cfk_concrete
((copy_override_flag x0), (copy_expression x1))
and copy_open_declaration :
Ast_500.Parsetree.open_declaration -> Ast_414.Parsetree.open_declaration =
fun x -> copy_open_infos copy_module_expr x
and copy_module_binding :
Ast_500.Parsetree.module_binding -> Ast_414.Parsetree.module_binding =
fun
{ Ast_500.Parsetree.pmb_name = pmb_name;
Ast_500.Parsetree.pmb_expr = pmb_expr;
Ast_500.Parsetree.pmb_attributes = pmb_attributes;
Ast_500.Parsetree.pmb_loc = pmb_loc }
->
{
Ast_414.Parsetree.pmb_name =
(copy_loc (fun x -> Option.map (fun x -> x) x) pmb_name);
Ast_414.Parsetree.pmb_expr = (copy_module_expr pmb_expr);
Ast_414.Parsetree.pmb_attributes = (copy_attributes pmb_attributes);
Ast_414.Parsetree.pmb_loc = (copy_location pmb_loc)
}
and copy_module_expr :
Ast_500.Parsetree.module_expr -> Ast_414.Parsetree.module_expr =
fun
{ Ast_500.Parsetree.pmod_desc = pmod_desc;
Ast_500.Parsetree.pmod_loc = pmod_loc;
Ast_500.Parsetree.pmod_attributes = pmod_attributes }
->
{
Ast_414.Parsetree.pmod_desc = (copy_module_expr_desc pmod_desc);
Ast_414.Parsetree.pmod_loc = (copy_location pmod_loc);
Ast_414.Parsetree.pmod_attributes = (copy_attributes pmod_attributes)
}
and copy_module_expr_desc :
Ast_500.Parsetree.module_expr_desc -> Ast_414.Parsetree.module_expr_desc =
function
| Ast_500.Parsetree.Pmod_ident x0 ->
Ast_414.Parsetree.Pmod_ident (copy_loc copy_Longident_t x0)
| Ast_500.Parsetree.Pmod_structure x0 ->
Ast_414.Parsetree.Pmod_structure (copy_structure x0)
| Ast_500.Parsetree.Pmod_functor (x0, x1) ->
Ast_414.Parsetree.Pmod_functor
((copy_functor_parameter x0), (copy_module_expr x1))
| Ast_500.Parsetree.Pmod_apply (x0, x1) ->
Ast_414.Parsetree.Pmod_apply
((copy_module_expr x0), (copy_module_expr x1))
| Ast_500.Parsetree.Pmod_constraint (x0, x1) ->
Ast_414.Parsetree.Pmod_constraint
((copy_module_expr x0), (copy_module_type x1))
| Ast_500.Parsetree.Pmod_unpack x0 ->
Ast_414.Parsetree.Pmod_unpack (copy_expression x0)
| Ast_500.Parsetree.Pmod_extension x0 ->
Ast_414.Parsetree.Pmod_extension (copy_extension x0)
and copy_functor_parameter :
Ast_500.Parsetree.functor_parameter -> Ast_414.Parsetree.functor_parameter
=
function
| Ast_500.Parsetree.Unit -> Ast_414.Parsetree.Unit
| Ast_500.Parsetree.Named (x0, x1) ->
Ast_414.Parsetree.Named
((copy_loc (fun x -> Option.map (fun x -> x) x) x0),
(copy_module_type x1))
and copy_module_type :
Ast_500.Parsetree.module_type -> Ast_414.Parsetree.module_type =
fun
{ Ast_500.Parsetree.pmty_desc = pmty_desc;
Ast_500.Parsetree.pmty_loc = pmty_loc;
Ast_500.Parsetree.pmty_attributes = pmty_attributes }
->
{
Ast_414.Parsetree.pmty_desc = (copy_module_type_desc pmty_desc);
Ast_414.Parsetree.pmty_loc = (copy_location pmty_loc);
Ast_414.Parsetree.pmty_attributes = (copy_attributes pmty_attributes)
}
and copy_module_type_desc :
Ast_500.Parsetree.module_type_desc -> Ast_414.Parsetree.module_type_desc =
function
| Ast_500.Parsetree.Pmty_ident x0 ->
Ast_414.Parsetree.Pmty_ident (copy_loc copy_Longident_t x0)
| Ast_500.Parsetree.Pmty_signature x0 ->
Ast_414.Parsetree.Pmty_signature (copy_signature x0)
| Ast_500.Parsetree.Pmty_functor (x0, x1) ->
Ast_414.Parsetree.Pmty_functor
((copy_functor_parameter x0), (copy_module_type x1))
| Ast_500.Parsetree.Pmty_with (x0, x1) ->
Ast_414.Parsetree.Pmty_with
((copy_module_type x0), (List.map copy_with_constraint x1))
| Ast_500.Parsetree.Pmty_typeof x0 ->
Ast_414.Parsetree.Pmty_typeof (copy_module_expr x0)
| Ast_500.Parsetree.Pmty_extension x0 ->
Ast_414.Parsetree.Pmty_extension (copy_extension x0)
| Ast_500.Parsetree.Pmty_alias x0 ->
Ast_414.Parsetree.Pmty_alias (copy_loc copy_Longident_t x0)
and copy_with_constraint :
Ast_500.Parsetree.with_constraint -> Ast_414.Parsetree.with_constraint =
function
| Ast_500.Parsetree.Pwith_type (x0, x1) ->
Ast_414.Parsetree.Pwith_type
((copy_loc copy_Longident_t x0), (copy_type_declaration x1))
| Ast_500.Parsetree.Pwith_module (x0, x1) ->
Ast_414.Parsetree.Pwith_module
((copy_loc copy_Longident_t x0), (copy_loc copy_Longident_t x1))
| Ast_500.Parsetree.Pwith_modtype (x0, x1) ->
Ast_414.Parsetree.Pwith_modtype
((copy_loc copy_Longident_t x0), (copy_module_type x1))
| Ast_500.Parsetree.Pwith_modtypesubst (x0, x1) ->
Ast_414.Parsetree.Pwith_modtypesubst
((copy_loc copy_Longident_t x0), (copy_module_type x1))
| Ast_500.Parsetree.Pwith_typesubst (x0, x1) ->
Ast_414.Parsetree.Pwith_typesubst
((copy_loc copy_Longident_t x0), (copy_type_declaration x1))
| Ast_500.Parsetree.Pwith_modsubst (x0, x1) ->
Ast_414.Parsetree.Pwith_modsubst
((copy_loc copy_Longident_t x0), (copy_loc copy_Longident_t x1))
and copy_signature :
Ast_500.Parsetree.signature -> Ast_414.Parsetree.signature =
fun x -> List.map copy_signature_item x
and copy_signature_item :
Ast_500.Parsetree.signature_item -> Ast_414.Parsetree.signature_item =
fun
{ Ast_500.Parsetree.psig_desc = psig_desc;
Ast_500.Parsetree.psig_loc = psig_loc }
->
{
Ast_414.Parsetree.psig_desc = (copy_signature_item_desc psig_desc);
Ast_414.Parsetree.psig_loc = (copy_location psig_loc)
}
and copy_signature_item_desc :
Ast_500.Parsetree.signature_item_desc ->
Ast_414.Parsetree.signature_item_desc
=
function
| Ast_500.Parsetree.Psig_value x0 ->
Ast_414.Parsetree.Psig_value (copy_value_description x0)
| Ast_500.Parsetree.Psig_type (x0, x1) ->
Ast_414.Parsetree.Psig_type
((copy_rec_flag x0), (List.map copy_type_declaration x1))
| Ast_500.Parsetree.Psig_typesubst x0 ->
Ast_414.Parsetree.Psig_typesubst (List.map copy_type_declaration x0)
| Ast_500.Parsetree.Psig_typext x0 ->
Ast_414.Parsetree.Psig_typext (copy_type_extension x0)
| Ast_500.Parsetree.Psig_exception x0 ->
Ast_414.Parsetree.Psig_exception (copy_type_exception x0)
| Ast_500.Parsetree.Psig_module x0 ->
Ast_414.Parsetree.Psig_module (copy_module_declaration x0)
| Ast_500.Parsetree.Psig_modsubst x0 ->
Ast_414.Parsetree.Psig_modsubst (copy_module_substitution x0)
| Ast_500.Parsetree.Psig_recmodule x0 ->
Ast_414.Parsetree.Psig_recmodule (List.map copy_module_declaration x0)
| Ast_500.Parsetree.Psig_modtype x0 ->
Ast_414.Parsetree.Psig_modtype (copy_module_type_declaration x0)
| Ast_500.Parsetree.Psig_modtypesubst x0 ->
Ast_414.Parsetree.Psig_modtypesubst (copy_module_type_declaration x0)
| Ast_500.Parsetree.Psig_open x0 ->
Ast_414.Parsetree.Psig_open (copy_open_description x0)
| Ast_500.Parsetree.Psig_include x0 ->
Ast_414.Parsetree.Psig_include (copy_include_description x0)
| Ast_500.Parsetree.Psig_class x0 ->
Ast_414.Parsetree.Psig_class (List.map copy_class_description x0)
| Ast_500.Parsetree.Psig_class_type x0 ->
Ast_414.Parsetree.Psig_class_type
(List.map copy_class_type_declaration x0)
| Ast_500.Parsetree.Psig_attribute x0 ->
Ast_414.Parsetree.Psig_attribute (copy_attribute x0)
| Ast_500.Parsetree.Psig_extension (x0, x1) ->
Ast_414.Parsetree.Psig_extension
((copy_extension x0), (copy_attributes x1))
and copy_class_type_declaration :
Ast_500.Parsetree.class_type_declaration ->
Ast_414.Parsetree.class_type_declaration
= fun x -> copy_class_infos copy_class_type x
and copy_class_description :
Ast_500.Parsetree.class_description -> Ast_414.Parsetree.class_description
= fun x -> copy_class_infos copy_class_type x
and copy_class_type :
Ast_500.Parsetree.class_type -> Ast_414.Parsetree.class_type =
fun
{ Ast_500.Parsetree.pcty_desc = pcty_desc;
Ast_500.Parsetree.pcty_loc = pcty_loc;
Ast_500.Parsetree.pcty_attributes = pcty_attributes }
->
{
Ast_414.Parsetree.pcty_desc = (copy_class_type_desc pcty_desc);
Ast_414.Parsetree.pcty_loc = (copy_location pcty_loc);
Ast_414.Parsetree.pcty_attributes = (copy_attributes pcty_attributes)
}
and copy_class_type_desc :
Ast_500.Parsetree.class_type_desc -> Ast_414.Parsetree.class_type_desc =
function
| Ast_500.Parsetree.Pcty_constr (x0, x1) ->
Ast_414.Parsetree.Pcty_constr
((copy_loc copy_Longident_t x0), (List.map copy_core_type x1))
| Ast_500.Parsetree.Pcty_signature x0 ->
Ast_414.Parsetree.Pcty_signature (copy_class_signature x0)
| Ast_500.Parsetree.Pcty_arrow (x0, x1, x2) ->
Ast_414.Parsetree.Pcty_arrow
((copy_arg_label x0), (copy_core_type x1), (copy_class_type x2))
| Ast_500.Parsetree.Pcty_extension x0 ->
Ast_414.Parsetree.Pcty_extension (copy_extension x0)
| Ast_500.Parsetree.Pcty_open (x0, x1) ->
Ast_414.Parsetree.Pcty_open
((copy_open_description x0), (copy_class_type x1))
and copy_class_signature :
Ast_500.Parsetree.class_signature -> Ast_414.Parsetree.class_signature =
fun
{ Ast_500.Parsetree.pcsig_self = pcsig_self;
Ast_500.Parsetree.pcsig_fields = pcsig_fields }
->
{
Ast_414.Parsetree.pcsig_self = (copy_core_type pcsig_self);
Ast_414.Parsetree.pcsig_fields =
(List.map copy_class_type_field pcsig_fields)
}
and copy_class_type_field :
Ast_500.Parsetree.class_type_field -> Ast_414.Parsetree.class_type_field =
fun
{ Ast_500.Parsetree.pctf_desc = pctf_desc;
Ast_500.Parsetree.pctf_loc = pctf_loc;
Ast_500.Parsetree.pctf_attributes = pctf_attributes }
->
{
Ast_414.Parsetree.pctf_desc = (copy_class_type_field_desc pctf_desc);
Ast_414.Parsetree.pctf_loc = (copy_location pctf_loc);
Ast_414.Parsetree.pctf_attributes = (copy_attributes pctf_attributes)
}
and copy_class_type_field_desc :
Ast_500.Parsetree.class_type_field_desc ->
Ast_414.Parsetree.class_type_field_desc
=
function
| Ast_500.Parsetree.Pctf_inherit x0 ->
Ast_414.Parsetree.Pctf_inherit (copy_class_type x0)
| Ast_500.Parsetree.Pctf_val x0 ->
Ast_414.Parsetree.Pctf_val
(let (x0, x1, x2, x3) = x0 in
((copy_loc copy_label x0), (copy_mutable_flag x1),
(copy_virtual_flag x2), (copy_core_type x3)))
| Ast_500.Parsetree.Pctf_method x0 ->
Ast_414.Parsetree.Pctf_method
(let (x0, x1, x2, x3) = x0 in
((copy_loc copy_label x0), (copy_private_flag x1),
(copy_virtual_flag x2), (copy_core_type x3)))
| Ast_500.Parsetree.Pctf_constraint x0 ->
Ast_414.Parsetree.Pctf_constraint
(let (x0, x1) = x0 in ((copy_core_type x0), (copy_core_type x1)))
| Ast_500.Parsetree.Pctf_attribute x0 ->
Ast_414.Parsetree.Pctf_attribute (copy_attribute x0)
| Ast_500.Parsetree.Pctf_extension x0 ->
Ast_414.Parsetree.Pctf_extension (copy_extension x0)
and copy_extension :
Ast_500.Parsetree.extension -> Ast_414.Parsetree.extension =
fun x ->
let (x0, x1) = x in ((copy_loc (fun x -> x) x0), (copy_payload x1))
and copy_class_infos :
'f0 'g0 .
('f0 -> 'g0) ->
'f0 Ast_500.Parsetree.class_infos -> 'g0 Ast_414.Parsetree.class_infos
=
fun f0 ->
fun
{ Ast_500.Parsetree.pci_virt = pci_virt;
Ast_500.Parsetree.pci_params = pci_params;
Ast_500.Parsetree.pci_name = pci_name;
Ast_500.Parsetree.pci_expr = pci_expr;
Ast_500.Parsetree.pci_loc = pci_loc;
Ast_500.Parsetree.pci_attributes = pci_attributes }
->
{
Ast_414.Parsetree.pci_virt = (copy_virtual_flag pci_virt);
Ast_414.Parsetree.pci_params =
(List.map
(fun x ->
let (x0, x1) = x in
((copy_core_type x0),
(let (x0, x1) = x1 in
((copy_variance x0), (copy_injectivity x1))))) pci_params);
Ast_414.Parsetree.pci_name = (copy_loc (fun x -> x) pci_name);
Ast_414.Parsetree.pci_expr = (f0 pci_expr);
Ast_414.Parsetree.pci_loc = (copy_location pci_loc);
Ast_414.Parsetree.pci_attributes = (copy_attributes pci_attributes)
}
and copy_virtual_flag :
Ast_500.Asttypes.virtual_flag -> Ast_414.Asttypes.virtual_flag =
function
| Ast_500.Asttypes.Virtual -> Ast_414.Asttypes.Virtual
| Ast_500.Asttypes.Concrete -> Ast_414.Asttypes.Concrete
and copy_include_description :
Ast_500.Parsetree.include_description ->
Ast_414.Parsetree.include_description
= fun x -> copy_include_infos copy_module_type x
and copy_include_infos :
'f0 'g0 .
('f0 -> 'g0) ->
'f0 Ast_500.Parsetree.include_infos ->
'g0 Ast_414.Parsetree.include_infos
=
fun f0 ->
fun
{ Ast_500.Parsetree.pincl_mod = pincl_mod;
Ast_500.Parsetree.pincl_loc = pincl_loc;
Ast_500.Parsetree.pincl_attributes = pincl_attributes }
->
{
Ast_414.Parsetree.pincl_mod = (f0 pincl_mod);
Ast_414.Parsetree.pincl_loc = (copy_location pincl_loc);
Ast_414.Parsetree.pincl_attributes =
(copy_attributes pincl_attributes)
}
and copy_open_description :
Ast_500.Parsetree.open_description -> Ast_414.Parsetree.open_description =
fun x -> copy_open_infos (fun x -> copy_loc copy_Longident_t x) x
and copy_open_infos :
'f0 'g0 .
('f0 -> 'g0) ->
'f0 Ast_500.Parsetree.open_infos -> 'g0 Ast_414.Parsetree.open_infos
=
fun f0 ->
fun
{ Ast_500.Parsetree.popen_expr = popen_expr;
Ast_500.Parsetree.popen_override = popen_override;
Ast_500.Parsetree.popen_loc = popen_loc;
Ast_500.Parsetree.popen_attributes = popen_attributes }
->
{
Ast_414.Parsetree.popen_expr = (f0 popen_expr);
Ast_414.Parsetree.popen_override =
(copy_override_flag popen_override);
Ast_414.Parsetree.popen_loc = (copy_location popen_loc);
Ast_414.Parsetree.popen_attributes =
(copy_attributes popen_attributes)
}
and copy_override_flag :
Ast_500.Asttypes.override_flag -> Ast_414.Asttypes.override_flag =
function
| Ast_500.Asttypes.Override -> Ast_414.Asttypes.Override
| Ast_500.Asttypes.Fresh -> Ast_414.Asttypes.Fresh
and copy_module_type_declaration :
Ast_500.Parsetree.module_type_declaration ->
Ast_414.Parsetree.module_type_declaration
=
fun
{ Ast_500.Parsetree.pmtd_name = pmtd_name;
Ast_500.Parsetree.pmtd_type = pmtd_type;
Ast_500.Parsetree.pmtd_attributes = pmtd_attributes;
Ast_500.Parsetree.pmtd_loc = pmtd_loc }
->
{
Ast_414.Parsetree.pmtd_name = (copy_loc (fun x -> x) pmtd_name);
Ast_414.Parsetree.pmtd_type = (Option.map copy_module_type pmtd_type);
Ast_414.Parsetree.pmtd_attributes = (copy_attributes pmtd_attributes);
Ast_414.Parsetree.pmtd_loc = (copy_location pmtd_loc)
}
and copy_module_substitution :
Ast_500.Parsetree.module_substitution ->
Ast_414.Parsetree.module_substitution
=
fun
{ Ast_500.Parsetree.pms_name = pms_name;
Ast_500.Parsetree.pms_manifest = pms_manifest;
Ast_500.Parsetree.pms_attributes = pms_attributes;
Ast_500.Parsetree.pms_loc = pms_loc }
->
{
Ast_414.Parsetree.pms_name = (copy_loc (fun x -> x) pms_name);
Ast_414.Parsetree.pms_manifest =
(copy_loc copy_Longident_t pms_manifest);
Ast_414.Parsetree.pms_attributes = (copy_attributes pms_attributes);
Ast_414.Parsetree.pms_loc = (copy_location pms_loc)
}
and copy_module_declaration :
Ast_500.Parsetree.module_declaration ->
Ast_414.Parsetree.module_declaration
=
fun
{ Ast_500.Parsetree.pmd_name = pmd_name;
Ast_500.Parsetree.pmd_type = pmd_type;
Ast_500.Parsetree.pmd_attributes = pmd_attributes;
Ast_500.Parsetree.pmd_loc = pmd_loc }
->
{
Ast_414.Parsetree.pmd_name =
(copy_loc (fun x -> Option.map (fun x -> x) x) pmd_name);
Ast_414.Parsetree.pmd_type = (copy_module_type pmd_type);
Ast_414.Parsetree.pmd_attributes = (copy_attributes pmd_attributes);
Ast_414.Parsetree.pmd_loc = (copy_location pmd_loc)
}
and copy_type_exception :
Ast_500.Parsetree.type_exception -> Ast_414.Parsetree.type_exception =
fun
{ Ast_500.Parsetree.ptyexn_constructor = ptyexn_constructor;
Ast_500.Parsetree.ptyexn_loc = ptyexn_loc;
Ast_500.Parsetree.ptyexn_attributes = ptyexn_attributes }
->
{
Ast_414.Parsetree.ptyexn_constructor =
(copy_extension_constructor ptyexn_constructor);
Ast_414.Parsetree.ptyexn_loc = (copy_location ptyexn_loc);
Ast_414.Parsetree.ptyexn_attributes =
(copy_attributes ptyexn_attributes)
}
and copy_type_extension :
Ast_500.Parsetree.type_extension -> Ast_414.Parsetree.type_extension =
fun
{ Ast_500.Parsetree.ptyext_path = ptyext_path;
Ast_500.Parsetree.ptyext_params = ptyext_params;
Ast_500.Parsetree.ptyext_constructors = ptyext_constructors;
Ast_500.Parsetree.ptyext_private = ptyext_private;
Ast_500.Parsetree.ptyext_loc = ptyext_loc;
Ast_500.Parsetree.ptyext_attributes = ptyext_attributes }
->
{
Ast_414.Parsetree.ptyext_path = (copy_loc copy_Longident_t ptyext_path);
Ast_414.Parsetree.ptyext_params =
(List.map
(fun x ->
let (x0, x1) = x in
((copy_core_type x0),
(let (x0, x1) = x1 in
((copy_variance x0), (copy_injectivity x1))))) ptyext_params);
Ast_414.Parsetree.ptyext_constructors =
(List.map copy_extension_constructor ptyext_constructors);
Ast_414.Parsetree.ptyext_private = (copy_private_flag ptyext_private);
Ast_414.Parsetree.ptyext_loc = (copy_location ptyext_loc);
Ast_414.Parsetree.ptyext_attributes =
(copy_attributes ptyext_attributes)
}
and copy_extension_constructor :
Ast_500.Parsetree.extension_constructor ->
Ast_414.Parsetree.extension_constructor
=
fun
{ Ast_500.Parsetree.pext_name = pext_name;
Ast_500.Parsetree.pext_kind = pext_kind;
Ast_500.Parsetree.pext_loc = pext_loc;
Ast_500.Parsetree.pext_attributes = pext_attributes }
->
{
Ast_414.Parsetree.pext_name = (copy_loc (fun x -> x) pext_name);
Ast_414.Parsetree.pext_kind =
(copy_extension_constructor_kind pext_kind);
Ast_414.Parsetree.pext_loc = (copy_location pext_loc);
Ast_414.Parsetree.pext_attributes = (copy_attributes pext_attributes)
}
and copy_extension_constructor_kind :
Ast_500.Parsetree.extension_constructor_kind ->
Ast_414.Parsetree.extension_constructor_kind
=
function
| Ast_500.Parsetree.Pext_decl (x0, x1, x2) ->
Ast_414.Parsetree.Pext_decl
((List.map (fun x -> copy_loc (fun x -> x) x) x0),
(copy_constructor_arguments x1), (Option.map copy_core_type x2))
| Ast_500.Parsetree.Pext_rebind x0 ->
Ast_414.Parsetree.Pext_rebind (copy_loc copy_Longident_t x0)
and copy_type_declaration :
Ast_500.Parsetree.type_declaration -> Ast_414.Parsetree.type_declaration =
fun
{ Ast_500.Parsetree.ptype_name = ptype_name;
Ast_500.Parsetree.ptype_params = ptype_params;
Ast_500.Parsetree.ptype_cstrs = ptype_cstrs;
Ast_500.Parsetree.ptype_kind = ptype_kind;
Ast_500.Parsetree.ptype_private = ptype_private;
Ast_500.Parsetree.ptype_manifest = ptype_manifest;
Ast_500.Parsetree.ptype_attributes = ptype_attributes;
Ast_500.Parsetree.ptype_loc = ptype_loc }
->
{
Ast_414.Parsetree.ptype_name = (copy_loc (fun x -> x) ptype_name);
Ast_414.Parsetree.ptype_params =
(List.map
(fun x ->
let (x0, x1) = x in
((copy_core_type x0),
(let (x0, x1) = x1 in
((copy_variance x0), (copy_injectivity x1))))) ptype_params);
Ast_414.Parsetree.ptype_cstrs =
(List.map
(fun x ->
let (x0, x1, x2) = x in
((copy_core_type x0), (copy_core_type x1), (copy_location x2)))
ptype_cstrs);
Ast_414.Parsetree.ptype_kind = (copy_type_kind ptype_kind);
Ast_414.Parsetree.ptype_private = (copy_private_flag ptype_private);
Ast_414.Parsetree.ptype_manifest =
(Option.map copy_core_type ptype_manifest);
Ast_414.Parsetree.ptype_attributes = (copy_attributes ptype_attributes);
Ast_414.Parsetree.ptype_loc = (copy_location ptype_loc)
}
and copy_private_flag :
Ast_500.Asttypes.private_flag -> Ast_414.Asttypes.private_flag =
function
| Ast_500.Asttypes.Private -> Ast_414.Asttypes.Private
| Ast_500.Asttypes.Public -> Ast_414.Asttypes.Public
and copy_type_kind :
Ast_500.Parsetree.type_kind -> Ast_414.Parsetree.type_kind =
function
| Ast_500.Parsetree.Ptype_abstract -> Ast_414.Parsetree.Ptype_abstract
| Ast_500.Parsetree.Ptype_variant x0 ->
Ast_414.Parsetree.Ptype_variant
(List.map copy_constructor_declaration x0)
| Ast_500.Parsetree.Ptype_record x0 ->
Ast_414.Parsetree.Ptype_record (List.map copy_label_declaration x0)
| Ast_500.Parsetree.Ptype_open -> Ast_414.Parsetree.Ptype_open
and copy_constructor_declaration :
Ast_500.Parsetree.constructor_declaration ->
Ast_414.Parsetree.constructor_declaration
=
fun
{ Ast_500.Parsetree.pcd_name = pcd_name;
Ast_500.Parsetree.pcd_vars = pcd_vars;
Ast_500.Parsetree.pcd_args = pcd_args;
Ast_500.Parsetree.pcd_res = pcd_res;
Ast_500.Parsetree.pcd_loc = pcd_loc;
Ast_500.Parsetree.pcd_attributes = pcd_attributes }
->
{
Ast_414.Parsetree.pcd_name = (copy_loc (fun x -> x) pcd_name);
Ast_414.Parsetree.pcd_vars =
(List.map (fun x -> copy_loc (fun x -> x) x) pcd_vars);
Ast_414.Parsetree.pcd_args = (copy_constructor_arguments pcd_args);
Ast_414.Parsetree.pcd_res = (Option.map copy_core_type pcd_res);
Ast_414.Parsetree.pcd_loc = (copy_location pcd_loc);
Ast_414.Parsetree.pcd_attributes = (copy_attributes pcd_attributes)
}
and copy_constructor_arguments :
Ast_500.Parsetree.constructor_arguments ->
Ast_414.Parsetree.constructor_arguments
=
function
| Ast_500.Parsetree.Pcstr_tuple x0 ->
Ast_414.Parsetree.Pcstr_tuple (List.map copy_core_type x0)
| Ast_500.Parsetree.Pcstr_record x0 ->
Ast_414.Parsetree.Pcstr_record (List.map copy_label_declaration x0)
and copy_label_declaration :
Ast_500.Parsetree.label_declaration -> Ast_414.Parsetree.label_declaration
=
fun
{ Ast_500.Parsetree.pld_name = pld_name;
Ast_500.Parsetree.pld_mutable = pld_mutable;
Ast_500.Parsetree.pld_type = pld_type;
Ast_500.Parsetree.pld_loc = pld_loc;
Ast_500.Parsetree.pld_attributes = pld_attributes }
->
{
Ast_414.Parsetree.pld_name = (copy_loc (fun x -> x) pld_name);
Ast_414.Parsetree.pld_mutable = (copy_mutable_flag pld_mutable);
Ast_414.Parsetree.pld_type = (copy_core_type pld_type);
Ast_414.Parsetree.pld_loc = (copy_location pld_loc);
Ast_414.Parsetree.pld_attributes = (copy_attributes pld_attributes)
}
and copy_mutable_flag :
Ast_500.Asttypes.mutable_flag -> Ast_414.Asttypes.mutable_flag =
function
| Ast_500.Asttypes.Immutable -> Ast_414.Asttypes.Immutable
| Ast_500.Asttypes.Mutable -> Ast_414.Asttypes.Mutable
and copy_injectivity :
Ast_500.Asttypes.injectivity -> Ast_414.Asttypes.injectivity =
function
| Ast_500.Asttypes.Injective -> Ast_414.Asttypes.Injective
| Ast_500.Asttypes.NoInjectivity -> Ast_414.Asttypes.NoInjectivity
and copy_variance : Ast_500.Asttypes.variance -> Ast_414.Asttypes.variance =
function
| Ast_500.Asttypes.Covariant -> Ast_414.Asttypes.Covariant
| Ast_500.Asttypes.Contravariant -> Ast_414.Asttypes.Contravariant
| Ast_500.Asttypes.NoVariance -> Ast_414.Asttypes.NoVariance
and copy_value_description :
Ast_500.Parsetree.value_description -> Ast_414.Parsetree.value_description
=
fun
{ Ast_500.Parsetree.pval_name = pval_name;
Ast_500.Parsetree.pval_type = pval_type;
Ast_500.Parsetree.pval_prim = pval_prim;
Ast_500.Parsetree.pval_attributes = pval_attributes;
Ast_500.Parsetree.pval_loc = pval_loc }
->
{
Ast_414.Parsetree.pval_name = (copy_loc (fun x -> x) pval_name);
Ast_414.Parsetree.pval_type = (copy_core_type pval_type);
Ast_414.Parsetree.pval_prim = (List.map (fun x -> x) pval_prim);
Ast_414.Parsetree.pval_attributes = (copy_attributes pval_attributes);
Ast_414.Parsetree.pval_loc = (copy_location pval_loc)
}
and copy_object_field_desc :
Ast_500.Parsetree.object_field_desc -> Ast_414.Parsetree.object_field_desc
=
function
| Ast_500.Parsetree.Otag (x0, x1) ->
Ast_414.Parsetree.Otag ((copy_loc copy_label x0), (copy_core_type x1))
| Ast_500.Parsetree.Oinherit x0 ->
Ast_414.Parsetree.Oinherit (copy_core_type x0)
and copy_arg_label : Ast_500.Asttypes.arg_label -> Ast_414.Asttypes.arg_label
=
function
| Ast_500.Asttypes.Nolabel -> Ast_414.Asttypes.Nolabel
| Ast_500.Asttypes.Labelled x0 -> Ast_414.Asttypes.Labelled x0
| Ast_500.Asttypes.Optional x0 -> Ast_414.Asttypes.Optional x0
and copy_closed_flag :
Ast_500.Asttypes.closed_flag -> Ast_414.Asttypes.closed_flag =
function
| Ast_500.Asttypes.Closed -> Ast_414.Asttypes.Closed
| Ast_500.Asttypes.Open -> Ast_414.Asttypes.Open
and copy_label : Ast_500.Asttypes.label -> Ast_414.Asttypes.label =
fun x -> x
and copy_rec_flag : Ast_500.Asttypes.rec_flag -> Ast_414.Asttypes.rec_flag =
function
| Ast_500.Asttypes.Nonrecursive -> Ast_414.Asttypes.Nonrecursive
| Ast_500.Asttypes.Recursive -> Ast_414.Asttypes.Recursive
and copy_constant : Ast_500.Parsetree.constant -> Ast_414.Parsetree.constant
=
function
| Ast_500.Parsetree.Pconst_integer (x0, x1) ->
Ast_414.Parsetree.Pconst_integer (x0, (Option.map (fun x -> x) x1))
| Ast_500.Parsetree.Pconst_char x0 -> Ast_414.Parsetree.Pconst_char x0
| Ast_500.Parsetree.Pconst_string (x0, x1, x2) ->
Ast_414.Parsetree.Pconst_string
(x0, (copy_location x1), (Option.map (fun x -> x) x2))
| Ast_500.Parsetree.Pconst_float (x0, x1) ->
Ast_414.Parsetree.Pconst_float (x0, (Option.map (fun x -> x) x1))
and copy_Longident_t : Longident.t -> Longident.t =
function
| Longident.Lident x0 -> Longident.Lident x0
| Longident.Ldot (x0, x1) -> Longident.Ldot ((copy_Longident_t x0), x1)
| Longident.Lapply (x0, x1) ->
Longident.Lapply ((copy_Longident_t x0), (copy_Longident_t x1))
and copy_loc :
'f0 'g0 .
('f0 -> 'g0) -> 'f0 Ast_500.Asttypes.loc -> 'g0 Ast_414.Asttypes.loc
=
fun f0 ->
fun { Ast_500.Asttypes.txt = txt; Ast_500.Asttypes.loc = loc } ->
{
Ast_414.Asttypes.txt = (f0 txt);
Ast_414.Asttypes.loc = (copy_location loc)
}
and copy_location : Location.t -> Location.t =
fun
{ Location.loc_start = loc_start; Location.loc_end = loc_end;
Location.loc_ghost = loc_ghost }
->
{
Location.loc_start = (copy_position loc_start);
Location.loc_end = (copy_position loc_end);
Location.loc_ghost = loc_ghost
}
and copy_position : Lexing.position -> Lexing.position =
fun
{ Lexing.pos_fname = pos_fname; Lexing.pos_lnum = pos_lnum;
Lexing.pos_bol = pos_bol; Lexing.pos_cnum = pos_cnum }
->
{
Lexing.pos_fname = pos_fname;
Lexing.pos_lnum = pos_lnum;
Lexing.pos_bol = pos_bol;
Lexing.pos_cnum = pos_cnum
}
| null | https://raw.githubusercontent.com/reasonml/reason/3f433821d93bdc8f85677d4ee5d276ff6551c777/src/vendored-omp/src/migrate_parsetree_500_414_migrate.ml | ocaml | open Stdlib0
module From = Ast_500
module To = Ast_414
let rec copy_out_type_extension :
Ast_500.Outcometree.out_type_extension ->
Ast_414.Outcometree.out_type_extension
=
fun
{ Ast_500.Outcometree.otyext_name = otyext_name;
Ast_500.Outcometree.otyext_params = otyext_params;
Ast_500.Outcometree.otyext_constructors = otyext_constructors;
Ast_500.Outcometree.otyext_private = otyext_private }
->
{
Ast_414.Outcometree.otyext_name = otyext_name;
Ast_414.Outcometree.otyext_params =
(List.map (fun x -> x) otyext_params);
Ast_414.Outcometree.otyext_constructors =
(List.map copy_out_constructor otyext_constructors);
Ast_414.Outcometree.otyext_private = (copy_private_flag otyext_private)
}
and copy_out_phrase :
Ast_500.Outcometree.out_phrase -> Ast_414.Outcometree.out_phrase =
function
| Ast_500.Outcometree.Ophr_eval (x0, x1) ->
Ast_414.Outcometree.Ophr_eval ((copy_out_value x0), (copy_out_type x1))
| Ast_500.Outcometree.Ophr_signature x0 ->
Ast_414.Outcometree.Ophr_signature
(List.map
(fun x ->
let (x0, x1) = x in
((copy_out_sig_item x0), (Option.map copy_out_value x1))) x0)
| Ast_500.Outcometree.Ophr_exception x0 ->
Ast_414.Outcometree.Ophr_exception
(let (x0, x1) = x0 in (x0, (copy_out_value x1)))
and copy_out_sig_item :
Ast_500.Outcometree.out_sig_item -> Ast_414.Outcometree.out_sig_item =
function
| Ast_500.Outcometree.Osig_class (x0, x1, x2, x3, x4) ->
Ast_414.Outcometree.Osig_class
(x0, x1, (List.map copy_out_type_param x2), (copy_out_class_type x3),
(copy_out_rec_status x4))
| Ast_500.Outcometree.Osig_class_type (x0, x1, x2, x3, x4) ->
Ast_414.Outcometree.Osig_class_type
(x0, x1, (List.map copy_out_type_param x2), (copy_out_class_type x3),
(copy_out_rec_status x4))
| Ast_500.Outcometree.Osig_typext (x0, x1) ->
Ast_414.Outcometree.Osig_typext
((copy_out_extension_constructor x0), (copy_out_ext_status x1))
| Ast_500.Outcometree.Osig_modtype (x0, x1) ->
Ast_414.Outcometree.Osig_modtype (x0, (copy_out_module_type x1))
| Ast_500.Outcometree.Osig_module (x0, x1, x2) ->
Ast_414.Outcometree.Osig_module
(x0, (copy_out_module_type x1), (copy_out_rec_status x2))
| Ast_500.Outcometree.Osig_type (x0, x1) ->
Ast_414.Outcometree.Osig_type
((copy_out_type_decl x0), (copy_out_rec_status x1))
| Ast_500.Outcometree.Osig_value x0 ->
Ast_414.Outcometree.Osig_value (copy_out_val_decl x0)
| Ast_500.Outcometree.Osig_ellipsis -> Ast_414.Outcometree.Osig_ellipsis
and copy_out_val_decl :
Ast_500.Outcometree.out_val_decl -> Ast_414.Outcometree.out_val_decl =
fun
{ Ast_500.Outcometree.oval_name = oval_name;
Ast_500.Outcometree.oval_type = oval_type;
Ast_500.Outcometree.oval_prims = oval_prims;
Ast_500.Outcometree.oval_attributes = oval_attributes }
->
{
Ast_414.Outcometree.oval_name = oval_name;
Ast_414.Outcometree.oval_type = (copy_out_type oval_type);
Ast_414.Outcometree.oval_prims = (List.map (fun x -> x) oval_prims);
Ast_414.Outcometree.oval_attributes =
(List.map copy_out_attribute oval_attributes)
}
and copy_out_type_decl :
Ast_500.Outcometree.out_type_decl -> Ast_414.Outcometree.out_type_decl =
fun
{ Ast_500.Outcometree.otype_name = otype_name;
Ast_500.Outcometree.otype_params = otype_params;
Ast_500.Outcometree.otype_type = otype_type;
Ast_500.Outcometree.otype_private = otype_private;
Ast_500.Outcometree.otype_immediate = otype_immediate;
Ast_500.Outcometree.otype_unboxed = otype_unboxed;
Ast_500.Outcometree.otype_cstrs = otype_cstrs }
->
{
Ast_414.Outcometree.otype_name = otype_name;
Ast_414.Outcometree.otype_params =
(List.map copy_out_type_param otype_params);
Ast_414.Outcometree.otype_type = (copy_out_type otype_type);
Ast_414.Outcometree.otype_private = (copy_private_flag otype_private);
Ast_414.Outcometree.otype_immediate =
(copy_Type_immediacy_t otype_immediate);
Ast_414.Outcometree.otype_unboxed = otype_unboxed;
Ast_414.Outcometree.otype_cstrs =
(List.map
(fun x ->
let (x0, x1) = x in ((copy_out_type x0), (copy_out_type x1)))
otype_cstrs)
}
and copy_Type_immediacy_t :
Ast_500.Type_immediacy.t -> Ast_414.Type_immediacy.t =
function
| Ast_500.Type_immediacy.Unknown -> Ast_414.Type_immediacy.Unknown
| Ast_500.Type_immediacy.Always -> Ast_414.Type_immediacy.Always
| Ast_500.Type_immediacy.Always_on_64bits ->
Ast_414.Type_immediacy.Always_on_64bits
and copy_out_module_type :
Ast_500.Outcometree.out_module_type -> Ast_414.Outcometree.out_module_type
=
function
| Ast_500.Outcometree.Omty_abstract -> Ast_414.Outcometree.Omty_abstract
| Ast_500.Outcometree.Omty_functor (x0, x1) ->
Ast_414.Outcometree.Omty_functor
((Option.map
(fun x ->
let (x0, x1) = x in
((Option.map (fun x -> x) x0), (copy_out_module_type x1))) x0),
(copy_out_module_type x1))
| Ast_500.Outcometree.Omty_ident x0 ->
Ast_414.Outcometree.Omty_ident (copy_out_ident x0)
| Ast_500.Outcometree.Omty_signature x0 ->
Ast_414.Outcometree.Omty_signature (List.map copy_out_sig_item x0)
| Ast_500.Outcometree.Omty_alias x0 ->
Ast_414.Outcometree.Omty_alias (copy_out_ident x0)
and copy_out_ext_status :
Ast_500.Outcometree.out_ext_status -> Ast_414.Outcometree.out_ext_status =
function
| Ast_500.Outcometree.Oext_first -> Ast_414.Outcometree.Oext_first
| Ast_500.Outcometree.Oext_next -> Ast_414.Outcometree.Oext_next
| Ast_500.Outcometree.Oext_exception -> Ast_414.Outcometree.Oext_exception
and copy_out_extension_constructor :
Ast_500.Outcometree.out_extension_constructor ->
Ast_414.Outcometree.out_extension_constructor
=
fun
{ Ast_500.Outcometree.oext_name = oext_name;
Ast_500.Outcometree.oext_type_name = oext_type_name;
Ast_500.Outcometree.oext_type_params = oext_type_params;
Ast_500.Outcometree.oext_args = oext_args;
Ast_500.Outcometree.oext_ret_type = oext_ret_type;
Ast_500.Outcometree.oext_private = oext_private }
->
{
Ast_414.Outcometree.oext_name = oext_name;
Ast_414.Outcometree.oext_type_name = oext_type_name;
Ast_414.Outcometree.oext_type_params =
(List.map (fun x -> x) oext_type_params);
Ast_414.Outcometree.oext_args = (List.map copy_out_type oext_args);
Ast_414.Outcometree.oext_ret_type =
(Option.map copy_out_type oext_ret_type);
Ast_414.Outcometree.oext_private = (copy_private_flag oext_private)
}
and copy_out_rec_status :
Ast_500.Outcometree.out_rec_status -> Ast_414.Outcometree.out_rec_status =
function
| Ast_500.Outcometree.Orec_not -> Ast_414.Outcometree.Orec_not
| Ast_500.Outcometree.Orec_first -> Ast_414.Outcometree.Orec_first
| Ast_500.Outcometree.Orec_next -> Ast_414.Outcometree.Orec_next
and copy_out_class_type :
Ast_500.Outcometree.out_class_type -> Ast_414.Outcometree.out_class_type =
function
| Ast_500.Outcometree.Octy_constr (x0, x1) ->
Ast_414.Outcometree.Octy_constr
((copy_out_ident x0), (List.map copy_out_type x1))
| Ast_500.Outcometree.Octy_arrow (x0, x1, x2) ->
Ast_414.Outcometree.Octy_arrow
(x0, (copy_out_type x1), (copy_out_class_type x2))
| Ast_500.Outcometree.Octy_signature (x0, x1) ->
Ast_414.Outcometree.Octy_signature
((Option.map copy_out_type x0),
(List.map copy_out_class_sig_item x1))
and copy_out_class_sig_item :
Ast_500.Outcometree.out_class_sig_item ->
Ast_414.Outcometree.out_class_sig_item
=
function
| Ast_500.Outcometree.Ocsg_constraint (x0, x1) ->
Ast_414.Outcometree.Ocsg_constraint
((copy_out_type x0), (copy_out_type x1))
| Ast_500.Outcometree.Ocsg_method (x0, x1, x2, x3) ->
Ast_414.Outcometree.Ocsg_method (x0, x1, x2, (copy_out_type x3))
| Ast_500.Outcometree.Ocsg_value (x0, x1, x2, x3) ->
Ast_414.Outcometree.Ocsg_value (x0, x1, x2, (copy_out_type x3))
and copy_out_type_param :
Ast_500.Outcometree.out_type_param -> Ast_414.Outcometree.out_type_param =
fun x ->
let (x0, x1) = x in
(x0, (let (x0, x1) = x1 in ((copy_variance x0), (copy_injectivity x1))))
and copy_out_type :
Ast_500.Outcometree.out_type -> Ast_414.Outcometree.out_type =
function
| Ast_500.Outcometree.Otyp_abstract -> Ast_414.Outcometree.Otyp_abstract
| Ast_500.Outcometree.Otyp_open -> Ast_414.Outcometree.Otyp_open
| Ast_500.Outcometree.Otyp_alias (x0, x1) ->
Ast_414.Outcometree.Otyp_alias ((copy_out_type x0), x1)
| Ast_500.Outcometree.Otyp_arrow (x0, x1, x2) ->
Ast_414.Outcometree.Otyp_arrow
(x0, (copy_out_type x1), (copy_out_type x2))
| Ast_500.Outcometree.Otyp_class (x0, x1, x2) ->
Ast_414.Outcometree.Otyp_class
(x0, (copy_out_ident x1), (List.map copy_out_type x2))
| Ast_500.Outcometree.Otyp_constr (x0, x1) ->
Ast_414.Outcometree.Otyp_constr
((copy_out_ident x0), (List.map copy_out_type x1))
| Ast_500.Outcometree.Otyp_manifest (x0, x1) ->
Ast_414.Outcometree.Otyp_manifest
((copy_out_type x0), (copy_out_type x1))
| Ast_500.Outcometree.Otyp_object (x0, x1) ->
Ast_414.Outcometree.Otyp_object
((List.map (fun x -> let (x0, x1) = x in (x0, (copy_out_type x1))) x0),
(Option.map (fun x -> x) x1))
| Ast_500.Outcometree.Otyp_record x0 ->
Ast_414.Outcometree.Otyp_record
(List.map
(fun x -> let (x0, x1, x2) = x in (x0, x1, (copy_out_type x2))) x0)
| Ast_500.Outcometree.Otyp_stuff x0 -> Ast_414.Outcometree.Otyp_stuff x0
| Ast_500.Outcometree.Otyp_sum x0 ->
Ast_414.Outcometree.Otyp_sum (List.map copy_out_constructor x0)
| Ast_500.Outcometree.Otyp_tuple x0 ->
Ast_414.Outcometree.Otyp_tuple (List.map copy_out_type x0)
| Ast_500.Outcometree.Otyp_var (x0, x1) ->
Ast_414.Outcometree.Otyp_var (x0, x1)
| Ast_500.Outcometree.Otyp_variant (x0, x1, x2, x3) ->
Ast_414.Outcometree.Otyp_variant
(x0, (copy_out_variant x1), x2,
(Option.map (fun x -> List.map (fun x -> x) x) x3))
| Ast_500.Outcometree.Otyp_poly (x0, x1) ->
Ast_414.Outcometree.Otyp_poly
((List.map (fun x -> x) x0), (copy_out_type x1))
| Ast_500.Outcometree.Otyp_module (x0, x1) ->
Ast_414.Outcometree.Otyp_module
((copy_out_ident x0),
(List.map (fun x -> let (x0, x1) = x in (x0, (copy_out_type x1)))
x1))
| Ast_500.Outcometree.Otyp_attribute (x0, x1) ->
Ast_414.Outcometree.Otyp_attribute
((copy_out_type x0), (copy_out_attribute x1))
and copy_out_attribute :
Ast_500.Outcometree.out_attribute -> Ast_414.Outcometree.out_attribute =
fun { Ast_500.Outcometree.oattr_name = oattr_name } ->
{ Ast_414.Outcometree.oattr_name = oattr_name }
and copy_out_variant :
Ast_500.Outcometree.out_variant -> Ast_414.Outcometree.out_variant =
function
| Ast_500.Outcometree.Ovar_fields x0 ->
Ast_414.Outcometree.Ovar_fields
(List.map
(fun x ->
let (x0, x1, x2) = x in (x0, x1, (List.map copy_out_type x2)))
x0)
| Ast_500.Outcometree.Ovar_typ x0 ->
Ast_414.Outcometree.Ovar_typ (copy_out_type x0)
and copy_out_constructor :
Ast_500.Outcometree.out_constructor -> Ast_414.Outcometree.out_constructor
=
fun
{ Ast_500.Outcometree.ocstr_name = ocstr_name;
Ast_500.Outcometree.ocstr_args = ocstr_args;
Ast_500.Outcometree.ocstr_return_type = ocstr_return_type }
->
{
Ast_414.Outcometree.ocstr_name = ocstr_name;
Ast_414.Outcometree.ocstr_args = (List.map copy_out_type ocstr_args);
Ast_414.Outcometree.ocstr_return_type =
(Option.map copy_out_type ocstr_return_type)
}
and copy_out_value :
Ast_500.Outcometree.out_value -> Ast_414.Outcometree.out_value =
function
| Ast_500.Outcometree.Oval_array x0 ->
Ast_414.Outcometree.Oval_array (List.map copy_out_value x0)
| Ast_500.Outcometree.Oval_char x0 -> Ast_414.Outcometree.Oval_char x0
| Ast_500.Outcometree.Oval_constr (x0, x1) ->
Ast_414.Outcometree.Oval_constr
((copy_out_ident x0), (List.map copy_out_value x1))
| Ast_500.Outcometree.Oval_ellipsis -> Ast_414.Outcometree.Oval_ellipsis
| Ast_500.Outcometree.Oval_float x0 -> Ast_414.Outcometree.Oval_float x0
| Ast_500.Outcometree.Oval_int x0 -> Ast_414.Outcometree.Oval_int x0
| Ast_500.Outcometree.Oval_int32 x0 -> Ast_414.Outcometree.Oval_int32 x0
| Ast_500.Outcometree.Oval_int64 x0 -> Ast_414.Outcometree.Oval_int64 x0
| Ast_500.Outcometree.Oval_nativeint x0 ->
Ast_414.Outcometree.Oval_nativeint x0
| Ast_500.Outcometree.Oval_list x0 ->
Ast_414.Outcometree.Oval_list (List.map copy_out_value x0)
| Ast_500.Outcometree.Oval_printer x0 ->
Ast_414.Outcometree.Oval_printer x0
| Ast_500.Outcometree.Oval_record x0 ->
Ast_414.Outcometree.Oval_record
(List.map
(fun x ->
let (x0, x1) = x in ((copy_out_ident x0), (copy_out_value x1)))
x0)
| Ast_500.Outcometree.Oval_string (x0, x1, x2) ->
Ast_414.Outcometree.Oval_string (x0, x1, (copy_out_string x2))
| Ast_500.Outcometree.Oval_stuff x0 -> Ast_414.Outcometree.Oval_stuff x0
| Ast_500.Outcometree.Oval_tuple x0 ->
Ast_414.Outcometree.Oval_tuple (List.map copy_out_value x0)
| Ast_500.Outcometree.Oval_variant (x0, x1) ->
Ast_414.Outcometree.Oval_variant (x0, (Option.map copy_out_value x1))
and copy_out_string :
Ast_500.Outcometree.out_string -> Ast_414.Outcometree.out_string =
function
| Ast_500.Outcometree.Ostr_string -> Ast_414.Outcometree.Ostr_string
| Ast_500.Outcometree.Ostr_bytes -> Ast_414.Outcometree.Ostr_bytes
and copy_out_ident :
Ast_500.Outcometree.out_ident -> Ast_414.Outcometree.out_ident =
function
| Ast_500.Outcometree.Oide_apply (x0, x1) ->
Ast_414.Outcometree.Oide_apply
((copy_out_ident x0), (copy_out_ident x1))
| Ast_500.Outcometree.Oide_dot (x0, x1) ->
Ast_414.Outcometree.Oide_dot ((copy_out_ident x0), x1)
| Ast_500.Outcometree.Oide_ident x0 ->
Ast_414.Outcometree.Oide_ident (copy_out_name x0)
and copy_out_name :
Ast_500.Outcometree.out_name -> Ast_414.Outcometree.out_name =
fun { Ast_500.Outcometree.printed_name = printed_name } ->
{ Ast_414.Outcometree.printed_name = printed_name }
and copy_toplevel_phrase :
Ast_500.Parsetree.toplevel_phrase -> Ast_414.Parsetree.toplevel_phrase =
function
| Ast_500.Parsetree.Ptop_def x0 ->
Ast_414.Parsetree.Ptop_def (copy_structure x0)
| Ast_500.Parsetree.Ptop_dir x0 ->
Ast_414.Parsetree.Ptop_dir (copy_toplevel_directive x0)
and copy_toplevel_directive :
Ast_500.Parsetree.toplevel_directive ->
Ast_414.Parsetree.toplevel_directive
=
fun
{ Ast_500.Parsetree.pdir_name = pdir_name;
Ast_500.Parsetree.pdir_arg = pdir_arg;
Ast_500.Parsetree.pdir_loc = pdir_loc }
->
{
Ast_414.Parsetree.pdir_name = (copy_loc (fun x -> x) pdir_name);
Ast_414.Parsetree.pdir_arg =
(Option.map copy_directive_argument pdir_arg);
Ast_414.Parsetree.pdir_loc = (copy_location pdir_loc)
}
and copy_directive_argument :
Ast_500.Parsetree.directive_argument ->
Ast_414.Parsetree.directive_argument
=
fun
{ Ast_500.Parsetree.pdira_desc = pdira_desc;
Ast_500.Parsetree.pdira_loc = pdira_loc }
->
{
Ast_414.Parsetree.pdira_desc =
(copy_directive_argument_desc pdira_desc);
Ast_414.Parsetree.pdira_loc = (copy_location pdira_loc)
}
and copy_directive_argument_desc :
Ast_500.Parsetree.directive_argument_desc ->
Ast_414.Parsetree.directive_argument_desc
=
function
| Ast_500.Parsetree.Pdir_string x0 -> Ast_414.Parsetree.Pdir_string x0
| Ast_500.Parsetree.Pdir_int (x0, x1) ->
Ast_414.Parsetree.Pdir_int (x0, (Option.map (fun x -> x) x1))
| Ast_500.Parsetree.Pdir_ident x0 ->
Ast_414.Parsetree.Pdir_ident (copy_Longident_t x0)
| Ast_500.Parsetree.Pdir_bool x0 -> Ast_414.Parsetree.Pdir_bool x0
and copy_expression :
Ast_500.Parsetree.expression -> Ast_414.Parsetree.expression =
fun
{ Ast_500.Parsetree.pexp_desc = pexp_desc;
Ast_500.Parsetree.pexp_loc = pexp_loc;
Ast_500.Parsetree.pexp_loc_stack = pexp_loc_stack;
Ast_500.Parsetree.pexp_attributes = pexp_attributes }
->
{
Ast_414.Parsetree.pexp_desc = (copy_expression_desc pexp_desc);
Ast_414.Parsetree.pexp_loc = (copy_location pexp_loc);
Ast_414.Parsetree.pexp_loc_stack = (copy_location_stack pexp_loc_stack);
Ast_414.Parsetree.pexp_attributes = (copy_attributes pexp_attributes)
}
and copy_expression_desc :
Ast_500.Parsetree.expression_desc -> Ast_414.Parsetree.expression_desc =
function
| Ast_500.Parsetree.Pexp_ident x0 ->
Ast_414.Parsetree.Pexp_ident (copy_loc copy_Longident_t x0)
| Ast_500.Parsetree.Pexp_constant x0 ->
Ast_414.Parsetree.Pexp_constant (copy_constant x0)
| Ast_500.Parsetree.Pexp_let (x0, x1, x2) ->
Ast_414.Parsetree.Pexp_let
((copy_rec_flag x0), (List.map copy_value_binding x1),
(copy_expression x2))
| Ast_500.Parsetree.Pexp_function x0 ->
Ast_414.Parsetree.Pexp_function (List.map copy_case x0)
| Ast_500.Parsetree.Pexp_fun (x0, x1, x2, x3) ->
Ast_414.Parsetree.Pexp_fun
((copy_arg_label x0), (Option.map copy_expression x1),
(copy_pattern x2), (copy_expression x3))
| Ast_500.Parsetree.Pexp_apply (x0, x1) ->
Ast_414.Parsetree.Pexp_apply
((copy_expression x0),
(List.map
(fun x ->
let (x0, x1) = x in
((copy_arg_label x0), (copy_expression x1))) x1))
| Ast_500.Parsetree.Pexp_match (x0, x1) ->
Ast_414.Parsetree.Pexp_match
((copy_expression x0), (List.map copy_case x1))
| Ast_500.Parsetree.Pexp_try (x0, x1) ->
Ast_414.Parsetree.Pexp_try
((copy_expression x0), (List.map copy_case x1))
| Ast_500.Parsetree.Pexp_tuple x0 ->
Ast_414.Parsetree.Pexp_tuple (List.map copy_expression x0)
| Ast_500.Parsetree.Pexp_construct (x0, x1) ->
Ast_414.Parsetree.Pexp_construct
((copy_loc copy_Longident_t x0), (Option.map copy_expression x1))
| Ast_500.Parsetree.Pexp_variant (x0, x1) ->
Ast_414.Parsetree.Pexp_variant
((copy_label x0), (Option.map copy_expression x1))
| Ast_500.Parsetree.Pexp_record (x0, x1) ->
Ast_414.Parsetree.Pexp_record
((List.map
(fun x ->
let (x0, x1) = x in
((copy_loc copy_Longident_t x0), (copy_expression x1))) x0),
(Option.map copy_expression x1))
| Ast_500.Parsetree.Pexp_field (x0, x1) ->
Ast_414.Parsetree.Pexp_field
((copy_expression x0), (copy_loc copy_Longident_t x1))
| Ast_500.Parsetree.Pexp_setfield (x0, x1, x2) ->
Ast_414.Parsetree.Pexp_setfield
((copy_expression x0), (copy_loc copy_Longident_t x1),
(copy_expression x2))
| Ast_500.Parsetree.Pexp_array x0 ->
Ast_414.Parsetree.Pexp_array (List.map copy_expression x0)
| Ast_500.Parsetree.Pexp_ifthenelse (x0, x1, x2) ->
Ast_414.Parsetree.Pexp_ifthenelse
((copy_expression x0), (copy_expression x1),
(Option.map copy_expression x2))
| Ast_500.Parsetree.Pexp_sequence (x0, x1) ->
Ast_414.Parsetree.Pexp_sequence
((copy_expression x0), (copy_expression x1))
| Ast_500.Parsetree.Pexp_while (x0, x1) ->
Ast_414.Parsetree.Pexp_while
((copy_expression x0), (copy_expression x1))
| Ast_500.Parsetree.Pexp_for (x0, x1, x2, x3, x4) ->
Ast_414.Parsetree.Pexp_for
((copy_pattern x0), (copy_expression x1), (copy_expression x2),
(copy_direction_flag x3), (copy_expression x4))
| Ast_500.Parsetree.Pexp_constraint (x0, x1) ->
Ast_414.Parsetree.Pexp_constraint
((copy_expression x0), (copy_core_type x1))
| Ast_500.Parsetree.Pexp_coerce (x0, x1, x2) ->
Ast_414.Parsetree.Pexp_coerce
((copy_expression x0), (Option.map copy_core_type x1),
(copy_core_type x2))
| Ast_500.Parsetree.Pexp_send (x0, x1) ->
Ast_414.Parsetree.Pexp_send
((copy_expression x0), (copy_loc copy_label x1))
| Ast_500.Parsetree.Pexp_new x0 ->
Ast_414.Parsetree.Pexp_new (copy_loc copy_Longident_t x0)
| Ast_500.Parsetree.Pexp_setinstvar (x0, x1) ->
Ast_414.Parsetree.Pexp_setinstvar
((copy_loc copy_label x0), (copy_expression x1))
| Ast_500.Parsetree.Pexp_override x0 ->
Ast_414.Parsetree.Pexp_override
(List.map
(fun x ->
let (x0, x1) = x in
((copy_loc copy_label x0), (copy_expression x1))) x0)
| Ast_500.Parsetree.Pexp_letmodule (x0, x1, x2) ->
Ast_414.Parsetree.Pexp_letmodule
((copy_loc (fun x -> Option.map (fun x -> x) x) x0),
(copy_module_expr x1), (copy_expression x2))
| Ast_500.Parsetree.Pexp_letexception (x0, x1) ->
Ast_414.Parsetree.Pexp_letexception
((copy_extension_constructor x0), (copy_expression x1))
| Ast_500.Parsetree.Pexp_assert x0 ->
Ast_414.Parsetree.Pexp_assert (copy_expression x0)
| Ast_500.Parsetree.Pexp_lazy x0 ->
Ast_414.Parsetree.Pexp_lazy (copy_expression x0)
| Ast_500.Parsetree.Pexp_poly (x0, x1) ->
Ast_414.Parsetree.Pexp_poly
((copy_expression x0), (Option.map copy_core_type x1))
| Ast_500.Parsetree.Pexp_object x0 ->
Ast_414.Parsetree.Pexp_object (copy_class_structure x0)
| Ast_500.Parsetree.Pexp_newtype (x0, x1) ->
Ast_414.Parsetree.Pexp_newtype
((copy_loc (fun x -> x) x0), (copy_expression x1))
| Ast_500.Parsetree.Pexp_pack x0 ->
Ast_414.Parsetree.Pexp_pack (copy_module_expr x0)
| Ast_500.Parsetree.Pexp_open (x0, x1) ->
Ast_414.Parsetree.Pexp_open
((copy_open_declaration x0), (copy_expression x1))
| Ast_500.Parsetree.Pexp_letop x0 ->
Ast_414.Parsetree.Pexp_letop (copy_letop x0)
| Ast_500.Parsetree.Pexp_extension x0 ->
Ast_414.Parsetree.Pexp_extension (copy_extension x0)
| Ast_500.Parsetree.Pexp_unreachable -> Ast_414.Parsetree.Pexp_unreachable
and copy_letop : Ast_500.Parsetree.letop -> Ast_414.Parsetree.letop =
fun
{ Ast_500.Parsetree.let_ = let_; Ast_500.Parsetree.ands = ands;
Ast_500.Parsetree.body = body }
->
{
Ast_414.Parsetree.let_ = (copy_binding_op let_);
Ast_414.Parsetree.ands = (List.map copy_binding_op ands);
Ast_414.Parsetree.body = (copy_expression body)
}
and copy_binding_op :
Ast_500.Parsetree.binding_op -> Ast_414.Parsetree.binding_op =
fun
{ Ast_500.Parsetree.pbop_op = pbop_op;
Ast_500.Parsetree.pbop_pat = pbop_pat;
Ast_500.Parsetree.pbop_exp = pbop_exp;
Ast_500.Parsetree.pbop_loc = pbop_loc }
->
{
Ast_414.Parsetree.pbop_op = (copy_loc (fun x -> x) pbop_op);
Ast_414.Parsetree.pbop_pat = (copy_pattern pbop_pat);
Ast_414.Parsetree.pbop_exp = (copy_expression pbop_exp);
Ast_414.Parsetree.pbop_loc = (copy_location pbop_loc)
}
and copy_direction_flag :
Ast_500.Asttypes.direction_flag -> Ast_414.Asttypes.direction_flag =
function
| Ast_500.Asttypes.Upto -> Ast_414.Asttypes.Upto
| Ast_500.Asttypes.Downto -> Ast_414.Asttypes.Downto
and copy_case : Ast_500.Parsetree.case -> Ast_414.Parsetree.case =
fun
{ Ast_500.Parsetree.pc_lhs = pc_lhs;
Ast_500.Parsetree.pc_guard = pc_guard;
Ast_500.Parsetree.pc_rhs = pc_rhs }
->
{
Ast_414.Parsetree.pc_lhs = (copy_pattern pc_lhs);
Ast_414.Parsetree.pc_guard = (Option.map copy_expression pc_guard);
Ast_414.Parsetree.pc_rhs = (copy_expression pc_rhs)
}
and copy_value_binding :
Ast_500.Parsetree.value_binding -> Ast_414.Parsetree.value_binding =
fun
{ Ast_500.Parsetree.pvb_pat = pvb_pat;
Ast_500.Parsetree.pvb_expr = pvb_expr;
Ast_500.Parsetree.pvb_attributes = pvb_attributes;
Ast_500.Parsetree.pvb_loc = pvb_loc }
->
{
Ast_414.Parsetree.pvb_pat = (copy_pattern pvb_pat);
Ast_414.Parsetree.pvb_expr = (copy_expression pvb_expr);
Ast_414.Parsetree.pvb_attributes = (copy_attributes pvb_attributes);
Ast_414.Parsetree.pvb_loc = (copy_location pvb_loc)
}
and copy_pattern : Ast_500.Parsetree.pattern -> Ast_414.Parsetree.pattern =
fun
{ Ast_500.Parsetree.ppat_desc = ppat_desc;
Ast_500.Parsetree.ppat_loc = ppat_loc;
Ast_500.Parsetree.ppat_loc_stack = ppat_loc_stack;
Ast_500.Parsetree.ppat_attributes = ppat_attributes }
->
{
Ast_414.Parsetree.ppat_desc = (copy_pattern_desc ppat_desc);
Ast_414.Parsetree.ppat_loc = (copy_location ppat_loc);
Ast_414.Parsetree.ppat_loc_stack = (copy_location_stack ppat_loc_stack);
Ast_414.Parsetree.ppat_attributes = (copy_attributes ppat_attributes)
}
and copy_pattern_desc :
Ast_500.Parsetree.pattern_desc -> Ast_414.Parsetree.pattern_desc =
function
| Ast_500.Parsetree.Ppat_any -> Ast_414.Parsetree.Ppat_any
| Ast_500.Parsetree.Ppat_var x0 ->
Ast_414.Parsetree.Ppat_var (copy_loc (fun x -> x) x0)
| Ast_500.Parsetree.Ppat_alias (x0, x1) ->
Ast_414.Parsetree.Ppat_alias
((copy_pattern x0), (copy_loc (fun x -> x) x1))
| Ast_500.Parsetree.Ppat_constant x0 ->
Ast_414.Parsetree.Ppat_constant (copy_constant x0)
| Ast_500.Parsetree.Ppat_interval (x0, x1) ->
Ast_414.Parsetree.Ppat_interval
((copy_constant x0), (copy_constant x1))
| Ast_500.Parsetree.Ppat_tuple x0 ->
Ast_414.Parsetree.Ppat_tuple (List.map copy_pattern x0)
| Ast_500.Parsetree.Ppat_construct (x0, x1) ->
Ast_414.Parsetree.Ppat_construct
((copy_loc copy_Longident_t x0),
(Option.map
(fun x ->
let (x0, x1) = x in
((List.map (fun x -> copy_loc (fun x -> x) x) x0),
(copy_pattern x1))) x1))
| Ast_500.Parsetree.Ppat_variant (x0, x1) ->
Ast_414.Parsetree.Ppat_variant
((copy_label x0), (Option.map copy_pattern x1))
| Ast_500.Parsetree.Ppat_record (x0, x1) ->
Ast_414.Parsetree.Ppat_record
((List.map
(fun x ->
let (x0, x1) = x in
((copy_loc copy_Longident_t x0), (copy_pattern x1))) x0),
(copy_closed_flag x1))
| Ast_500.Parsetree.Ppat_array x0 ->
Ast_414.Parsetree.Ppat_array (List.map copy_pattern x0)
| Ast_500.Parsetree.Ppat_or (x0, x1) ->
Ast_414.Parsetree.Ppat_or ((copy_pattern x0), (copy_pattern x1))
| Ast_500.Parsetree.Ppat_constraint (x0, x1) ->
Ast_414.Parsetree.Ppat_constraint
((copy_pattern x0), (copy_core_type x1))
| Ast_500.Parsetree.Ppat_type x0 ->
Ast_414.Parsetree.Ppat_type (copy_loc copy_Longident_t x0)
| Ast_500.Parsetree.Ppat_lazy x0 ->
Ast_414.Parsetree.Ppat_lazy (copy_pattern x0)
| Ast_500.Parsetree.Ppat_unpack x0 ->
Ast_414.Parsetree.Ppat_unpack
(copy_loc (fun x -> Option.map (fun x -> x) x) x0)
| Ast_500.Parsetree.Ppat_exception x0 ->
Ast_414.Parsetree.Ppat_exception (copy_pattern x0)
| Ast_500.Parsetree.Ppat_extension x0 ->
Ast_414.Parsetree.Ppat_extension (copy_extension x0)
| Ast_500.Parsetree.Ppat_open (x0, x1) ->
Ast_414.Parsetree.Ppat_open
((copy_loc copy_Longident_t x0), (copy_pattern x1))
and copy_core_type :
Ast_500.Parsetree.core_type -> Ast_414.Parsetree.core_type =
fun
{ Ast_500.Parsetree.ptyp_desc = ptyp_desc;
Ast_500.Parsetree.ptyp_loc = ptyp_loc;
Ast_500.Parsetree.ptyp_loc_stack = ptyp_loc_stack;
Ast_500.Parsetree.ptyp_attributes = ptyp_attributes }
->
{
Ast_414.Parsetree.ptyp_desc = (copy_core_type_desc ptyp_desc);
Ast_414.Parsetree.ptyp_loc = (copy_location ptyp_loc);
Ast_414.Parsetree.ptyp_loc_stack = (copy_location_stack ptyp_loc_stack);
Ast_414.Parsetree.ptyp_attributes = (copy_attributes ptyp_attributes)
}
and copy_location_stack :
Ast_500.Parsetree.location_stack -> Ast_414.Parsetree.location_stack =
fun x -> List.map copy_location x
and copy_core_type_desc :
Ast_500.Parsetree.core_type_desc -> Ast_414.Parsetree.core_type_desc =
function
| Ast_500.Parsetree.Ptyp_any -> Ast_414.Parsetree.Ptyp_any
| Ast_500.Parsetree.Ptyp_var x0 -> Ast_414.Parsetree.Ptyp_var x0
| Ast_500.Parsetree.Ptyp_arrow (x0, x1, x2) ->
Ast_414.Parsetree.Ptyp_arrow
((copy_arg_label x0), (copy_core_type x1), (copy_core_type x2))
| Ast_500.Parsetree.Ptyp_tuple x0 ->
Ast_414.Parsetree.Ptyp_tuple (List.map copy_core_type x0)
| Ast_500.Parsetree.Ptyp_constr (x0, x1) ->
Ast_414.Parsetree.Ptyp_constr
((copy_loc copy_Longident_t x0), (List.map copy_core_type x1))
| Ast_500.Parsetree.Ptyp_object (x0, x1) ->
Ast_414.Parsetree.Ptyp_object
((List.map copy_object_field x0), (copy_closed_flag x1))
| Ast_500.Parsetree.Ptyp_class (x0, x1) ->
Ast_414.Parsetree.Ptyp_class
((copy_loc copy_Longident_t x0), (List.map copy_core_type x1))
| Ast_500.Parsetree.Ptyp_alias (x0, x1) ->
Ast_414.Parsetree.Ptyp_alias ((copy_core_type x0), x1)
| Ast_500.Parsetree.Ptyp_variant (x0, x1, x2) ->
Ast_414.Parsetree.Ptyp_variant
((List.map copy_row_field x0), (copy_closed_flag x1),
(Option.map (fun x -> List.map copy_label x) x2))
| Ast_500.Parsetree.Ptyp_poly (x0, x1) ->
Ast_414.Parsetree.Ptyp_poly
((List.map (fun x -> copy_loc (fun x -> x) x) x0),
(copy_core_type x1))
| Ast_500.Parsetree.Ptyp_package x0 ->
Ast_414.Parsetree.Ptyp_package (copy_package_type x0)
| Ast_500.Parsetree.Ptyp_extension x0 ->
Ast_414.Parsetree.Ptyp_extension (copy_extension x0)
and copy_package_type :
Ast_500.Parsetree.package_type -> Ast_414.Parsetree.package_type =
fun x ->
let (x0, x1) = x in
((copy_loc copy_Longident_t x0),
(List.map
(fun x ->
let (x0, x1) = x in
((copy_loc copy_Longident_t x0), (copy_core_type x1))) x1))
and copy_row_field :
Ast_500.Parsetree.row_field -> Ast_414.Parsetree.row_field =
fun
{ Ast_500.Parsetree.prf_desc = prf_desc;
Ast_500.Parsetree.prf_loc = prf_loc;
Ast_500.Parsetree.prf_attributes = prf_attributes }
->
{
Ast_414.Parsetree.prf_desc = (copy_row_field_desc prf_desc);
Ast_414.Parsetree.prf_loc = (copy_location prf_loc);
Ast_414.Parsetree.prf_attributes = (copy_attributes prf_attributes)
}
and copy_row_field_desc :
Ast_500.Parsetree.row_field_desc -> Ast_414.Parsetree.row_field_desc =
function
| Ast_500.Parsetree.Rtag (x0, x1, x2) ->
Ast_414.Parsetree.Rtag
((copy_loc copy_label x0), x1, (List.map copy_core_type x2))
| Ast_500.Parsetree.Rinherit x0 ->
Ast_414.Parsetree.Rinherit (copy_core_type x0)
and copy_object_field :
Ast_500.Parsetree.object_field -> Ast_414.Parsetree.object_field =
fun
{ Ast_500.Parsetree.pof_desc = pof_desc;
Ast_500.Parsetree.pof_loc = pof_loc;
Ast_500.Parsetree.pof_attributes = pof_attributes }
->
{
Ast_414.Parsetree.pof_desc = (copy_object_field_desc pof_desc);
Ast_414.Parsetree.pof_loc = (copy_location pof_loc);
Ast_414.Parsetree.pof_attributes = (copy_attributes pof_attributes)
}
and copy_attributes :
Ast_500.Parsetree.attributes -> Ast_414.Parsetree.attributes =
fun x -> List.map copy_attribute x
and copy_attribute :
Ast_500.Parsetree.attribute -> Ast_414.Parsetree.attribute =
fun
{ Ast_500.Parsetree.attr_name = attr_name;
Ast_500.Parsetree.attr_payload = attr_payload;
Ast_500.Parsetree.attr_loc = attr_loc }
->
{
Ast_414.Parsetree.attr_name = (copy_loc (fun x -> x) attr_name);
Ast_414.Parsetree.attr_payload = (copy_payload attr_payload);
Ast_414.Parsetree.attr_loc = (copy_location attr_loc)
}
and copy_payload : Ast_500.Parsetree.payload -> Ast_414.Parsetree.payload =
function
| Ast_500.Parsetree.PStr x0 -> Ast_414.Parsetree.PStr (copy_structure x0)
| Ast_500.Parsetree.PSig x0 -> Ast_414.Parsetree.PSig (copy_signature x0)
| Ast_500.Parsetree.PTyp x0 -> Ast_414.Parsetree.PTyp (copy_core_type x0)
| Ast_500.Parsetree.PPat (x0, x1) ->
Ast_414.Parsetree.PPat
((copy_pattern x0), (Option.map copy_expression x1))
and copy_structure :
Ast_500.Parsetree.structure -> Ast_414.Parsetree.structure =
fun x -> List.map copy_structure_item x
and copy_structure_item :
Ast_500.Parsetree.structure_item -> Ast_414.Parsetree.structure_item =
fun
{ Ast_500.Parsetree.pstr_desc = pstr_desc;
Ast_500.Parsetree.pstr_loc = pstr_loc }
->
{
Ast_414.Parsetree.pstr_desc = (copy_structure_item_desc pstr_desc);
Ast_414.Parsetree.pstr_loc = (copy_location pstr_loc)
}
and copy_structure_item_desc :
Ast_500.Parsetree.structure_item_desc ->
Ast_414.Parsetree.structure_item_desc
=
function
| Ast_500.Parsetree.Pstr_eval (x0, x1) ->
Ast_414.Parsetree.Pstr_eval
((copy_expression x0), (copy_attributes x1))
| Ast_500.Parsetree.Pstr_value (x0, x1) ->
Ast_414.Parsetree.Pstr_value
((copy_rec_flag x0), (List.map copy_value_binding x1))
| Ast_500.Parsetree.Pstr_primitive x0 ->
Ast_414.Parsetree.Pstr_primitive (copy_value_description x0)
| Ast_500.Parsetree.Pstr_type (x0, x1) ->
Ast_414.Parsetree.Pstr_type
((copy_rec_flag x0), (List.map copy_type_declaration x1))
| Ast_500.Parsetree.Pstr_typext x0 ->
Ast_414.Parsetree.Pstr_typext (copy_type_extension x0)
| Ast_500.Parsetree.Pstr_exception x0 ->
Ast_414.Parsetree.Pstr_exception (copy_type_exception x0)
| Ast_500.Parsetree.Pstr_module x0 ->
Ast_414.Parsetree.Pstr_module (copy_module_binding x0)
| Ast_500.Parsetree.Pstr_recmodule x0 ->
Ast_414.Parsetree.Pstr_recmodule (List.map copy_module_binding x0)
| Ast_500.Parsetree.Pstr_modtype x0 ->
Ast_414.Parsetree.Pstr_modtype (copy_module_type_declaration x0)
| Ast_500.Parsetree.Pstr_open x0 ->
Ast_414.Parsetree.Pstr_open (copy_open_declaration x0)
| Ast_500.Parsetree.Pstr_class x0 ->
Ast_414.Parsetree.Pstr_class (List.map copy_class_declaration x0)
| Ast_500.Parsetree.Pstr_class_type x0 ->
Ast_414.Parsetree.Pstr_class_type
(List.map copy_class_type_declaration x0)
| Ast_500.Parsetree.Pstr_include x0 ->
Ast_414.Parsetree.Pstr_include (copy_include_declaration x0)
| Ast_500.Parsetree.Pstr_attribute x0 ->
Ast_414.Parsetree.Pstr_attribute (copy_attribute x0)
| Ast_500.Parsetree.Pstr_extension (x0, x1) ->
Ast_414.Parsetree.Pstr_extension
((copy_extension x0), (copy_attributes x1))
and copy_include_declaration :
Ast_500.Parsetree.include_declaration ->
Ast_414.Parsetree.include_declaration
= fun x -> copy_include_infos copy_module_expr x
and copy_class_declaration :
Ast_500.Parsetree.class_declaration -> Ast_414.Parsetree.class_declaration
= fun x -> copy_class_infos copy_class_expr x
and copy_class_expr :
Ast_500.Parsetree.class_expr -> Ast_414.Parsetree.class_expr =
fun
{ Ast_500.Parsetree.pcl_desc = pcl_desc;
Ast_500.Parsetree.pcl_loc = pcl_loc;
Ast_500.Parsetree.pcl_attributes = pcl_attributes }
->
{
Ast_414.Parsetree.pcl_desc = (copy_class_expr_desc pcl_desc);
Ast_414.Parsetree.pcl_loc = (copy_location pcl_loc);
Ast_414.Parsetree.pcl_attributes = (copy_attributes pcl_attributes)
}
and copy_class_expr_desc :
Ast_500.Parsetree.class_expr_desc -> Ast_414.Parsetree.class_expr_desc =
function
| Ast_500.Parsetree.Pcl_constr (x0, x1) ->
Ast_414.Parsetree.Pcl_constr
((copy_loc copy_Longident_t x0), (List.map copy_core_type x1))
| Ast_500.Parsetree.Pcl_structure x0 ->
Ast_414.Parsetree.Pcl_structure (copy_class_structure x0)
| Ast_500.Parsetree.Pcl_fun (x0, x1, x2, x3) ->
Ast_414.Parsetree.Pcl_fun
((copy_arg_label x0), (Option.map copy_expression x1),
(copy_pattern x2), (copy_class_expr x3))
| Ast_500.Parsetree.Pcl_apply (x0, x1) ->
Ast_414.Parsetree.Pcl_apply
((copy_class_expr x0),
(List.map
(fun x ->
let (x0, x1) = x in
((copy_arg_label x0), (copy_expression x1))) x1))
| Ast_500.Parsetree.Pcl_let (x0, x1, x2) ->
Ast_414.Parsetree.Pcl_let
((copy_rec_flag x0), (List.map copy_value_binding x1),
(copy_class_expr x2))
| Ast_500.Parsetree.Pcl_constraint (x0, x1) ->
Ast_414.Parsetree.Pcl_constraint
((copy_class_expr x0), (copy_class_type x1))
| Ast_500.Parsetree.Pcl_extension x0 ->
Ast_414.Parsetree.Pcl_extension (copy_extension x0)
| Ast_500.Parsetree.Pcl_open (x0, x1) ->
Ast_414.Parsetree.Pcl_open
((copy_open_description x0), (copy_class_expr x1))
and copy_class_structure :
Ast_500.Parsetree.class_structure -> Ast_414.Parsetree.class_structure =
fun
{ Ast_500.Parsetree.pcstr_self = pcstr_self;
Ast_500.Parsetree.pcstr_fields = pcstr_fields }
->
{
Ast_414.Parsetree.pcstr_self = (copy_pattern pcstr_self);
Ast_414.Parsetree.pcstr_fields =
(List.map copy_class_field pcstr_fields)
}
and copy_class_field :
Ast_500.Parsetree.class_field -> Ast_414.Parsetree.class_field =
fun
{ Ast_500.Parsetree.pcf_desc = pcf_desc;
Ast_500.Parsetree.pcf_loc = pcf_loc;
Ast_500.Parsetree.pcf_attributes = pcf_attributes }
->
{
Ast_414.Parsetree.pcf_desc = (copy_class_field_desc pcf_desc);
Ast_414.Parsetree.pcf_loc = (copy_location pcf_loc);
Ast_414.Parsetree.pcf_attributes = (copy_attributes pcf_attributes)
}
and copy_class_field_desc :
Ast_500.Parsetree.class_field_desc -> Ast_414.Parsetree.class_field_desc =
function
| Ast_500.Parsetree.Pcf_inherit (x0, x1, x2) ->
Ast_414.Parsetree.Pcf_inherit
((copy_override_flag x0), (copy_class_expr x1),
(Option.map (fun x -> copy_loc (fun x -> x) x) x2))
| Ast_500.Parsetree.Pcf_val x0 ->
Ast_414.Parsetree.Pcf_val
(let (x0, x1, x2) = x0 in
((copy_loc copy_label x0), (copy_mutable_flag x1),
(copy_class_field_kind x2)))
| Ast_500.Parsetree.Pcf_method x0 ->
Ast_414.Parsetree.Pcf_method
(let (x0, x1, x2) = x0 in
((copy_loc copy_label x0), (copy_private_flag x1),
(copy_class_field_kind x2)))
| Ast_500.Parsetree.Pcf_constraint x0 ->
Ast_414.Parsetree.Pcf_constraint
(let (x0, x1) = x0 in ((copy_core_type x0), (copy_core_type x1)))
| Ast_500.Parsetree.Pcf_initializer x0 ->
Ast_414.Parsetree.Pcf_initializer (copy_expression x0)
| Ast_500.Parsetree.Pcf_attribute x0 ->
Ast_414.Parsetree.Pcf_attribute (copy_attribute x0)
| Ast_500.Parsetree.Pcf_extension x0 ->
Ast_414.Parsetree.Pcf_extension (copy_extension x0)
and copy_class_field_kind :
Ast_500.Parsetree.class_field_kind -> Ast_414.Parsetree.class_field_kind =
function
| Ast_500.Parsetree.Cfk_virtual x0 ->
Ast_414.Parsetree.Cfk_virtual (copy_core_type x0)
| Ast_500.Parsetree.Cfk_concrete (x0, x1) ->
Ast_414.Parsetree.Cfk_concrete
((copy_override_flag x0), (copy_expression x1))
and copy_open_declaration :
Ast_500.Parsetree.open_declaration -> Ast_414.Parsetree.open_declaration =
fun x -> copy_open_infos copy_module_expr x
and copy_module_binding :
Ast_500.Parsetree.module_binding -> Ast_414.Parsetree.module_binding =
fun
{ Ast_500.Parsetree.pmb_name = pmb_name;
Ast_500.Parsetree.pmb_expr = pmb_expr;
Ast_500.Parsetree.pmb_attributes = pmb_attributes;
Ast_500.Parsetree.pmb_loc = pmb_loc }
->
{
Ast_414.Parsetree.pmb_name =
(copy_loc (fun x -> Option.map (fun x -> x) x) pmb_name);
Ast_414.Parsetree.pmb_expr = (copy_module_expr pmb_expr);
Ast_414.Parsetree.pmb_attributes = (copy_attributes pmb_attributes);
Ast_414.Parsetree.pmb_loc = (copy_location pmb_loc)
}
and copy_module_expr :
Ast_500.Parsetree.module_expr -> Ast_414.Parsetree.module_expr =
fun
{ Ast_500.Parsetree.pmod_desc = pmod_desc;
Ast_500.Parsetree.pmod_loc = pmod_loc;
Ast_500.Parsetree.pmod_attributes = pmod_attributes }
->
{
Ast_414.Parsetree.pmod_desc = (copy_module_expr_desc pmod_desc);
Ast_414.Parsetree.pmod_loc = (copy_location pmod_loc);
Ast_414.Parsetree.pmod_attributes = (copy_attributes pmod_attributes)
}
and copy_module_expr_desc :
Ast_500.Parsetree.module_expr_desc -> Ast_414.Parsetree.module_expr_desc =
function
| Ast_500.Parsetree.Pmod_ident x0 ->
Ast_414.Parsetree.Pmod_ident (copy_loc copy_Longident_t x0)
| Ast_500.Parsetree.Pmod_structure x0 ->
Ast_414.Parsetree.Pmod_structure (copy_structure x0)
| Ast_500.Parsetree.Pmod_functor (x0, x1) ->
Ast_414.Parsetree.Pmod_functor
((copy_functor_parameter x0), (copy_module_expr x1))
| Ast_500.Parsetree.Pmod_apply (x0, x1) ->
Ast_414.Parsetree.Pmod_apply
((copy_module_expr x0), (copy_module_expr x1))
| Ast_500.Parsetree.Pmod_constraint (x0, x1) ->
Ast_414.Parsetree.Pmod_constraint
((copy_module_expr x0), (copy_module_type x1))
| Ast_500.Parsetree.Pmod_unpack x0 ->
Ast_414.Parsetree.Pmod_unpack (copy_expression x0)
| Ast_500.Parsetree.Pmod_extension x0 ->
Ast_414.Parsetree.Pmod_extension (copy_extension x0)
and copy_functor_parameter :
Ast_500.Parsetree.functor_parameter -> Ast_414.Parsetree.functor_parameter
=
function
| Ast_500.Parsetree.Unit -> Ast_414.Parsetree.Unit
| Ast_500.Parsetree.Named (x0, x1) ->
Ast_414.Parsetree.Named
((copy_loc (fun x -> Option.map (fun x -> x) x) x0),
(copy_module_type x1))
and copy_module_type :
Ast_500.Parsetree.module_type -> Ast_414.Parsetree.module_type =
fun
{ Ast_500.Parsetree.pmty_desc = pmty_desc;
Ast_500.Parsetree.pmty_loc = pmty_loc;
Ast_500.Parsetree.pmty_attributes = pmty_attributes }
->
{
Ast_414.Parsetree.pmty_desc = (copy_module_type_desc pmty_desc);
Ast_414.Parsetree.pmty_loc = (copy_location pmty_loc);
Ast_414.Parsetree.pmty_attributes = (copy_attributes pmty_attributes)
}
and copy_module_type_desc :
Ast_500.Parsetree.module_type_desc -> Ast_414.Parsetree.module_type_desc =
function
| Ast_500.Parsetree.Pmty_ident x0 ->
Ast_414.Parsetree.Pmty_ident (copy_loc copy_Longident_t x0)
| Ast_500.Parsetree.Pmty_signature x0 ->
Ast_414.Parsetree.Pmty_signature (copy_signature x0)
| Ast_500.Parsetree.Pmty_functor (x0, x1) ->
Ast_414.Parsetree.Pmty_functor
((copy_functor_parameter x0), (copy_module_type x1))
| Ast_500.Parsetree.Pmty_with (x0, x1) ->
Ast_414.Parsetree.Pmty_with
((copy_module_type x0), (List.map copy_with_constraint x1))
| Ast_500.Parsetree.Pmty_typeof x0 ->
Ast_414.Parsetree.Pmty_typeof (copy_module_expr x0)
| Ast_500.Parsetree.Pmty_extension x0 ->
Ast_414.Parsetree.Pmty_extension (copy_extension x0)
| Ast_500.Parsetree.Pmty_alias x0 ->
Ast_414.Parsetree.Pmty_alias (copy_loc copy_Longident_t x0)
and copy_with_constraint :
Ast_500.Parsetree.with_constraint -> Ast_414.Parsetree.with_constraint =
function
| Ast_500.Parsetree.Pwith_type (x0, x1) ->
Ast_414.Parsetree.Pwith_type
((copy_loc copy_Longident_t x0), (copy_type_declaration x1))
| Ast_500.Parsetree.Pwith_module (x0, x1) ->
Ast_414.Parsetree.Pwith_module
((copy_loc copy_Longident_t x0), (copy_loc copy_Longident_t x1))
| Ast_500.Parsetree.Pwith_modtype (x0, x1) ->
Ast_414.Parsetree.Pwith_modtype
((copy_loc copy_Longident_t x0), (copy_module_type x1))
| Ast_500.Parsetree.Pwith_modtypesubst (x0, x1) ->
Ast_414.Parsetree.Pwith_modtypesubst
((copy_loc copy_Longident_t x0), (copy_module_type x1))
| Ast_500.Parsetree.Pwith_typesubst (x0, x1) ->
Ast_414.Parsetree.Pwith_typesubst
((copy_loc copy_Longident_t x0), (copy_type_declaration x1))
| Ast_500.Parsetree.Pwith_modsubst (x0, x1) ->
Ast_414.Parsetree.Pwith_modsubst
((copy_loc copy_Longident_t x0), (copy_loc copy_Longident_t x1))
and copy_signature :
Ast_500.Parsetree.signature -> Ast_414.Parsetree.signature =
fun x -> List.map copy_signature_item x
and copy_signature_item :
Ast_500.Parsetree.signature_item -> Ast_414.Parsetree.signature_item =
fun
{ Ast_500.Parsetree.psig_desc = psig_desc;
Ast_500.Parsetree.psig_loc = psig_loc }
->
{
Ast_414.Parsetree.psig_desc = (copy_signature_item_desc psig_desc);
Ast_414.Parsetree.psig_loc = (copy_location psig_loc)
}
and copy_signature_item_desc :
Ast_500.Parsetree.signature_item_desc ->
Ast_414.Parsetree.signature_item_desc
=
function
| Ast_500.Parsetree.Psig_value x0 ->
Ast_414.Parsetree.Psig_value (copy_value_description x0)
| Ast_500.Parsetree.Psig_type (x0, x1) ->
Ast_414.Parsetree.Psig_type
((copy_rec_flag x0), (List.map copy_type_declaration x1))
| Ast_500.Parsetree.Psig_typesubst x0 ->
Ast_414.Parsetree.Psig_typesubst (List.map copy_type_declaration x0)
| Ast_500.Parsetree.Psig_typext x0 ->
Ast_414.Parsetree.Psig_typext (copy_type_extension x0)
| Ast_500.Parsetree.Psig_exception x0 ->
Ast_414.Parsetree.Psig_exception (copy_type_exception x0)
| Ast_500.Parsetree.Psig_module x0 ->
Ast_414.Parsetree.Psig_module (copy_module_declaration x0)
| Ast_500.Parsetree.Psig_modsubst x0 ->
Ast_414.Parsetree.Psig_modsubst (copy_module_substitution x0)
| Ast_500.Parsetree.Psig_recmodule x0 ->
Ast_414.Parsetree.Psig_recmodule (List.map copy_module_declaration x0)
| Ast_500.Parsetree.Psig_modtype x0 ->
Ast_414.Parsetree.Psig_modtype (copy_module_type_declaration x0)
| Ast_500.Parsetree.Psig_modtypesubst x0 ->
Ast_414.Parsetree.Psig_modtypesubst (copy_module_type_declaration x0)
| Ast_500.Parsetree.Psig_open x0 ->
Ast_414.Parsetree.Psig_open (copy_open_description x0)
| Ast_500.Parsetree.Psig_include x0 ->
Ast_414.Parsetree.Psig_include (copy_include_description x0)
| Ast_500.Parsetree.Psig_class x0 ->
Ast_414.Parsetree.Psig_class (List.map copy_class_description x0)
| Ast_500.Parsetree.Psig_class_type x0 ->
Ast_414.Parsetree.Psig_class_type
(List.map copy_class_type_declaration x0)
| Ast_500.Parsetree.Psig_attribute x0 ->
Ast_414.Parsetree.Psig_attribute (copy_attribute x0)
| Ast_500.Parsetree.Psig_extension (x0, x1) ->
Ast_414.Parsetree.Psig_extension
((copy_extension x0), (copy_attributes x1))
and copy_class_type_declaration :
Ast_500.Parsetree.class_type_declaration ->
Ast_414.Parsetree.class_type_declaration
= fun x -> copy_class_infos copy_class_type x
and copy_class_description :
Ast_500.Parsetree.class_description -> Ast_414.Parsetree.class_description
= fun x -> copy_class_infos copy_class_type x
and copy_class_type :
Ast_500.Parsetree.class_type -> Ast_414.Parsetree.class_type =
fun
{ Ast_500.Parsetree.pcty_desc = pcty_desc;
Ast_500.Parsetree.pcty_loc = pcty_loc;
Ast_500.Parsetree.pcty_attributes = pcty_attributes }
->
{
Ast_414.Parsetree.pcty_desc = (copy_class_type_desc pcty_desc);
Ast_414.Parsetree.pcty_loc = (copy_location pcty_loc);
Ast_414.Parsetree.pcty_attributes = (copy_attributes pcty_attributes)
}
and copy_class_type_desc :
Ast_500.Parsetree.class_type_desc -> Ast_414.Parsetree.class_type_desc =
function
| Ast_500.Parsetree.Pcty_constr (x0, x1) ->
Ast_414.Parsetree.Pcty_constr
((copy_loc copy_Longident_t x0), (List.map copy_core_type x1))
| Ast_500.Parsetree.Pcty_signature x0 ->
Ast_414.Parsetree.Pcty_signature (copy_class_signature x0)
| Ast_500.Parsetree.Pcty_arrow (x0, x1, x2) ->
Ast_414.Parsetree.Pcty_arrow
((copy_arg_label x0), (copy_core_type x1), (copy_class_type x2))
| Ast_500.Parsetree.Pcty_extension x0 ->
Ast_414.Parsetree.Pcty_extension (copy_extension x0)
| Ast_500.Parsetree.Pcty_open (x0, x1) ->
Ast_414.Parsetree.Pcty_open
((copy_open_description x0), (copy_class_type x1))
and copy_class_signature :
Ast_500.Parsetree.class_signature -> Ast_414.Parsetree.class_signature =
fun
{ Ast_500.Parsetree.pcsig_self = pcsig_self;
Ast_500.Parsetree.pcsig_fields = pcsig_fields }
->
{
Ast_414.Parsetree.pcsig_self = (copy_core_type pcsig_self);
Ast_414.Parsetree.pcsig_fields =
(List.map copy_class_type_field pcsig_fields)
}
and copy_class_type_field :
Ast_500.Parsetree.class_type_field -> Ast_414.Parsetree.class_type_field =
fun
{ Ast_500.Parsetree.pctf_desc = pctf_desc;
Ast_500.Parsetree.pctf_loc = pctf_loc;
Ast_500.Parsetree.pctf_attributes = pctf_attributes }
->
{
Ast_414.Parsetree.pctf_desc = (copy_class_type_field_desc pctf_desc);
Ast_414.Parsetree.pctf_loc = (copy_location pctf_loc);
Ast_414.Parsetree.pctf_attributes = (copy_attributes pctf_attributes)
}
and copy_class_type_field_desc :
Ast_500.Parsetree.class_type_field_desc ->
Ast_414.Parsetree.class_type_field_desc
=
function
| Ast_500.Parsetree.Pctf_inherit x0 ->
Ast_414.Parsetree.Pctf_inherit (copy_class_type x0)
| Ast_500.Parsetree.Pctf_val x0 ->
Ast_414.Parsetree.Pctf_val
(let (x0, x1, x2, x3) = x0 in
((copy_loc copy_label x0), (copy_mutable_flag x1),
(copy_virtual_flag x2), (copy_core_type x3)))
| Ast_500.Parsetree.Pctf_method x0 ->
Ast_414.Parsetree.Pctf_method
(let (x0, x1, x2, x3) = x0 in
((copy_loc copy_label x0), (copy_private_flag x1),
(copy_virtual_flag x2), (copy_core_type x3)))
| Ast_500.Parsetree.Pctf_constraint x0 ->
Ast_414.Parsetree.Pctf_constraint
(let (x0, x1) = x0 in ((copy_core_type x0), (copy_core_type x1)))
| Ast_500.Parsetree.Pctf_attribute x0 ->
Ast_414.Parsetree.Pctf_attribute (copy_attribute x0)
| Ast_500.Parsetree.Pctf_extension x0 ->
Ast_414.Parsetree.Pctf_extension (copy_extension x0)
and copy_extension :
Ast_500.Parsetree.extension -> Ast_414.Parsetree.extension =
fun x ->
let (x0, x1) = x in ((copy_loc (fun x -> x) x0), (copy_payload x1))
and copy_class_infos :
'f0 'g0 .
('f0 -> 'g0) ->
'f0 Ast_500.Parsetree.class_infos -> 'g0 Ast_414.Parsetree.class_infos
=
fun f0 ->
fun
{ Ast_500.Parsetree.pci_virt = pci_virt;
Ast_500.Parsetree.pci_params = pci_params;
Ast_500.Parsetree.pci_name = pci_name;
Ast_500.Parsetree.pci_expr = pci_expr;
Ast_500.Parsetree.pci_loc = pci_loc;
Ast_500.Parsetree.pci_attributes = pci_attributes }
->
{
Ast_414.Parsetree.pci_virt = (copy_virtual_flag pci_virt);
Ast_414.Parsetree.pci_params =
(List.map
(fun x ->
let (x0, x1) = x in
((copy_core_type x0),
(let (x0, x1) = x1 in
((copy_variance x0), (copy_injectivity x1))))) pci_params);
Ast_414.Parsetree.pci_name = (copy_loc (fun x -> x) pci_name);
Ast_414.Parsetree.pci_expr = (f0 pci_expr);
Ast_414.Parsetree.pci_loc = (copy_location pci_loc);
Ast_414.Parsetree.pci_attributes = (copy_attributes pci_attributes)
}
and copy_virtual_flag :
Ast_500.Asttypes.virtual_flag -> Ast_414.Asttypes.virtual_flag =
function
| Ast_500.Asttypes.Virtual -> Ast_414.Asttypes.Virtual
| Ast_500.Asttypes.Concrete -> Ast_414.Asttypes.Concrete
and copy_include_description :
Ast_500.Parsetree.include_description ->
Ast_414.Parsetree.include_description
= fun x -> copy_include_infos copy_module_type x
and copy_include_infos :
'f0 'g0 .
('f0 -> 'g0) ->
'f0 Ast_500.Parsetree.include_infos ->
'g0 Ast_414.Parsetree.include_infos
=
fun f0 ->
fun
{ Ast_500.Parsetree.pincl_mod = pincl_mod;
Ast_500.Parsetree.pincl_loc = pincl_loc;
Ast_500.Parsetree.pincl_attributes = pincl_attributes }
->
{
Ast_414.Parsetree.pincl_mod = (f0 pincl_mod);
Ast_414.Parsetree.pincl_loc = (copy_location pincl_loc);
Ast_414.Parsetree.pincl_attributes =
(copy_attributes pincl_attributes)
}
and copy_open_description :
Ast_500.Parsetree.open_description -> Ast_414.Parsetree.open_description =
fun x -> copy_open_infos (fun x -> copy_loc copy_Longident_t x) x
and copy_open_infos :
'f0 'g0 .
('f0 -> 'g0) ->
'f0 Ast_500.Parsetree.open_infos -> 'g0 Ast_414.Parsetree.open_infos
=
fun f0 ->
fun
{ Ast_500.Parsetree.popen_expr = popen_expr;
Ast_500.Parsetree.popen_override = popen_override;
Ast_500.Parsetree.popen_loc = popen_loc;
Ast_500.Parsetree.popen_attributes = popen_attributes }
->
{
Ast_414.Parsetree.popen_expr = (f0 popen_expr);
Ast_414.Parsetree.popen_override =
(copy_override_flag popen_override);
Ast_414.Parsetree.popen_loc = (copy_location popen_loc);
Ast_414.Parsetree.popen_attributes =
(copy_attributes popen_attributes)
}
and copy_override_flag :
Ast_500.Asttypes.override_flag -> Ast_414.Asttypes.override_flag =
function
| Ast_500.Asttypes.Override -> Ast_414.Asttypes.Override
| Ast_500.Asttypes.Fresh -> Ast_414.Asttypes.Fresh
and copy_module_type_declaration :
Ast_500.Parsetree.module_type_declaration ->
Ast_414.Parsetree.module_type_declaration
=
fun
{ Ast_500.Parsetree.pmtd_name = pmtd_name;
Ast_500.Parsetree.pmtd_type = pmtd_type;
Ast_500.Parsetree.pmtd_attributes = pmtd_attributes;
Ast_500.Parsetree.pmtd_loc = pmtd_loc }
->
{
Ast_414.Parsetree.pmtd_name = (copy_loc (fun x -> x) pmtd_name);
Ast_414.Parsetree.pmtd_type = (Option.map copy_module_type pmtd_type);
Ast_414.Parsetree.pmtd_attributes = (copy_attributes pmtd_attributes);
Ast_414.Parsetree.pmtd_loc = (copy_location pmtd_loc)
}
and copy_module_substitution :
Ast_500.Parsetree.module_substitution ->
Ast_414.Parsetree.module_substitution
=
fun
{ Ast_500.Parsetree.pms_name = pms_name;
Ast_500.Parsetree.pms_manifest = pms_manifest;
Ast_500.Parsetree.pms_attributes = pms_attributes;
Ast_500.Parsetree.pms_loc = pms_loc }
->
{
Ast_414.Parsetree.pms_name = (copy_loc (fun x -> x) pms_name);
Ast_414.Parsetree.pms_manifest =
(copy_loc copy_Longident_t pms_manifest);
Ast_414.Parsetree.pms_attributes = (copy_attributes pms_attributes);
Ast_414.Parsetree.pms_loc = (copy_location pms_loc)
}
and copy_module_declaration :
Ast_500.Parsetree.module_declaration ->
Ast_414.Parsetree.module_declaration
=
fun
{ Ast_500.Parsetree.pmd_name = pmd_name;
Ast_500.Parsetree.pmd_type = pmd_type;
Ast_500.Parsetree.pmd_attributes = pmd_attributes;
Ast_500.Parsetree.pmd_loc = pmd_loc }
->
{
Ast_414.Parsetree.pmd_name =
(copy_loc (fun x -> Option.map (fun x -> x) x) pmd_name);
Ast_414.Parsetree.pmd_type = (copy_module_type pmd_type);
Ast_414.Parsetree.pmd_attributes = (copy_attributes pmd_attributes);
Ast_414.Parsetree.pmd_loc = (copy_location pmd_loc)
}
and copy_type_exception :
Ast_500.Parsetree.type_exception -> Ast_414.Parsetree.type_exception =
fun
{ Ast_500.Parsetree.ptyexn_constructor = ptyexn_constructor;
Ast_500.Parsetree.ptyexn_loc = ptyexn_loc;
Ast_500.Parsetree.ptyexn_attributes = ptyexn_attributes }
->
{
Ast_414.Parsetree.ptyexn_constructor =
(copy_extension_constructor ptyexn_constructor);
Ast_414.Parsetree.ptyexn_loc = (copy_location ptyexn_loc);
Ast_414.Parsetree.ptyexn_attributes =
(copy_attributes ptyexn_attributes)
}
and copy_type_extension :
Ast_500.Parsetree.type_extension -> Ast_414.Parsetree.type_extension =
fun
{ Ast_500.Parsetree.ptyext_path = ptyext_path;
Ast_500.Parsetree.ptyext_params = ptyext_params;
Ast_500.Parsetree.ptyext_constructors = ptyext_constructors;
Ast_500.Parsetree.ptyext_private = ptyext_private;
Ast_500.Parsetree.ptyext_loc = ptyext_loc;
Ast_500.Parsetree.ptyext_attributes = ptyext_attributes }
->
{
Ast_414.Parsetree.ptyext_path = (copy_loc copy_Longident_t ptyext_path);
Ast_414.Parsetree.ptyext_params =
(List.map
(fun x ->
let (x0, x1) = x in
((copy_core_type x0),
(let (x0, x1) = x1 in
((copy_variance x0), (copy_injectivity x1))))) ptyext_params);
Ast_414.Parsetree.ptyext_constructors =
(List.map copy_extension_constructor ptyext_constructors);
Ast_414.Parsetree.ptyext_private = (copy_private_flag ptyext_private);
Ast_414.Parsetree.ptyext_loc = (copy_location ptyext_loc);
Ast_414.Parsetree.ptyext_attributes =
(copy_attributes ptyext_attributes)
}
and copy_extension_constructor :
Ast_500.Parsetree.extension_constructor ->
Ast_414.Parsetree.extension_constructor
=
fun
{ Ast_500.Parsetree.pext_name = pext_name;
Ast_500.Parsetree.pext_kind = pext_kind;
Ast_500.Parsetree.pext_loc = pext_loc;
Ast_500.Parsetree.pext_attributes = pext_attributes }
->
{
Ast_414.Parsetree.pext_name = (copy_loc (fun x -> x) pext_name);
Ast_414.Parsetree.pext_kind =
(copy_extension_constructor_kind pext_kind);
Ast_414.Parsetree.pext_loc = (copy_location pext_loc);
Ast_414.Parsetree.pext_attributes = (copy_attributes pext_attributes)
}
and copy_extension_constructor_kind :
Ast_500.Parsetree.extension_constructor_kind ->
Ast_414.Parsetree.extension_constructor_kind
=
function
| Ast_500.Parsetree.Pext_decl (x0, x1, x2) ->
Ast_414.Parsetree.Pext_decl
((List.map (fun x -> copy_loc (fun x -> x) x) x0),
(copy_constructor_arguments x1), (Option.map copy_core_type x2))
| Ast_500.Parsetree.Pext_rebind x0 ->
Ast_414.Parsetree.Pext_rebind (copy_loc copy_Longident_t x0)
and copy_type_declaration :
Ast_500.Parsetree.type_declaration -> Ast_414.Parsetree.type_declaration =
fun
{ Ast_500.Parsetree.ptype_name = ptype_name;
Ast_500.Parsetree.ptype_params = ptype_params;
Ast_500.Parsetree.ptype_cstrs = ptype_cstrs;
Ast_500.Parsetree.ptype_kind = ptype_kind;
Ast_500.Parsetree.ptype_private = ptype_private;
Ast_500.Parsetree.ptype_manifest = ptype_manifest;
Ast_500.Parsetree.ptype_attributes = ptype_attributes;
Ast_500.Parsetree.ptype_loc = ptype_loc }
->
{
Ast_414.Parsetree.ptype_name = (copy_loc (fun x -> x) ptype_name);
Ast_414.Parsetree.ptype_params =
(List.map
(fun x ->
let (x0, x1) = x in
((copy_core_type x0),
(let (x0, x1) = x1 in
((copy_variance x0), (copy_injectivity x1))))) ptype_params);
Ast_414.Parsetree.ptype_cstrs =
(List.map
(fun x ->
let (x0, x1, x2) = x in
((copy_core_type x0), (copy_core_type x1), (copy_location x2)))
ptype_cstrs);
Ast_414.Parsetree.ptype_kind = (copy_type_kind ptype_kind);
Ast_414.Parsetree.ptype_private = (copy_private_flag ptype_private);
Ast_414.Parsetree.ptype_manifest =
(Option.map copy_core_type ptype_manifest);
Ast_414.Parsetree.ptype_attributes = (copy_attributes ptype_attributes);
Ast_414.Parsetree.ptype_loc = (copy_location ptype_loc)
}
and copy_private_flag :
Ast_500.Asttypes.private_flag -> Ast_414.Asttypes.private_flag =
function
| Ast_500.Asttypes.Private -> Ast_414.Asttypes.Private
| Ast_500.Asttypes.Public -> Ast_414.Asttypes.Public
and copy_type_kind :
Ast_500.Parsetree.type_kind -> Ast_414.Parsetree.type_kind =
function
| Ast_500.Parsetree.Ptype_abstract -> Ast_414.Parsetree.Ptype_abstract
| Ast_500.Parsetree.Ptype_variant x0 ->
Ast_414.Parsetree.Ptype_variant
(List.map copy_constructor_declaration x0)
| Ast_500.Parsetree.Ptype_record x0 ->
Ast_414.Parsetree.Ptype_record (List.map copy_label_declaration x0)
| Ast_500.Parsetree.Ptype_open -> Ast_414.Parsetree.Ptype_open
and copy_constructor_declaration :
Ast_500.Parsetree.constructor_declaration ->
Ast_414.Parsetree.constructor_declaration
=
fun
{ Ast_500.Parsetree.pcd_name = pcd_name;
Ast_500.Parsetree.pcd_vars = pcd_vars;
Ast_500.Parsetree.pcd_args = pcd_args;
Ast_500.Parsetree.pcd_res = pcd_res;
Ast_500.Parsetree.pcd_loc = pcd_loc;
Ast_500.Parsetree.pcd_attributes = pcd_attributes }
->
{
Ast_414.Parsetree.pcd_name = (copy_loc (fun x -> x) pcd_name);
Ast_414.Parsetree.pcd_vars =
(List.map (fun x -> copy_loc (fun x -> x) x) pcd_vars);
Ast_414.Parsetree.pcd_args = (copy_constructor_arguments pcd_args);
Ast_414.Parsetree.pcd_res = (Option.map copy_core_type pcd_res);
Ast_414.Parsetree.pcd_loc = (copy_location pcd_loc);
Ast_414.Parsetree.pcd_attributes = (copy_attributes pcd_attributes)
}
and copy_constructor_arguments :
Ast_500.Parsetree.constructor_arguments ->
Ast_414.Parsetree.constructor_arguments
=
function
| Ast_500.Parsetree.Pcstr_tuple x0 ->
Ast_414.Parsetree.Pcstr_tuple (List.map copy_core_type x0)
| Ast_500.Parsetree.Pcstr_record x0 ->
Ast_414.Parsetree.Pcstr_record (List.map copy_label_declaration x0)
and copy_label_declaration :
Ast_500.Parsetree.label_declaration -> Ast_414.Parsetree.label_declaration
=
fun
{ Ast_500.Parsetree.pld_name = pld_name;
Ast_500.Parsetree.pld_mutable = pld_mutable;
Ast_500.Parsetree.pld_type = pld_type;
Ast_500.Parsetree.pld_loc = pld_loc;
Ast_500.Parsetree.pld_attributes = pld_attributes }
->
{
Ast_414.Parsetree.pld_name = (copy_loc (fun x -> x) pld_name);
Ast_414.Parsetree.pld_mutable = (copy_mutable_flag pld_mutable);
Ast_414.Parsetree.pld_type = (copy_core_type pld_type);
Ast_414.Parsetree.pld_loc = (copy_location pld_loc);
Ast_414.Parsetree.pld_attributes = (copy_attributes pld_attributes)
}
and copy_mutable_flag :
Ast_500.Asttypes.mutable_flag -> Ast_414.Asttypes.mutable_flag =
function
| Ast_500.Asttypes.Immutable -> Ast_414.Asttypes.Immutable
| Ast_500.Asttypes.Mutable -> Ast_414.Asttypes.Mutable
and copy_injectivity :
Ast_500.Asttypes.injectivity -> Ast_414.Asttypes.injectivity =
function
| Ast_500.Asttypes.Injective -> Ast_414.Asttypes.Injective
| Ast_500.Asttypes.NoInjectivity -> Ast_414.Asttypes.NoInjectivity
and copy_variance : Ast_500.Asttypes.variance -> Ast_414.Asttypes.variance =
function
| Ast_500.Asttypes.Covariant -> Ast_414.Asttypes.Covariant
| Ast_500.Asttypes.Contravariant -> Ast_414.Asttypes.Contravariant
| Ast_500.Asttypes.NoVariance -> Ast_414.Asttypes.NoVariance
and copy_value_description :
Ast_500.Parsetree.value_description -> Ast_414.Parsetree.value_description
=
fun
{ Ast_500.Parsetree.pval_name = pval_name;
Ast_500.Parsetree.pval_type = pval_type;
Ast_500.Parsetree.pval_prim = pval_prim;
Ast_500.Parsetree.pval_attributes = pval_attributes;
Ast_500.Parsetree.pval_loc = pval_loc }
->
{
Ast_414.Parsetree.pval_name = (copy_loc (fun x -> x) pval_name);
Ast_414.Parsetree.pval_type = (copy_core_type pval_type);
Ast_414.Parsetree.pval_prim = (List.map (fun x -> x) pval_prim);
Ast_414.Parsetree.pval_attributes = (copy_attributes pval_attributes);
Ast_414.Parsetree.pval_loc = (copy_location pval_loc)
}
and copy_object_field_desc :
Ast_500.Parsetree.object_field_desc -> Ast_414.Parsetree.object_field_desc
=
function
| Ast_500.Parsetree.Otag (x0, x1) ->
Ast_414.Parsetree.Otag ((copy_loc copy_label x0), (copy_core_type x1))
| Ast_500.Parsetree.Oinherit x0 ->
Ast_414.Parsetree.Oinherit (copy_core_type x0)
and copy_arg_label : Ast_500.Asttypes.arg_label -> Ast_414.Asttypes.arg_label
=
function
| Ast_500.Asttypes.Nolabel -> Ast_414.Asttypes.Nolabel
| Ast_500.Asttypes.Labelled x0 -> Ast_414.Asttypes.Labelled x0
| Ast_500.Asttypes.Optional x0 -> Ast_414.Asttypes.Optional x0
and copy_closed_flag :
Ast_500.Asttypes.closed_flag -> Ast_414.Asttypes.closed_flag =
function
| Ast_500.Asttypes.Closed -> Ast_414.Asttypes.Closed
| Ast_500.Asttypes.Open -> Ast_414.Asttypes.Open
and copy_label : Ast_500.Asttypes.label -> Ast_414.Asttypes.label =
fun x -> x
and copy_rec_flag : Ast_500.Asttypes.rec_flag -> Ast_414.Asttypes.rec_flag =
function
| Ast_500.Asttypes.Nonrecursive -> Ast_414.Asttypes.Nonrecursive
| Ast_500.Asttypes.Recursive -> Ast_414.Asttypes.Recursive
and copy_constant : Ast_500.Parsetree.constant -> Ast_414.Parsetree.constant
=
function
| Ast_500.Parsetree.Pconst_integer (x0, x1) ->
Ast_414.Parsetree.Pconst_integer (x0, (Option.map (fun x -> x) x1))
| Ast_500.Parsetree.Pconst_char x0 -> Ast_414.Parsetree.Pconst_char x0
| Ast_500.Parsetree.Pconst_string (x0, x1, x2) ->
Ast_414.Parsetree.Pconst_string
(x0, (copy_location x1), (Option.map (fun x -> x) x2))
| Ast_500.Parsetree.Pconst_float (x0, x1) ->
Ast_414.Parsetree.Pconst_float (x0, (Option.map (fun x -> x) x1))
and copy_Longident_t : Longident.t -> Longident.t =
function
| Longident.Lident x0 -> Longident.Lident x0
| Longident.Ldot (x0, x1) -> Longident.Ldot ((copy_Longident_t x0), x1)
| Longident.Lapply (x0, x1) ->
Longident.Lapply ((copy_Longident_t x0), (copy_Longident_t x1))
and copy_loc :
'f0 'g0 .
('f0 -> 'g0) -> 'f0 Ast_500.Asttypes.loc -> 'g0 Ast_414.Asttypes.loc
=
fun f0 ->
fun { Ast_500.Asttypes.txt = txt; Ast_500.Asttypes.loc = loc } ->
{
Ast_414.Asttypes.txt = (f0 txt);
Ast_414.Asttypes.loc = (copy_location loc)
}
and copy_location : Location.t -> Location.t =
fun
{ Location.loc_start = loc_start; Location.loc_end = loc_end;
Location.loc_ghost = loc_ghost }
->
{
Location.loc_start = (copy_position loc_start);
Location.loc_end = (copy_position loc_end);
Location.loc_ghost = loc_ghost
}
and copy_position : Lexing.position -> Lexing.position =
fun
{ Lexing.pos_fname = pos_fname; Lexing.pos_lnum = pos_lnum;
Lexing.pos_bol = pos_bol; Lexing.pos_cnum = pos_cnum }
->
{
Lexing.pos_fname = pos_fname;
Lexing.pos_lnum = pos_lnum;
Lexing.pos_bol = pos_bol;
Lexing.pos_cnum = pos_cnum
}
|
|
a45ba0c1bba082ac7021a7dc33b81966a6f63a72840eda6ad6a8f305cbeab0c5 | roosta/herb | core.clj | (ns herb.core
(:require [herb.spec]
[garden.types])
(:import [garden.types CSSAtRule])
)
(defmacro defkeyframes
"Define a CSS @keyframes animation:
```clojure
(defkeyframes my-animation
[:from
{:background \"red\"}]
[:to
{:background \"yellow\"}])
```
CLJS: the keyframes CSS gets injected into head under data-herb=\"keyframes\"
CLJ: Use `<keyframes` macro with the defined keyframes returns a CSS string
containing the animation"
[sym & frames]
(let [value {:identifier `(str '~sym)
:frames `(list ~@frames)}
s# `'~sym
n# (name (ns-name *ns*))
obj `(CSSAtRule. :keyframes ~value)]
`(do
(herb.runtime/inject-obj! (str ~n# "/" ~s#) :keyframes ~obj)
(def ~sym ~obj))))
(defmacro defglobal
"Define global CSS:
```clojure
(defglobal some-global-style
[:body {:box-sizing \"border-box\"
:font-size (px 14)
[:button {:border \"none\"}])
```
The CSS output of garden style vectors gets appended to head under
data-herb=\"global\"
"
[sym & styles]
(let [styles# `(list ~@styles)
s# `'~sym
n# (name (ns-name *ns*))]
`(do
(herb.runtime/inject-obj! (str ~n# "/" ~s# ) :global ~styles#)
(def ~sym ~styles#))))
(defmacro <keyframes
"Returns a CSS string from defined keyframes using the defkeyframes macro.
```clojure
(defkeyframes pulse
[:from {:opacity 1}]
[:to {:opacity 0}])
user=> (<keyframes pulse)
@keyframes anime {
from {
opacity: 1;
}
to {
opacity: 0;
}
}
```"
[sym]
(let [s# `'~sym
n# (name (ns-name *ns*))]
`(-> @herb.runtime/injected-keyframes
(get (str ~n# "/" ~s#))
:css)))
(defmacro defgroup
"Define a style group, takes a name and a map of styles in the form:
```clojure
(defgroup my-group
{:a-component {:color \"red\"}})
```
To use a group, use one of `<class` or `<id` macro, where the first argument is
the key for whatever component stylesheet you want:
```clojure
[:div {:class (<class my-group :a-component)}]
```
Since version `0.10.0` this macro is less useful than it once was due to how
arguments are handled (every function is grouped), but keeping for backward
compatibility.
"
[n c]
`(defn ~n [~'component & ~'args]
(if-let [style# (get ~c ~'component)]
(vary-meta
style# assoc
:hint (name ~'component))
(throw (str "Herb error: failed to get component: " ~'component " in stylegroup: " '~n)))))
(defn- dispatch
[style-fn kind args]
(let [f `'~style-fn
n (name (ns-name *ns*))]
`(cond
(not (fn? ~style-fn))
(throw (ex-info (str "herb error in \"" ~n "\", the first argument to " "<" (name ~kind) " must be a function.")
{:input ~f
:namespace ~n}))
(not (map? (~style-fn ~@args)))
(throw (ex-info (str "herb error: style function \"" ~n "/" ~f "\" needs to return a map.")
{:function ~f
:namespace ~n
:return-value (~style-fn ~@args)}))
:else (herb.impl/with-style! ~kind ~f ~n ~style-fn ~@args))))
(defmacro <style
"Takes a function `style-fn` that returns a map. Arguments `args` can be passed
along with the function as additional arguments to <style i.e
`(<style some-fn arg1 arg2)`.
Returns a CSS string that is the result of calling passed function
**example:**
```clojure
(defn style-fn
[color]
{:background color})
[:div {:class (<class style-fn \"red\")}]
```
"
[style-fn & args]
(dispatch style-fn :style args))
(defmacro <id
"Takes a function `style-fn` that returns a map. Arguments `args` can be passed
along with the function as additional arguments to <id i.e
`(<id some-fn arg1 arg2)`. Returns a unique id based on the fully qualified
name and a hash of the arguments from the passed function
**example:**
```clojure
(defn style-fn
[color]
{:background color})
[:div {:id (<id style-fn \"red\")}]
```
"
[style-fn & args]
(dispatch style-fn :id args))
(defmacro <class
"Takes a function `style-fn` that returns a map. Arguments `args` can be passed
along with the function as additional arguments to <class i.e
`(<class some-fn arg1 arg2)`. Returns a unique class based on the fully
qualified name and a hash of the arguments from the passed function
**example:**
```clojure
(defn style-fn
[color]
{:background color})
[:div {:class (<class style-fn \"red\")}]
```"
[style-fn & args]
(dispatch style-fn :class args))
| null | https://raw.githubusercontent.com/roosta/herb/64afb133a7bf51d7171a3c5260584c09dbe4e504/src/herb/core.clj | clojure | (ns herb.core
(:require [herb.spec]
[garden.types])
(:import [garden.types CSSAtRule])
)
(defmacro defkeyframes
"Define a CSS @keyframes animation:
```clojure
(defkeyframes my-animation
[:from
{:background \"red\"}]
[:to
{:background \"yellow\"}])
```
CLJS: the keyframes CSS gets injected into head under data-herb=\"keyframes\"
CLJ: Use `<keyframes` macro with the defined keyframes returns a CSS string
containing the animation"
[sym & frames]
(let [value {:identifier `(str '~sym)
:frames `(list ~@frames)}
s# `'~sym
n# (name (ns-name *ns*))
obj `(CSSAtRule. :keyframes ~value)]
`(do
(herb.runtime/inject-obj! (str ~n# "/" ~s#) :keyframes ~obj)
(def ~sym ~obj))))
(defmacro defglobal
"Define global CSS:
```clojure
(defglobal some-global-style
[:body {:box-sizing \"border-box\"
:font-size (px 14)
[:button {:border \"none\"}])
```
The CSS output of garden style vectors gets appended to head under
data-herb=\"global\"
"
[sym & styles]
(let [styles# `(list ~@styles)
s# `'~sym
n# (name (ns-name *ns*))]
`(do
(herb.runtime/inject-obj! (str ~n# "/" ~s# ) :global ~styles#)
(def ~sym ~styles#))))
(defmacro <keyframes
"Returns a CSS string from defined keyframes using the defkeyframes macro.
```clojure
(defkeyframes pulse
[:from {:opacity 1}]
[:to {:opacity 0}])
user=> (<keyframes pulse)
@keyframes anime {
from {
}
to {
}
}
```"
[sym]
(let [s# `'~sym
n# (name (ns-name *ns*))]
`(-> @herb.runtime/injected-keyframes
(get (str ~n# "/" ~s#))
:css)))
(defmacro defgroup
"Define a style group, takes a name and a map of styles in the form:
```clojure
(defgroup my-group
{:a-component {:color \"red\"}})
```
To use a group, use one of `<class` or `<id` macro, where the first argument is
the key for whatever component stylesheet you want:
```clojure
[:div {:class (<class my-group :a-component)}]
```
Since version `0.10.0` this macro is less useful than it once was due to how
arguments are handled (every function is grouped), but keeping for backward
compatibility.
"
[n c]
`(defn ~n [~'component & ~'args]
(if-let [style# (get ~c ~'component)]
(vary-meta
style# assoc
:hint (name ~'component))
(throw (str "Herb error: failed to get component: " ~'component " in stylegroup: " '~n)))))
(defn- dispatch
[style-fn kind args]
(let [f `'~style-fn
n (name (ns-name *ns*))]
`(cond
(not (fn? ~style-fn))
(throw (ex-info (str "herb error in \"" ~n "\", the first argument to " "<" (name ~kind) " must be a function.")
{:input ~f
:namespace ~n}))
(not (map? (~style-fn ~@args)))
(throw (ex-info (str "herb error: style function \"" ~n "/" ~f "\" needs to return a map.")
{:function ~f
:namespace ~n
:return-value (~style-fn ~@args)}))
:else (herb.impl/with-style! ~kind ~f ~n ~style-fn ~@args))))
(defmacro <style
"Takes a function `style-fn` that returns a map. Arguments `args` can be passed
along with the function as additional arguments to <style i.e
`(<style some-fn arg1 arg2)`.
Returns a CSS string that is the result of calling passed function
**example:**
```clojure
(defn style-fn
[color]
{:background color})
[:div {:class (<class style-fn \"red\")}]
```
"
[style-fn & args]
(dispatch style-fn :style args))
(defmacro <id
"Takes a function `style-fn` that returns a map. Arguments `args` can be passed
along with the function as additional arguments to <id i.e
`(<id some-fn arg1 arg2)`. Returns a unique id based on the fully qualified
name and a hash of the arguments from the passed function
**example:**
```clojure
(defn style-fn
[color]
{:background color})
[:div {:id (<id style-fn \"red\")}]
```
"
[style-fn & args]
(dispatch style-fn :id args))
(defmacro <class
"Takes a function `style-fn` that returns a map. Arguments `args` can be passed
along with the function as additional arguments to <class i.e
`(<class some-fn arg1 arg2)`. Returns a unique class based on the fully
qualified name and a hash of the arguments from the passed function
**example:**
```clojure
(defn style-fn
[color]
{:background color})
[:div {:class (<class style-fn \"red\")}]
```"
[style-fn & args]
(dispatch style-fn :class args))
|
|
9a45b6e865c118ebfd026ca317a9ba57ee35b5d516ce1f34db8b12f0131001be | yakaz/yamerl | unsupported_yaml_version_13.erl | -module('unsupported_yaml_version_13').
-include_lib("eunit/include/eunit.hrl").
-define(FILENAME, "test/parsing/" ?MODULE_STRING ".yaml").
single_test_() ->
?_assertMatch(
{yamerl_parser,
{file,?FILENAME},
[{io_blocksize, 1}],
<<>>,
19,
true,
[],
0,
20,
3,
1,
false,
2,
9,
utf8,
false,
undefined,
_,
_,
[],
{bcoll,root,0,-1,1,1,-1,1,1},
false,
false,
false,
[{impl_key,false,false,undefined,undefined,1,1}],
false,
false,
_,
[],
0,
7,
6,
undefined,
undefined,
_,
false,
[
{yamerl_parsing_error,warning,
"Version 1.3 not supported (maximum version 1.2); parsing may fail",
2,1,version_not_supported,
{yamerl_doc_start,2,1,{1,3},_},
[]}
],
[
{yamerl_stream_end,2,9},
{yamerl_doc_end,2,9},
{yamerl_scalar,2,1,
{yamerl_tag,2,1,{non_specific,"?"}},
flow,plain,"Document"},
{yamerl_doc_start,2,1,{1,3},_},
{yamerl_yaml_directive,1,1,{1,3}},
{yamerl_stream_start,1,1,utf8}
]
},
yamerl_parser:file(?FILENAME, [{io_blocksize, 1}])
).
| null | https://raw.githubusercontent.com/yakaz/yamerl/0032607a7b27fa2b548fc9a02d7ae6b53469c0c5/test/parsing/unsupported_yaml_version_13.erl | erlang | -module('unsupported_yaml_version_13').
-include_lib("eunit/include/eunit.hrl").
-define(FILENAME, "test/parsing/" ?MODULE_STRING ".yaml").
single_test_() ->
?_assertMatch(
{yamerl_parser,
{file,?FILENAME},
[{io_blocksize, 1}],
<<>>,
19,
true,
[],
0,
20,
3,
1,
false,
2,
9,
utf8,
false,
undefined,
_,
_,
[],
{bcoll,root,0,-1,1,1,-1,1,1},
false,
false,
false,
[{impl_key,false,false,undefined,undefined,1,1}],
false,
false,
_,
[],
0,
7,
6,
undefined,
undefined,
_,
false,
[
{yamerl_parsing_error,warning,
"Version 1.3 not supported (maximum version 1.2); parsing may fail",
2,1,version_not_supported,
{yamerl_doc_start,2,1,{1,3},_},
[]}
],
[
{yamerl_stream_end,2,9},
{yamerl_doc_end,2,9},
{yamerl_scalar,2,1,
{yamerl_tag,2,1,{non_specific,"?"}},
flow,plain,"Document"},
{yamerl_doc_start,2,1,{1,3},_},
{yamerl_yaml_directive,1,1,{1,3}},
{yamerl_stream_start,1,1,utf8}
]
},
yamerl_parser:file(?FILENAME, [{io_blocksize, 1}])
).
|
|
ead7b2721f758d5d85d94013abf1edf858ea268ae31497f1fc6982d465d25c4a | larrychristensen/orcpub | main.cljs | (ns env.main
(:require [orcpub.core :as core]))
(core/init)
| null | https://raw.githubusercontent.com/larrychristensen/orcpub/e83995857f7e64af1798009a45a0b03abcd3a4be/env/prod/env/main.cljs | clojure | (ns env.main
(:require [orcpub.core :as core]))
(core/init)
|
|
595bc59e9db2fc363ed4d326c90383da316d1e03c43ba217bf26b36df212fb05 | jwiegley/notes | Printf.hs | # LANGUAGE DataKinds #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
module Printf where
import Data.Monoid
import Data.Text
import Data.Singletons.Prelude
import Data.Type.Equality
import GHC.TypeLits
Every function of multiple arguments is isomorphic to its " uncurried " form of
a function in one argument taking an N - tuple .
A " varargs function " is then equivalent to a function taking an N - tuple whose
size is determined at runtime . To model this , we use a type level list , which
means we only need a way to construct such a list .
Every function of multiple arguments is isomorphic to its "uncurried" form of
a function in one argument taking an N-tuple.
A "varargs function" is then equivalent to a function taking an N-tuple whose
size is determined at runtime. To model this, we use a type level list, which
means we only need a way to construct such a list.
-}
data List :: [*] -> * where
Nil :: List '[]
Cons :: x -> List xs -> List (x ': xs)
data Path :: * -> [*] -> * where
Head :: Path x (x ': xs)
Tail :: Path x xs -> Path x (x' ': xs)
data Format :: [*] -> [*] -> * where
End :: Format fs xs
Str :: Text -> Format fs xs -> Format fs xs
Hole :: Show x => Path x xs -> Format fs xs -> Format (f ': fs) xs
getElement :: Path x xs -> List xs -> x
getElement _ Nil = error "Empty list in getElement"
getElement Head (Cons y _) = y
getElement (Tail xs) (Cons _ ys) = getElement xs ys
printf :: Format fs xs -> List xs -> Text
printf End _ = ""
printf (Str t fmt) args = t <> printf fmt args
printf (Hole p fmt) args = pack (show (getElement p args)) <> printf fmt args
head :: ((n :> 1) ~ True) => Vector n (x ': xs) -> x
head (VCons x _) = x
main :: IO ()
main = do
print $ Printf.head (VCons 10 (VCons "Hello" VNil))
print $ Printf.head (VCons "Hello" VNil)
print $ Printf.head VNil
print $ printf (Str "Hello "
(Hole (Head :: Path String '[String, Int])
(Hole (Tail (Head :: Path Int '[Int]) :: Path Int '[String, Int])
(Str "!" End))))
(Cons "John" (Cons 42 Nil))
data Vector :: Nat -> [*] -> * where
VNil :: Vector 0 '[]
VCons :: x -> Vector n xs -> Vector (n + 1) (x ': xs)
| null | https://raw.githubusercontent.com/jwiegley/notes/24574b02bfd869845faa1521854f90e4e8bf5e9a/haskell/Printf.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE DataKinds #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
module Printf where
import Data.Monoid
import Data.Text
import Data.Singletons.Prelude
import Data.Type.Equality
import GHC.TypeLits
Every function of multiple arguments is isomorphic to its " uncurried " form of
a function in one argument taking an N - tuple .
A " varargs function " is then equivalent to a function taking an N - tuple whose
size is determined at runtime . To model this , we use a type level list , which
means we only need a way to construct such a list .
Every function of multiple arguments is isomorphic to its "uncurried" form of
a function in one argument taking an N-tuple.
A "varargs function" is then equivalent to a function taking an N-tuple whose
size is determined at runtime. To model this, we use a type level list, which
means we only need a way to construct such a list.
-}
data List :: [*] -> * where
Nil :: List '[]
Cons :: x -> List xs -> List (x ': xs)
data Path :: * -> [*] -> * where
Head :: Path x (x ': xs)
Tail :: Path x xs -> Path x (x' ': xs)
data Format :: [*] -> [*] -> * where
End :: Format fs xs
Str :: Text -> Format fs xs -> Format fs xs
Hole :: Show x => Path x xs -> Format fs xs -> Format (f ': fs) xs
getElement :: Path x xs -> List xs -> x
getElement _ Nil = error "Empty list in getElement"
getElement Head (Cons y _) = y
getElement (Tail xs) (Cons _ ys) = getElement xs ys
printf :: Format fs xs -> List xs -> Text
printf End _ = ""
printf (Str t fmt) args = t <> printf fmt args
printf (Hole p fmt) args = pack (show (getElement p args)) <> printf fmt args
head :: ((n :> 1) ~ True) => Vector n (x ': xs) -> x
head (VCons x _) = x
main :: IO ()
main = do
print $ Printf.head (VCons 10 (VCons "Hello" VNil))
print $ Printf.head (VCons "Hello" VNil)
print $ Printf.head VNil
print $ printf (Str "Hello "
(Hole (Head :: Path String '[String, Int])
(Hole (Tail (Head :: Path Int '[Int]) :: Path Int '[String, Int])
(Str "!" End))))
(Cons "John" (Cons 42 Nil))
data Vector :: Nat -> [*] -> * where
VNil :: Vector 0 '[]
VCons :: x -> Vector n xs -> Vector (n + 1) (x ': xs)
|
3885d88cfda2339c02e274df0dc3d9296ea2706be630e9572fa6ba655bbbcc0a | MinaProtocol/mina | processor.ml | * This module contains the transition processor . The transition processor is
* the thread in which transitions are attached the to the transition frontier .
*
* Two types of data are handled by the transition processor : validated external transitions
* with precomputed state hashes ( via the block producer and validator pipes )
* and breadcrumb rose trees ( via the catchup pipe ) .
* the thread in which transitions are attached the to the transition frontier.
*
* Two types of data are handled by the transition processor: validated external transitions
* with precomputed state hashes (via the block producer and validator pipes)
* and breadcrumb rose trees (via the catchup pipe).
*)
(* Only show stdout for failed inline tests. *)
open Inline_test_quiet_logs
open Core_kernel
open Async_kernel
open Pipe_lib.Strict_pipe
open Mina_base
open Mina_state
open Cache_lib
open Mina_block
open Network_peer
module type CONTEXT = sig
val logger : Logger.t
val precomputed_values : Precomputed_values.t
val constraint_constants : Genesis_constants.Constraint_constants.t
val consensus_constants : Consensus.Constants.t
end
(* TODO: calculate a sensible value from postake consensus arguments *)
let catchup_timeout_duration (precomputed_values : Precomputed_values.t) =
Block_time.Span.of_ms
( (precomputed_values.genesis_constants.protocol.delta + 1)
* precomputed_values.constraint_constants.block_window_duration_ms
|> Int64.of_int )
|> Block_time.Span.min (Block_time.Span.of_ms (Int64.of_int 5000))
let cached_transform_deferred_result ~transform_cached ~transform_result cached
=
Cached.transform cached ~f:transform_cached
|> Cached.sequence_deferred
>>= Fn.compose transform_result Cached.sequence_result
(* add a breadcrumb and perform post processing *)
let add_and_finalize ~logger ~frontier ~catchup_scheduler
~processed_transition_writer ~only_if_present ~time_controller ~source
~valid_cb cached_breadcrumb ~(precomputed_values : Precomputed_values.t) =
let breadcrumb =
if Cached.is_pure cached_breadcrumb then Cached.peek cached_breadcrumb
else Cached.invalidate_with_success cached_breadcrumb
in
let consensus_constants = precomputed_values.consensus_constants in
let transition =
Transition_frontier.Breadcrumb.validated_transition breadcrumb
in
[%log debug] "add_and_finalize $state_hash %s callback"
~metadata:
[ ( "state_hash"
, Transition_frontier.Breadcrumb.state_hash breadcrumb
|> State_hash.to_yojson )
]
(Option.value_map valid_cb ~default:"without" ~f:(const "with")) ;
let%map () =
if only_if_present then (
let parent_hash = Transition_frontier.Breadcrumb.parent_hash breadcrumb in
match Transition_frontier.find frontier parent_hash with
| Some _ ->
Transition_frontier.add_breadcrumb_exn frontier breadcrumb
| None ->
[%log warn]
!"When trying to add breadcrumb, its parent had been removed from \
transition frontier: %{sexp: State_hash.t}"
parent_hash ;
Deferred.unit )
else Transition_frontier.add_breadcrumb_exn frontier breadcrumb
in
( match source with
| `Internal ->
()
| _ ->
let transition_time =
transition |> Mina_block.Validated.header
|> Mina_block.Header.protocol_state |> Protocol_state.consensus_state
|> Consensus.Data.Consensus_state.consensus_time
in
let time_elapsed =
Block_time.diff
(Block_time.now time_controller)
(Consensus.Data.Consensus_time.to_time ~constants:consensus_constants
transition_time )
in
Mina_metrics.Block_latency.Inclusion_time.update
(Block_time.Span.to_time_span time_elapsed) ) ;
Writer.write processed_transition_writer
(`Transition transition, `Source source, `Valid_cb valid_cb) ;
Catchup_scheduler.notify catchup_scheduler
~hash:(Mina_block.Validated.state_hash transition)
let process_transition ~context:(module Context : CONTEXT) ~trust_system
~verifier ~frontier ~catchup_scheduler ~processed_transition_writer
~time_controller ~transition:cached_initially_validated_transition ~valid_cb
=
let open Context in
let enveloped_initially_validated_transition =
Cached.peek cached_initially_validated_transition
in
let transition_receipt_time =
Some
(Envelope.Incoming.received_at enveloped_initially_validated_transition)
in
let sender =
Envelope.Incoming.sender enveloped_initially_validated_transition
in
let initially_validated_transition =
Envelope.Incoming.data enveloped_initially_validated_transition
in
let transition_hash, transition =
let t, _ = initially_validated_transition in
(State_hash.With_state_hashes.state_hash t, With_hash.data t)
in
let metadata = [ ("state_hash", State_hash.to_yojson transition_hash) ] in
Deferred.map ~f:(Fn.const ())
(let open Deferred.Result.Let_syntax in
let%bind mostly_validated_transition =
let open Deferred.Let_syntax in
match
Mina_block.Validation.validate_frontier_dependencies
~context:(module Context)
~root_block:
Transition_frontier.(Breadcrumb.block_with_hash @@ root frontier)
~get_block_by_hash:
Transition_frontier.(
Fn.compose (Option.map ~f:Breadcrumb.block_with_hash)
@@ find frontier)
initially_validated_transition
with
| Ok t ->
return (Ok t)
| Error `Not_selected_over_frontier_root ->
let%map () =
Trust_system.record_envelope_sender trust_system logger sender
( Trust_system.Actions.Gossiped_invalid_transition
, Some
( "The transition with hash $state_hash was not selected \
over the transition frontier root"
, metadata ) )
in
let (_ : Mina_block.initial_valid_block Envelope.Incoming.t) =
Cached.invalidate_with_failure cached_initially_validated_transition
in
Error ()
| Error `Already_in_frontier ->
[%log warn] ~metadata
"Refusing to process the transition with hash $state_hash because \
is is already in the transition frontier" ;
let (_ : Mina_block.initial_valid_block Envelope.Incoming.t) =
Cached.invalidate_with_failure cached_initially_validated_transition
in
return (Error ())
| Error `Parent_missing_from_frontier -> (
let _, validation =
Cached.peek cached_initially_validated_transition
|> Envelope.Incoming.data
in
match validation with
| ( _
, _
, _
, (`Delta_block_chain, Truth.True delta_state_hashes)
, _
, _
, _ ) ->
let timeout_duration =
Option.fold
(Transition_frontier.find frontier
(Mina_stdlib.Nonempty_list.head delta_state_hashes) )
~init:(Block_time.Span.of_ms 0L)
~f:(fun _ _ -> catchup_timeout_duration precomputed_values)
in
Catchup_scheduler.watch catchup_scheduler ~timeout_duration
~cached_transition:cached_initially_validated_transition
~valid_cb ;
return (Error ()) )
in
TODO : only access parent in transition frontier once ( already done in call to validate dependencies ) # 2485
let parent_hash =
Protocol_state.previous_state_hash
(Header.protocol_state @@ Mina_block.header transition)
in
let parent_breadcrumb = Transition_frontier.find_exn frontier parent_hash in
let%bind breadcrumb =
cached_transform_deferred_result cached_initially_validated_transition
~transform_cached:(fun _ ->
Transition_frontier.Breadcrumb.build ~logger ~precomputed_values
~verifier ~trust_system ~transition_receipt_time
~sender:(Some sender) ~parent:parent_breadcrumb
~transition:mostly_validated_transition
(* TODO: Can we skip here? *) () )
~transform_result:(function
| Error (`Invalid_staged_ledger_hash error)
| Error (`Invalid_staged_ledger_diff error) ->
[%log error]
~metadata:
(metadata @ [ ("error", Error_json.error_to_yojson error) ])
"Error while building breadcrumb in the transition handler \
processor: $error" ;
Deferred.return (Error ())
| Error (`Fatal_error exn) ->
raise exn
| Ok breadcrumb ->
Deferred.return (Ok breadcrumb) )
in
Mina_metrics.(
Counter.inc_one
Transition_frontier_controller.breadcrumbs_built_by_processor) ;
Deferred.map ~f:Result.return
(add_and_finalize ~logger ~frontier ~catchup_scheduler
~processed_transition_writer ~only_if_present:false ~time_controller
~source:`Gossip breadcrumb ~precomputed_values ~valid_cb ))
let run ~context:(module Context : CONTEXT) ~verifier ~trust_system
~time_controller ~frontier
~(primary_transition_reader :
( [ `Block of
( Mina_block.initial_valid_block Envelope.Incoming.t
, State_hash.t )
Cached.t ]
* [ `Valid_cb of Mina_net2.Validation_callback.t option ] )
Reader.t )
~(producer_transition_reader : Transition_frontier.Breadcrumb.t Reader.t)
~(clean_up_catchup_scheduler : unit Ivar.t) ~catchup_job_writer
~(catchup_breadcrumbs_reader :
( ( (Transition_frontier.Breadcrumb.t, State_hash.t) Cached.t
* Mina_net2.Validation_callback.t option )
Rose_tree.t
list
* [ `Ledger_catchup of unit Ivar.t | `Catchup_scheduler ] )
Reader.t )
~(catchup_breadcrumbs_writer :
( ( (Transition_frontier.Breadcrumb.t, State_hash.t) Cached.t
* Mina_net2.Validation_callback.t option )
Rose_tree.t
list
* [ `Ledger_catchup of unit Ivar.t | `Catchup_scheduler ]
, crash buffered
, unit )
Writer.t ) ~processed_transition_writer =
let open Context in
let catchup_scheduler =
Catchup_scheduler.create ~logger ~precomputed_values ~verifier ~trust_system
~frontier ~time_controller ~catchup_job_writer ~catchup_breadcrumbs_writer
~clean_up_signal:clean_up_catchup_scheduler
in
let add_and_finalize =
add_and_finalize ~frontier ~catchup_scheduler ~processed_transition_writer
~time_controller ~precomputed_values
in
let process_transition =
process_transition
~context:(module Context)
~trust_system ~verifier ~frontier ~catchup_scheduler
~processed_transition_writer ~time_controller
in
O1trace.background_thread "process_blocks" (fun () ->
Reader.Merge.iter
(* It is fine to skip the cache layer on blocks produced by this node
* because it is extraordinarily unlikely we would write an internal bug
* triggering this case, and the external case (where we received an
* identical external transition from the network) can happen iff there
* is another node with the exact same private key and view of the
* transaction pool. *)
[ Reader.map producer_transition_reader ~f:(fun breadcrumb ->
Mina_metrics.(
Gauge.inc_one
Transition_frontier_controller.transitions_being_processed) ;
`Local_breadcrumb (Cached.pure breadcrumb) )
; Reader.map catchup_breadcrumbs_reader
~f:(fun (cb, catchup_breadcrumbs_callback) ->
`Catchup_breadcrumbs (cb, catchup_breadcrumbs_callback) )
; Reader.map primary_transition_reader ~f:(fun vt ->
`Partially_valid_transition vt )
]
~f:(fun msg ->
let open Deferred.Let_syntax in
O1trace.thread "transition_handler_processor" (fun () ->
match msg with
| `Catchup_breadcrumbs
(breadcrumb_subtrees, subsequent_callback_action) -> (
( match%map
Deferred.Or_error.List.iter breadcrumb_subtrees
~f:(fun subtree ->
Rose_tree.Deferred.Or_error.iter
subtree
(* It could be the case that by the time we try and
* add the breadcrumb, it's no longer relevant when
* we're catching up *) ~f:(fun (b, valid_cb) ->
add_and_finalize ~logger ~only_if_present:true
~source:`Catchup ~valid_cb b ) )
with
| Ok () ->
()
| Error err ->
List.iter breadcrumb_subtrees ~f:(fun tree ->
Rose_tree.iter tree
~f:(fun (cached_breadcrumb, _vc) ->
let (_ : Transition_frontier.Breadcrumb.t) =
Cached.invalidate_with_failure cached_breadcrumb
in
() ) ) ;
[%log error]
"Error, failed to attach all catchup breadcrumbs to \
transition frontier: $error"
~metadata:[ ("error", Error_json.error_to_yojson err) ]
)
>>| fun () ->
match subsequent_callback_action with
| `Ledger_catchup decrement_signal ->
if Ivar.is_full decrement_signal then
[%log error] "Ivar.fill bug is here!" ;
Ivar.fill decrement_signal ()
| `Catchup_scheduler ->
() )
| `Local_breadcrumb breadcrumb ->
let transition_time =
Transition_frontier.Breadcrumb.validated_transition
(Cached.peek breadcrumb)
|> Mina_block.Validated.header
|> Mina_block.Header.protocol_state
|> Protocol_state.blockchain_state
|> Blockchain_state.timestamp |> Block_time.to_time_exn
in
Perf_histograms.add_span
~name:"accepted_transition_local_latency"
(Core_kernel.Time.diff
Block_time.(now time_controller |> to_time_exn)
transition_time ) ;
let%map () =
match%map
add_and_finalize ~logger ~only_if_present:false
~source:`Internal breadcrumb ~valid_cb:None
with
| Ok () ->
()
| Error err ->
[%log error]
~metadata:
[ ("error", Error_json.error_to_yojson err) ]
"Error, failed to attach produced breadcrumb to \
transition frontier: $error" ;
let (_ : Transition_frontier.Breadcrumb.t) =
Cached.invalidate_with_failure breadcrumb
in
()
in
Mina_metrics.(
Gauge.dec_one
Transition_frontier_controller.transitions_being_processed)
| `Partially_valid_transition
(`Block transition, `Valid_cb valid_cb) ->
process_transition ~transition ~valid_cb ) ) )
let%test_module "Transition_handler.Processor tests" =
( module struct
open Async
open Pipe_lib
let () =
Backtrace.elide := false ;
Printexc.record_backtrace true ;
Async.Scheduler.set_record_backtraces true
let logger = Logger.create ()
let precomputed_values = Lazy.force Precomputed_values.for_unit_tests
let proof_level = precomputed_values.proof_level
let constraint_constants = precomputed_values.constraint_constants
let time_controller = Block_time.Controller.basic ~logger
let trust_system = Trust_system.null ()
let verifier =
Async.Thread_safe.block_on_async_exn (fun () ->
Verifier.create ~logger ~proof_level ~constraint_constants
~conf_dir:None
~pids:(Child_processes.Termination.create_pid_table ()) )
module Context = struct
let logger = logger
let precomputed_values = precomputed_values
let constraint_constants = constraint_constants
let consensus_constants = precomputed_values.consensus_constants
end
let downcast_breadcrumb breadcrumb =
let transition =
Transition_frontier.Breadcrumb.validated_transition breadcrumb
|> Mina_block.Validated.remember
|> Mina_block.Validation.reset_frontier_dependencies_validation
|> Mina_block.Validation.reset_staged_ledger_diff_validation
in
Envelope.Incoming.wrap ~data:transition ~sender:Envelope.Sender.Local
let%test_unit "adding transitions whose parents are in the frontier" =
let frontier_size = 1 in
let branch_size = 10 in
let max_length = frontier_size + branch_size in
Quickcheck.test ~trials:4
(Transition_frontier.For_tests.gen_with_branch ~precomputed_values
~verifier ~max_length ~frontier_size ~branch_size () )
~f:(fun (frontier, branch) ->
assert (
Thread_safe.block_on_async_exn (fun () ->
let valid_transition_reader, valid_transition_writer =
Strict_pipe.create
(Buffered
(`Capacity branch_size, `Overflow (Drop_head ignore)) )
in
let producer_transition_reader, _ =
Strict_pipe.create
(Buffered
(`Capacity branch_size, `Overflow (Drop_head ignore)) )
in
let _, catchup_job_writer =
Strict_pipe.create (Buffered (`Capacity 1, `Overflow Crash))
in
let catchup_breadcrumbs_reader, catchup_breadcrumbs_writer =
Strict_pipe.create (Buffered (`Capacity 1, `Overflow Crash))
in
let processed_transition_reader, processed_transition_writer =
Strict_pipe.create
(Buffered
(`Capacity branch_size, `Overflow (Drop_head ignore)) )
in
let clean_up_catchup_scheduler = Ivar.create () in
let cache = Unprocessed_transition_cache.create ~logger in
run
~context:(module Context)
~time_controller ~verifier ~trust_system
~clean_up_catchup_scheduler ~frontier
~primary_transition_reader:valid_transition_reader
~producer_transition_reader ~catchup_job_writer
~catchup_breadcrumbs_reader ~catchup_breadcrumbs_writer
~processed_transition_writer ;
List.iter branch ~f:(fun breadcrumb ->
let b =
downcast_breadcrumb breadcrumb
|> Unprocessed_transition_cache.register_exn cache
in
Strict_pipe.Writer.write valid_transition_writer
(`Block b, `Valid_cb None) ) ;
match%map
Block_time.Timeout.await
~timeout_duration:(Block_time.Span.of_ms 30000L)
time_controller
(Strict_pipe.Reader.fold_until processed_transition_reader
~init:branch
~f:(fun
remaining_breadcrumbs
(`Transition newly_added_transition, _, _)
->
Deferred.return
( match remaining_breadcrumbs with
| next_expected_breadcrumb :: tail ->
[%test_eq: State_hash.t]
(Transition_frontier.Breadcrumb.state_hash
next_expected_breadcrumb )
(Mina_block.Validated.state_hash
newly_added_transition ) ;
[%log info]
~metadata:
[ ( "height"
, `Int
( newly_added_transition
|> Mina_block.Validated.forget
|> With_hash.data |> Mina_block.header
|> Mina_block.Header.protocol_state
|> Protocol_state.consensus_state
|> Consensus.Data.Consensus_state
.blockchain_length
|> Mina_numbers.Length.to_uint32
|> Unsigned.UInt32.to_int ) )
]
"transition of $height passed processor" ;
if List.is_empty tail then `Stop true
else `Continue tail
| [] ->
`Stop false ) ) )
with
| `Timeout ->
failwith "test timed out"
| `Ok (`Eof _) ->
failwith "pipe closed unexpectedly"
| `Ok (`Terminated x) ->
x ) ) )
end )
| null | https://raw.githubusercontent.com/MinaProtocol/mina/f3f7d625ce45a0820a56ebc8f4f1c967865a3e57/src/lib/transition_handler/processor.ml | ocaml | Only show stdout for failed inline tests.
TODO: calculate a sensible value from postake consensus arguments
add a breadcrumb and perform post processing
TODO: Can we skip here?
It is fine to skip the cache layer on blocks produced by this node
* because it is extraordinarily unlikely we would write an internal bug
* triggering this case, and the external case (where we received an
* identical external transition from the network) can happen iff there
* is another node with the exact same private key and view of the
* transaction pool.
It could be the case that by the time we try and
* add the breadcrumb, it's no longer relevant when
* we're catching up | * This module contains the transition processor . The transition processor is
* the thread in which transitions are attached the to the transition frontier .
*
* Two types of data are handled by the transition processor : validated external transitions
* with precomputed state hashes ( via the block producer and validator pipes )
* and breadcrumb rose trees ( via the catchup pipe ) .
* the thread in which transitions are attached the to the transition frontier.
*
* Two types of data are handled by the transition processor: validated external transitions
* with precomputed state hashes (via the block producer and validator pipes)
* and breadcrumb rose trees (via the catchup pipe).
*)
open Inline_test_quiet_logs
open Core_kernel
open Async_kernel
open Pipe_lib.Strict_pipe
open Mina_base
open Mina_state
open Cache_lib
open Mina_block
open Network_peer
module type CONTEXT = sig
val logger : Logger.t
val precomputed_values : Precomputed_values.t
val constraint_constants : Genesis_constants.Constraint_constants.t
val consensus_constants : Consensus.Constants.t
end
let catchup_timeout_duration (precomputed_values : Precomputed_values.t) =
Block_time.Span.of_ms
( (precomputed_values.genesis_constants.protocol.delta + 1)
* precomputed_values.constraint_constants.block_window_duration_ms
|> Int64.of_int )
|> Block_time.Span.min (Block_time.Span.of_ms (Int64.of_int 5000))
let cached_transform_deferred_result ~transform_cached ~transform_result cached
=
Cached.transform cached ~f:transform_cached
|> Cached.sequence_deferred
>>= Fn.compose transform_result Cached.sequence_result
let add_and_finalize ~logger ~frontier ~catchup_scheduler
~processed_transition_writer ~only_if_present ~time_controller ~source
~valid_cb cached_breadcrumb ~(precomputed_values : Precomputed_values.t) =
let breadcrumb =
if Cached.is_pure cached_breadcrumb then Cached.peek cached_breadcrumb
else Cached.invalidate_with_success cached_breadcrumb
in
let consensus_constants = precomputed_values.consensus_constants in
let transition =
Transition_frontier.Breadcrumb.validated_transition breadcrumb
in
[%log debug] "add_and_finalize $state_hash %s callback"
~metadata:
[ ( "state_hash"
, Transition_frontier.Breadcrumb.state_hash breadcrumb
|> State_hash.to_yojson )
]
(Option.value_map valid_cb ~default:"without" ~f:(const "with")) ;
let%map () =
if only_if_present then (
let parent_hash = Transition_frontier.Breadcrumb.parent_hash breadcrumb in
match Transition_frontier.find frontier parent_hash with
| Some _ ->
Transition_frontier.add_breadcrumb_exn frontier breadcrumb
| None ->
[%log warn]
!"When trying to add breadcrumb, its parent had been removed from \
transition frontier: %{sexp: State_hash.t}"
parent_hash ;
Deferred.unit )
else Transition_frontier.add_breadcrumb_exn frontier breadcrumb
in
( match source with
| `Internal ->
()
| _ ->
let transition_time =
transition |> Mina_block.Validated.header
|> Mina_block.Header.protocol_state |> Protocol_state.consensus_state
|> Consensus.Data.Consensus_state.consensus_time
in
let time_elapsed =
Block_time.diff
(Block_time.now time_controller)
(Consensus.Data.Consensus_time.to_time ~constants:consensus_constants
transition_time )
in
Mina_metrics.Block_latency.Inclusion_time.update
(Block_time.Span.to_time_span time_elapsed) ) ;
Writer.write processed_transition_writer
(`Transition transition, `Source source, `Valid_cb valid_cb) ;
Catchup_scheduler.notify catchup_scheduler
~hash:(Mina_block.Validated.state_hash transition)
let process_transition ~context:(module Context : CONTEXT) ~trust_system
~verifier ~frontier ~catchup_scheduler ~processed_transition_writer
~time_controller ~transition:cached_initially_validated_transition ~valid_cb
=
let open Context in
let enveloped_initially_validated_transition =
Cached.peek cached_initially_validated_transition
in
let transition_receipt_time =
Some
(Envelope.Incoming.received_at enveloped_initially_validated_transition)
in
let sender =
Envelope.Incoming.sender enveloped_initially_validated_transition
in
let initially_validated_transition =
Envelope.Incoming.data enveloped_initially_validated_transition
in
let transition_hash, transition =
let t, _ = initially_validated_transition in
(State_hash.With_state_hashes.state_hash t, With_hash.data t)
in
let metadata = [ ("state_hash", State_hash.to_yojson transition_hash) ] in
Deferred.map ~f:(Fn.const ())
(let open Deferred.Result.Let_syntax in
let%bind mostly_validated_transition =
let open Deferred.Let_syntax in
match
Mina_block.Validation.validate_frontier_dependencies
~context:(module Context)
~root_block:
Transition_frontier.(Breadcrumb.block_with_hash @@ root frontier)
~get_block_by_hash:
Transition_frontier.(
Fn.compose (Option.map ~f:Breadcrumb.block_with_hash)
@@ find frontier)
initially_validated_transition
with
| Ok t ->
return (Ok t)
| Error `Not_selected_over_frontier_root ->
let%map () =
Trust_system.record_envelope_sender trust_system logger sender
( Trust_system.Actions.Gossiped_invalid_transition
, Some
( "The transition with hash $state_hash was not selected \
over the transition frontier root"
, metadata ) )
in
let (_ : Mina_block.initial_valid_block Envelope.Incoming.t) =
Cached.invalidate_with_failure cached_initially_validated_transition
in
Error ()
| Error `Already_in_frontier ->
[%log warn] ~metadata
"Refusing to process the transition with hash $state_hash because \
is is already in the transition frontier" ;
let (_ : Mina_block.initial_valid_block Envelope.Incoming.t) =
Cached.invalidate_with_failure cached_initially_validated_transition
in
return (Error ())
| Error `Parent_missing_from_frontier -> (
let _, validation =
Cached.peek cached_initially_validated_transition
|> Envelope.Incoming.data
in
match validation with
| ( _
, _
, _
, (`Delta_block_chain, Truth.True delta_state_hashes)
, _
, _
, _ ) ->
let timeout_duration =
Option.fold
(Transition_frontier.find frontier
(Mina_stdlib.Nonempty_list.head delta_state_hashes) )
~init:(Block_time.Span.of_ms 0L)
~f:(fun _ _ -> catchup_timeout_duration precomputed_values)
in
Catchup_scheduler.watch catchup_scheduler ~timeout_duration
~cached_transition:cached_initially_validated_transition
~valid_cb ;
return (Error ()) )
in
TODO : only access parent in transition frontier once ( already done in call to validate dependencies ) # 2485
let parent_hash =
Protocol_state.previous_state_hash
(Header.protocol_state @@ Mina_block.header transition)
in
let parent_breadcrumb = Transition_frontier.find_exn frontier parent_hash in
let%bind breadcrumb =
cached_transform_deferred_result cached_initially_validated_transition
~transform_cached:(fun _ ->
Transition_frontier.Breadcrumb.build ~logger ~precomputed_values
~verifier ~trust_system ~transition_receipt_time
~sender:(Some sender) ~parent:parent_breadcrumb
~transition:mostly_validated_transition
~transform_result:(function
| Error (`Invalid_staged_ledger_hash error)
| Error (`Invalid_staged_ledger_diff error) ->
[%log error]
~metadata:
(metadata @ [ ("error", Error_json.error_to_yojson error) ])
"Error while building breadcrumb in the transition handler \
processor: $error" ;
Deferred.return (Error ())
| Error (`Fatal_error exn) ->
raise exn
| Ok breadcrumb ->
Deferred.return (Ok breadcrumb) )
in
Mina_metrics.(
Counter.inc_one
Transition_frontier_controller.breadcrumbs_built_by_processor) ;
Deferred.map ~f:Result.return
(add_and_finalize ~logger ~frontier ~catchup_scheduler
~processed_transition_writer ~only_if_present:false ~time_controller
~source:`Gossip breadcrumb ~precomputed_values ~valid_cb ))
let run ~context:(module Context : CONTEXT) ~verifier ~trust_system
~time_controller ~frontier
~(primary_transition_reader :
( [ `Block of
( Mina_block.initial_valid_block Envelope.Incoming.t
, State_hash.t )
Cached.t ]
* [ `Valid_cb of Mina_net2.Validation_callback.t option ] )
Reader.t )
~(producer_transition_reader : Transition_frontier.Breadcrumb.t Reader.t)
~(clean_up_catchup_scheduler : unit Ivar.t) ~catchup_job_writer
~(catchup_breadcrumbs_reader :
( ( (Transition_frontier.Breadcrumb.t, State_hash.t) Cached.t
* Mina_net2.Validation_callback.t option )
Rose_tree.t
list
* [ `Ledger_catchup of unit Ivar.t | `Catchup_scheduler ] )
Reader.t )
~(catchup_breadcrumbs_writer :
( ( (Transition_frontier.Breadcrumb.t, State_hash.t) Cached.t
* Mina_net2.Validation_callback.t option )
Rose_tree.t
list
* [ `Ledger_catchup of unit Ivar.t | `Catchup_scheduler ]
, crash buffered
, unit )
Writer.t ) ~processed_transition_writer =
let open Context in
let catchup_scheduler =
Catchup_scheduler.create ~logger ~precomputed_values ~verifier ~trust_system
~frontier ~time_controller ~catchup_job_writer ~catchup_breadcrumbs_writer
~clean_up_signal:clean_up_catchup_scheduler
in
let add_and_finalize =
add_and_finalize ~frontier ~catchup_scheduler ~processed_transition_writer
~time_controller ~precomputed_values
in
let process_transition =
process_transition
~context:(module Context)
~trust_system ~verifier ~frontier ~catchup_scheduler
~processed_transition_writer ~time_controller
in
O1trace.background_thread "process_blocks" (fun () ->
Reader.Merge.iter
[ Reader.map producer_transition_reader ~f:(fun breadcrumb ->
Mina_metrics.(
Gauge.inc_one
Transition_frontier_controller.transitions_being_processed) ;
`Local_breadcrumb (Cached.pure breadcrumb) )
; Reader.map catchup_breadcrumbs_reader
~f:(fun (cb, catchup_breadcrumbs_callback) ->
`Catchup_breadcrumbs (cb, catchup_breadcrumbs_callback) )
; Reader.map primary_transition_reader ~f:(fun vt ->
`Partially_valid_transition vt )
]
~f:(fun msg ->
let open Deferred.Let_syntax in
O1trace.thread "transition_handler_processor" (fun () ->
match msg with
| `Catchup_breadcrumbs
(breadcrumb_subtrees, subsequent_callback_action) -> (
( match%map
Deferred.Or_error.List.iter breadcrumb_subtrees
~f:(fun subtree ->
Rose_tree.Deferred.Or_error.iter
subtree
add_and_finalize ~logger ~only_if_present:true
~source:`Catchup ~valid_cb b ) )
with
| Ok () ->
()
| Error err ->
List.iter breadcrumb_subtrees ~f:(fun tree ->
Rose_tree.iter tree
~f:(fun (cached_breadcrumb, _vc) ->
let (_ : Transition_frontier.Breadcrumb.t) =
Cached.invalidate_with_failure cached_breadcrumb
in
() ) ) ;
[%log error]
"Error, failed to attach all catchup breadcrumbs to \
transition frontier: $error"
~metadata:[ ("error", Error_json.error_to_yojson err) ]
)
>>| fun () ->
match subsequent_callback_action with
| `Ledger_catchup decrement_signal ->
if Ivar.is_full decrement_signal then
[%log error] "Ivar.fill bug is here!" ;
Ivar.fill decrement_signal ()
| `Catchup_scheduler ->
() )
| `Local_breadcrumb breadcrumb ->
let transition_time =
Transition_frontier.Breadcrumb.validated_transition
(Cached.peek breadcrumb)
|> Mina_block.Validated.header
|> Mina_block.Header.protocol_state
|> Protocol_state.blockchain_state
|> Blockchain_state.timestamp |> Block_time.to_time_exn
in
Perf_histograms.add_span
~name:"accepted_transition_local_latency"
(Core_kernel.Time.diff
Block_time.(now time_controller |> to_time_exn)
transition_time ) ;
let%map () =
match%map
add_and_finalize ~logger ~only_if_present:false
~source:`Internal breadcrumb ~valid_cb:None
with
| Ok () ->
()
| Error err ->
[%log error]
~metadata:
[ ("error", Error_json.error_to_yojson err) ]
"Error, failed to attach produced breadcrumb to \
transition frontier: $error" ;
let (_ : Transition_frontier.Breadcrumb.t) =
Cached.invalidate_with_failure breadcrumb
in
()
in
Mina_metrics.(
Gauge.dec_one
Transition_frontier_controller.transitions_being_processed)
| `Partially_valid_transition
(`Block transition, `Valid_cb valid_cb) ->
process_transition ~transition ~valid_cb ) ) )
let%test_module "Transition_handler.Processor tests" =
( module struct
open Async
open Pipe_lib
let () =
Backtrace.elide := false ;
Printexc.record_backtrace true ;
Async.Scheduler.set_record_backtraces true
let logger = Logger.create ()
let precomputed_values = Lazy.force Precomputed_values.for_unit_tests
let proof_level = precomputed_values.proof_level
let constraint_constants = precomputed_values.constraint_constants
let time_controller = Block_time.Controller.basic ~logger
let trust_system = Trust_system.null ()
let verifier =
Async.Thread_safe.block_on_async_exn (fun () ->
Verifier.create ~logger ~proof_level ~constraint_constants
~conf_dir:None
~pids:(Child_processes.Termination.create_pid_table ()) )
module Context = struct
let logger = logger
let precomputed_values = precomputed_values
let constraint_constants = constraint_constants
let consensus_constants = precomputed_values.consensus_constants
end
let downcast_breadcrumb breadcrumb =
let transition =
Transition_frontier.Breadcrumb.validated_transition breadcrumb
|> Mina_block.Validated.remember
|> Mina_block.Validation.reset_frontier_dependencies_validation
|> Mina_block.Validation.reset_staged_ledger_diff_validation
in
Envelope.Incoming.wrap ~data:transition ~sender:Envelope.Sender.Local
let%test_unit "adding transitions whose parents are in the frontier" =
let frontier_size = 1 in
let branch_size = 10 in
let max_length = frontier_size + branch_size in
Quickcheck.test ~trials:4
(Transition_frontier.For_tests.gen_with_branch ~precomputed_values
~verifier ~max_length ~frontier_size ~branch_size () )
~f:(fun (frontier, branch) ->
assert (
Thread_safe.block_on_async_exn (fun () ->
let valid_transition_reader, valid_transition_writer =
Strict_pipe.create
(Buffered
(`Capacity branch_size, `Overflow (Drop_head ignore)) )
in
let producer_transition_reader, _ =
Strict_pipe.create
(Buffered
(`Capacity branch_size, `Overflow (Drop_head ignore)) )
in
let _, catchup_job_writer =
Strict_pipe.create (Buffered (`Capacity 1, `Overflow Crash))
in
let catchup_breadcrumbs_reader, catchup_breadcrumbs_writer =
Strict_pipe.create (Buffered (`Capacity 1, `Overflow Crash))
in
let processed_transition_reader, processed_transition_writer =
Strict_pipe.create
(Buffered
(`Capacity branch_size, `Overflow (Drop_head ignore)) )
in
let clean_up_catchup_scheduler = Ivar.create () in
let cache = Unprocessed_transition_cache.create ~logger in
run
~context:(module Context)
~time_controller ~verifier ~trust_system
~clean_up_catchup_scheduler ~frontier
~primary_transition_reader:valid_transition_reader
~producer_transition_reader ~catchup_job_writer
~catchup_breadcrumbs_reader ~catchup_breadcrumbs_writer
~processed_transition_writer ;
List.iter branch ~f:(fun breadcrumb ->
let b =
downcast_breadcrumb breadcrumb
|> Unprocessed_transition_cache.register_exn cache
in
Strict_pipe.Writer.write valid_transition_writer
(`Block b, `Valid_cb None) ) ;
match%map
Block_time.Timeout.await
~timeout_duration:(Block_time.Span.of_ms 30000L)
time_controller
(Strict_pipe.Reader.fold_until processed_transition_reader
~init:branch
~f:(fun
remaining_breadcrumbs
(`Transition newly_added_transition, _, _)
->
Deferred.return
( match remaining_breadcrumbs with
| next_expected_breadcrumb :: tail ->
[%test_eq: State_hash.t]
(Transition_frontier.Breadcrumb.state_hash
next_expected_breadcrumb )
(Mina_block.Validated.state_hash
newly_added_transition ) ;
[%log info]
~metadata:
[ ( "height"
, `Int
( newly_added_transition
|> Mina_block.Validated.forget
|> With_hash.data |> Mina_block.header
|> Mina_block.Header.protocol_state
|> Protocol_state.consensus_state
|> Consensus.Data.Consensus_state
.blockchain_length
|> Mina_numbers.Length.to_uint32
|> Unsigned.UInt32.to_int ) )
]
"transition of $height passed processor" ;
if List.is_empty tail then `Stop true
else `Continue tail
| [] ->
`Stop false ) ) )
with
| `Timeout ->
failwith "test timed out"
| `Ok (`Eof _) ->
failwith "pipe closed unexpectedly"
| `Ok (`Terminated x) ->
x ) ) )
end )
|
eeaacc94ac900fa93c7cc7a70d3dc6694d1640c2d86a2f7744efc0a123b22515 | ocurrent/opam-repo-ci | test.ml | let () =
Lwt_main.run
@@ Alcotest_lwt.run "opam-repo-ci"
[ ("index", Test_index.tests) ]
| null | https://raw.githubusercontent.com/ocurrent/opam-repo-ci/58cd38ddcf8bb24db5c2a7d8c233770e3e1e08cf/test/test.ml | ocaml | let () =
Lwt_main.run
@@ Alcotest_lwt.run "opam-repo-ci"
[ ("index", Test_index.tests) ]
|
|
2621651dd4a8ffce4e169899841b736f0fe2301afa158cad8290f56793d64936 | metosin/sieppari | project.clj | (defproject metosin/sieppari "0.0.0-alpha13"
:description "Small, fast, and complete interceptor library."
:url ""
:license {:name "Eclipse Public License", :url "-2.0/"}
:deploy-repositories [["releases" :clojars]]
:lein-release {:deploy-via :clojars}
:dependencies []
:test-paths ["test/clj" "test/cljs" "test/cljc"]
:profiles {:dev {:source-paths ["dev"]
:dependencies [[org.clojure/clojure "1.10.1" :scope "provided"]
[org.clojure/clojurescript "1.10.758"]
;; Add-ons:
[org.clojure/core.async "1.2.603"]
[manifold "0.1.8"]
[funcool/promesa "5.1.0"]
;; Testing:
[metosin/testit "0.4.0"]
[lambdaisland/kaocha "1.0.632"]
[lambdaisland/kaocha-cljs "0.0-71"]
;; Dev:
[org.clojure/tools.namespace "1.0.0"]
;; Perf testing:
[criterium "0.4.5"]
[com.clojure-goes-fast/clj-async-profiler "0.5.0-SNAPSHOT"]
[io.pedestal/pedestal.interceptor "0.5.7"]
[org.slf4j/slf4j-nop "1.7.30"]]}
needed because of -cljs#known-issues
:test-cljs {:source-paths ["test/cljc" "test/cljs"]}
:examples {:source-paths ["examples"]}
:perf {:jvm-opts ^:replace ["-server" "-Xms4096m" "-Xmx4096m" "-Dclojure.compiler.direct-linking=true"]}}
:aliases {"kaocha" ["with-profile" "+dev-deps,+test-cljs" "run" "-m" "kaocha.runner" "--reporter" "kaocha.report/documentation"]
"perf" ["with-profile" "default,dev,examples,perf"]
"perf-test" ["perf" "run" "-m" "example.perf-testing"]})
| null | https://raw.githubusercontent.com/metosin/sieppari/7a97d55d8063f30359e902b0678c6ebca952f9fd/project.clj | clojure | Add-ons:
Testing:
Dev:
Perf testing: | (defproject metosin/sieppari "0.0.0-alpha13"
:description "Small, fast, and complete interceptor library."
:url ""
:license {:name "Eclipse Public License", :url "-2.0/"}
:deploy-repositories [["releases" :clojars]]
:lein-release {:deploy-via :clojars}
:dependencies []
:test-paths ["test/clj" "test/cljs" "test/cljc"]
:profiles {:dev {:source-paths ["dev"]
:dependencies [[org.clojure/clojure "1.10.1" :scope "provided"]
[org.clojure/clojurescript "1.10.758"]
[org.clojure/core.async "1.2.603"]
[manifold "0.1.8"]
[funcool/promesa "5.1.0"]
[metosin/testit "0.4.0"]
[lambdaisland/kaocha "1.0.632"]
[lambdaisland/kaocha-cljs "0.0-71"]
[org.clojure/tools.namespace "1.0.0"]
[criterium "0.4.5"]
[com.clojure-goes-fast/clj-async-profiler "0.5.0-SNAPSHOT"]
[io.pedestal/pedestal.interceptor "0.5.7"]
[org.slf4j/slf4j-nop "1.7.30"]]}
needed because of -cljs#known-issues
:test-cljs {:source-paths ["test/cljc" "test/cljs"]}
:examples {:source-paths ["examples"]}
:perf {:jvm-opts ^:replace ["-server" "-Xms4096m" "-Xmx4096m" "-Dclojure.compiler.direct-linking=true"]}}
:aliases {"kaocha" ["with-profile" "+dev-deps,+test-cljs" "run" "-m" "kaocha.runner" "--reporter" "kaocha.report/documentation"]
"perf" ["with-profile" "default,dev,examples,perf"]
"perf-test" ["perf" "run" "-m" "example.perf-testing"]})
|
8d505652975dca27ff1afc41bd5780b3994a88cf1dd287956b08324965a146d1 | ropas/sparrow | taintDom.ml | open BasicDom
open InterCfg
module IntOverflow =
struct
type t = Bot | Top
let to_string = function
| Bot -> "No Overflow"
| Top -> "May Overflow"
let compare = compare
let bot = Bot
let top = Top
let le x y =
match x, y with
| Bot, _ -> true
| Top, Bot -> false
| Top, Top -> true
let eq x y =
match x, y with
| Bot, Bot -> true
| Top, Top -> true
| _ -> false
let join x y =
match x, y with
| Bot, Bot -> Bot
| Top, _ | _, Top -> Top
let meet x y =
match x, y with
| Bot, _ -> Bot
| _, Bot -> Bot
| _ -> Top
let is_bot x = x = Bot
let widen = join
let narrow = meet
let pp fmt x = Format.fprintf fmt "%s" (to_string x)
end
module UserInput =
struct
module Source =
struct
type t = Node.t * Cil.location
let to_string (node, loc) =
Node.to_string node ^ "@" ^ CilHelper.s_location loc
let compare x y = Node.compare (fst x) (fst y)
let pp fmt x = Format.fprintf fmt "%s" (to_string x)
end
include PowDom.MakeLAT(Source)
let make node loc = singleton (node, loc)
let is_bot = is_empty
let is_taint x = not (is_bot x)
end
module Val =
struct
type t = {
int_overflow : IntOverflow.t;
user_input : UserInput.t;
}
let int_overflow x = x.int_overflow
let user_input x = x.user_input
let to_string t =
"{ int_overflow: " ^ IntOverflow.to_string t.int_overflow
^ ", user_input: " ^ UserInput.to_string t.user_input ^ " }"
let compare = compare
let bot = { int_overflow = IntOverflow.bot; user_input = UserInput.bot }
let top = { int_overflow = IntOverflow.top; user_input = UserInput.top }
let input_value node loc = { top with user_input = UserInput.make node loc }
let le x y =
(IntOverflow.le x.int_overflow y.int_overflow)
&& (UserInput.le x.user_input y.user_input)
let eq x y =
(IntOverflow.eq x.int_overflow y.int_overflow)
&& (UserInput.eq x.user_input y.user_input)
let join x y =
{ int_overflow = IntOverflow.join x.int_overflow y.int_overflow;
user_input = UserInput.join x.user_input y.user_input }
let meet x y =
{ int_overflow = IntOverflow.meet x.int_overflow y.int_overflow;
user_input = UserInput.meet x.user_input y.user_input }
let is_bot x = x = bot
let widen = join
let narrow = meet
let pp fmt x =
Format.fprintf fmt "{ int_overflow: %a, user_input: %a }"
IntOverflow.pp x.int_overflow UserInput.pp x.user_input
end
module Mem =
struct
include InstrumentedMem.Make(MapDom.MakeCPO (Loc) (Val))
let lookup : PowLoc.t -> t -> Val.t = fun locs mem ->
if eq mem bot then Val.bot
else
let find_join loc acc = Val.join acc (find loc mem) in
PowLoc.fold find_join locs Val.bot
let strong_update : PowLoc.t -> Val.t -> t -> t
= fun locs v mem ->
PowLoc.fold (fun x -> add x v) locs mem
let weak_update : PowLoc.t -> Val.t -> t -> t
= fun locs v mem ->
PowLoc.fold (fun x -> weak_add x v) locs mem
end
module Table = MapDom.MakeCPO (Node) (Mem)
| null | https://raw.githubusercontent.com/ropas/sparrow/3ec055b8c87b5c8340ef3ed6cde34f5835865b31/src/domain/taintDom.ml | ocaml | open BasicDom
open InterCfg
module IntOverflow =
struct
type t = Bot | Top
let to_string = function
| Bot -> "No Overflow"
| Top -> "May Overflow"
let compare = compare
let bot = Bot
let top = Top
let le x y =
match x, y with
| Bot, _ -> true
| Top, Bot -> false
| Top, Top -> true
let eq x y =
match x, y with
| Bot, Bot -> true
| Top, Top -> true
| _ -> false
let join x y =
match x, y with
| Bot, Bot -> Bot
| Top, _ | _, Top -> Top
let meet x y =
match x, y with
| Bot, _ -> Bot
| _, Bot -> Bot
| _ -> Top
let is_bot x = x = Bot
let widen = join
let narrow = meet
let pp fmt x = Format.fprintf fmt "%s" (to_string x)
end
module UserInput =
struct
module Source =
struct
type t = Node.t * Cil.location
let to_string (node, loc) =
Node.to_string node ^ "@" ^ CilHelper.s_location loc
let compare x y = Node.compare (fst x) (fst y)
let pp fmt x = Format.fprintf fmt "%s" (to_string x)
end
include PowDom.MakeLAT(Source)
let make node loc = singleton (node, loc)
let is_bot = is_empty
let is_taint x = not (is_bot x)
end
module Val =
struct
type t = {
int_overflow : IntOverflow.t;
user_input : UserInput.t;
}
let int_overflow x = x.int_overflow
let user_input x = x.user_input
let to_string t =
"{ int_overflow: " ^ IntOverflow.to_string t.int_overflow
^ ", user_input: " ^ UserInput.to_string t.user_input ^ " }"
let compare = compare
let bot = { int_overflow = IntOverflow.bot; user_input = UserInput.bot }
let top = { int_overflow = IntOverflow.top; user_input = UserInput.top }
let input_value node loc = { top with user_input = UserInput.make node loc }
let le x y =
(IntOverflow.le x.int_overflow y.int_overflow)
&& (UserInput.le x.user_input y.user_input)
let eq x y =
(IntOverflow.eq x.int_overflow y.int_overflow)
&& (UserInput.eq x.user_input y.user_input)
let join x y =
{ int_overflow = IntOverflow.join x.int_overflow y.int_overflow;
user_input = UserInput.join x.user_input y.user_input }
let meet x y =
{ int_overflow = IntOverflow.meet x.int_overflow y.int_overflow;
user_input = UserInput.meet x.user_input y.user_input }
let is_bot x = x = bot
let widen = join
let narrow = meet
let pp fmt x =
Format.fprintf fmt "{ int_overflow: %a, user_input: %a }"
IntOverflow.pp x.int_overflow UserInput.pp x.user_input
end
module Mem =
struct
include InstrumentedMem.Make(MapDom.MakeCPO (Loc) (Val))
let lookup : PowLoc.t -> t -> Val.t = fun locs mem ->
if eq mem bot then Val.bot
else
let find_join loc acc = Val.join acc (find loc mem) in
PowLoc.fold find_join locs Val.bot
let strong_update : PowLoc.t -> Val.t -> t -> t
= fun locs v mem ->
PowLoc.fold (fun x -> add x v) locs mem
let weak_update : PowLoc.t -> Val.t -> t -> t
= fun locs v mem ->
PowLoc.fold (fun x -> weak_add x v) locs mem
end
module Table = MapDom.MakeCPO (Node) (Mem)
|
|
d073a92303e56a7f7a76e4777bbc8ec1fc8ff956ea30e3c9be605d9b3b3f4d7d | blancas/kern | custom_lexer.clj | (ns custom-lexer
(:use [blancas.kern.core]
[clojure.string :only (upper-case)])
(:require [blancas.kern.lexer :as lex]))
;; To customize the lexer, change 'basic-def' fields as needed.
(def hoc-style
(assoc lex/basic-def
:comment-start "(*"
:comment-end "*)"
:nested-comments true
:identifier-letter (<|> alpha-num (one-of* "_-."))
:reserved-names ["while" "if" "else" "read" "print" "return" "fun" "proc"]
:case-sensitive false
:trim-newline false))
;; Then make the customized parsers.
(def- rec (lex/make-parsers hoc-style))
;; For easy access, store the parsers in vars.
(def trim (:trim rec))
(def lexeme (:lexeme rec))
(def sym (:sym rec))
(def new-line (:new-line rec))
(def one-of (:one-of rec))
(def none-of (:none-of rec))
(def token (:token rec))
(def word (:word rec))
(def identifier (<$> upper-case (:identifier rec)))
(def field (:field rec))
(def char-lit (:char-lit rec))
(def string-lit (:string-lit rec))
(def dec-lit (:dec-lit rec))
(def oct-lit (:oct-lit rec))
(def hex-lit (:hex-lit rec))
(def float-lit (:float-lit rec))
(def bool-lit (:bool-lit rec))
(def nil-lit (:nil-lit rec))
(def parens (:parens rec))
(def braces (:braces rec))
(def angles (:angles rec))
(def brackets (:brackets rec))
(def semi (:semi rec))
(def comma (:comma rec))
(def colon (:colon rec))
(def dot (:dot rec))
(def semi-sep (:semi-sep rec))
(def semi-sep1 (:semi-sep1 rec))
(def comma-sep (:comma-sep rec))
(def comma-sep1 (:comma-sep1 rec))
| null | https://raw.githubusercontent.com/blancas/kern/3ef65e559658c06a321a9ca7c85a541edc7b9ff2/src/main/resources/custom_lexer.clj | clojure | To customize the lexer, change 'basic-def' fields as needed.
Then make the customized parsers.
For easy access, store the parsers in vars. | (ns custom-lexer
(:use [blancas.kern.core]
[clojure.string :only (upper-case)])
(:require [blancas.kern.lexer :as lex]))
(def hoc-style
(assoc lex/basic-def
:comment-start "(*"
:comment-end "*)"
:nested-comments true
:identifier-letter (<|> alpha-num (one-of* "_-."))
:reserved-names ["while" "if" "else" "read" "print" "return" "fun" "proc"]
:case-sensitive false
:trim-newline false))
(def- rec (lex/make-parsers hoc-style))
(def trim (:trim rec))
(def lexeme (:lexeme rec))
(def sym (:sym rec))
(def new-line (:new-line rec))
(def one-of (:one-of rec))
(def none-of (:none-of rec))
(def token (:token rec))
(def word (:word rec))
(def identifier (<$> upper-case (:identifier rec)))
(def field (:field rec))
(def char-lit (:char-lit rec))
(def string-lit (:string-lit rec))
(def dec-lit (:dec-lit rec))
(def oct-lit (:oct-lit rec))
(def hex-lit (:hex-lit rec))
(def float-lit (:float-lit rec))
(def bool-lit (:bool-lit rec))
(def nil-lit (:nil-lit rec))
(def parens (:parens rec))
(def braces (:braces rec))
(def angles (:angles rec))
(def brackets (:brackets rec))
(def semi (:semi rec))
(def comma (:comma rec))
(def colon (:colon rec))
(def dot (:dot rec))
(def semi-sep (:semi-sep rec))
(def semi-sep1 (:semi-sep1 rec))
(def comma-sep (:comma-sep rec))
(def comma-sep1 (:comma-sep1 rec))
|
2c3fe287aacafda6688f7e3a3336577ea1ea594312ebc73fbbf502ea95e15853 | ghuysmans/api-cohttp-lwt | api_cohttp_lwt_server.ml | let of_router router =
let callback _conn req body =
let uri = Cohttp.Request.uri req in
let meth = Cohttp.Request.meth req in
let headers = Cohttp.Request.headers req in
let%lwt body = Cohttp_lwt.Body.to_string body in
Api_cohttp_lwt_mock.Client.ctx_of_router router ~headers ~body meth uri
in
Cohttp_lwt_unix.Server.make ~callback ()
| null | https://raw.githubusercontent.com/ghuysmans/api-cohttp-lwt/4edfcb4d24deeeaca5786cb0e440fe295ef05b6f/server/api_cohttp_lwt_server.ml | ocaml | let of_router router =
let callback _conn req body =
let uri = Cohttp.Request.uri req in
let meth = Cohttp.Request.meth req in
let headers = Cohttp.Request.headers req in
let%lwt body = Cohttp_lwt.Body.to_string body in
Api_cohttp_lwt_mock.Client.ctx_of_router router ~headers ~body meth uri
in
Cohttp_lwt_unix.Server.make ~callback ()
|
|
06d70ffe7944519efee916781450760559f2ad8e35992af3278c1b73e3187992 | Oblosys/proxima | CodeSyntaxDump.hs |
UUAGC 0.9.10 ( CodeSyntaxDump.ag )
module CodeSyntaxDump where
import Data.List
import qualified Data.Map as Map
import Pretty
import PPUtil
import CodeSyntax
import Patterns
import CommonTypes
import Data.Map(Map)
import Data.Set(Set)
-- Patterns.ag imports
import UU.Scanner.Position(Pos)
import CommonTypes (ConstructorIdent,Identifier)
ppChild :: (Identifier,Type,Bool) -> PP_Doc
ppChild (nm,tp,b)
= pp nm >#< "::" >#< pp (show tp) >#< "<" >|< ppBool b >|< ">"
ppVertexMap :: Map Int (Identifier,Identifier,Maybe Type) -> PP_Doc
ppVertexMap m
= ppVList [ ppF (show k) $ ppAttr v | (k,v) <- Map.toList m ]
ppAttr :: (Identifier,Identifier,Maybe Type) -> PP_Doc
ppAttr (fld,nm,mTp)
= pp fld >|< "." >|< pp nm >#<
case mTp of
Just tp -> pp "::" >#< show tp
Nothing -> empty
ppBool :: Bool -> PP_Doc
ppBool True = pp "T"
ppBool False = pp "F"
ppMaybeShow :: Show a => Maybe a -> PP_Doc
ppMaybeShow (Just x) = pp (show x)
ppMaybeShow Nothing = pp "_"
ppStrings :: [String] -> PP_Doc
ppStrings = vlist
-- CGrammar ----------------------------------------------------
visit 0 :
synthesized attribute :
pp : PP_Doc
alternatives :
alternative CGrammar :
child typeSyns : { TypeSyns }
child derivings : { Derivings }
child wrappers : { Set NontermIdent }
child : CNonterminals
child pragmas : { PragmaMap }
child paramMap : { ParamMap }
child : { ContextMap }
visit 0:
synthesized attribute:
pp : PP_Doc
alternatives:
alternative CGrammar:
child typeSyns : {TypeSyns}
child derivings : {Derivings}
child wrappers : {Set NontermIdent}
child nonts : CNonterminals
child pragmas : {PragmaMap}
child paramMap : {ParamMap}
child contextMap : {ContextMap}
-}
-- cata
sem_CGrammar :: CGrammar ->
T_CGrammar
sem_CGrammar (CGrammar _typeSyns _derivings _wrappers _nonts _pragmas _paramMap _contextMap ) =
(sem_CGrammar_CGrammar _typeSyns _derivings _wrappers (sem_CNonterminals _nonts ) _pragmas _paramMap _contextMap )
-- semantic domain
newtype T_CGrammar = T_CGrammar (( PP_Doc))
data Inh_CGrammar = Inh_CGrammar {}
data Syn_CGrammar = Syn_CGrammar {pp_Syn_CGrammar :: PP_Doc}
wrap_CGrammar :: T_CGrammar ->
Inh_CGrammar ->
Syn_CGrammar
wrap_CGrammar (T_CGrammar sem ) (Inh_CGrammar ) =
(let ( _lhsOpp) =
(sem )
in (Syn_CGrammar _lhsOpp ))
sem_CGrammar_CGrammar :: TypeSyns ->
Derivings ->
(Set NontermIdent) ->
T_CNonterminals ->
PragmaMap ->
ParamMap ->
ContextMap ->
T_CGrammar
sem_CGrammar_CGrammar typeSyns_ derivings_ wrappers_ (T_CNonterminals nonts_ ) pragmas_ paramMap_ contextMap_ =
(T_CGrammar (let _lhsOpp :: PP_Doc
_nontsIpp :: PP_Doc
_nontsIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 47 , column 21 )
_lhsOpp =
ppNestInfo ["CGrammar","CGrammar"] []
[ ppF "typeSyns" $ ppAssocL typeSyns_
, ppF "derivings" $ ppMap $ derivings_
, ppF "nonts" $ ppVList _nontsIppL
] []
( _nontsIpp,_nontsIppL) =
(nonts_ )
in ( _lhsOpp)) )
CInterface --------------------------------------------------
visit 0 :
synthesized attribute :
pp : PP_Doc
alternatives :
alternative CInterface :
child seg : CSegments
visit 0:
synthesized attribute:
pp : PP_Doc
alternatives:
alternative CInterface:
child seg : CSegments
-}
-- cata
sem_CInterface :: CInterface ->
T_CInterface
sem_CInterface (CInterface _seg ) =
(sem_CInterface_CInterface (sem_CSegments _seg ) )
-- semantic domain
newtype T_CInterface = T_CInterface (( PP_Doc))
data Inh_CInterface = Inh_CInterface {}
data Syn_CInterface = Syn_CInterface {pp_Syn_CInterface :: PP_Doc}
wrap_CInterface :: T_CInterface ->
Inh_CInterface ->
Syn_CInterface
wrap_CInterface (T_CInterface sem ) (Inh_CInterface ) =
(let ( _lhsOpp) =
(sem )
in (Syn_CInterface _lhsOpp ))
sem_CInterface_CInterface :: T_CSegments ->
T_CInterface
sem_CInterface_CInterface (T_CSegments seg_ ) =
(T_CInterface (let _lhsOpp :: PP_Doc
_segIpp :: PP_Doc
_segIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 57 , column 21 )
_lhsOpp =
ppNestInfo ["CInterface","CInterface"] [] [ppF "seg" $ ppVList _segIppL] []
( _segIpp,_segIppL) =
(seg_ )
in ( _lhsOpp)) )
-- CNonterminal ------------------------------------------------
{-
visit 0:
synthesized attribute:
pp : PP_Doc
alternatives:
alternative CNonterminal:
child nt : {NontermIdent}
child params : {[Identifier]}
child inh : {Attributes}
child syn : {Attributes}
child prods : CProductions
child inter : CInterface
-}
-- cata
sem_CNonterminal :: CNonterminal ->
T_CNonterminal
sem_CNonterminal (CNonterminal _nt _params _inh _syn _prods _inter ) =
(sem_CNonterminal_CNonterminal _nt _params _inh _syn (sem_CProductions _prods ) (sem_CInterface _inter ) )
-- semantic domain
newtype T_CNonterminal = T_CNonterminal (( PP_Doc))
data Inh_CNonterminal = Inh_CNonterminal {}
data Syn_CNonterminal = Syn_CNonterminal {pp_Syn_CNonterminal :: PP_Doc}
wrap_CNonterminal :: T_CNonterminal ->
Inh_CNonterminal ->
Syn_CNonterminal
wrap_CNonterminal (T_CNonterminal sem ) (Inh_CNonterminal ) =
(let ( _lhsOpp) =
(sem )
in (Syn_CNonterminal _lhsOpp ))
sem_CNonterminal_CNonterminal :: NontermIdent ->
([Identifier]) ->
Attributes ->
Attributes ->
T_CProductions ->
T_CInterface ->
T_CNonterminal
sem_CNonterminal_CNonterminal nt_ params_ inh_ syn_ (T_CProductions prods_ ) (T_CInterface inter_ ) =
(T_CNonterminal (let _lhsOpp :: PP_Doc
_prodsIpp :: PP_Doc
_prodsIppL :: ([PP_Doc])
_interIpp :: PP_Doc
" CodeSyntaxDump.ag"(line 54 , column 33 )
_lhsOpp =
ppNestInfo ["CNonterminal","CNonterminal"] (pp nt_ : map pp params_) [ppF "inh" $ ppMap inh_, ppF "syn" $ ppMap syn_, ppF "prods" $ ppVList _prodsIppL, ppF "inter" _interIpp] []
( _prodsIpp,_prodsIppL) =
(prods_ )
( _interIpp) =
(inter_ )
in ( _lhsOpp)) )
-- CNonterminals -----------------------------------------------
visit 0 :
synthesized attributes :
pp : PP_Doc
ppL : [ PP_Doc ]
alternatives :
alternative Cons :
child hd : CNonterminal
child tl : CNonterminals
alternative :
visit 0:
synthesized attributes:
pp : PP_Doc
ppL : [PP_Doc]
alternatives:
alternative Cons:
child hd : CNonterminal
child tl : CNonterminals
alternative Nil:
-}
-- cata
sem_CNonterminals :: CNonterminals ->
T_CNonterminals
sem_CNonterminals list =
(Prelude.foldr sem_CNonterminals_Cons sem_CNonterminals_Nil (Prelude.map sem_CNonterminal list) )
-- semantic domain
newtype T_CNonterminals = T_CNonterminals (( PP_Doc,([PP_Doc])))
data Inh_CNonterminals = Inh_CNonterminals {}
data Syn_CNonterminals = Syn_CNonterminals {pp_Syn_CNonterminals :: PP_Doc,ppL_Syn_CNonterminals :: [PP_Doc]}
wrap_CNonterminals :: T_CNonterminals ->
Inh_CNonterminals ->
Syn_CNonterminals
wrap_CNonterminals (T_CNonterminals sem ) (Inh_CNonterminals ) =
(let ( _lhsOpp,_lhsOppL) =
(sem )
in (Syn_CNonterminals _lhsOpp _lhsOppL ))
sem_CNonterminals_Cons :: T_CNonterminal ->
T_CNonterminals ->
T_CNonterminals
sem_CNonterminals_Cons (T_CNonterminal hd_ ) (T_CNonterminals tl_ ) =
(T_CNonterminals (let _lhsOppL :: ([PP_Doc])
_lhsOpp :: PP_Doc
_hdIpp :: PP_Doc
_tlIpp :: PP_Doc
_tlIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 102 , column 33 )
_lhsOppL =
_hdIpp : _tlIppL
use rule " CodeSyntaxDump.ag"(line 44 , column 40 )
_lhsOpp =
_hdIpp >-< _tlIpp
( _hdIpp) =
(hd_ )
( _tlIpp,_tlIppL) =
(tl_ )
in ( _lhsOpp,_lhsOppL)) )
sem_CNonterminals_Nil :: T_CNonterminals
sem_CNonterminals_Nil =
(T_CNonterminals (let _lhsOppL :: ([PP_Doc])
_lhsOpp :: PP_Doc
" CodeSyntaxDump.ag"(line 103 , column 33 )
_lhsOppL =
[]
use rule " CodeSyntaxDump.ag"(line 44 , column 40 )
_lhsOpp =
empty
in ( _lhsOpp,_lhsOppL)) )
-- CProduction -------------------------------------------------
visit 0 :
synthesized attribute :
pp : PP_Doc
alternatives :
alternative CProduction :
child con : { ConstructorIdent }
child visits : CVisits
child children : { [ ( Identifier , Type , ) ] }
child terminals : { [ Identifier ] }
visit 0:
synthesized attribute:
pp : PP_Doc
alternatives:
alternative CProduction:
child con : {ConstructorIdent}
child visits : CVisits
child children : {[(Identifier,Type,Bool)]}
child terminals : {[Identifier]}
-}
-- cata
sem_CProduction :: CProduction ->
T_CProduction
sem_CProduction (CProduction _con _visits _children _terminals ) =
(sem_CProduction_CProduction _con (sem_CVisits _visits ) _children _terminals )
-- semantic domain
newtype T_CProduction = T_CProduction (( PP_Doc))
data Inh_CProduction = Inh_CProduction {}
data Syn_CProduction = Syn_CProduction {pp_Syn_CProduction :: PP_Doc}
wrap_CProduction :: T_CProduction ->
Inh_CProduction ->
Syn_CProduction
wrap_CProduction (T_CProduction sem ) (Inh_CProduction ) =
(let ( _lhsOpp) =
(sem )
in (Syn_CProduction _lhsOpp ))
sem_CProduction_CProduction :: ConstructorIdent ->
T_CVisits ->
([(Identifier,Type,Bool)]) ->
([Identifier]) ->
T_CProduction
sem_CProduction_CProduction con_ (T_CVisits visits_ ) children_ terminals_ =
(T_CProduction (let _lhsOpp :: PP_Doc
_visitsIpp :: PP_Doc
_visitsIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 63 , column 17 )
_lhsOpp =
ppNestInfo ["CProduction","CProduction"] [pp con_] [ppF "visits" $ ppVList _visitsIppL, ppF "children" $ ppVList (map ppChild children_),ppF "terminals" $ ppVList (map ppShow terminals_)] []
( _visitsIpp,_visitsIppL) =
(visits_ )
in ( _lhsOpp)) )
-- CProductions ------------------------------------------------
visit 0 :
synthesized attributes :
pp : PP_Doc
ppL : [ PP_Doc ]
alternatives :
alternative Cons :
child hd : CProduction
child tl : CProductions
alternative :
visit 0:
synthesized attributes:
pp : PP_Doc
ppL : [PP_Doc]
alternatives:
alternative Cons:
child hd : CProduction
child tl : CProductions
alternative Nil:
-}
-- cata
sem_CProductions :: CProductions ->
T_CProductions
sem_CProductions list =
(Prelude.foldr sem_CProductions_Cons sem_CProductions_Nil (Prelude.map sem_CProduction list) )
-- semantic domain
newtype T_CProductions = T_CProductions (( PP_Doc,([PP_Doc])))
data Inh_CProductions = Inh_CProductions {}
data Syn_CProductions = Syn_CProductions {pp_Syn_CProductions :: PP_Doc,ppL_Syn_CProductions :: [PP_Doc]}
wrap_CProductions :: T_CProductions ->
Inh_CProductions ->
Syn_CProductions
wrap_CProductions (T_CProductions sem ) (Inh_CProductions ) =
(let ( _lhsOpp,_lhsOppL) =
(sem )
in (Syn_CProductions _lhsOpp _lhsOppL ))
sem_CProductions_Cons :: T_CProduction ->
T_CProductions ->
T_CProductions
sem_CProductions_Cons (T_CProduction hd_ ) (T_CProductions tl_ ) =
(T_CProductions (let _lhsOppL :: ([PP_Doc])
_lhsOpp :: PP_Doc
_hdIpp :: PP_Doc
_tlIpp :: PP_Doc
_tlIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 94 , column 33 )
_lhsOppL =
_hdIpp : _tlIppL
use rule " CodeSyntaxDump.ag"(line 44 , column 40 )
_lhsOpp =
_hdIpp >-< _tlIpp
( _hdIpp) =
(hd_ )
( _tlIpp,_tlIppL) =
(tl_ )
in ( _lhsOpp,_lhsOppL)) )
sem_CProductions_Nil :: T_CProductions
sem_CProductions_Nil =
(T_CProductions (let _lhsOppL :: ([PP_Doc])
_lhsOpp :: PP_Doc
" CodeSyntaxDump.ag"(line 95 , column 33 )
_lhsOppL =
[]
use rule " CodeSyntaxDump.ag"(line 44 , column 40 )
_lhsOpp =
empty
in ( _lhsOpp,_lhsOppL)) )
CRule -------------------------------------------------------
visit 0 :
synthesized attribute :
pp : PP_Doc
alternatives :
alternative :
child name : { Identifier }
child nt : { NontermIdent }
child nr : { Int }
child inh : { Attributes }
child syn : { Attributes }
child isLast : { Bool }
alternative CRule :
child name : { Identifier }
child isIn : { Bool }
child hasCode : { Bool }
child nt : { NontermIdent }
child con : { ConstructorIdent }
child field : { Identifier }
child childnt : { Maybe NontermIdent }
child tp : { Maybe Type }
child pattern : Pattern
child rhs : { [ String ] }
child defines : { Map Int ( Identifier , Identifier , Maybe Type ) }
child owrt : { Bool }
child origin : { String }
child uses : { Set ( Identifier , Identifier ) }
visit 0:
synthesized attribute:
pp : PP_Doc
alternatives:
alternative CChildVisit:
child name : {Identifier}
child nt : {NontermIdent}
child nr : {Int}
child inh : {Attributes}
child syn : {Attributes}
child isLast : {Bool}
alternative CRule:
child name : {Identifier}
child isIn : {Bool}
child hasCode : {Bool}
child nt : {NontermIdent}
child con : {ConstructorIdent}
child field : {Identifier}
child childnt : {Maybe NontermIdent}
child tp : {Maybe Type}
child pattern : Pattern
child rhs : {[String]}
child defines : {Map Int (Identifier,Identifier,Maybe Type)}
child owrt : {Bool}
child origin : {String}
child uses : {Set (Identifier, Identifier)}
-}
-- cata
sem_CRule :: CRule ->
T_CRule
sem_CRule (CChildVisit _name _nt _nr _inh _syn _isLast ) =
(sem_CRule_CChildVisit _name _nt _nr _inh _syn _isLast )
sem_CRule (CRule _name _isIn _hasCode _nt _con _field _childnt _tp _pattern _rhs _defines _owrt _origin _uses ) =
(sem_CRule_CRule _name _isIn _hasCode _nt _con _field _childnt _tp (sem_Pattern _pattern ) _rhs _defines _owrt _origin _uses )
-- semantic domain
newtype T_CRule = T_CRule (( PP_Doc))
data Inh_CRule = Inh_CRule {}
data Syn_CRule = Syn_CRule {pp_Syn_CRule :: PP_Doc}
wrap_CRule :: T_CRule ->
Inh_CRule ->
Syn_CRule
wrap_CRule (T_CRule sem ) (Inh_CRule ) =
(let ( _lhsOpp) =
(sem )
in (Syn_CRule _lhsOpp ))
sem_CRule_CChildVisit :: Identifier ->
NontermIdent ->
Int ->
Attributes ->
Attributes ->
Bool ->
T_CRule
sem_CRule_CChildVisit name_ nt_ nr_ inh_ syn_ isLast_ =
(T_CRule (let _lhsOpp :: PP_Doc
" CodeSyntaxDump.ag"(line 70 , column 21 )
_lhsOpp =
ppNestInfo ["CRule","CChildVisit"] [pp name_] [ppF "nt" $ pp nt_, ppF "nr" $ ppShow nr_, ppF "inh" $ ppMap inh_, ppF "syn" $ ppMap syn_, ppF "last" $ ppBool isLast_] []
in ( _lhsOpp)) )
sem_CRule_CRule :: Identifier ->
Bool ->
Bool ->
NontermIdent ->
ConstructorIdent ->
Identifier ->
(Maybe NontermIdent) ->
(Maybe Type) ->
T_Pattern ->
([String]) ->
(Map Int (Identifier,Identifier,Maybe Type)) ->
Bool ->
String ->
(Set (Identifier, Identifier)) ->
T_CRule
sem_CRule_CRule name_ isIn_ hasCode_ nt_ con_ field_ childnt_ tp_ (T_Pattern pattern_ ) rhs_ defines_ owrt_ origin_ uses_ =
(T_CRule (let _lhsOpp :: PP_Doc
_patternIcopy :: Pattern
_patternIpp :: PP_Doc
" CodeSyntaxDump.ag"(line 69 , column 33 )
_lhsOpp =
ppNestInfo ["CRule","CRule"] [pp name_] [ppF "isIn" $ ppBool isIn_, ppF "hasCode" $ ppBool hasCode_, ppF "nt" $ pp nt_, ppF "con" $ pp con_, ppF "field" $ pp field_, ppF "childnt" $ ppMaybeShow childnt_, ppF "tp" $ ppMaybeShow tp_, ppF "pattern" $ if isIn_ then pp "<no pat because In>" else _patternIpp, ppF "rhs" $ ppStrings rhs_, ppF "defines" $ ppVertexMap defines_, ppF "owrt" $ ppBool owrt_, ppF "origin" $ pp origin_] []
( _patternIcopy,_patternIpp) =
(pattern_ )
in ( _lhsOpp)) )
CSegment ----------------------------------------------------
visit 0 :
synthesized attribute :
pp : PP_Doc
alternatives :
alternative CSegment :
child inh : { Attributes }
child syn : { Attributes }
visit 0:
synthesized attribute:
pp : PP_Doc
alternatives:
alternative CSegment:
child inh : {Attributes}
child syn : {Attributes}
-}
-- cata
sem_CSegment :: CSegment ->
T_CSegment
sem_CSegment (CSegment _inh _syn ) =
(sem_CSegment_CSegment _inh _syn )
-- semantic domain
newtype T_CSegment = T_CSegment (( PP_Doc))
data Inh_CSegment = Inh_CSegment {}
data Syn_CSegment = Syn_CSegment {pp_Syn_CSegment :: PP_Doc}
wrap_CSegment :: T_CSegment ->
Inh_CSegment ->
Syn_CSegment
wrap_CSegment (T_CSegment sem ) (Inh_CSegment ) =
(let ( _lhsOpp) =
(sem )
in (Syn_CSegment _lhsOpp ))
sem_CSegment_CSegment :: Attributes ->
Attributes ->
T_CSegment
sem_CSegment_CSegment inh_ syn_ =
(T_CSegment (let _lhsOpp :: PP_Doc
" CodeSyntaxDump.ag"(line 60 , column 21 )
_lhsOpp =
ppNestInfo ["CSegment","CSegment"] [] [ppF "inh" $ ppMap inh_, ppF "syn" $ ppMap syn_] []
in ( _lhsOpp)) )
CSegments ---------------------------------------------------
visit 0 :
synthesized attributes :
pp : PP_Doc
ppL : [ PP_Doc ]
alternatives :
alternative Cons :
child hd : CSegment
child tl : CSegments
alternative :
visit 0:
synthesized attributes:
pp : PP_Doc
ppL : [PP_Doc]
alternatives:
alternative Cons:
child hd : CSegment
child tl : CSegments
alternative Nil:
-}
-- cata
sem_CSegments :: CSegments ->
T_CSegments
sem_CSegments list =
(Prelude.foldr sem_CSegments_Cons sem_CSegments_Nil (Prelude.map sem_CSegment list) )
-- semantic domain
newtype T_CSegments = T_CSegments (( PP_Doc,([PP_Doc])))
data Inh_CSegments = Inh_CSegments {}
data Syn_CSegments = Syn_CSegments {pp_Syn_CSegments :: PP_Doc,ppL_Syn_CSegments :: [PP_Doc]}
wrap_CSegments :: T_CSegments ->
Inh_CSegments ->
Syn_CSegments
wrap_CSegments (T_CSegments sem ) (Inh_CSegments ) =
(let ( _lhsOpp,_lhsOppL) =
(sem )
in (Syn_CSegments _lhsOpp _lhsOppL ))
sem_CSegments_Cons :: T_CSegment ->
T_CSegments ->
T_CSegments
sem_CSegments_Cons (T_CSegment hd_ ) (T_CSegments tl_ ) =
(T_CSegments (let _lhsOppL :: ([PP_Doc])
_lhsOpp :: PP_Doc
_hdIpp :: PP_Doc
_tlIpp :: PP_Doc
_tlIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 98 , column 33 )
_lhsOppL =
_hdIpp : _tlIppL
use rule " CodeSyntaxDump.ag"(line 44 , column 40 )
_lhsOpp =
_hdIpp >-< _tlIpp
( _hdIpp) =
(hd_ )
( _tlIpp,_tlIppL) =
(tl_ )
in ( _lhsOpp,_lhsOppL)) )
sem_CSegments_Nil :: T_CSegments
sem_CSegments_Nil =
(T_CSegments (let _lhsOppL :: ([PP_Doc])
_lhsOpp :: PP_Doc
" CodeSyntaxDump.ag"(line 99 , column 33 )
_lhsOppL =
[]
use rule " CodeSyntaxDump.ag"(line 44 , column 40 )
_lhsOpp =
empty
in ( _lhsOpp,_lhsOppL)) )
-- CVisit ------------------------------------------------------
{-
visit 0:
synthesized attribute:
pp : PP_Doc
alternatives:
alternative CVisit:
child inh : {Attributes}
child syn : {Attributes}
child vss : Sequence
child intra : Sequence
child ordered : {Bool}
-}
-- cata
sem_CVisit :: CVisit ->
T_CVisit
sem_CVisit (CVisit _inh _syn _vss _intra _ordered ) =
(sem_CVisit_CVisit _inh _syn (sem_Sequence _vss ) (sem_Sequence _intra ) _ordered )
-- semantic domain
newtype T_CVisit = T_CVisit (( PP_Doc))
data Inh_CVisit = Inh_CVisit {}
data Syn_CVisit = Syn_CVisit {pp_Syn_CVisit :: PP_Doc}
wrap_CVisit :: T_CVisit ->
Inh_CVisit ->
Syn_CVisit
wrap_CVisit (T_CVisit sem ) (Inh_CVisit ) =
(let ( _lhsOpp) =
(sem )
in (Syn_CVisit _lhsOpp ))
sem_CVisit_CVisit :: Attributes ->
Attributes ->
T_Sequence ->
T_Sequence ->
Bool ->
T_CVisit
sem_CVisit_CVisit inh_ syn_ (T_Sequence vss_ ) (T_Sequence intra_ ) ordered_ =
(T_CVisit (let _lhsOpp :: PP_Doc
_vssIppL :: ([PP_Doc])
_intraIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 66 , column 21 )
_lhsOpp =
ppNestInfo ["CVisit","CVisit"] [] [ppF "inh" $ ppMap inh_, ppF "syn" $ ppMap syn_, ppF "sequence" $ ppVList _vssIppL, ppF "intra" $ ppVList _intraIppL, ppF "ordered" $ ppBool ordered_] []
( _vssIppL) =
(vss_ )
( _intraIppL) =
(intra_ )
in ( _lhsOpp)) )
-- CVisits -----------------------------------------------------
visit 0 :
synthesized attributes :
pp : PP_Doc
ppL : [ PP_Doc ]
alternatives :
alternative Cons :
child hd : CVisit
child tl : CVisits
alternative :
visit 0:
synthesized attributes:
pp : PP_Doc
ppL : [PP_Doc]
alternatives:
alternative Cons:
child hd : CVisit
child tl : CVisits
alternative Nil:
-}
-- cata
sem_CVisits :: CVisits ->
T_CVisits
sem_CVisits list =
(Prelude.foldr sem_CVisits_Cons sem_CVisits_Nil (Prelude.map sem_CVisit list) )
-- semantic domain
newtype T_CVisits = T_CVisits (( PP_Doc,([PP_Doc])))
data Inh_CVisits = Inh_CVisits {}
data Syn_CVisits = Syn_CVisits {pp_Syn_CVisits :: PP_Doc,ppL_Syn_CVisits :: [PP_Doc]}
wrap_CVisits :: T_CVisits ->
Inh_CVisits ->
Syn_CVisits
wrap_CVisits (T_CVisits sem ) (Inh_CVisits ) =
(let ( _lhsOpp,_lhsOppL) =
(sem )
in (Syn_CVisits _lhsOpp _lhsOppL ))
sem_CVisits_Cons :: T_CVisit ->
T_CVisits ->
T_CVisits
sem_CVisits_Cons (T_CVisit hd_ ) (T_CVisits tl_ ) =
(T_CVisits (let _lhsOppL :: ([PP_Doc])
_lhsOpp :: PP_Doc
_hdIpp :: PP_Doc
_tlIpp :: PP_Doc
_tlIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 90 , column 33 )
_lhsOppL =
_hdIpp : _tlIppL
use rule " CodeSyntaxDump.ag"(line 44 , column 40 )
_lhsOpp =
_hdIpp >-< _tlIpp
( _hdIpp) =
(hd_ )
( _tlIpp,_tlIppL) =
(tl_ )
in ( _lhsOpp,_lhsOppL)) )
sem_CVisits_Nil :: T_CVisits
sem_CVisits_Nil =
(T_CVisits (let _lhsOppL :: ([PP_Doc])
_lhsOpp :: PP_Doc
" CodeSyntaxDump.ag"(line 91 , column 33 )
_lhsOppL =
[]
use rule " CodeSyntaxDump.ag"(line 44 , column 40 )
_lhsOpp =
empty
in ( _lhsOpp,_lhsOppL)) )
-- Pattern -----------------------------------------------------
visit 0 :
synthesized attributes :
copy : SELF
pp : PP_Doc
alternatives :
alternative :
child field : { Identifier }
child attr : { Identifier }
child pat : Pattern
child parts : Patterns
visit 0 :
local copy : _
alternative Constr :
child name : { ConstructorIdent }
child pats : Patterns
visit 0 :
local copy : _
alternative Irrefutable :
child pat : Pattern
visit 0 :
local copy : _
alternative Product :
child pos : { Pos }
child pats : Patterns
visit 0 :
local copy : _
alternative Underscore :
child pos : { Pos }
visit 0 :
local copy : _
visit 0:
synthesized attributes:
copy : SELF
pp : PP_Doc
alternatives:
alternative Alias:
child field : {Identifier}
child attr : {Identifier}
child pat : Pattern
child parts : Patterns
visit 0:
local copy : _
alternative Constr:
child name : {ConstructorIdent}
child pats : Patterns
visit 0:
local copy : _
alternative Irrefutable:
child pat : Pattern
visit 0:
local copy : _
alternative Product:
child pos : {Pos}
child pats : Patterns
visit 0:
local copy : _
alternative Underscore:
child pos : {Pos}
visit 0:
local copy : _
-}
-- cata
sem_Pattern :: Pattern ->
T_Pattern
sem_Pattern (Alias _field _attr _pat _parts ) =
(sem_Pattern_Alias _field _attr (sem_Pattern _pat ) (sem_Patterns _parts ) )
sem_Pattern (Constr _name _pats ) =
(sem_Pattern_Constr _name (sem_Patterns _pats ) )
sem_Pattern (Irrefutable _pat ) =
(sem_Pattern_Irrefutable (sem_Pattern _pat ) )
sem_Pattern (Product _pos _pats ) =
(sem_Pattern_Product _pos (sem_Patterns _pats ) )
sem_Pattern (Underscore _pos ) =
(sem_Pattern_Underscore _pos )
-- semantic domain
newtype T_Pattern = T_Pattern (( Pattern,PP_Doc))
data Inh_Pattern = Inh_Pattern {}
data Syn_Pattern = Syn_Pattern {copy_Syn_Pattern :: Pattern,pp_Syn_Pattern :: PP_Doc}
wrap_Pattern :: T_Pattern ->
Inh_Pattern ->
Syn_Pattern
wrap_Pattern (T_Pattern sem ) (Inh_Pattern ) =
(let ( _lhsOcopy,_lhsOpp) =
(sem )
in (Syn_Pattern _lhsOcopy _lhsOpp ))
sem_Pattern_Alias :: Identifier ->
Identifier ->
T_Pattern ->
T_Patterns ->
T_Pattern
sem_Pattern_Alias field_ attr_ (T_Pattern pat_ ) (T_Patterns parts_ ) =
(T_Pattern (let _lhsOpp :: PP_Doc
_lhsOcopy :: Pattern
_patIcopy :: Pattern
_patIpp :: PP_Doc
_partsIcopy :: Patterns
_partsIpp :: PP_Doc
_partsIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 75 , column 33 )
_lhsOpp =
ppNestInfo ["Pattern","Alias"] [pp field_, pp attr_] [ppF "pat" $ _patIpp] []
-- self rule
_copy =
Alias field_ attr_ _patIcopy _partsIcopy
-- self rule
_lhsOcopy =
_copy
( _patIcopy,_patIpp) =
(pat_ )
( _partsIcopy,_partsIpp,_partsIppL) =
(parts_ )
in ( _lhsOcopy,_lhsOpp)) )
sem_Pattern_Constr :: ConstructorIdent ->
T_Patterns ->
T_Pattern
sem_Pattern_Constr name_ (T_Patterns pats_ ) =
(T_Pattern (let _lhsOpp :: PP_Doc
_lhsOcopy :: Pattern
_patsIcopy :: Patterns
_patsIpp :: PP_Doc
_patsIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 73 , column 33 )
_lhsOpp =
ppNestInfo ["Pattern","Constr"] [pp name_] [ppF "pats" $ ppVList _patsIppL] []
-- self rule
_copy =
Constr name_ _patsIcopy
-- self rule
_lhsOcopy =
_copy
( _patsIcopy,_patsIpp,_patsIppL) =
(pats_ )
in ( _lhsOcopy,_lhsOpp)) )
sem_Pattern_Irrefutable :: T_Pattern ->
T_Pattern
sem_Pattern_Irrefutable (T_Pattern pat_ ) =
(T_Pattern (let _lhsOpp :: PP_Doc
_lhsOcopy :: Pattern
_patIcopy :: Pattern
_patIpp :: PP_Doc
use rule " CodeSyntaxDump.ag"(line 44 , column 40 )
_lhsOpp =
_patIpp
-- self rule
_copy =
Irrefutable _patIcopy
-- self rule
_lhsOcopy =
_copy
( _patIcopy,_patIpp) =
(pat_ )
in ( _lhsOcopy,_lhsOpp)) )
sem_Pattern_Product :: Pos ->
T_Patterns ->
T_Pattern
sem_Pattern_Product pos_ (T_Patterns pats_ ) =
(T_Pattern (let _lhsOpp :: PP_Doc
_lhsOcopy :: Pattern
_patsIcopy :: Patterns
_patsIpp :: PP_Doc
_patsIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 74 , column 33 )
_lhsOpp =
ppNestInfo ["Pattern","Product"] [ppShow pos_] [ppF "pats" $ ppVList _patsIppL] []
-- self rule
_copy =
Product pos_ _patsIcopy
-- self rule
_lhsOcopy =
_copy
( _patsIcopy,_patsIpp,_patsIppL) =
(pats_ )
in ( _lhsOcopy,_lhsOpp)) )
sem_Pattern_Underscore :: Pos ->
T_Pattern
sem_Pattern_Underscore pos_ =
(T_Pattern (let _lhsOpp :: PP_Doc
_lhsOcopy :: Pattern
" CodeSyntaxDump.ag"(line 76 , column 25 )
_lhsOpp =
ppNestInfo ["Pattern","Underscore"] [ppShow pos_] [] []
-- self rule
_copy =
Underscore pos_
-- self rule
_lhsOcopy =
_copy
in ( _lhsOcopy,_lhsOpp)) )
-- Patterns ----------------------------------------------------
visit 0 :
synthesized attributes :
copy : SELF
pp : PP_Doc
ppL : [ PP_Doc ]
alternatives :
alternative Cons :
child hd : Pattern
child tl : Patterns
visit 0 :
local copy : _
alternative :
visit 0 :
local copy : _
visit 0:
synthesized attributes:
copy : SELF
pp : PP_Doc
ppL : [PP_Doc]
alternatives:
alternative Cons:
child hd : Pattern
child tl : Patterns
visit 0:
local copy : _
alternative Nil:
visit 0:
local copy : _
-}
-- cata
sem_Patterns :: Patterns ->
T_Patterns
sem_Patterns list =
(Prelude.foldr sem_Patterns_Cons sem_Patterns_Nil (Prelude.map sem_Pattern list) )
-- semantic domain
newtype T_Patterns = T_Patterns (( Patterns,PP_Doc,([PP_Doc])))
data Inh_Patterns = Inh_Patterns {}
data Syn_Patterns = Syn_Patterns {copy_Syn_Patterns :: Patterns,pp_Syn_Patterns :: PP_Doc,ppL_Syn_Patterns :: [PP_Doc]}
wrap_Patterns :: T_Patterns ->
Inh_Patterns ->
Syn_Patterns
wrap_Patterns (T_Patterns sem ) (Inh_Patterns ) =
(let ( _lhsOcopy,_lhsOpp,_lhsOppL) =
(sem )
in (Syn_Patterns _lhsOcopy _lhsOpp _lhsOppL ))
sem_Patterns_Cons :: T_Pattern ->
T_Patterns ->
T_Patterns
sem_Patterns_Cons (T_Pattern hd_ ) (T_Patterns tl_ ) =
(T_Patterns (let _lhsOppL :: ([PP_Doc])
_lhsOpp :: PP_Doc
_lhsOcopy :: Patterns
_hdIcopy :: Pattern
_hdIpp :: PP_Doc
_tlIcopy :: Patterns
_tlIpp :: PP_Doc
_tlIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 82 , column 33 )
_lhsOppL =
_hdIpp : _tlIppL
use rule " CodeSyntaxDump.ag"(line 44 , column 40 )
_lhsOpp =
_hdIpp >-< _tlIpp
-- self rule
_copy =
(:) _hdIcopy _tlIcopy
-- self rule
_lhsOcopy =
_copy
( _hdIcopy,_hdIpp) =
(hd_ )
( _tlIcopy,_tlIpp,_tlIppL) =
(tl_ )
in ( _lhsOcopy,_lhsOpp,_lhsOppL)) )
sem_Patterns_Nil :: T_Patterns
sem_Patterns_Nil =
(T_Patterns (let _lhsOppL :: ([PP_Doc])
_lhsOpp :: PP_Doc
_lhsOcopy :: Patterns
" CodeSyntaxDump.ag"(line 83 , column 33 )
_lhsOppL =
[]
use rule " CodeSyntaxDump.ag"(line 44 , column 40 )
_lhsOpp =
empty
-- self rule
_copy =
[]
-- self rule
_lhsOcopy =
_copy
in ( _lhsOcopy,_lhsOpp,_lhsOppL)) )
-- Sequence ----------------------------------------------------
visit 0 :
synthesized attribute :
ppL : [ PP_Doc ]
alternatives :
alternative Cons :
child hd : CRule
child tl : Sequence
alternative :
visit 0:
synthesized attribute:
ppL : [PP_Doc]
alternatives:
alternative Cons:
child hd : CRule
child tl : Sequence
alternative Nil:
-}
-- cata
sem_Sequence :: Sequence ->
T_Sequence
sem_Sequence list =
(Prelude.foldr sem_Sequence_Cons sem_Sequence_Nil (Prelude.map sem_CRule list) )
-- semantic domain
newtype T_Sequence = T_Sequence (( ([PP_Doc])))
data Inh_Sequence = Inh_Sequence {}
data Syn_Sequence = Syn_Sequence {ppL_Syn_Sequence :: [PP_Doc]}
wrap_Sequence :: T_Sequence ->
Inh_Sequence ->
Syn_Sequence
wrap_Sequence (T_Sequence sem ) (Inh_Sequence ) =
(let ( _lhsOppL) =
(sem )
in (Syn_Sequence _lhsOppL ))
sem_Sequence_Cons :: T_CRule ->
T_Sequence ->
T_Sequence
sem_Sequence_Cons (T_CRule hd_ ) (T_Sequence tl_ ) =
(T_Sequence (let _lhsOppL :: ([PP_Doc])
_hdIpp :: PP_Doc
_tlIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 86 , column 33 )
_lhsOppL =
_hdIpp : _tlIppL
( _hdIpp) =
(hd_ )
( _tlIppL) =
(tl_ )
in ( _lhsOppL)) )
sem_Sequence_Nil :: T_Sequence
sem_Sequence_Nil =
(T_Sequence (let _lhsOppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 87 , column 33 )
_lhsOppL =
[]
in ( _lhsOppL)) ) | null | https://raw.githubusercontent.com/Oblosys/proxima/f154dff2ccb8afe00eeb325d9d06f5e2a5ee7589/uuagc/src-derived/CodeSyntaxDump.hs | haskell | Patterns.ag imports
CGrammar ----------------------------------------------------
cata
semantic domain
------------------------------------------------
cata
semantic domain
CNonterminal ------------------------------------------------
visit 0:
synthesized attribute:
pp : PP_Doc
alternatives:
alternative CNonterminal:
child nt : {NontermIdent}
child params : {[Identifier]}
child inh : {Attributes}
child syn : {Attributes}
child prods : CProductions
child inter : CInterface
cata
semantic domain
CNonterminals -----------------------------------------------
cata
semantic domain
CProduction -------------------------------------------------
cata
semantic domain
CProductions ------------------------------------------------
cata
semantic domain
-----------------------------------------------------
cata
semantic domain
--------------------------------------------------
cata
semantic domain
-------------------------------------------------
cata
semantic domain
CVisit ------------------------------------------------------
visit 0:
synthesized attribute:
pp : PP_Doc
alternatives:
alternative CVisit:
child inh : {Attributes}
child syn : {Attributes}
child vss : Sequence
child intra : Sequence
child ordered : {Bool}
cata
semantic domain
CVisits -----------------------------------------------------
cata
semantic domain
Pattern -----------------------------------------------------
cata
semantic domain
self rule
self rule
self rule
self rule
self rule
self rule
self rule
self rule
self rule
self rule
Patterns ----------------------------------------------------
cata
semantic domain
self rule
self rule
self rule
self rule
Sequence ----------------------------------------------------
cata
semantic domain |
UUAGC 0.9.10 ( CodeSyntaxDump.ag )
module CodeSyntaxDump where
import Data.List
import qualified Data.Map as Map
import Pretty
import PPUtil
import CodeSyntax
import Patterns
import CommonTypes
import Data.Map(Map)
import Data.Set(Set)
import UU.Scanner.Position(Pos)
import CommonTypes (ConstructorIdent,Identifier)
ppChild :: (Identifier,Type,Bool) -> PP_Doc
ppChild (nm,tp,b)
= pp nm >#< "::" >#< pp (show tp) >#< "<" >|< ppBool b >|< ">"
ppVertexMap :: Map Int (Identifier,Identifier,Maybe Type) -> PP_Doc
ppVertexMap m
= ppVList [ ppF (show k) $ ppAttr v | (k,v) <- Map.toList m ]
ppAttr :: (Identifier,Identifier,Maybe Type) -> PP_Doc
ppAttr (fld,nm,mTp)
= pp fld >|< "." >|< pp nm >#<
case mTp of
Just tp -> pp "::" >#< show tp
Nothing -> empty
ppBool :: Bool -> PP_Doc
ppBool True = pp "T"
ppBool False = pp "F"
ppMaybeShow :: Show a => Maybe a -> PP_Doc
ppMaybeShow (Just x) = pp (show x)
ppMaybeShow Nothing = pp "_"
ppStrings :: [String] -> PP_Doc
ppStrings = vlist
visit 0 :
synthesized attribute :
pp : PP_Doc
alternatives :
alternative CGrammar :
child typeSyns : { TypeSyns }
child derivings : { Derivings }
child wrappers : { Set NontermIdent }
child : CNonterminals
child pragmas : { PragmaMap }
child paramMap : { ParamMap }
child : { ContextMap }
visit 0:
synthesized attribute:
pp : PP_Doc
alternatives:
alternative CGrammar:
child typeSyns : {TypeSyns}
child derivings : {Derivings}
child wrappers : {Set NontermIdent}
child nonts : CNonterminals
child pragmas : {PragmaMap}
child paramMap : {ParamMap}
child contextMap : {ContextMap}
-}
sem_CGrammar :: CGrammar ->
T_CGrammar
sem_CGrammar (CGrammar _typeSyns _derivings _wrappers _nonts _pragmas _paramMap _contextMap ) =
(sem_CGrammar_CGrammar _typeSyns _derivings _wrappers (sem_CNonterminals _nonts ) _pragmas _paramMap _contextMap )
newtype T_CGrammar = T_CGrammar (( PP_Doc))
data Inh_CGrammar = Inh_CGrammar {}
data Syn_CGrammar = Syn_CGrammar {pp_Syn_CGrammar :: PP_Doc}
wrap_CGrammar :: T_CGrammar ->
Inh_CGrammar ->
Syn_CGrammar
wrap_CGrammar (T_CGrammar sem ) (Inh_CGrammar ) =
(let ( _lhsOpp) =
(sem )
in (Syn_CGrammar _lhsOpp ))
sem_CGrammar_CGrammar :: TypeSyns ->
Derivings ->
(Set NontermIdent) ->
T_CNonterminals ->
PragmaMap ->
ParamMap ->
ContextMap ->
T_CGrammar
sem_CGrammar_CGrammar typeSyns_ derivings_ wrappers_ (T_CNonterminals nonts_ ) pragmas_ paramMap_ contextMap_ =
(T_CGrammar (let _lhsOpp :: PP_Doc
_nontsIpp :: PP_Doc
_nontsIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 47 , column 21 )
_lhsOpp =
ppNestInfo ["CGrammar","CGrammar"] []
[ ppF "typeSyns" $ ppAssocL typeSyns_
, ppF "derivings" $ ppMap $ derivings_
, ppF "nonts" $ ppVList _nontsIppL
] []
( _nontsIpp,_nontsIppL) =
(nonts_ )
in ( _lhsOpp)) )
visit 0 :
synthesized attribute :
pp : PP_Doc
alternatives :
alternative CInterface :
child seg : CSegments
visit 0:
synthesized attribute:
pp : PP_Doc
alternatives:
alternative CInterface:
child seg : CSegments
-}
sem_CInterface :: CInterface ->
T_CInterface
sem_CInterface (CInterface _seg ) =
(sem_CInterface_CInterface (sem_CSegments _seg ) )
newtype T_CInterface = T_CInterface (( PP_Doc))
data Inh_CInterface = Inh_CInterface {}
data Syn_CInterface = Syn_CInterface {pp_Syn_CInterface :: PP_Doc}
wrap_CInterface :: T_CInterface ->
Inh_CInterface ->
Syn_CInterface
wrap_CInterface (T_CInterface sem ) (Inh_CInterface ) =
(let ( _lhsOpp) =
(sem )
in (Syn_CInterface _lhsOpp ))
sem_CInterface_CInterface :: T_CSegments ->
T_CInterface
sem_CInterface_CInterface (T_CSegments seg_ ) =
(T_CInterface (let _lhsOpp :: PP_Doc
_segIpp :: PP_Doc
_segIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 57 , column 21 )
_lhsOpp =
ppNestInfo ["CInterface","CInterface"] [] [ppF "seg" $ ppVList _segIppL] []
( _segIpp,_segIppL) =
(seg_ )
in ( _lhsOpp)) )
sem_CNonterminal :: CNonterminal ->
T_CNonterminal
sem_CNonterminal (CNonterminal _nt _params _inh _syn _prods _inter ) =
(sem_CNonterminal_CNonterminal _nt _params _inh _syn (sem_CProductions _prods ) (sem_CInterface _inter ) )
newtype T_CNonterminal = T_CNonterminal (( PP_Doc))
data Inh_CNonterminal = Inh_CNonterminal {}
data Syn_CNonterminal = Syn_CNonterminal {pp_Syn_CNonterminal :: PP_Doc}
wrap_CNonterminal :: T_CNonterminal ->
Inh_CNonterminal ->
Syn_CNonterminal
wrap_CNonterminal (T_CNonterminal sem ) (Inh_CNonterminal ) =
(let ( _lhsOpp) =
(sem )
in (Syn_CNonterminal _lhsOpp ))
sem_CNonterminal_CNonterminal :: NontermIdent ->
([Identifier]) ->
Attributes ->
Attributes ->
T_CProductions ->
T_CInterface ->
T_CNonterminal
sem_CNonterminal_CNonterminal nt_ params_ inh_ syn_ (T_CProductions prods_ ) (T_CInterface inter_ ) =
(T_CNonterminal (let _lhsOpp :: PP_Doc
_prodsIpp :: PP_Doc
_prodsIppL :: ([PP_Doc])
_interIpp :: PP_Doc
" CodeSyntaxDump.ag"(line 54 , column 33 )
_lhsOpp =
ppNestInfo ["CNonterminal","CNonterminal"] (pp nt_ : map pp params_) [ppF "inh" $ ppMap inh_, ppF "syn" $ ppMap syn_, ppF "prods" $ ppVList _prodsIppL, ppF "inter" _interIpp] []
( _prodsIpp,_prodsIppL) =
(prods_ )
( _interIpp) =
(inter_ )
in ( _lhsOpp)) )
visit 0 :
synthesized attributes :
pp : PP_Doc
ppL : [ PP_Doc ]
alternatives :
alternative Cons :
child hd : CNonterminal
child tl : CNonterminals
alternative :
visit 0:
synthesized attributes:
pp : PP_Doc
ppL : [PP_Doc]
alternatives:
alternative Cons:
child hd : CNonterminal
child tl : CNonterminals
alternative Nil:
-}
sem_CNonterminals :: CNonterminals ->
T_CNonterminals
sem_CNonterminals list =
(Prelude.foldr sem_CNonterminals_Cons sem_CNonterminals_Nil (Prelude.map sem_CNonterminal list) )
newtype T_CNonterminals = T_CNonterminals (( PP_Doc,([PP_Doc])))
data Inh_CNonterminals = Inh_CNonterminals {}
data Syn_CNonterminals = Syn_CNonterminals {pp_Syn_CNonterminals :: PP_Doc,ppL_Syn_CNonterminals :: [PP_Doc]}
wrap_CNonterminals :: T_CNonterminals ->
Inh_CNonterminals ->
Syn_CNonterminals
wrap_CNonterminals (T_CNonterminals sem ) (Inh_CNonterminals ) =
(let ( _lhsOpp,_lhsOppL) =
(sem )
in (Syn_CNonterminals _lhsOpp _lhsOppL ))
sem_CNonterminals_Cons :: T_CNonterminal ->
T_CNonterminals ->
T_CNonterminals
sem_CNonterminals_Cons (T_CNonterminal hd_ ) (T_CNonterminals tl_ ) =
(T_CNonterminals (let _lhsOppL :: ([PP_Doc])
_lhsOpp :: PP_Doc
_hdIpp :: PP_Doc
_tlIpp :: PP_Doc
_tlIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 102 , column 33 )
_lhsOppL =
_hdIpp : _tlIppL
use rule " CodeSyntaxDump.ag"(line 44 , column 40 )
_lhsOpp =
_hdIpp >-< _tlIpp
( _hdIpp) =
(hd_ )
( _tlIpp,_tlIppL) =
(tl_ )
in ( _lhsOpp,_lhsOppL)) )
sem_CNonterminals_Nil :: T_CNonterminals
sem_CNonterminals_Nil =
(T_CNonterminals (let _lhsOppL :: ([PP_Doc])
_lhsOpp :: PP_Doc
" CodeSyntaxDump.ag"(line 103 , column 33 )
_lhsOppL =
[]
use rule " CodeSyntaxDump.ag"(line 44 , column 40 )
_lhsOpp =
empty
in ( _lhsOpp,_lhsOppL)) )
visit 0 :
synthesized attribute :
pp : PP_Doc
alternatives :
alternative CProduction :
child con : { ConstructorIdent }
child visits : CVisits
child children : { [ ( Identifier , Type , ) ] }
child terminals : { [ Identifier ] }
visit 0:
synthesized attribute:
pp : PP_Doc
alternatives:
alternative CProduction:
child con : {ConstructorIdent}
child visits : CVisits
child children : {[(Identifier,Type,Bool)]}
child terminals : {[Identifier]}
-}
sem_CProduction :: CProduction ->
T_CProduction
sem_CProduction (CProduction _con _visits _children _terminals ) =
(sem_CProduction_CProduction _con (sem_CVisits _visits ) _children _terminals )
newtype T_CProduction = T_CProduction (( PP_Doc))
data Inh_CProduction = Inh_CProduction {}
data Syn_CProduction = Syn_CProduction {pp_Syn_CProduction :: PP_Doc}
wrap_CProduction :: T_CProduction ->
Inh_CProduction ->
Syn_CProduction
wrap_CProduction (T_CProduction sem ) (Inh_CProduction ) =
(let ( _lhsOpp) =
(sem )
in (Syn_CProduction _lhsOpp ))
sem_CProduction_CProduction :: ConstructorIdent ->
T_CVisits ->
([(Identifier,Type,Bool)]) ->
([Identifier]) ->
T_CProduction
sem_CProduction_CProduction con_ (T_CVisits visits_ ) children_ terminals_ =
(T_CProduction (let _lhsOpp :: PP_Doc
_visitsIpp :: PP_Doc
_visitsIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 63 , column 17 )
_lhsOpp =
ppNestInfo ["CProduction","CProduction"] [pp con_] [ppF "visits" $ ppVList _visitsIppL, ppF "children" $ ppVList (map ppChild children_),ppF "terminals" $ ppVList (map ppShow terminals_)] []
( _visitsIpp,_visitsIppL) =
(visits_ )
in ( _lhsOpp)) )
visit 0 :
synthesized attributes :
pp : PP_Doc
ppL : [ PP_Doc ]
alternatives :
alternative Cons :
child hd : CProduction
child tl : CProductions
alternative :
visit 0:
synthesized attributes:
pp : PP_Doc
ppL : [PP_Doc]
alternatives:
alternative Cons:
child hd : CProduction
child tl : CProductions
alternative Nil:
-}
sem_CProductions :: CProductions ->
T_CProductions
sem_CProductions list =
(Prelude.foldr sem_CProductions_Cons sem_CProductions_Nil (Prelude.map sem_CProduction list) )
newtype T_CProductions = T_CProductions (( PP_Doc,([PP_Doc])))
data Inh_CProductions = Inh_CProductions {}
data Syn_CProductions = Syn_CProductions {pp_Syn_CProductions :: PP_Doc,ppL_Syn_CProductions :: [PP_Doc]}
wrap_CProductions :: T_CProductions ->
Inh_CProductions ->
Syn_CProductions
wrap_CProductions (T_CProductions sem ) (Inh_CProductions ) =
(let ( _lhsOpp,_lhsOppL) =
(sem )
in (Syn_CProductions _lhsOpp _lhsOppL ))
sem_CProductions_Cons :: T_CProduction ->
T_CProductions ->
T_CProductions
sem_CProductions_Cons (T_CProduction hd_ ) (T_CProductions tl_ ) =
(T_CProductions (let _lhsOppL :: ([PP_Doc])
_lhsOpp :: PP_Doc
_hdIpp :: PP_Doc
_tlIpp :: PP_Doc
_tlIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 94 , column 33 )
_lhsOppL =
_hdIpp : _tlIppL
use rule " CodeSyntaxDump.ag"(line 44 , column 40 )
_lhsOpp =
_hdIpp >-< _tlIpp
( _hdIpp) =
(hd_ )
( _tlIpp,_tlIppL) =
(tl_ )
in ( _lhsOpp,_lhsOppL)) )
sem_CProductions_Nil :: T_CProductions
sem_CProductions_Nil =
(T_CProductions (let _lhsOppL :: ([PP_Doc])
_lhsOpp :: PP_Doc
" CodeSyntaxDump.ag"(line 95 , column 33 )
_lhsOppL =
[]
use rule " CodeSyntaxDump.ag"(line 44 , column 40 )
_lhsOpp =
empty
in ( _lhsOpp,_lhsOppL)) )
visit 0 :
synthesized attribute :
pp : PP_Doc
alternatives :
alternative :
child name : { Identifier }
child nt : { NontermIdent }
child nr : { Int }
child inh : { Attributes }
child syn : { Attributes }
child isLast : { Bool }
alternative CRule :
child name : { Identifier }
child isIn : { Bool }
child hasCode : { Bool }
child nt : { NontermIdent }
child con : { ConstructorIdent }
child field : { Identifier }
child childnt : { Maybe NontermIdent }
child tp : { Maybe Type }
child pattern : Pattern
child rhs : { [ String ] }
child defines : { Map Int ( Identifier , Identifier , Maybe Type ) }
child owrt : { Bool }
child origin : { String }
child uses : { Set ( Identifier , Identifier ) }
visit 0:
synthesized attribute:
pp : PP_Doc
alternatives:
alternative CChildVisit:
child name : {Identifier}
child nt : {NontermIdent}
child nr : {Int}
child inh : {Attributes}
child syn : {Attributes}
child isLast : {Bool}
alternative CRule:
child name : {Identifier}
child isIn : {Bool}
child hasCode : {Bool}
child nt : {NontermIdent}
child con : {ConstructorIdent}
child field : {Identifier}
child childnt : {Maybe NontermIdent}
child tp : {Maybe Type}
child pattern : Pattern
child rhs : {[String]}
child defines : {Map Int (Identifier,Identifier,Maybe Type)}
child owrt : {Bool}
child origin : {String}
child uses : {Set (Identifier, Identifier)}
-}
sem_CRule :: CRule ->
T_CRule
sem_CRule (CChildVisit _name _nt _nr _inh _syn _isLast ) =
(sem_CRule_CChildVisit _name _nt _nr _inh _syn _isLast )
sem_CRule (CRule _name _isIn _hasCode _nt _con _field _childnt _tp _pattern _rhs _defines _owrt _origin _uses ) =
(sem_CRule_CRule _name _isIn _hasCode _nt _con _field _childnt _tp (sem_Pattern _pattern ) _rhs _defines _owrt _origin _uses )
newtype T_CRule = T_CRule (( PP_Doc))
data Inh_CRule = Inh_CRule {}
data Syn_CRule = Syn_CRule {pp_Syn_CRule :: PP_Doc}
wrap_CRule :: T_CRule ->
Inh_CRule ->
Syn_CRule
wrap_CRule (T_CRule sem ) (Inh_CRule ) =
(let ( _lhsOpp) =
(sem )
in (Syn_CRule _lhsOpp ))
sem_CRule_CChildVisit :: Identifier ->
NontermIdent ->
Int ->
Attributes ->
Attributes ->
Bool ->
T_CRule
sem_CRule_CChildVisit name_ nt_ nr_ inh_ syn_ isLast_ =
(T_CRule (let _lhsOpp :: PP_Doc
" CodeSyntaxDump.ag"(line 70 , column 21 )
_lhsOpp =
ppNestInfo ["CRule","CChildVisit"] [pp name_] [ppF "nt" $ pp nt_, ppF "nr" $ ppShow nr_, ppF "inh" $ ppMap inh_, ppF "syn" $ ppMap syn_, ppF "last" $ ppBool isLast_] []
in ( _lhsOpp)) )
sem_CRule_CRule :: Identifier ->
Bool ->
Bool ->
NontermIdent ->
ConstructorIdent ->
Identifier ->
(Maybe NontermIdent) ->
(Maybe Type) ->
T_Pattern ->
([String]) ->
(Map Int (Identifier,Identifier,Maybe Type)) ->
Bool ->
String ->
(Set (Identifier, Identifier)) ->
T_CRule
sem_CRule_CRule name_ isIn_ hasCode_ nt_ con_ field_ childnt_ tp_ (T_Pattern pattern_ ) rhs_ defines_ owrt_ origin_ uses_ =
(T_CRule (let _lhsOpp :: PP_Doc
_patternIcopy :: Pattern
_patternIpp :: PP_Doc
" CodeSyntaxDump.ag"(line 69 , column 33 )
_lhsOpp =
ppNestInfo ["CRule","CRule"] [pp name_] [ppF "isIn" $ ppBool isIn_, ppF "hasCode" $ ppBool hasCode_, ppF "nt" $ pp nt_, ppF "con" $ pp con_, ppF "field" $ pp field_, ppF "childnt" $ ppMaybeShow childnt_, ppF "tp" $ ppMaybeShow tp_, ppF "pattern" $ if isIn_ then pp "<no pat because In>" else _patternIpp, ppF "rhs" $ ppStrings rhs_, ppF "defines" $ ppVertexMap defines_, ppF "owrt" $ ppBool owrt_, ppF "origin" $ pp origin_] []
( _patternIcopy,_patternIpp) =
(pattern_ )
in ( _lhsOpp)) )
visit 0 :
synthesized attribute :
pp : PP_Doc
alternatives :
alternative CSegment :
child inh : { Attributes }
child syn : { Attributes }
visit 0:
synthesized attribute:
pp : PP_Doc
alternatives:
alternative CSegment:
child inh : {Attributes}
child syn : {Attributes}
-}
sem_CSegment :: CSegment ->
T_CSegment
sem_CSegment (CSegment _inh _syn ) =
(sem_CSegment_CSegment _inh _syn )
newtype T_CSegment = T_CSegment (( PP_Doc))
data Inh_CSegment = Inh_CSegment {}
data Syn_CSegment = Syn_CSegment {pp_Syn_CSegment :: PP_Doc}
wrap_CSegment :: T_CSegment ->
Inh_CSegment ->
Syn_CSegment
wrap_CSegment (T_CSegment sem ) (Inh_CSegment ) =
(let ( _lhsOpp) =
(sem )
in (Syn_CSegment _lhsOpp ))
sem_CSegment_CSegment :: Attributes ->
Attributes ->
T_CSegment
sem_CSegment_CSegment inh_ syn_ =
(T_CSegment (let _lhsOpp :: PP_Doc
" CodeSyntaxDump.ag"(line 60 , column 21 )
_lhsOpp =
ppNestInfo ["CSegment","CSegment"] [] [ppF "inh" $ ppMap inh_, ppF "syn" $ ppMap syn_] []
in ( _lhsOpp)) )
visit 0 :
synthesized attributes :
pp : PP_Doc
ppL : [ PP_Doc ]
alternatives :
alternative Cons :
child hd : CSegment
child tl : CSegments
alternative :
visit 0:
synthesized attributes:
pp : PP_Doc
ppL : [PP_Doc]
alternatives:
alternative Cons:
child hd : CSegment
child tl : CSegments
alternative Nil:
-}
sem_CSegments :: CSegments ->
T_CSegments
sem_CSegments list =
(Prelude.foldr sem_CSegments_Cons sem_CSegments_Nil (Prelude.map sem_CSegment list) )
newtype T_CSegments = T_CSegments (( PP_Doc,([PP_Doc])))
data Inh_CSegments = Inh_CSegments {}
data Syn_CSegments = Syn_CSegments {pp_Syn_CSegments :: PP_Doc,ppL_Syn_CSegments :: [PP_Doc]}
wrap_CSegments :: T_CSegments ->
Inh_CSegments ->
Syn_CSegments
wrap_CSegments (T_CSegments sem ) (Inh_CSegments ) =
(let ( _lhsOpp,_lhsOppL) =
(sem )
in (Syn_CSegments _lhsOpp _lhsOppL ))
sem_CSegments_Cons :: T_CSegment ->
T_CSegments ->
T_CSegments
sem_CSegments_Cons (T_CSegment hd_ ) (T_CSegments tl_ ) =
(T_CSegments (let _lhsOppL :: ([PP_Doc])
_lhsOpp :: PP_Doc
_hdIpp :: PP_Doc
_tlIpp :: PP_Doc
_tlIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 98 , column 33 )
_lhsOppL =
_hdIpp : _tlIppL
use rule " CodeSyntaxDump.ag"(line 44 , column 40 )
_lhsOpp =
_hdIpp >-< _tlIpp
( _hdIpp) =
(hd_ )
( _tlIpp,_tlIppL) =
(tl_ )
in ( _lhsOpp,_lhsOppL)) )
sem_CSegments_Nil :: T_CSegments
sem_CSegments_Nil =
(T_CSegments (let _lhsOppL :: ([PP_Doc])
_lhsOpp :: PP_Doc
" CodeSyntaxDump.ag"(line 99 , column 33 )
_lhsOppL =
[]
use rule " CodeSyntaxDump.ag"(line 44 , column 40 )
_lhsOpp =
empty
in ( _lhsOpp,_lhsOppL)) )
sem_CVisit :: CVisit ->
T_CVisit
sem_CVisit (CVisit _inh _syn _vss _intra _ordered ) =
(sem_CVisit_CVisit _inh _syn (sem_Sequence _vss ) (sem_Sequence _intra ) _ordered )
newtype T_CVisit = T_CVisit (( PP_Doc))
data Inh_CVisit = Inh_CVisit {}
data Syn_CVisit = Syn_CVisit {pp_Syn_CVisit :: PP_Doc}
wrap_CVisit :: T_CVisit ->
Inh_CVisit ->
Syn_CVisit
wrap_CVisit (T_CVisit sem ) (Inh_CVisit ) =
(let ( _lhsOpp) =
(sem )
in (Syn_CVisit _lhsOpp ))
sem_CVisit_CVisit :: Attributes ->
Attributes ->
T_Sequence ->
T_Sequence ->
Bool ->
T_CVisit
sem_CVisit_CVisit inh_ syn_ (T_Sequence vss_ ) (T_Sequence intra_ ) ordered_ =
(T_CVisit (let _lhsOpp :: PP_Doc
_vssIppL :: ([PP_Doc])
_intraIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 66 , column 21 )
_lhsOpp =
ppNestInfo ["CVisit","CVisit"] [] [ppF "inh" $ ppMap inh_, ppF "syn" $ ppMap syn_, ppF "sequence" $ ppVList _vssIppL, ppF "intra" $ ppVList _intraIppL, ppF "ordered" $ ppBool ordered_] []
( _vssIppL) =
(vss_ )
( _intraIppL) =
(intra_ )
in ( _lhsOpp)) )
visit 0 :
synthesized attributes :
pp : PP_Doc
ppL : [ PP_Doc ]
alternatives :
alternative Cons :
child hd : CVisit
child tl : CVisits
alternative :
visit 0:
synthesized attributes:
pp : PP_Doc
ppL : [PP_Doc]
alternatives:
alternative Cons:
child hd : CVisit
child tl : CVisits
alternative Nil:
-}
sem_CVisits :: CVisits ->
T_CVisits
sem_CVisits list =
(Prelude.foldr sem_CVisits_Cons sem_CVisits_Nil (Prelude.map sem_CVisit list) )
newtype T_CVisits = T_CVisits (( PP_Doc,([PP_Doc])))
data Inh_CVisits = Inh_CVisits {}
data Syn_CVisits = Syn_CVisits {pp_Syn_CVisits :: PP_Doc,ppL_Syn_CVisits :: [PP_Doc]}
wrap_CVisits :: T_CVisits ->
Inh_CVisits ->
Syn_CVisits
wrap_CVisits (T_CVisits sem ) (Inh_CVisits ) =
(let ( _lhsOpp,_lhsOppL) =
(sem )
in (Syn_CVisits _lhsOpp _lhsOppL ))
sem_CVisits_Cons :: T_CVisit ->
T_CVisits ->
T_CVisits
sem_CVisits_Cons (T_CVisit hd_ ) (T_CVisits tl_ ) =
(T_CVisits (let _lhsOppL :: ([PP_Doc])
_lhsOpp :: PP_Doc
_hdIpp :: PP_Doc
_tlIpp :: PP_Doc
_tlIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 90 , column 33 )
_lhsOppL =
_hdIpp : _tlIppL
use rule " CodeSyntaxDump.ag"(line 44 , column 40 )
_lhsOpp =
_hdIpp >-< _tlIpp
( _hdIpp) =
(hd_ )
( _tlIpp,_tlIppL) =
(tl_ )
in ( _lhsOpp,_lhsOppL)) )
sem_CVisits_Nil :: T_CVisits
sem_CVisits_Nil =
(T_CVisits (let _lhsOppL :: ([PP_Doc])
_lhsOpp :: PP_Doc
" CodeSyntaxDump.ag"(line 91 , column 33 )
_lhsOppL =
[]
use rule " CodeSyntaxDump.ag"(line 44 , column 40 )
_lhsOpp =
empty
in ( _lhsOpp,_lhsOppL)) )
visit 0 :
synthesized attributes :
copy : SELF
pp : PP_Doc
alternatives :
alternative :
child field : { Identifier }
child attr : { Identifier }
child pat : Pattern
child parts : Patterns
visit 0 :
local copy : _
alternative Constr :
child name : { ConstructorIdent }
child pats : Patterns
visit 0 :
local copy : _
alternative Irrefutable :
child pat : Pattern
visit 0 :
local copy : _
alternative Product :
child pos : { Pos }
child pats : Patterns
visit 0 :
local copy : _
alternative Underscore :
child pos : { Pos }
visit 0 :
local copy : _
visit 0:
synthesized attributes:
copy : SELF
pp : PP_Doc
alternatives:
alternative Alias:
child field : {Identifier}
child attr : {Identifier}
child pat : Pattern
child parts : Patterns
visit 0:
local copy : _
alternative Constr:
child name : {ConstructorIdent}
child pats : Patterns
visit 0:
local copy : _
alternative Irrefutable:
child pat : Pattern
visit 0:
local copy : _
alternative Product:
child pos : {Pos}
child pats : Patterns
visit 0:
local copy : _
alternative Underscore:
child pos : {Pos}
visit 0:
local copy : _
-}
sem_Pattern :: Pattern ->
T_Pattern
sem_Pattern (Alias _field _attr _pat _parts ) =
(sem_Pattern_Alias _field _attr (sem_Pattern _pat ) (sem_Patterns _parts ) )
sem_Pattern (Constr _name _pats ) =
(sem_Pattern_Constr _name (sem_Patterns _pats ) )
sem_Pattern (Irrefutable _pat ) =
(sem_Pattern_Irrefutable (sem_Pattern _pat ) )
sem_Pattern (Product _pos _pats ) =
(sem_Pattern_Product _pos (sem_Patterns _pats ) )
sem_Pattern (Underscore _pos ) =
(sem_Pattern_Underscore _pos )
newtype T_Pattern = T_Pattern (( Pattern,PP_Doc))
data Inh_Pattern = Inh_Pattern {}
data Syn_Pattern = Syn_Pattern {copy_Syn_Pattern :: Pattern,pp_Syn_Pattern :: PP_Doc}
wrap_Pattern :: T_Pattern ->
Inh_Pattern ->
Syn_Pattern
wrap_Pattern (T_Pattern sem ) (Inh_Pattern ) =
(let ( _lhsOcopy,_lhsOpp) =
(sem )
in (Syn_Pattern _lhsOcopy _lhsOpp ))
sem_Pattern_Alias :: Identifier ->
Identifier ->
T_Pattern ->
T_Patterns ->
T_Pattern
sem_Pattern_Alias field_ attr_ (T_Pattern pat_ ) (T_Patterns parts_ ) =
(T_Pattern (let _lhsOpp :: PP_Doc
_lhsOcopy :: Pattern
_patIcopy :: Pattern
_patIpp :: PP_Doc
_partsIcopy :: Patterns
_partsIpp :: PP_Doc
_partsIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 75 , column 33 )
_lhsOpp =
ppNestInfo ["Pattern","Alias"] [pp field_, pp attr_] [ppF "pat" $ _patIpp] []
_copy =
Alias field_ attr_ _patIcopy _partsIcopy
_lhsOcopy =
_copy
( _patIcopy,_patIpp) =
(pat_ )
( _partsIcopy,_partsIpp,_partsIppL) =
(parts_ )
in ( _lhsOcopy,_lhsOpp)) )
sem_Pattern_Constr :: ConstructorIdent ->
T_Patterns ->
T_Pattern
sem_Pattern_Constr name_ (T_Patterns pats_ ) =
(T_Pattern (let _lhsOpp :: PP_Doc
_lhsOcopy :: Pattern
_patsIcopy :: Patterns
_patsIpp :: PP_Doc
_patsIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 73 , column 33 )
_lhsOpp =
ppNestInfo ["Pattern","Constr"] [pp name_] [ppF "pats" $ ppVList _patsIppL] []
_copy =
Constr name_ _patsIcopy
_lhsOcopy =
_copy
( _patsIcopy,_patsIpp,_patsIppL) =
(pats_ )
in ( _lhsOcopy,_lhsOpp)) )
sem_Pattern_Irrefutable :: T_Pattern ->
T_Pattern
sem_Pattern_Irrefutable (T_Pattern pat_ ) =
(T_Pattern (let _lhsOpp :: PP_Doc
_lhsOcopy :: Pattern
_patIcopy :: Pattern
_patIpp :: PP_Doc
use rule " CodeSyntaxDump.ag"(line 44 , column 40 )
_lhsOpp =
_patIpp
_copy =
Irrefutable _patIcopy
_lhsOcopy =
_copy
( _patIcopy,_patIpp) =
(pat_ )
in ( _lhsOcopy,_lhsOpp)) )
sem_Pattern_Product :: Pos ->
T_Patterns ->
T_Pattern
sem_Pattern_Product pos_ (T_Patterns pats_ ) =
(T_Pattern (let _lhsOpp :: PP_Doc
_lhsOcopy :: Pattern
_patsIcopy :: Patterns
_patsIpp :: PP_Doc
_patsIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 74 , column 33 )
_lhsOpp =
ppNestInfo ["Pattern","Product"] [ppShow pos_] [ppF "pats" $ ppVList _patsIppL] []
_copy =
Product pos_ _patsIcopy
_lhsOcopy =
_copy
( _patsIcopy,_patsIpp,_patsIppL) =
(pats_ )
in ( _lhsOcopy,_lhsOpp)) )
sem_Pattern_Underscore :: Pos ->
T_Pattern
sem_Pattern_Underscore pos_ =
(T_Pattern (let _lhsOpp :: PP_Doc
_lhsOcopy :: Pattern
" CodeSyntaxDump.ag"(line 76 , column 25 )
_lhsOpp =
ppNestInfo ["Pattern","Underscore"] [ppShow pos_] [] []
_copy =
Underscore pos_
_lhsOcopy =
_copy
in ( _lhsOcopy,_lhsOpp)) )
visit 0 :
synthesized attributes :
copy : SELF
pp : PP_Doc
ppL : [ PP_Doc ]
alternatives :
alternative Cons :
child hd : Pattern
child tl : Patterns
visit 0 :
local copy : _
alternative :
visit 0 :
local copy : _
visit 0:
synthesized attributes:
copy : SELF
pp : PP_Doc
ppL : [PP_Doc]
alternatives:
alternative Cons:
child hd : Pattern
child tl : Patterns
visit 0:
local copy : _
alternative Nil:
visit 0:
local copy : _
-}
sem_Patterns :: Patterns ->
T_Patterns
sem_Patterns list =
(Prelude.foldr sem_Patterns_Cons sem_Patterns_Nil (Prelude.map sem_Pattern list) )
newtype T_Patterns = T_Patterns (( Patterns,PP_Doc,([PP_Doc])))
data Inh_Patterns = Inh_Patterns {}
data Syn_Patterns = Syn_Patterns {copy_Syn_Patterns :: Patterns,pp_Syn_Patterns :: PP_Doc,ppL_Syn_Patterns :: [PP_Doc]}
wrap_Patterns :: T_Patterns ->
Inh_Patterns ->
Syn_Patterns
wrap_Patterns (T_Patterns sem ) (Inh_Patterns ) =
(let ( _lhsOcopy,_lhsOpp,_lhsOppL) =
(sem )
in (Syn_Patterns _lhsOcopy _lhsOpp _lhsOppL ))
sem_Patterns_Cons :: T_Pattern ->
T_Patterns ->
T_Patterns
sem_Patterns_Cons (T_Pattern hd_ ) (T_Patterns tl_ ) =
(T_Patterns (let _lhsOppL :: ([PP_Doc])
_lhsOpp :: PP_Doc
_lhsOcopy :: Patterns
_hdIcopy :: Pattern
_hdIpp :: PP_Doc
_tlIcopy :: Patterns
_tlIpp :: PP_Doc
_tlIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 82 , column 33 )
_lhsOppL =
_hdIpp : _tlIppL
use rule " CodeSyntaxDump.ag"(line 44 , column 40 )
_lhsOpp =
_hdIpp >-< _tlIpp
_copy =
(:) _hdIcopy _tlIcopy
_lhsOcopy =
_copy
( _hdIcopy,_hdIpp) =
(hd_ )
( _tlIcopy,_tlIpp,_tlIppL) =
(tl_ )
in ( _lhsOcopy,_lhsOpp,_lhsOppL)) )
sem_Patterns_Nil :: T_Patterns
sem_Patterns_Nil =
(T_Patterns (let _lhsOppL :: ([PP_Doc])
_lhsOpp :: PP_Doc
_lhsOcopy :: Patterns
" CodeSyntaxDump.ag"(line 83 , column 33 )
_lhsOppL =
[]
use rule " CodeSyntaxDump.ag"(line 44 , column 40 )
_lhsOpp =
empty
_copy =
[]
_lhsOcopy =
_copy
in ( _lhsOcopy,_lhsOpp,_lhsOppL)) )
visit 0 :
synthesized attribute :
ppL : [ PP_Doc ]
alternatives :
alternative Cons :
child hd : CRule
child tl : Sequence
alternative :
visit 0:
synthesized attribute:
ppL : [PP_Doc]
alternatives:
alternative Cons:
child hd : CRule
child tl : Sequence
alternative Nil:
-}
sem_Sequence :: Sequence ->
T_Sequence
sem_Sequence list =
(Prelude.foldr sem_Sequence_Cons sem_Sequence_Nil (Prelude.map sem_CRule list) )
newtype T_Sequence = T_Sequence (( ([PP_Doc])))
data Inh_Sequence = Inh_Sequence {}
data Syn_Sequence = Syn_Sequence {ppL_Syn_Sequence :: [PP_Doc]}
wrap_Sequence :: T_Sequence ->
Inh_Sequence ->
Syn_Sequence
wrap_Sequence (T_Sequence sem ) (Inh_Sequence ) =
(let ( _lhsOppL) =
(sem )
in (Syn_Sequence _lhsOppL ))
sem_Sequence_Cons :: T_CRule ->
T_Sequence ->
T_Sequence
sem_Sequence_Cons (T_CRule hd_ ) (T_Sequence tl_ ) =
(T_Sequence (let _lhsOppL :: ([PP_Doc])
_hdIpp :: PP_Doc
_tlIppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 86 , column 33 )
_lhsOppL =
_hdIpp : _tlIppL
( _hdIpp) =
(hd_ )
( _tlIppL) =
(tl_ )
in ( _lhsOppL)) )
sem_Sequence_Nil :: T_Sequence
sem_Sequence_Nil =
(T_Sequence (let _lhsOppL :: ([PP_Doc])
" CodeSyntaxDump.ag"(line 87 , column 33 )
_lhsOppL =
[]
in ( _lhsOppL)) ) |
7889fbceff0535974ae85f4f296b64d86840dbcc32f2d4e04d06680ccc2d0042 | parapluu/Concuerror | etsi_5.erl | -module(etsi_5).
-export([etsi_5/0]).
-export([scenarios/0]).
scenarios() -> [{?MODULE, inf, dpor}].
etsi_5() ->
Parent = self(),
ets:new(table, [public, named_table]),
ets:insert(table, {x, 0}),
ets:insert(table, {y, 0}),
ets:insert(table, {z, 0}),
ets:insert(table, {z5, 0}),
ets:insert(table, {xy, 0}),
P1 =
spawn(fun() ->
ets:insert(table, {y, 1}),
receive
ok -> Parent ! ok
end
end),
P2 =
spawn(fun() ->
ets:insert(table, {x, 1}),
receive
ok -> P1 ! ok
end
end),
P3 =
spawn(fun() ->
[{x,Y}] = ets:lookup(table, x),
case Y of
1 -> ok;
0 -> ets:insert(table, {z, 1})
end,
receive
ok -> P2 ! ok
end
end),
P4 =
spawn(fun() ->
[{x,X}] = ets:lookup(table, x),
[{y,Y}] = ets:lookup(table, y),
ets:insert(table, {xy, {X,Y}}),
receive
ok -> P3 ! ok
end
end),
spawn(fun() ->
[{z,Z}] = ets:lookup(table, z),
ets:insert(table, {z5, Z}),
P4 ! ok
end),
receive
ok -> ok
end,
P3D = ets:lookup(table, z),
P4D = ets:lookup(table, xy),
P5D = ets:lookup(table, z5),
throw(P3D++P4D++P5D).
| null | https://raw.githubusercontent.com/parapluu/Concuerror/152a5ccee0b6e97d8c3329c2167166435329d261/tests/suites/dpor_tests/src/etsi_5.erl | erlang | -module(etsi_5).
-export([etsi_5/0]).
-export([scenarios/0]).
scenarios() -> [{?MODULE, inf, dpor}].
etsi_5() ->
Parent = self(),
ets:new(table, [public, named_table]),
ets:insert(table, {x, 0}),
ets:insert(table, {y, 0}),
ets:insert(table, {z, 0}),
ets:insert(table, {z5, 0}),
ets:insert(table, {xy, 0}),
P1 =
spawn(fun() ->
ets:insert(table, {y, 1}),
receive
ok -> Parent ! ok
end
end),
P2 =
spawn(fun() ->
ets:insert(table, {x, 1}),
receive
ok -> P1 ! ok
end
end),
P3 =
spawn(fun() ->
[{x,Y}] = ets:lookup(table, x),
case Y of
1 -> ok;
0 -> ets:insert(table, {z, 1})
end,
receive
ok -> P2 ! ok
end
end),
P4 =
spawn(fun() ->
[{x,X}] = ets:lookup(table, x),
[{y,Y}] = ets:lookup(table, y),
ets:insert(table, {xy, {X,Y}}),
receive
ok -> P3 ! ok
end
end),
spawn(fun() ->
[{z,Z}] = ets:lookup(table, z),
ets:insert(table, {z5, Z}),
P4 ! ok
end),
receive
ok -> ok
end,
P3D = ets:lookup(table, z),
P4D = ets:lookup(table, xy),
P5D = ets:lookup(table, z5),
throw(P3D++P4D++P5D).
|
|
19d82be9d602366b53127ecaf67617f56491e49146cf6f1c0cf83845ce11224a | haroldcarr/learn-haskell-coq-ml-etc | Validation.hs |
Created : 2014 Jul 09 ( We d ) 13:18:07 by .
Last Modified : 2014 Jul 10 ( Thu ) by .
Created : 2014 Jul 09 (Wed) 13:18:07 by Harold Carr.
Last Modified : 2014 Jul 10 (Thu) 00:59:30 by Harold Carr.
-}
module Validation where
This contains the " Validating Credit Card Numbers " functions ( exact same thing in course ) .
import HW01_HC
import Data.List (intercalate, isPrefixOf)
import Data.List.Split (chunksOf)
import System.IO
import qualified Test.HUnit as T
import qualified Test.HUnit.Util as U
------------------------------------------------------------------------------
-- Reading and Showing Credit Card Numbers
Ex . 5 .
readCC :: String -> Integer
readCC = read . filter (/= ' ')
Ex . 6 .
showCC :: Integer -> String
showCC i =
let ds = show i
zs = replicate (16 - (length ds)) '0'
in intercalate " " (chunksOf 4 (zs ++ ds))
------------------------------------------------------------------------------
-- Identifying Credit Card Type
Ex . 7 .
rawData :: [String]
rawData = [ "34 15 American Express"
, "37 15 American Express"
, "560221 16 Bankcard"
, "6011 16 Discover Card"
, "65 16 Discover Card"
, "51 16 Master Card"
, "52 16 Master Card"
, "4 13 Visa"
, "4 16 Visa"
, "417500 16 Visa Electron"
]
lookupIssuer :: String -> String
lookupIssuer cardNum = lu cardNum rawData
where
lu _ [] = "Unknown"
lu cn (x:xs) = if check cn (words x) then x else lu cn xs
check cn (prefix:n:_) = isPrefixOf prefix cn && length cn == read n
Ex . 8 .
checkCC :: IO ()
checkCC = do
putStr "Enter credit card number: "
n <- getLine
putStrLn (lookupIssuer n)
checkCC
Ex . 9 .
toDigitsRevG :: (Integral a) => a -> a -> [a]
toDigitsRevG = undefined
------------------------------------------------------------------------------
t0 :: T.Test
t0 = T.TestList
[
U.teq "t000" (readCC "4012 8888 8888 1881") 4012888888881881
, U.teq "t001" (showCC 4012888888881881) "4012 8888 8888 1881"
, U.teq "t002" (lookupIssuer "4012888888881881") "4 16 Visa"
]
v :: IO T.Counts
v = T.runTestTT t0
-- End of file.
| null | https://raw.githubusercontent.com/haroldcarr/learn-haskell-coq-ml-etc/b4e83ec7c7af730de688b7376497b9f49dc24a0e/haskell/course/2014-07-utrecht/src/Validation.hs | haskell | ----------------------------------------------------------------------------
Reading and Showing Credit Card Numbers
----------------------------------------------------------------------------
Identifying Credit Card Type
----------------------------------------------------------------------------
End of file. |
Created : 2014 Jul 09 ( We d ) 13:18:07 by .
Last Modified : 2014 Jul 10 ( Thu ) by .
Created : 2014 Jul 09 (Wed) 13:18:07 by Harold Carr.
Last Modified : 2014 Jul 10 (Thu) 00:59:30 by Harold Carr.
-}
module Validation where
This contains the " Validating Credit Card Numbers " functions ( exact same thing in course ) .
import HW01_HC
import Data.List (intercalate, isPrefixOf)
import Data.List.Split (chunksOf)
import System.IO
import qualified Test.HUnit as T
import qualified Test.HUnit.Util as U
Ex . 5 .
readCC :: String -> Integer
readCC = read . filter (/= ' ')
Ex . 6 .
showCC :: Integer -> String
showCC i =
let ds = show i
zs = replicate (16 - (length ds)) '0'
in intercalate " " (chunksOf 4 (zs ++ ds))
Ex . 7 .
rawData :: [String]
rawData = [ "34 15 American Express"
, "37 15 American Express"
, "560221 16 Bankcard"
, "6011 16 Discover Card"
, "65 16 Discover Card"
, "51 16 Master Card"
, "52 16 Master Card"
, "4 13 Visa"
, "4 16 Visa"
, "417500 16 Visa Electron"
]
lookupIssuer :: String -> String
lookupIssuer cardNum = lu cardNum rawData
where
lu _ [] = "Unknown"
lu cn (x:xs) = if check cn (words x) then x else lu cn xs
check cn (prefix:n:_) = isPrefixOf prefix cn && length cn == read n
Ex . 8 .
checkCC :: IO ()
checkCC = do
putStr "Enter credit card number: "
n <- getLine
putStrLn (lookupIssuer n)
checkCC
Ex . 9 .
toDigitsRevG :: (Integral a) => a -> a -> [a]
toDigitsRevG = undefined
t0 :: T.Test
t0 = T.TestList
[
U.teq "t000" (readCC "4012 8888 8888 1881") 4012888888881881
, U.teq "t001" (showCC 4012888888881881) "4012 8888 8888 1881"
, U.teq "t002" (lookupIssuer "4012888888881881") "4 16 Visa"
]
v :: IO T.Counts
v = T.runTestTT t0
|
ccf20278eb82b6c1bee07205c3ead7c379548599f260dd5acc871cedc4cdf882 | facebookarchive/pfff | gzip.ml | (***********************************************************************)
(* *)
The library
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 2001 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the GNU Library General Public License , with
(* the special exception on linking described in file LICENSE. *)
(* *)
(***********************************************************************)
$ Id$
(* Module [Gzip]: reading and writing to/from [gzip] compressed files *)
exception Error of string
let buffer_size = 1024
type in_channel =
{ in_chan: Pervasives.in_channel;
in_buffer: string;
mutable in_pos: int;
mutable in_avail: int;
mutable in_eof: bool;
in_stream: Zlib.stream;
mutable in_size: int32;
mutable in_crc: int32 }
let open_in_chan ic =
(* Superficial parsing of header *)
begin try
let id1 = input_byte ic in
let id2 = input_byte ic in
if id1 <> 0x1F || id2 <> 0x8B then
raise(Error("bad magic number, not a gzip file"));
let cm = input_byte ic in
if cm <> 8 then
raise(Error("unknown compression method"));
let flags = input_byte ic in
if flags land 0xE0 <> 0 then
raise(Error("bad flags, not a gzip file"));
for i = 1 to 6 do ignore(input_byte ic) done;
if flags land 0x04 <> 0 then begin
(* Skip extra data *)
let len1 = input_byte ic in
let len2 = input_byte ic in
for i = 1 to len1 + len2 lsl 8 do ignore(input_byte ic) done
end;
if flags land 0x08 <> 0 then begin
(* Skip original file name *)
while input_byte ic <> 0 do () done
end;
if flags land 0x10 <> 0 then begin
(* Skip comment *)
while input_byte ic <> 0 do () done
end;
if flags land 0x02 <> 0 then begin
Skip header
ignore(input_byte ic); ignore(input_byte ic)
end
with End_of_file ->
raise(Error("premature end of file, not a gzip file"))
end;
{ in_chan = ic;
in_buffer = String.create buffer_size;
in_pos = 0;
in_avail = 0;
in_eof = false;
in_stream = Zlib.inflate_init false;
in_size = Int32.zero;
in_crc = Int32.zero }
let open_in filename =
open_in_chan (Pervasives.open_in_bin filename)
let read_byte iz =
if iz.in_avail = 0 then begin
let n = Pervasives.input iz.in_chan iz.in_buffer 0
(String.length iz.in_buffer) in
if n = 0 then raise End_of_file;
iz.in_pos <- 0;
iz.in_avail <- n
end;
let c = iz.in_buffer.[iz.in_pos] in
iz.in_pos <- iz.in_pos + 1;
iz.in_avail <- iz.in_avail - 1;
Char.code c
let read_int32 iz =
let b1 = read_byte iz in
let b2 = read_byte iz in
let b3 = read_byte iz in
let b4 = read_byte iz in
Int32.logor (Int32.of_int b1)
(Int32.logor (Int32.shift_left (Int32.of_int b2) 8)
(Int32.logor (Int32.shift_left (Int32.of_int b3) 16)
(Int32.shift_left (Int32.of_int b4) 24)))
let rec input iz buf pos len =
if pos < 0 || len < 0 || pos + len > String.length buf then
invalid_arg "Gzip.input";
if iz.in_eof then 0 else begin
if iz.in_avail = 0 then begin
let n = Pervasives.input iz.in_chan iz.in_buffer 0
(String.length iz.in_buffer) in
if n = 0 then raise(Error("truncated file"));
iz.in_pos <- 0;
iz.in_avail <- n
end;
let (finished, used_in, used_out) =
try
Zlib.inflate iz.in_stream iz.in_buffer iz.in_pos iz.in_avail
buf pos len Zlib.Z_SYNC_FLUSH
with Zlib.Error(_, _) ->
raise(Error("error during decompression")) in
iz.in_pos <- iz.in_pos + used_in;
iz.in_avail <- iz.in_avail - used_in;
iz.in_crc <- Zlib.update_crc iz.in_crc buf pos used_out;
iz.in_size <- Int32.add iz.in_size (Int32.of_int used_out);
if finished then begin
try
let crc = read_int32 iz in
let size = read_int32 iz in
if iz.in_crc <> crc then
raise(Error("CRC mismatch, data corrupted"));
if iz.in_size <> size then
raise(Error("size mismatch, data corrupted"));
iz.in_eof <- true;
used_out
with End_of_file ->
raise(Error("truncated file"))
end
else if used_out = 0 then
input iz buf pos len
else
used_out
end
let rec really_input iz buf pos len =
if len <= 0 then () else begin
let n = input iz buf pos len in
if n = 0 then raise End_of_file;
really_input iz buf (pos + n) (len - n)
end
let char_buffer = String.create 1
let input_char iz =
if input iz char_buffer 0 1 = 0 then raise End_of_file else char_buffer.[0]
let input_byte iz =
Char.code (input_char iz)
let dispose iz =
iz.in_eof <- true;
Zlib.inflate_end iz.in_stream
let close_in iz =
dispose iz;
Pervasives.close_in iz.in_chan
type out_channel =
{ out_chan: Pervasives.out_channel;
out_buffer: string;
mutable out_pos: int;
mutable out_avail: int;
out_stream: Zlib.stream;
mutable out_size: int32;
mutable out_crc: int32 }
let open_out_chan ?(level = 6) oc =
if level < 1 || level > 9 then invalid_arg "Gzip.open_out: bad level";
(* Write minimal header *)
output_byte oc 0x1F; (* ID1 *)
output_byte oc 0x8B; (* ID2 *)
output_byte oc 8; (* compression method *)
output_byte oc 0; (* flags *)
output_byte oc 0; (* xflags *)
output_byte oc 0xFF; (* OS (unknown) *)
{ out_chan = oc;
out_buffer = String.create buffer_size;
out_pos = 0;
out_avail = buffer_size;
out_stream = Zlib.deflate_init level false;
out_size = Int32.zero;
out_crc = Int32.zero }
let open_out ?(level = 6) filename =
open_out_chan ~level (Pervasives.open_out_bin filename)
let rec output oz buf pos len =
if pos < 0 || len < 0 || pos + len > String.length buf then
invalid_arg "Gzip.output";
(* If output buffer is full, flush it *)
if oz.out_avail = 0 then begin
Pervasives.output oz.out_chan oz.out_buffer 0 oz.out_pos;
oz.out_pos <- 0;
oz.out_avail <- String.length oz.out_buffer
end;
let (_, used_in, used_out) =
try
Zlib.deflate oz.out_stream buf pos len
oz.out_buffer oz.out_pos oz.out_avail
Zlib.Z_NO_FLUSH
with Zlib.Error(_, _) ->
raise (Error("error during compression")) in
oz.out_pos <- oz.out_pos + used_out;
oz.out_avail <- oz.out_avail - used_out;
oz.out_size <- Int32.add oz.out_size (Int32.of_int used_in);
oz.out_crc <- Zlib.update_crc oz.out_crc buf pos used_in;
if used_in < len then output oz buf (pos + used_in) (len - used_in)
let output_char oz c =
char_buffer.[0] <- c;
output oz char_buffer 0 1
let output_byte oz b =
output_char oz (Char.unsafe_chr b)
let write_int32 oc n =
let r = ref n in
for i = 1 to 4 do
Pervasives.output_byte oc (Int32.to_int !r);
r := Int32.shift_right_logical !r 8
done
let flush oz =
let rec do_flush () =
(* If output buffer is full, flush it *)
if oz.out_avail = 0 then begin
Pervasives.output oz.out_chan oz.out_buffer 0 oz.out_pos;
oz.out_pos <- 0;
oz.out_avail <- String.length oz.out_buffer
end;
let (finished, _, used_out) =
Zlib.deflate oz.out_stream oz.out_buffer 0 0
oz.out_buffer oz.out_pos oz.out_avail
Zlib.Z_FINISH in
oz.out_pos <- oz.out_pos + used_out;
oz.out_avail <- oz.out_avail - used_out;
if not finished then do_flush() in
do_flush();
(* Final data flush *)
if oz.out_pos > 0 then
Pervasives.output oz.out_chan oz.out_buffer 0 oz.out_pos;
Write CRC and size
write_int32 oz.out_chan oz.out_crc;
write_int32 oz.out_chan oz.out_size;
(* Dispose of stream *)
Zlib.deflate_end oz.out_stream
let close_out oz =
flush oz;
Pervasives.close_out oz.out_chan
| null | https://raw.githubusercontent.com/facebookarchive/pfff/ec21095ab7d445559576513a63314e794378c367/external/ocamlzip/gzip.ml | ocaml | *********************************************************************
the special exception on linking described in file LICENSE.
*********************************************************************
Module [Gzip]: reading and writing to/from [gzip] compressed files
Superficial parsing of header
Skip extra data
Skip original file name
Skip comment
Write minimal header
ID1
ID2
compression method
flags
xflags
OS (unknown)
If output buffer is full, flush it
If output buffer is full, flush it
Final data flush
Dispose of stream | The library
, projet Cristal , INRIA Rocquencourt
Copyright 2001 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the GNU Library General Public License , with
$ Id$
exception Error of string
let buffer_size = 1024
type in_channel =
{ in_chan: Pervasives.in_channel;
in_buffer: string;
mutable in_pos: int;
mutable in_avail: int;
mutable in_eof: bool;
in_stream: Zlib.stream;
mutable in_size: int32;
mutable in_crc: int32 }
let open_in_chan ic =
begin try
let id1 = input_byte ic in
let id2 = input_byte ic in
if id1 <> 0x1F || id2 <> 0x8B then
raise(Error("bad magic number, not a gzip file"));
let cm = input_byte ic in
if cm <> 8 then
raise(Error("unknown compression method"));
let flags = input_byte ic in
if flags land 0xE0 <> 0 then
raise(Error("bad flags, not a gzip file"));
for i = 1 to 6 do ignore(input_byte ic) done;
if flags land 0x04 <> 0 then begin
let len1 = input_byte ic in
let len2 = input_byte ic in
for i = 1 to len1 + len2 lsl 8 do ignore(input_byte ic) done
end;
if flags land 0x08 <> 0 then begin
while input_byte ic <> 0 do () done
end;
if flags land 0x10 <> 0 then begin
while input_byte ic <> 0 do () done
end;
if flags land 0x02 <> 0 then begin
Skip header
ignore(input_byte ic); ignore(input_byte ic)
end
with End_of_file ->
raise(Error("premature end of file, not a gzip file"))
end;
{ in_chan = ic;
in_buffer = String.create buffer_size;
in_pos = 0;
in_avail = 0;
in_eof = false;
in_stream = Zlib.inflate_init false;
in_size = Int32.zero;
in_crc = Int32.zero }
let open_in filename =
open_in_chan (Pervasives.open_in_bin filename)
let read_byte iz =
if iz.in_avail = 0 then begin
let n = Pervasives.input iz.in_chan iz.in_buffer 0
(String.length iz.in_buffer) in
if n = 0 then raise End_of_file;
iz.in_pos <- 0;
iz.in_avail <- n
end;
let c = iz.in_buffer.[iz.in_pos] in
iz.in_pos <- iz.in_pos + 1;
iz.in_avail <- iz.in_avail - 1;
Char.code c
let read_int32 iz =
let b1 = read_byte iz in
let b2 = read_byte iz in
let b3 = read_byte iz in
let b4 = read_byte iz in
Int32.logor (Int32.of_int b1)
(Int32.logor (Int32.shift_left (Int32.of_int b2) 8)
(Int32.logor (Int32.shift_left (Int32.of_int b3) 16)
(Int32.shift_left (Int32.of_int b4) 24)))
let rec input iz buf pos len =
if pos < 0 || len < 0 || pos + len > String.length buf then
invalid_arg "Gzip.input";
if iz.in_eof then 0 else begin
if iz.in_avail = 0 then begin
let n = Pervasives.input iz.in_chan iz.in_buffer 0
(String.length iz.in_buffer) in
if n = 0 then raise(Error("truncated file"));
iz.in_pos <- 0;
iz.in_avail <- n
end;
let (finished, used_in, used_out) =
try
Zlib.inflate iz.in_stream iz.in_buffer iz.in_pos iz.in_avail
buf pos len Zlib.Z_SYNC_FLUSH
with Zlib.Error(_, _) ->
raise(Error("error during decompression")) in
iz.in_pos <- iz.in_pos + used_in;
iz.in_avail <- iz.in_avail - used_in;
iz.in_crc <- Zlib.update_crc iz.in_crc buf pos used_out;
iz.in_size <- Int32.add iz.in_size (Int32.of_int used_out);
if finished then begin
try
let crc = read_int32 iz in
let size = read_int32 iz in
if iz.in_crc <> crc then
raise(Error("CRC mismatch, data corrupted"));
if iz.in_size <> size then
raise(Error("size mismatch, data corrupted"));
iz.in_eof <- true;
used_out
with End_of_file ->
raise(Error("truncated file"))
end
else if used_out = 0 then
input iz buf pos len
else
used_out
end
let rec really_input iz buf pos len =
if len <= 0 then () else begin
let n = input iz buf pos len in
if n = 0 then raise End_of_file;
really_input iz buf (pos + n) (len - n)
end
let char_buffer = String.create 1
let input_char iz =
if input iz char_buffer 0 1 = 0 then raise End_of_file else char_buffer.[0]
let input_byte iz =
Char.code (input_char iz)
let dispose iz =
iz.in_eof <- true;
Zlib.inflate_end iz.in_stream
let close_in iz =
dispose iz;
Pervasives.close_in iz.in_chan
type out_channel =
{ out_chan: Pervasives.out_channel;
out_buffer: string;
mutable out_pos: int;
mutable out_avail: int;
out_stream: Zlib.stream;
mutable out_size: int32;
mutable out_crc: int32 }
let open_out_chan ?(level = 6) oc =
if level < 1 || level > 9 then invalid_arg "Gzip.open_out: bad level";
{ out_chan = oc;
out_buffer = String.create buffer_size;
out_pos = 0;
out_avail = buffer_size;
out_stream = Zlib.deflate_init level false;
out_size = Int32.zero;
out_crc = Int32.zero }
let open_out ?(level = 6) filename =
open_out_chan ~level (Pervasives.open_out_bin filename)
let rec output oz buf pos len =
if pos < 0 || len < 0 || pos + len > String.length buf then
invalid_arg "Gzip.output";
if oz.out_avail = 0 then begin
Pervasives.output oz.out_chan oz.out_buffer 0 oz.out_pos;
oz.out_pos <- 0;
oz.out_avail <- String.length oz.out_buffer
end;
let (_, used_in, used_out) =
try
Zlib.deflate oz.out_stream buf pos len
oz.out_buffer oz.out_pos oz.out_avail
Zlib.Z_NO_FLUSH
with Zlib.Error(_, _) ->
raise (Error("error during compression")) in
oz.out_pos <- oz.out_pos + used_out;
oz.out_avail <- oz.out_avail - used_out;
oz.out_size <- Int32.add oz.out_size (Int32.of_int used_in);
oz.out_crc <- Zlib.update_crc oz.out_crc buf pos used_in;
if used_in < len then output oz buf (pos + used_in) (len - used_in)
let output_char oz c =
char_buffer.[0] <- c;
output oz char_buffer 0 1
let output_byte oz b =
output_char oz (Char.unsafe_chr b)
let write_int32 oc n =
let r = ref n in
for i = 1 to 4 do
Pervasives.output_byte oc (Int32.to_int !r);
r := Int32.shift_right_logical !r 8
done
let flush oz =
let rec do_flush () =
if oz.out_avail = 0 then begin
Pervasives.output oz.out_chan oz.out_buffer 0 oz.out_pos;
oz.out_pos <- 0;
oz.out_avail <- String.length oz.out_buffer
end;
let (finished, _, used_out) =
Zlib.deflate oz.out_stream oz.out_buffer 0 0
oz.out_buffer oz.out_pos oz.out_avail
Zlib.Z_FINISH in
oz.out_pos <- oz.out_pos + used_out;
oz.out_avail <- oz.out_avail - used_out;
if not finished then do_flush() in
do_flush();
if oz.out_pos > 0 then
Pervasives.output oz.out_chan oz.out_buffer 0 oz.out_pos;
Write CRC and size
write_int32 oz.out_chan oz.out_crc;
write_int32 oz.out_chan oz.out_size;
Zlib.deflate_end oz.out_stream
let close_out oz =
flush oz;
Pervasives.close_out oz.out_chan
|
6cfe3b1d7cd5e6a68e073f533d1df15a011033572ad6ac09fe9ea217114e9b51 | ekmett/bifunctors | TH.hs | # LANGUAGE CPP #
# LANGUAGE PatternGuards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE Unsafe #
-- |
Copyright : ( C ) 2008 - 2016 , ( C ) 2015 - 2016
-- License : BSD-style (see the file LICENSE)
--
Maintainer : < >
-- Stability : provisional
-- Portability : portable
--
-- Functions to mechanically derive 'Bifunctor', 'Bifoldable',
or ' Bitraversable ' instances , or to splice their functions directly into
source code . You need to enable the @TemplateHaskell@ language extension
-- in order to use this module.
module Data.Bifunctor.TH
(
-- * @derive@- functions
-- $derive
-- * @make@- functions
-- $make
-- * 'Bifunctor'
deriveBifunctor
, deriveBifunctorOptions
, makeBimap
, makeBimapOptions
-- * 'Bifoldable'
, deriveBifoldable
, deriveBifoldableOptions
, makeBifold
, makeBifoldOptions
, makeBifoldMap
, makeBifoldMapOptions
, makeBifoldr
, makeBifoldrOptions
, makeBifoldl
, makeBifoldlOptions
* ' '
, deriveBitraversable
, deriveBitraversableOptions
, makeBitraverse
, makeBitraverseOptions
, makeBisequenceA
, makeBisequenceAOptions
, makeBimapM
, makeBimapMOptions
, makeBisequence
, makeBisequenceOptions
-- * 'Options'
, Options(..)
, defaultOptions
) where
import Control.Monad (guard, unless, when)
import Data.Bifunctor.TH.Internal
import qualified Data.List as List
import qualified Data.Map as Map ((!), fromList, keys, lookup, member, size)
import Data.Maybe
import Language.Haskell.TH.Datatype as Datatype
import Language.Haskell.TH.Datatype.TyVarBndr
import Language.Haskell.TH.Lib
import Language.Haskell.TH.Ppr
import Language.Haskell.TH.Syntax
-------------------------------------------------------------------------------
-- User-facing API
-------------------------------------------------------------------------------
| Options that further configure how the functions in " Data . Bifunctor . TH "
-- should behave.
newtype Options = Options
{ emptyCaseBehavior :: Bool
-- ^ If 'True', derived instances for empty data types (i.e., ones with
-- no data constructors) will use the @EmptyCase@ language extension.
-- If 'False', derived instances will simply use 'seq' instead.
( This has no effect on GHCs before 7.8 , since @EmptyCase@ is only
available in 7.8 or later . )
} deriving (Eq, Ord, Read, Show)
-- | Conservative 'Options' that doesn't attempt to use @EmptyCase@ (to
-- prevent users from having to enable that extension at use sites.)
defaultOptions :: Options
defaultOptions = Options { emptyCaseBehavior = False }
$ derive
' deriveBifunctor ' , ' deriveBifoldable ' , and ' deriveBitraversable ' automatically
generate their respective class instances for a given data type , newtype , or data
family instance that has at least two type variable . Examples :
@
& # 123;-# ; LANGUAGE TemplateHaskell & # 35;-} ;
import Data . Bifunctor . TH
data Pair a b = Pair a b
$ ( ' deriveBifunctor ' ' ' Pair ) -- instance Bifunctor Pair where ...
data WrapLeftPair f g a b = WrapLeftPair ( f a ) ( g a b )
$ ( ' deriveBifoldable ' ' ' WrapLeftPair )
-- instance ( Foldable f , ) = > Bifoldable ( WrapLeftPair f g ) where ...
@
If you are using @template - haskell-2.7.0.0@ or later ( i.e. , GHC 7.4 or later ) ,
the @derive@ functions can be used data family instances ( which requires the
@-XTypeFamilies@ extension ) . To do so , pass the name of a data or newtype instance
constructor ( NOT a data family name ! ) to a @derive@ function . Note that the
generated code may require the @-XFlexibleInstances@ extension . Example :
@
& # 123;-# ; LANGUAGE FlexibleInstances , TemplateHaskell , TypeFamilies & # 35;-} ;
import Data . Bifunctor . TH
class AssocClass a b c where
data AssocData a b c
instance AssocClass Int b c where
data AssocData Int b c = AssocDataInt1 Int | AssocDataInt2 b c
$ ( ' deriveBitraversable ' ' AssocDataInt1 ) -- instance ( AssocData Int ) where ...
-- Alternatively , one could use $ ( deriveBitraversable ' AssocDataInt2 )
@
Note that there are some limitations :
* The ' Name ' argument to a @derive@ function must not be a type synonym .
* With a @derive@ function , the last two type variables must both be of kind @*@.
Other type variables of kind @ * - > * @ are assumed to require a ' Functor ' ,
' Foldable ' , or ' ' constraint ( depending on which @derive@ function is
used ) , and other type variables of kind @ * - > * - > * @ are assumed to require an
' Bifunctor ' , ' Bifoldable ' , or ' Bitraversable ' constraint . If your data type
does n't meet these assumptions , use a @make@ function .
* If using the @-XDatatypeContexts@ , , or @-XGADTs@
extensions , a constraint can not mention either of the last two type variables . For
example , @data Illegal2 a b where I2 : : a = > a - > b - > Illegal2 a b@ can not
have a derived ' Bifunctor ' instance .
* If either of the last two type variables is used within a constructor argument 's
type , it must only be used in the last two type arguments . For example ,
@data Legal a b = Legal ( Int , Int , a , b)@ can have a derived ' Bifunctor ' instance ,
but @data Illegal a b = Illegal ( a , b , a , b)@ can not .
* Data family instances must be able to eta - reduce the last two type variables . In other
words , if you have a instance of the form :
@
data family Family a1 ... an t1 t2
data instance Family e1 ... e2 v1 v2 = ...
@
Then the following conditions must hold :
1 . @v1@ and @v2@ must be distinct type variables .
2 . Neither @v1@ not @v2@ must be mentioned in any of @e1@ , ... , @e2@.
'deriveBifunctor', 'deriveBifoldable', and 'deriveBitraversable' automatically
generate their respective class instances for a given data type, newtype, or data
family instance that has at least two type variable. Examples:
@
{-# LANGUAGE TemplateHaskell #-}
import Data.Bifunctor.TH
data Pair a b = Pair a b
$('deriveBifunctor' ''Pair) -- instance Bifunctor Pair where ...
data WrapLeftPair f g a b = WrapLeftPair (f a) (g a b)
$('deriveBifoldable' ''WrapLeftPair)
-- instance (Foldable f, Bifoldable g) => Bifoldable (WrapLeftPair f g) where ...
@
If you are using @template-haskell-2.7.0.0@ or later (i.e., GHC 7.4 or later),
the @derive@ functions can be used data family instances (which requires the
@-XTypeFamilies@ extension). To do so, pass the name of a data or newtype instance
constructor (NOT a data family name!) to a @derive@ function. Note that the
generated code may require the @-XFlexibleInstances@ extension. Example:
@
{-# LANGUAGE FlexibleInstances, TemplateHaskell, TypeFamilies #-}
import Data.Bifunctor.TH
class AssocClass a b c where
data AssocData a b c
instance AssocClass Int b c where
data AssocData Int b c = AssocDataInt1 Int | AssocDataInt2 b c
$('deriveBitraversable' 'AssocDataInt1) -- instance Bitraversable (AssocData Int) where ...
-- Alternatively, one could use $(deriveBitraversable 'AssocDataInt2)
@
Note that there are some limitations:
* The 'Name' argument to a @derive@ function must not be a type synonym.
* With a @derive@ function, the last two type variables must both be of kind @*@.
Other type variables of kind @* -> *@ are assumed to require a 'Functor',
'Foldable', or 'Traversable' constraint (depending on which @derive@ function is
used), and other type variables of kind @* -> * -> *@ are assumed to require an
'Bifunctor', 'Bifoldable', or 'Bitraversable' constraint. If your data type
doesn't meet these assumptions, use a @make@ function.
* If using the @-XDatatypeContexts@, @-XExistentialQuantification@, or @-XGADTs@
extensions, a constraint cannot mention either of the last two type variables. For
example, @data Illegal2 a b where I2 :: Ord a => a -> b -> Illegal2 a b@ cannot
have a derived 'Bifunctor' instance.
* If either of the last two type variables is used within a constructor argument's
type, it must only be used in the last two type arguments. For example,
@data Legal a b = Legal (Int, Int, a, b)@ can have a derived 'Bifunctor' instance,
but @data Illegal a b = Illegal (a, b, a, b)@ cannot.
* Data family instances must be able to eta-reduce the last two type variables. In other
words, if you have a instance of the form:
@
data family Family a1 ... an t1 t2
data instance Family e1 ... e2 v1 v2 = ...
@
Then the following conditions must hold:
1. @v1@ and @v2@ must be distinct type variables.
2. Neither @v1@ not @v2@ must be mentioned in any of @e1@, ..., @e2@.
-}
$ make
There may be scenarios in which you want to , say , ' bimap ' over an arbitrary data type
or data family instance without having to make the type an instance of ' Bifunctor ' . For
these cases , this module provides several functions ( all prefixed with @make@- ) that
splice the appropriate lambda expression into your source code .
This is particularly useful for creating instances for sophisticated data types . For
example , ' deriveBifunctor ' can not infer the correct type context for
@newtype HigherKinded f a b c = HigherKinded ( f a b c)@ , since is of kind
@ * - > * - > * - > * @. However , it is still possible to create a ' Bifunctor ' instance for
@HigherKinded@ without too much trouble using ' ' :
@
& # 123;-# ; LANGUAGE FlexibleContexts , TemplateHaskell & # 35;-} ;
import Data . Bifunctor
import Data . Bifunctor . TH
newtype HigherKinded f a b c = HigherKinded ( f a b c )
instance Bifunctor ( f a ) = ( HigherKinded f a ) where
bimap = $ ( makeBimap '' HigherKinded )
@
There may be scenarios in which you want to, say, 'bimap' over an arbitrary data type
or data family instance without having to make the type an instance of 'Bifunctor'. For
these cases, this module provides several functions (all prefixed with @make@-) that
splice the appropriate lambda expression into your source code.
This is particularly useful for creating instances for sophisticated data types. For
example, 'deriveBifunctor' cannot infer the correct type context for
@newtype HigherKinded f a b c = HigherKinded (f a b c)@, since @f@ is of kind
@* -> * -> * -> *@. However, it is still possible to create a 'Bifunctor' instance for
@HigherKinded@ without too much trouble using 'makeBimap':
@
{-# LANGUAGE FlexibleContexts, TemplateHaskell #-}
import Data.Bifunctor
import Data.Bifunctor.TH
newtype HigherKinded f a b c = HigherKinded (f a b c)
instance Bifunctor (f a) => Bifunctor (HigherKinded f a) where
bimap = $(makeBimap ''HigherKinded)
@
-}
| Generates a ' Bifunctor ' instance declaration for the given data type or data
-- family instance.
deriveBifunctor :: Name -> Q [Dec]
deriveBifunctor = deriveBifunctorOptions defaultOptions
-- | Like 'deriveBifunctor', but takes an 'Options' argument.
deriveBifunctorOptions :: Options -> Name -> Q [Dec]
deriveBifunctorOptions = deriveBiClass Bifunctor
-- | Generates a lambda expression which behaves like 'bimap' (without requiring a
' Bifunctor ' instance ) .
makeBimap :: Name -> Q Exp
makeBimap = makeBimapOptions defaultOptions
| Like ' ' , but takes an ' Options ' argument .
makeBimapOptions :: Options -> Name -> Q Exp
makeBimapOptions = makeBiFun Bimap
-- | Generates a 'Bifoldable' instance declaration for the given data type or data
-- family instance.
deriveBifoldable :: Name -> Q [Dec]
deriveBifoldable = deriveBifoldableOptions defaultOptions
-- | Like 'deriveBifoldable', but takes an 'Options' argument.
deriveBifoldableOptions :: Options -> Name -> Q [Dec]
deriveBifoldableOptions = deriveBiClass Bifoldable
--- | Generates a lambda expression which behaves like 'bifold' (without requiring a
-- 'Bifoldable' instance).
makeBifold :: Name -> Q Exp
makeBifold = makeBifoldOptions defaultOptions
-- | Like 'makeBifold', but takes an 'Options' argument.
makeBifoldOptions :: Options -> Name -> Q Exp
makeBifoldOptions opts name = appsE [ makeBifoldMapOptions opts name
, varE idValName
, varE idValName
]
-- | Generates a lambda expression which behaves like 'bifoldMap' (without requiring
-- a 'Bifoldable' instance).
makeBifoldMap :: Name -> Q Exp
makeBifoldMap = makeBifoldMapOptions defaultOptions
-- | Like 'makeBifoldMap', but takes an 'Options' argument.
makeBifoldMapOptions :: Options -> Name -> Q Exp
makeBifoldMapOptions = makeBiFun BifoldMap
-- | Generates a lambda expression which behaves like 'bifoldr' (without requiring a
-- 'Bifoldable' instance).
makeBifoldr :: Name -> Q Exp
makeBifoldr = makeBifoldrOptions defaultOptions
-- | Like 'makeBifoldr', but takes an 'Options' argument.
makeBifoldrOptions :: Options -> Name -> Q Exp
makeBifoldrOptions = makeBiFun Bifoldr
-- | Generates a lambda expression which behaves like 'bifoldl' (without requiring a
-- 'Bifoldable' instance).
makeBifoldl :: Name -> Q Exp
makeBifoldl = makeBifoldlOptions defaultOptions
-- | Like 'makeBifoldl', but takes an 'Options' argument.
makeBifoldlOptions :: Options -> Name -> Q Exp
makeBifoldlOptions opts name = do
f <- newName "f"
g <- newName "g"
z <- newName "z"
t <- newName "t"
lamE [varP f, varP g, varP z, varP t] $
appsE [ varE appEndoValName
, appsE [ varE getDualValName
, appsE [ makeBifoldMapOptions opts name
, foldFun f
, foldFun g
, varE t]
]
, varE z
]
where
foldFun :: Name -> Q Exp
foldFun n = infixApp (conE dualDataName)
(varE composeValName)
(infixApp (conE endoDataName)
(varE composeValName)
(varE flipValName `appE` varE n)
)
| Generates a ' Bitraversable ' instance declaration for the given data type or data
-- family instance.
deriveBitraversable :: Name -> Q [Dec]
deriveBitraversable = deriveBitraversableOptions defaultOptions
-- | Like 'deriveBitraversable', but takes an 'Options' argument.
deriveBitraversableOptions :: Options -> Name -> Q [Dec]
deriveBitraversableOptions = deriveBiClass Bitraversable
-- | Generates a lambda expression which behaves like 'bitraverse' (without
requiring a ' Bitraversable ' instance ) .
makeBitraverse :: Name -> Q Exp
makeBitraverse = makeBitraverseOptions defaultOptions
| Like ' ' , but takes an ' Options ' argument .
makeBitraverseOptions :: Options -> Name -> Q Exp
makeBitraverseOptions = makeBiFun Bitraverse
-- | Generates a lambda expression which behaves like 'bisequenceA' (without
requiring a ' Bitraversable ' instance ) .
makeBisequenceA :: Name -> Q Exp
makeBisequenceA = makeBisequenceAOptions defaultOptions
-- | Like 'makeBitraverseA', but takes an 'Options' argument.
makeBisequenceAOptions :: Options -> Name -> Q Exp
makeBisequenceAOptions opts name = appsE [ makeBitraverseOptions opts name
, varE idValName
, varE idValName
]
-- | Generates a lambda expression which behaves like 'bimapM' (without
requiring a ' Bitraversable ' instance ) .
makeBimapM :: Name -> Q Exp
makeBimapM = makeBimapMOptions defaultOptions
-- | Like 'makeBimapM', but takes an 'Options' argument.
makeBimapMOptions :: Options -> Name -> Q Exp
makeBimapMOptions opts name = do
f <- newName "f"
g <- newName "g"
lamE [varP f, varP g] . infixApp (varE unwrapMonadValName) (varE composeValName) $
appsE [ makeBitraverseOptions opts name
, wrapMonadExp f
, wrapMonadExp g
]
where
wrapMonadExp :: Name -> Q Exp
wrapMonadExp n = infixApp (conE wrapMonadDataName) (varE composeValName) (varE n)
-- | Generates a lambda expression which behaves like 'bisequence' (without
requiring a ' Bitraversable ' instance ) .
makeBisequence :: Name -> Q Exp
makeBisequence = makeBisequenceOptions defaultOptions
-- | Like 'makeBisequence', but takes an 'Options' argument.
makeBisequenceOptions :: Options -> Name -> Q Exp
makeBisequenceOptions opts name = appsE [ makeBimapMOptions opts name
, varE idValName
, varE idValName
]
-------------------------------------------------------------------------------
-- Code generation
-------------------------------------------------------------------------------
| Derive a class instance declaration ( depending on the BiClass argument 's value ) .
deriveBiClass :: BiClass -> Options -> Name -> Q [Dec]
deriveBiClass biClass opts name = do
info <- reifyDatatype name
case info of
DatatypeInfo { datatypeContext = ctxt
, datatypeName = parentName
, datatypeInstTypes = instTys
, datatypeVariant = variant
, datatypeCons = cons
} -> do
(instanceCxt, instanceType)
<- buildTypeInstance biClass parentName ctxt instTys variant
(:[]) `fmap` instanceD (return instanceCxt)
(return instanceType)
(biFunDecs biClass opts parentName instTys cons)
-- | Generates a declaration defining the primary function(s) corresponding to a
particular class ( bimap for Bifunctor , bifoldr and for Bifoldable , and
bitraverse for Bitraversable ) .
--
For why both bifoldr and are derived for Bifoldable , see Trac # 7436 .
biFunDecs :: BiClass -> Options -> Name -> [Type] -> [ConstructorInfo] -> [Q Dec]
biFunDecs biClass opts parentName instTys cons =
map makeFunD $ biClassToFuns biClass
where
makeFunD :: BiFun -> Q Dec
makeFunD biFun =
funD (biFunName biFun)
[ clause []
(normalB $ makeBiFunForCons biFun opts parentName instTys cons)
[]
]
| Generates a lambda expression which behaves like the BiFun argument .
makeBiFun :: BiFun -> Options -> Name -> Q Exp
makeBiFun biFun opts name = do
info <- reifyDatatype name
case info of
DatatypeInfo { datatypeContext = ctxt
, datatypeName = parentName
, datatypeInstTypes = instTys
, datatypeVariant = variant
, datatypeCons = cons
} ->
-- We force buildTypeInstance here since it performs some checks for whether
or not the provided datatype can actually have bimap / bifoldr / bitraverse / etc .
-- implemented for it, and produces errors if it can't.
buildTypeInstance (biFunToClass biFun) parentName ctxt instTys variant
>> makeBiFunForCons biFun opts parentName instTys cons
-- | Generates a lambda expression for the given constructors.
-- All constructors must be from the same type.
makeBiFunForCons :: BiFun -> Options -> Name -> [Type] -> [ConstructorInfo] -> Q Exp
makeBiFunForCons biFun opts _parentName instTys cons = do
map1 <- newName "f"
map2 <- newName "g"
z <- newName "z" -- Only used for deriving bifoldr
value <- newName "value"
let argNames = catMaybes [ Just map1
, Just map2
, guard (biFun == Bifoldr) >> Just z
, Just value
]
lastTyVars = map varTToName $ drop (length instTys - 2) instTys
tvMap = Map.fromList $ zip lastTyVars [map1, map2]
lamE (map varP argNames)
. appsE
$ [ varE $ biFunConstName biFun
, makeFun z value tvMap
] ++ map varE argNames
where
makeFun :: Name -> Name -> TyVarMap -> Q Exp
makeFun z value tvMap = do
roles <- reifyRoles _parentName
case () of
_
| Just (rs, PhantomR) <- unsnoc roles
, Just (_, PhantomR) <- unsnoc rs
-> biFunPhantom z value
| null cons && emptyCaseBehavior opts
-> biFunEmptyCase biFun z value
| null cons
-> biFunNoCons biFun z value
| otherwise
-> caseE (varE value)
(map (makeBiFunForCon biFun z tvMap) cons)
biFunPhantom :: Name -> Name -> Q Exp
biFunPhantom z value =
biFunTrivial coerce
(varE pureValName `appE` coerce)
biFun z
where
coerce :: Q Exp
coerce = varE coerceValName `appE` varE value
-- | Generates a match for a single constructor.
makeBiFunForCon :: BiFun -> Name -> TyVarMap -> ConstructorInfo -> Q Match
makeBiFunForCon biFun z tvMap
con@ConstructorInfo { constructorName = conName
, constructorContext = ctxt } = do
when ((any (`predMentionsName` Map.keys tvMap) ctxt
|| Map.size tvMap < 2)
&& not (allowExQuant (biFunToClass biFun))) $
existentialContextError conName
case biFun of
Bimap -> makeBimapMatch tvMap con
Bifoldr -> makeBifoldrMatch z tvMap con
BifoldMap -> makeBifoldMapMatch tvMap con
Bitraverse -> makeBitraverseMatch tvMap con
| Generates a match whose right - hand side implements @bimap@.
makeBimapMatch :: TyVarMap -> ConstructorInfo -> Q Match
makeBimapMatch tvMap con@ConstructorInfo{constructorName = conName} = do
parts <- foldDataConArgs tvMap ft_bimap con
match_for_con conName parts
where
ft_bimap :: FFoldType (Exp -> Q Exp)
ft_bimap = FT { ft_triv = return
, ft_var = \v x -> return $ VarE (tvMap Map.! v) `AppE` x
, ft_fun = \g h x -> mkSimpleLam $ \b -> do
gg <- g b
h $ x `AppE` gg
, ft_tup = mkSimpleTupleCase match_for_con
, ft_ty_app = \argGs x -> do
let inspect :: (Type, Exp -> Q Exp) -> Q Exp
inspect (argTy, g)
If the argument type is a bare occurrence of one
-- of the data type's last type variables, then we
-- can generate more efficient code.
This was inspired by .
| Just argVar <- varTToName_maybe argTy
, Just f <- Map.lookup argVar tvMap
= return $ VarE f
| otherwise
= mkSimpleLam g
appsE $ varE (fmapArity (length argGs))
: map inspect argGs
++ [return x]
, ft_forall = \_ g x -> g x
, ft_bad_app = \_ -> outOfPlaceTyVarError conName
, ft_co_var = \_ _ -> contravarianceError conName
}
-- Con a1 a2 ... -> Con (f1 a1) (f2 a2) ...
match_for_con :: Name -> [Exp -> Q Exp] -> Q Match
match_for_con = mkSimpleConMatch $ \conName' xs ->
appsE (conE conName':xs) -- Con x1 x2 ..
-- | Generates a match whose right-hand side implements @bifoldr@.
makeBifoldrMatch :: Name -> TyVarMap -> ConstructorInfo -> Q Match
makeBifoldrMatch z tvMap con@ConstructorInfo{constructorName = conName} = do
parts <- foldDataConArgs tvMap ft_bifoldr con
parts' <- sequence parts
match_for_con (VarE z) conName parts'
where
The Bool is True if the type mentions of the last two type parameters ,
False otherwise . Later , uses to filter
-- out expressions that do not mention the last parameters by checking for
-- False.
ft_bifoldr :: FFoldType (Q (Bool, Exp))
See Note [ ft_triv for Bifoldable and Bitraversable ]
ft_triv = do lam <- mkSimpleLam2 $ \_ z' -> return z'
return (False, lam)
, ft_var = \v -> return (True, VarE $ tvMap Map.! v)
, ft_tup = \t gs -> do
gg <- sequence gs
lam <- mkSimpleLam2 $ \x z' ->
mkSimpleTupleCase (match_for_con z') t gg x
return (True, lam)
, ft_ty_app = \gs -> do
lam <- mkSimpleLam2 $ \x z' ->
appsE $ varE (foldrArity (length gs))
: map (\(_, hs) -> fmap snd hs) gs
++ map return [z', x]
return (True, lam)
, ft_forall = \_ g -> g
, ft_co_var = \_ -> contravarianceError conName
, ft_fun = \_ _ -> noFunctionsError conName
, ft_bad_app = outOfPlaceTyVarError conName
}
match_for_con :: Exp -> Name -> [(Bool, Exp)] -> Q Match
match_for_con zExp = mkSimpleConMatch2 $ \_ xs -> return $ mkBifoldr xs
where
g1 v1 ( ( .. z ) )
mkBifoldr :: [Exp] -> Exp
mkBifoldr = foldr AppE zExp
| Generates a match whose right - hand side implements @bifoldMap@.
makeBifoldMapMatch :: TyVarMap -> ConstructorInfo -> Q Match
makeBifoldMapMatch tvMap con@ConstructorInfo{constructorName = conName} = do
parts <- foldDataConArgs tvMap ft_bifoldMap con
parts' <- sequence parts
match_for_con conName parts'
where
The Bool is True if the type mentions of the last two type parameters ,
False otherwise . Later , uses to filter
-- out expressions that do not mention the last parameters by checking for
-- False.
ft_bifoldMap :: FFoldType (Q (Bool, Exp))
See Note [ ft_triv for Bifoldable and Bitraversable ]
ft_triv = do lam <- mkSimpleLam $ \_ -> return $ VarE memptyValName
return (False, lam)
, ft_var = \v -> return (True, VarE $ tvMap Map.! v)
, ft_tup = \t gs -> do
gg <- sequence gs
lam <- mkSimpleLam $ mkSimpleTupleCase match_for_con t gg
return (True, lam)
, ft_ty_app = \gs -> do
e <- appsE $ varE (foldMapArity (length gs))
: map (\(_, hs) -> fmap snd hs) gs
return (True, e)
, ft_forall = \_ g -> g
, ft_co_var = \_ -> contravarianceError conName
, ft_fun = \_ _ -> noFunctionsError conName
, ft_bad_app = outOfPlaceTyVarError conName
}
match_for_con :: Name -> [(Bool, Exp)] -> Q Match
match_for_con = mkSimpleConMatch2 $ \_ xs -> return $ mkBifoldMap xs
where
-- mappend v1 (mappend v2 ..)
mkBifoldMap :: [Exp] -> Exp
mkBifoldMap [] = VarE memptyValName
mkBifoldMap es = foldr1 (AppE . AppE (VarE mappendValName)) es
-- | Generates a match whose right-hand side implements @bitraverse@.
makeBitraverseMatch :: TyVarMap -> ConstructorInfo -> Q Match
makeBitraverseMatch tvMap con@ConstructorInfo{constructorName = conName} = do
parts <- foldDataConArgs tvMap ft_bitrav con
parts' <- sequence parts
match_for_con conName parts'
where
The Bool is True if the type mentions of the last two type parameters ,
False otherwise . Later , uses to filter
-- out expressions that do not mention the last parameters by checking for
-- False.
ft_bitrav :: FFoldType (Q (Bool, Exp))
See Note [ ft_triv for Bifoldable and Bitraversable ]
ft_triv = return (False, VarE pureValName)
, ft_var = \v -> return (True, VarE $ tvMap Map.! v)
, ft_tup = \t gs -> do
gg <- sequence gs
lam <- mkSimpleLam $ mkSimpleTupleCase match_for_con t gg
return (True, lam)
, ft_ty_app = \gs -> do
e <- appsE $ varE (traverseArity (length gs))
: map (\(_, hs) -> fmap snd hs) gs
return (True, e)
, ft_forall = \_ g -> g
, ft_co_var = \_ -> contravarianceError conName
, ft_fun = \_ _ -> noFunctionsError conName
, ft_bad_app = outOfPlaceTyVarError conName
}
-- Con a1 a2 ... -> liftA2 (\b1 b2 ... -> Con b1 b2 ...) (g1 a1)
-- (g2 a2) <*> ...
match_for_con :: Name -> [(Bool, Exp)] -> Q Match
match_for_con = mkSimpleConMatch2 $ \conExp xs -> return $ mkApCon conExp xs
where
-- liftA2 (\b1 b2 ... -> Con b1 b2 ...) x1 x2 <*> ..
mkApCon :: Exp -> [Exp] -> Exp
mkApCon conExp [] = VarE pureValName `AppE` conExp
mkApCon conExp [e] = VarE fmapValName `AppE` conExp `AppE` e
mkApCon conExp (e1:e2:es) = List.foldl' appAp
(VarE liftA2ValName `AppE` conExp `AppE` e1 `AppE` e2) es
where appAp se1 se2 = InfixE (Just se1) (VarE apValName) (Just se2)
-------------------------------------------------------------------------------
Template Haskell reifying and AST manipulation
-------------------------------------------------------------------------------
-- For the given Types, generate an instance context and head. Coming up with
-- the instance type isn't as simple as dropping the last types, as you need to
-- be wary of kinds being instantiated with *.
-- See Note [Type inference in derived instances]
buildTypeInstance :: BiClass
^ Bifunctor , Bifoldable , or Bitraversable
-> Name
-- ^ The type constructor or data family name
-> Cxt
-- ^ The datatype context
-> [Type]
-- ^ The types to instantiate the instance with
-> DatatypeVariant
-- ^ Are we dealing with a data family instance or not
-> Q (Cxt, Type)
buildTypeInstance biClass tyConName dataCxt instTysOrig variant = do
-- Make sure to expand through type/kind synonyms! Otherwise, the
-- eta-reduction check might get tripped up over type variables in a
-- synonym that are actually dropped.
( See GHC Trac # 11416 for a scenario where this actually happened . )
varTysExp <- mapM resolveTypeSynonyms instTysOrig
let remainingLength :: Int
remainingLength = length instTysOrig - 2
droppedTysExp :: [Type]
droppedTysExp = drop remainingLength varTysExp
droppedStarKindStati :: [StarKindStatus]
droppedStarKindStati = map canRealizeKindStar droppedTysExp
-- Check there are enough types to drop and that all of them are either of
-- kind * or kind k (for some kind variable k). If not, throw an error.
when (remainingLength < 0 || elem NotKindStar droppedStarKindStati) $
derivingKindError biClass tyConName
let droppedKindVarNames :: [Name]
droppedKindVarNames = catKindVarNames droppedStarKindStati
-- Substitute kind * for any dropped kind variables
varTysExpSubst :: [Type]
varTysExpSubst = map (substNamesWithKindStar droppedKindVarNames) varTysExp
remainingTysExpSubst, droppedTysExpSubst :: [Type]
(remainingTysExpSubst, droppedTysExpSubst) =
splitAt remainingLength varTysExpSubst
-- All of the type variables mentioned in the dropped types
-- (post-synonym expansion)
droppedTyVarNames :: [Name]
droppedTyVarNames = freeVariables droppedTysExpSubst
-- If any of the dropped types were polykinded, ensure that they are of kind *
-- after substituting * for the dropped kind variables. If not, throw an error.
unless (all hasKindStar droppedTysExpSubst) $
derivingKindError biClass tyConName
let preds :: [Maybe Pred]
kvNames :: [[Name]]
kvNames' :: [Name]
-- Derive instance constraints (and any kind variables which are specialized
-- to * in those constraints)
(preds, kvNames) = unzip $ map (deriveConstraint biClass) remainingTysExpSubst
kvNames' = concat kvNames
-- Substitute the kind variables specialized in the constraints with *
remainingTysExpSubst' :: [Type]
remainingTysExpSubst' =
map (substNamesWithKindStar kvNames') remainingTysExpSubst
-- We now substitute all of the specialized-to-* kind variable names with
-- *, but in the original types, not the synonym-expanded types. The reason
-- we do this is a superficial one: we want the derived instance to resemble
-- the datatype written in source code as closely as possible. For example,
-- for the following data family instance:
--
-- data family Fam a
-- newtype instance Fam String = Fam String
--
-- We'd want to generate the instance:
--
-- instance C (Fam String)
--
-- Not:
--
-- instance C (Fam [Char])
remainingTysOrigSubst :: [Type]
remainingTysOrigSubst =
map (substNamesWithKindStar (List.union droppedKindVarNames kvNames'))
$ take remainingLength instTysOrig
isDataFamily <-
case variant of
Datatype -> return False
Newtype -> return False
DataInstance -> return True
NewtypeInstance -> return True
#if MIN_VERSION_th_abstraction(0,5,0)
Datatype.TypeData -> typeDataError tyConName
#endif
let remainingTysOrigSubst' :: [Type]
-- See Note [Kind signatures in derived instances] for an explanation
-- of the isDataFamily check.
remainingTysOrigSubst' =
if isDataFamily
then remainingTysOrigSubst
else map unSigT remainingTysOrigSubst
instanceCxt :: Cxt
instanceCxt = catMaybes preds
instanceType :: Type
instanceType = AppT (ConT $ biClassName biClass)
$ applyTyCon tyConName remainingTysOrigSubst'
-- If the datatype context mentions any of the dropped type variables,
-- we can't derive an instance, so throw an error.
when (any (`predMentionsName` droppedTyVarNames) dataCxt) $
datatypeContextError tyConName instanceType
-- Also ensure the dropped types can be safely eta-reduced. Otherwise,
-- throw an error.
unless (canEtaReduce remainingTysExpSubst' droppedTysExpSubst) $
etaReductionError instanceType
return (instanceCxt, instanceType)
-- | Attempt to derive a constraint on a Type. If successful, return
-- Just the constraint and any kind variable names constrained to *.
-- Otherwise, return Nothing and the empty list.
--
-- See Note [Type inference in derived instances] for the heuristics used to
-- come up with constraints.
deriveConstraint :: BiClass -> Type -> (Maybe Pred, [Name])
deriveConstraint biClass t
| not (isTyVar t) = (Nothing, [])
| otherwise = case hasKindVarChain 1 t of
Just ns -> ((`applyClass` tName) `fmap` biClassConstraint biClass 1, ns)
_ -> case hasKindVarChain 2 t of
Just ns -> ((`applyClass` tName) `fmap` biClassConstraint biClass 2, ns)
_ -> (Nothing, [])
where
tName :: Name
tName = varTToName t
Note [ Kind signatures in derived instances ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is possible to put explicit kind signatures into the derived instances , e.g. ,
instance C a = > C ( Data ( f : : * - > * ) ) where ...
But it is preferable to avoid this if possible . If we come up with an incorrect
kind signature ( which is entirely possible , since our type inferencer is pretty
unsophisticated - see Note [ Type inference in derived instances ] ) , then GHC will
flat - out reject the instance , which is quite unfortunate .
Plain old datatypes have the advantage that you can avoid using any kind signatures
at all in their instances . This is because a datatype declaration uses all type
variables , so the types that we use in a derived instance uniquely determine their
kinds . As long as we plug in the right types , the kind inferencer can do the rest
of the work . For this reason , we use unSigT to remove all kind signatures before
splicing in the instance context and head .
Data family instances are trickier , since a data family can have two instances that
are distinguished by kind alone , e.g. ,
data family Fam ( a : : k )
data instance Fam ( a : : * - > * )
data instance Fam ( a : : * )
If we dropped the kind signatures for C ( Fam a ) , then GHC will have no way of
knowing which instance we are talking about . To avoid this scenario , we always
include explicit kind signatures in data family instances . There is a chance that
the inferred kind signatures will be incorrect , but if so , we can always fall back
on the make- functions .
Note [ Type inference in derived instances ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Type inference is can be tricky to get right , and we want to avoid recreating the
entirety of GHC 's type inferencer in Template Haskell . For this reason , we will
probably never come up with derived instance contexts that are as accurate as
GHC 's . But that does n't mean we ca n't do anything ! There are a couple of simple
things we can do to make instance contexts that work for 80 % of use cases :
1 . If one of the last type parameters is polykinded , then its kind will be
specialized to * in the derived instance . We note what kind variable the type
parameter had and substitute it with * in the other types as well . For example ,
imagine you had
data Data ( a : : k ) ( b : : k ) ( c : : k )
Then you 'd want to derived instance to be :
instance C ( Data ( a : : * ) )
Not :
instance C ( Data ( a : : k ) )
2 . We naïvely come up with instance constraints using the following criteria :
( i ) If there 's a type parameter n of kind k1 - > k2 ( where k1 / k2 are * or kind
variables ) , then generate a Functor n constraint , and if k1 / k2 are kind
variables , then substitute k1 / k2 with * elsewhere in the types . We must
consider the case where they are kind variables because you might have a
scenario like this :
newtype Compose ( f : : k3 - > * ) ( g : : k1 - > k2 - > k3 ) ( a : : k1 ) ( b : : k2 )
= Compose ( f ( g a b ) )
Which would have a derived Bifunctor instance of :
instance ( Functor f , ) = ( Compose f g ) where ...
( ii ) If there 's a type parameter n of kind k1 - > k2 - > k3 ( where k1 / k2 / k3 are
* or kind variables ) , then generate a Bifunctor n constraint and perform
kind substitution as in the other case .
Note [Kind signatures in derived instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is possible to put explicit kind signatures into the derived instances, e.g.,
instance C a => C (Data (f :: * -> *)) where ...
But it is preferable to avoid this if possible. If we come up with an incorrect
kind signature (which is entirely possible, since our type inferencer is pretty
unsophisticated - see Note [Type inference in derived instances]), then GHC will
flat-out reject the instance, which is quite unfortunate.
Plain old datatypes have the advantage that you can avoid using any kind signatures
at all in their instances. This is because a datatype declaration uses all type
variables, so the types that we use in a derived instance uniquely determine their
kinds. As long as we plug in the right types, the kind inferencer can do the rest
of the work. For this reason, we use unSigT to remove all kind signatures before
splicing in the instance context and head.
Data family instances are trickier, since a data family can have two instances that
are distinguished by kind alone, e.g.,
data family Fam (a :: k)
data instance Fam (a :: * -> *)
data instance Fam (a :: *)
If we dropped the kind signatures for C (Fam a), then GHC will have no way of
knowing which instance we are talking about. To avoid this scenario, we always
include explicit kind signatures in data family instances. There is a chance that
the inferred kind signatures will be incorrect, but if so, we can always fall back
on the make- functions.
Note [Type inference in derived instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Type inference is can be tricky to get right, and we want to avoid recreating the
entirety of GHC's type inferencer in Template Haskell. For this reason, we will
probably never come up with derived instance contexts that are as accurate as
GHC's. But that doesn't mean we can't do anything! There are a couple of simple
things we can do to make instance contexts that work for 80% of use cases:
1. If one of the last type parameters is polykinded, then its kind will be
specialized to * in the derived instance. We note what kind variable the type
parameter had and substitute it with * in the other types as well. For example,
imagine you had
data Data (a :: k) (b :: k) (c :: k)
Then you'd want to derived instance to be:
instance C (Data (a :: *))
Not:
instance C (Data (a :: k))
2. We naïvely come up with instance constraints using the following criteria:
(i) If there's a type parameter n of kind k1 -> k2 (where k1/k2 are * or kind
variables), then generate a Functor n constraint, and if k1/k2 are kind
variables, then substitute k1/k2 with * elsewhere in the types. We must
consider the case where they are kind variables because you might have a
scenario like this:
newtype Compose (f :: k3 -> *) (g :: k1 -> k2 -> k3) (a :: k1) (b :: k2)
= Compose (f (g a b))
Which would have a derived Bifunctor instance of:
instance (Functor f, Bifunctor g) => Bifunctor (Compose f g) where ...
(ii) If there's a type parameter n of kind k1 -> k2 -> k3 (where k1/k2/k3 are
* or kind variables), then generate a Bifunctor n constraint and perform
kind substitution as in the other case.
-}
Note [ Matching functions with GADT type variables ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When deriving Bifoldable , there is a tricky corner case to consider :
data Both a b where
BothCon : : x - > x - > Both x x
Which fold functions should be applied to which arguments of BothCon ? We have a
choice , since both the function of type ( a - > m ) and of type ( b - > m ) can be
applied to either argument . In such a scenario , the second fold function takes
precedence over the first fold function , so the derived Bifoldable instance would be :
instance Both where
bifoldMap _ g ( BothCon x1 x2 ) = g x1 < > g x2
This is not an arbitrary choice , as this definition ensures that
bifoldMap i d = Foldable.foldMap for a derived Bifoldable instance for Both .
Note [Matching functions with GADT type variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When deriving Bifoldable, there is a tricky corner case to consider:
data Both a b where
BothCon :: x -> x -> Both x x
Which fold functions should be applied to which arguments of BothCon? We have a
choice, since both the function of type (a -> m) and of type (b -> m) can be
applied to either argument. In such a scenario, the second fold function takes
precedence over the first fold function, so the derived Bifoldable instance would be:
instance Bifoldable Both where
bifoldMap _ g (BothCon x1 x2) = g x1 <> g x2
This is not an arbitrary choice, as this definition ensures that
bifoldMap id = Foldable.foldMap for a derived Bifoldable instance for Both.
-}
-------------------------------------------------------------------------------
-- Error messages
-------------------------------------------------------------------------------
| Either the given data type does n't have enough type variables , or one of
-- the type variables to be eta-reduced cannot realize kind *.
derivingKindError :: BiClass -> Name -> Q a
derivingKindError biClass tyConName = fail
. showString "Cannot derive well-kinded instance of form ‘"
. showString className
. showChar ' '
. showParen True
( showString (nameBase tyConName)
. showString " ..."
)
. showString "‘\n\tClass "
. showString className
. showString " expects an argument of kind * -> * -> *"
$ ""
where
className :: String
className = nameBase $ biClassName biClass
| One of the last two type variables appeard in a contravariant position
when deriving Bifoldable or Bitraversable .
contravarianceError :: Name -> Q a
contravarianceError conName = fail
. showString "Constructor ‘"
. showString (nameBase conName)
. showString "‘ must not use the last type variable(s) in a function argument"
$ ""
| A constructor has a function argument in a derived Bifoldable or Bitraversable
-- instance.
noFunctionsError :: Name -> Q a
noFunctionsError conName = fail
. showString "Constructor ‘"
. showString (nameBase conName)
. showString "‘ must not contain function types"
$ ""
| The data type has a DatatypeContext which mentions one of the eta - reduced
-- type variables.
datatypeContextError :: Name -> Type -> Q a
datatypeContextError dataName instanceType = fail
. showString "Can't make a derived instance of ‘"
. showString (pprint instanceType)
. showString "‘:\n\tData type ‘"
. showString (nameBase dataName)
. showString "‘ must not have a class context involving the last type argument(s)"
$ ""
| The data type has an existential constraint which mentions one of the
-- eta-reduced type variables.
existentialContextError :: Name -> Q a
existentialContextError conName = fail
. showString "Constructor ‘"
. showString (nameBase conName)
. showString "‘ must be truly polymorphic in the last argument(s) of the data type"
$ ""
| The data type mentions one of the n eta - reduced type variables in a place other
-- than the last nth positions of a data type in a constructor's field.
outOfPlaceTyVarError :: Name -> Q a
outOfPlaceTyVarError conName = fail
. showString "Constructor ‘"
. showString (nameBase conName)
. showString "‘ must only use its last two type variable(s) within"
. showString " the last two argument(s) of a data type"
$ ""
-- | One of the last type variables cannot be eta-reduced (see the canEtaReduce
-- function for the criteria it would have to meet).
etaReductionError :: Type -> Q a
etaReductionError instanceType = fail $
"Cannot eta-reduce to an instance of form \n\tinstance (...) => "
++ pprint instanceType
typeDataError :: Name -> Q a
typeDataError dataName = fail
. showString "Cannot derive instance for ‘"
. showString (nameBase dataName)
. showString "‘, which is a ‘type data‘ declaration"
$ ""
-------------------------------------------------------------------------------
-- Class-specific constants
-------------------------------------------------------------------------------
-- | A representation of which class is being derived.
data BiClass = Bifunctor | Bifoldable | Bitraversable
-- | A representation of which function is being generated.
data BiFun = Bimap | Bifoldr | BifoldMap | Bitraverse
deriving Eq
biFunConstName :: BiFun -> Name
biFunConstName Bimap = bimapConstValName
biFunConstName Bifoldr = bifoldrConstValName
biFunConstName BifoldMap = bifoldMapConstValName
biFunConstName Bitraverse = bitraverseConstValName
biClassName :: BiClass -> Name
biClassName Bifunctor = bifunctorTypeName
biClassName Bifoldable = bifoldableTypeName
biClassName Bitraversable = bitraversableTypeName
biFunName :: BiFun -> Name
biFunName Bimap = bimapValName
biFunName Bifoldr = bifoldrValName
biFunName BifoldMap = bifoldMapValName
biFunName Bitraverse = bitraverseValName
biClassToFuns :: BiClass -> [BiFun]
biClassToFuns Bifunctor = [Bimap]
biClassToFuns Bifoldable = [Bifoldr, BifoldMap]
biClassToFuns Bitraversable = [Bitraverse]
biFunToClass :: BiFun -> BiClass
biFunToClass Bimap = Bifunctor
biFunToClass Bifoldr = Bifoldable
biFunToClass BifoldMap = Bifoldable
biFunToClass Bitraverse = Bitraversable
biClassConstraint :: BiClass -> Int -> Maybe Name
biClassConstraint Bifunctor 1 = Just functorTypeName
biClassConstraint Bifoldable 1 = Just foldableTypeName
biClassConstraint Bitraversable 1 = Just traversableTypeName
biClassConstraint biClass 2 = Just $ biClassName biClass
biClassConstraint _ _ = Nothing
fmapArity :: Int -> Name
fmapArity 1 = fmapValName
fmapArity 2 = bimapValName
fmapArity n = arityErr n
foldrArity :: Int -> Name
foldrArity 1 = foldrValName
foldrArity 2 = bifoldrValName
foldrArity n = arityErr n
foldMapArity :: Int -> Name
foldMapArity 1 = foldMapValName
foldMapArity 2 = bifoldMapValName
foldMapArity n = arityErr n
traverseArity :: Int -> Name
traverseArity 1 = traverseValName
traverseArity 2 = bitraverseValName
traverseArity n = arityErr n
arityErr :: Int -> a
arityErr n = error $ "Unsupported arity: " ++ show n
allowExQuant :: BiClass -> Bool
allowExQuant Bifoldable = True
allowExQuant _ = False
biFunEmptyCase :: BiFun -> Name -> Name -> Q Exp
biFunEmptyCase biFun z value =
biFunTrivial emptyCase
(varE pureValName `appE` emptyCase)
biFun z
where
emptyCase :: Q Exp
emptyCase = caseE (varE value) []
biFunNoCons :: BiFun -> Name -> Name -> Q Exp
biFunNoCons biFun z value =
biFunTrivial seqAndError
(varE pureValName `appE` seqAndError)
biFun z
where
seqAndError :: Q Exp
seqAndError = appE (varE seqValName) (varE value) `appE`
appE (varE errorValName)
(stringE $ "Void " ++ nameBase (biFunName biFun))
biFunTrivial :: Q Exp -> Q Exp -> BiFun -> Name -> Q Exp
biFunTrivial bimapE bitraverseE biFun z = go biFun
where
go :: BiFun -> Q Exp
go Bimap = bimapE
go Bifoldr = varE z
go BifoldMap = varE memptyValName
go Bitraverse = bitraverseE
Note [ ft_triv for Bifoldable and Bitraversable ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When deriving Bifoldable and Bitraversable , we filter out any subexpressions whose
type does not mention one of the last two type parameters . From this , you might
think that we do n't need to implement ft_triv for bifoldr , , or
bitraverse at all , but in fact we do need to . Imagine the following data type :
data T a b = MkT a ( T Int b )
In a derived Bifoldable T instance , you would generate the following bifoldMap
definition :
bifoldMap f g ( MkT a1 a2 ) = f a1 < > ( \ _ - > mempty ) g arg2
You need to fill in bi_triv ( \ _ - > ) as the first argument to the recursive
call to , since that is how the algorithm handles polymorphic recursion .
Note [ft_triv for Bifoldable and Bitraversable]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When deriving Bifoldable and Bitraversable, we filter out any subexpressions whose
type does not mention one of the last two type parameters. From this, you might
think that we don't need to implement ft_triv for bifoldr, bifoldMap, or
bitraverse at all, but in fact we do need to. Imagine the following data type:
data T a b = MkT a (T Int b)
In a derived Bifoldable T instance, you would generate the following bifoldMap
definition:
bifoldMap f g (MkT a1 a2) = f a1 <> bifoldMap (\_ -> mempty) g arg2
You need to fill in bi_triv (\_ -> mempty) as the first argument to the recursive
call to bifoldMap, since that is how the algorithm handles polymorphic recursion.
-}
-------------------------------------------------------------------------------
Generic traversal for functor - like deriving
-------------------------------------------------------------------------------
Much of the code below is cargo - culted from the TcGenFunctor module in GHC .
data FFoldType a -- Describes how to fold over a Type in a functor like way
= FT { ft_triv :: a
-- ^ Does not contain variables
, ft_var :: Name -> a
-- ^ A bare variable
, ft_co_var :: Name -> a
-- ^ A bare variable, contravariantly
, ft_fun :: a -> a -> a
-- ^ Function type
, ft_tup :: TupleSort -> [a] -> a
-- ^ Tuple type. The [a] is the result of folding over the
-- arguments of the tuple.
, ft_ty_app :: [(Type, a)] -> a
-- ^ Type app, variables only in last argument. The [(Type, a)]
-- represents the last argument types. That is, they form the
-- argument parts of @fun_ty arg_ty_1 ... arg_ty_n@.
, ft_bad_app :: a
-- ^ Type app, variable other than in last arguments
, ft_forall :: [TyVarBndrSpec] -> a -> a
-- ^ Forall type
}
Note that in GHC , this function is pure . It must be monadic here since we :
--
( 1 ) Expand type synonyms
( 2 ) Detect type family applications
--
Which require reification in Template Haskell , but are pure in Core .
functorLikeTraverse :: forall a.
TyVarMap -- ^ Variables to look for
-> FFoldType a -- ^ How to fold
-> Type -- ^ Type to process
-> Q a
functorLikeTraverse tvMap FT { ft_triv = caseTrivial, ft_var = caseVar
, ft_co_var = caseCoVar, ft_fun = caseFun
, ft_tup = caseTuple, ft_ty_app = caseTyApp
, ft_bad_app = caseWrongArg, ft_forall = caseForAll }
ty
= do ty' <- resolveTypeSynonyms ty
(res, _) <- go False ty'
return res
where
go :: Bool -- Covariant or contravariant context
-> Type
-> Q (a, Bool) -- (result of type a, does type contain var)
go co t@AppT{}
| (ArrowT, [funArg, funRes]) <- unapplyTy t
= do (funArgR, funArgC) <- go (not co) funArg
(funResR, funResC) <- go co funRes
if funArgC || funResC
then return (caseFun funArgR funResR, True)
else trivial
go co t@AppT{} = do
let (f, args) = unapplyTy t
(_, fc) <- go co f
(xrs, xcs) <- unzip <$> mapM (go co) args
let numLastArgs, numFirstArgs :: Int
numLastArgs = min 2 $ length args
numFirstArgs = length args - numLastArgs
tuple :: TupleSort -> Q (a, Bool)
tuple tupSort = return (caseTuple tupSort xrs, True)
wrongArg :: Q (a, Bool)
wrongArg = return (caseWrongArg, True)
case () of
_ | not (or xcs)
-> trivial -- Variable does not occur
At this point we know that xrs , xcs is not empty ,
and at least one xr is True
| TupleT len <- f
-> tuple $ Boxed len
| UnboxedTupleT len <- f
-> tuple $ Unboxed len
| fc || or (take numFirstArgs xcs)
-> wrongArg -- T (..var..) ty_1 ... ty_n
| otherwise -- T (..no var..) ty_1 ... ty_n
-> do itf <- isInTypeFamilyApp tyVarNames f args
if itf -- We can't decompose type families, so
-- error if we encounter one here.
then wrongArg
else return ( caseTyApp $ drop numFirstArgs $ zip args xrs
, True )
go co (SigT t k) = do
(_, kc) <- go_kind co k
if kc
then return (caseWrongArg, True)
else go co t
go co (VarT v)
| Map.member v tvMap
= return (if co then caseCoVar v else caseVar v, True)
| otherwise
= trivial
go co (ForallT tvbs _ t) = do
(tr, tc) <- go co t
let tvbNames = map tvName tvbs
if not tc || any (`elem` tvbNames) tyVarNames
then trivial
else return (caseForAll tvbs tr, True)
go _ _ = trivial
go_kind :: Bool
-> Kind
-> Q (a, Bool)
go_kind = go
trivial :: Q (a, Bool)
trivial = return (caseTrivial, False)
tyVarNames :: [Name]
tyVarNames = Map.keys tvMap
Fold over the arguments of a data constructor in a Functor - like way .
foldDataConArgs :: forall a. TyVarMap -> FFoldType a -> ConstructorInfo -> Q [a]
foldDataConArgs tvMap ft con = do
fieldTys <- mapM resolveTypeSynonyms $ constructorFields con
mapM foldArg fieldTys
where
foldArg :: Type -> Q a
foldArg = functorLikeTraverse tvMap ft
-- Make a 'LamE' using a fresh variable.
mkSimpleLam :: (Exp -> Q Exp) -> Q Exp
mkSimpleLam lam = do
-- Use an underscore in front of the variable name, as it's possible for
certain Bifoldable instances to generate code like this ( see # 89 ):
--
-- @
( \\_n - > ) ...
-- @
--
-- Without the underscore, that code would trigger -Wunused-matches warnings.
n <- newName "_n"
body <- lam (VarE n)
return $ LamE [VarP n] body
Make a ' LamE ' using two fresh variables .
mkSimpleLam2 :: (Exp -> Exp -> Q Exp) -> Q Exp
mkSimpleLam2 lam = do
-- Use an underscore in front of the variable name, as it's possible for
certain Bifoldable instances to generate code like this ( see # 89 ):
--
-- @
-- bifoldr (\\_n1 n2 -> n2) ...
-- @
--
-- Without the underscore, that code would trigger -Wunused-matches warnings.
n1 <- newName "_n1"
n2 <- newName "n2"
body <- lam (VarE n1) (VarE n2)
return $ LamE [VarP n1, VarP n2] body
" Con a1 a2 a3 - > fold [ x1 a1 , x2 a2 , x3 a3 ] "
--
-- @mkSimpleConMatch fold conName insides@ produces a match clause in
which the LHS pattern - matches on , followed by a match on the
constructor @conName@ and its arguments . The RHS folds ( with @fold@ ) over
@conName@ and its arguments , applying an expression ( from @insides@ ) to each
-- of the respective arguments of @conName@.
mkSimpleConMatch :: (Name -> [a] -> Q Exp)
-> Name
-> [Exp -> a]
-> Q Match
mkSimpleConMatch fold conName insides = do
varsNeeded <- newNameList "_arg" $ length insides
let pat = conPCompat conName (map VarP varsNeeded)
rhs <- fold conName (zipWith (\i v -> i $ VarE v) insides varsNeeded)
return $ Match pat (NormalB rhs) []
" Con a1 a2 a3 - > fmap ( \b2 - > Con a1 b2 a3 ) ( traverse f a2 ) "
--
@mkSimpleConMatch2 fold conName insides@ behaves very similarly to
' mkSimpleConMatch ' , with two key differences :
--
1 . @insides@ is a @[(Bool , Exp)]@ instead of a @[Exp]@. This is because it
-- filters out the expressions corresponding to arguments whose types do not
mention the last type variable in a derived ' Foldable ' or ' '
-- instance (i.e., those elements of @insides@ containing @False@).
--
2 . @fold@ takes an expression as its first argument instead of a
-- constructor name. This is because it uses a specialized
-- constructor function expression that only takes as many parameters as
-- there are argument types that mention the last type variable.
mkSimpleConMatch2 :: (Exp -> [Exp] -> Q Exp)
-> Name
-> [(Bool, Exp)]
-> Q Match
mkSimpleConMatch2 fold conName insides = do
varsNeeded <- newNameList "_arg" lengthInsides
let pat = conPCompat conName (map VarP varsNeeded)
Make sure to zip BEFORE invoking . We want the variable
-- indicies in each expression to match up with the argument indices
-- in conExpr (defined below).
exps = catMaybes $ zipWith (\(m, i) v -> if m then Just (i `AppE` VarE v)
else Nothing)
insides varsNeeded
An element of argTysTyVarInfo is True if the constructor argument
-- with the same index has a type which mentions the last type
-- variable.
argTysTyVarInfo = map fst insides
(asWithTyVar, asWithoutTyVar) = partitionByList argTysTyVarInfo varsNeeded
conExpQ
| null asWithTyVar = appsE (conE conName:map varE asWithoutTyVar)
| otherwise = do
bs <- newNameList "b" lengthInsides
let bs' = filterByList argTysTyVarInfo bs
vars = filterByLists argTysTyVarInfo
(map varE bs) (map varE varsNeeded)
lamE (map varP bs') (appsE (conE conName:vars))
conExp <- conExpQ
rhs <- fold conExp exps
return $ Match pat (NormalB rhs) []
where
lengthInsides = length insides
-- Indicates whether a tuple is boxed or unboxed, as well as its number of
arguments . For instance , ( a , b ) corresponds to @Boxed 2@ , and ( # a , b , c # )
corresponds to @Unboxed 3@.
data TupleSort
= Boxed Int
| Unboxed Int
-- "case x of (a1,a2,a3) -> fold [x1 a1, x2 a2, x3 a3]"
mkSimpleTupleCase :: (Name -> [a] -> Q Match)
-> TupleSort -> [a] -> Exp -> Q Exp
mkSimpleTupleCase matchForCon tupSort insides x = do
let tupDataName = case tupSort of
Boxed len -> tupleDataName len
Unboxed len -> unboxedTupleDataName len
m <- matchForCon tupDataName insides
return $ CaseE x [m]
-- Adapt to the type of ConP changing in template-haskell-2.18.0.0.
conPCompat :: Name -> [Pat] -> Pat
conPCompat n pats = ConP n
#if MIN_VERSION_template_haskell(2,18,0)
[]
#endif
pats
| null | https://raw.githubusercontent.com/ekmett/bifunctors/e3c86967af947d6dcf9f227312c20ee1f0186cfc/src/Data/Bifunctor/TH.hs | haskell | |
License : BSD-style (see the file LICENSE)
Stability : provisional
Portability : portable
Functions to mechanically derive 'Bifunctor', 'Bifoldable',
in order to use this module.
* @derive@- functions
$derive
* @make@- functions
$make
* 'Bifunctor'
* 'Bifoldable'
* 'Options'
-----------------------------------------------------------------------------
User-facing API
-----------------------------------------------------------------------------
should behave.
^ If 'True', derived instances for empty data types (i.e., ones with
no data constructors) will use the @EmptyCase@ language extension.
If 'False', derived instances will simply use 'seq' instead.
| Conservative 'Options' that doesn't attempt to use @EmptyCase@ (to
prevent users from having to enable that extension at use sites.)
instance Bifunctor Pair where ...
instance ( Foldable f , ) = > Bifoldable ( WrapLeftPair f g ) where ...
instance ( AssocData Int ) where ...
Alternatively , one could use $ ( deriveBitraversable ' AssocDataInt2 )
instance Bifunctor Pair where ...
instance (Foldable f, Bifoldable g) => Bifoldable (WrapLeftPair f g) where ...
instance Bitraversable (AssocData Int) where ...
Alternatively, one could use $(deriveBitraversable 'AssocDataInt2)
family instance.
| Like 'deriveBifunctor', but takes an 'Options' argument.
| Generates a lambda expression which behaves like 'bimap' (without requiring a
| Generates a 'Bifoldable' instance declaration for the given data type or data
family instance.
| Like 'deriveBifoldable', but takes an 'Options' argument.
- | Generates a lambda expression which behaves like 'bifold' (without requiring a
'Bifoldable' instance).
| Like 'makeBifold', but takes an 'Options' argument.
| Generates a lambda expression which behaves like 'bifoldMap' (without requiring
a 'Bifoldable' instance).
| Like 'makeBifoldMap', but takes an 'Options' argument.
| Generates a lambda expression which behaves like 'bifoldr' (without requiring a
'Bifoldable' instance).
| Like 'makeBifoldr', but takes an 'Options' argument.
| Generates a lambda expression which behaves like 'bifoldl' (without requiring a
'Bifoldable' instance).
| Like 'makeBifoldl', but takes an 'Options' argument.
family instance.
| Like 'deriveBitraversable', but takes an 'Options' argument.
| Generates a lambda expression which behaves like 'bitraverse' (without
| Generates a lambda expression which behaves like 'bisequenceA' (without
| Like 'makeBitraverseA', but takes an 'Options' argument.
| Generates a lambda expression which behaves like 'bimapM' (without
| Like 'makeBimapM', but takes an 'Options' argument.
| Generates a lambda expression which behaves like 'bisequence' (without
| Like 'makeBisequence', but takes an 'Options' argument.
-----------------------------------------------------------------------------
Code generation
-----------------------------------------------------------------------------
| Generates a declaration defining the primary function(s) corresponding to a
We force buildTypeInstance here since it performs some checks for whether
implemented for it, and produces errors if it can't.
| Generates a lambda expression for the given constructors.
All constructors must be from the same type.
Only used for deriving bifoldr
| Generates a match for a single constructor.
of the data type's last type variables, then we
can generate more efficient code.
Con a1 a2 ... -> Con (f1 a1) (f2 a2) ...
Con x1 x2 ..
| Generates a match whose right-hand side implements @bifoldr@.
out expressions that do not mention the last parameters by checking for
False.
out expressions that do not mention the last parameters by checking for
False.
mappend v1 (mappend v2 ..)
| Generates a match whose right-hand side implements @bitraverse@.
out expressions that do not mention the last parameters by checking for
False.
Con a1 a2 ... -> liftA2 (\b1 b2 ... -> Con b1 b2 ...) (g1 a1)
(g2 a2) <*> ...
liftA2 (\b1 b2 ... -> Con b1 b2 ...) x1 x2 <*> ..
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
For the given Types, generate an instance context and head. Coming up with
the instance type isn't as simple as dropping the last types, as you need to
be wary of kinds being instantiated with *.
See Note [Type inference in derived instances]
^ The type constructor or data family name
^ The datatype context
^ The types to instantiate the instance with
^ Are we dealing with a data family instance or not
Make sure to expand through type/kind synonyms! Otherwise, the
eta-reduction check might get tripped up over type variables in a
synonym that are actually dropped.
Check there are enough types to drop and that all of them are either of
kind * or kind k (for some kind variable k). If not, throw an error.
Substitute kind * for any dropped kind variables
All of the type variables mentioned in the dropped types
(post-synonym expansion)
If any of the dropped types were polykinded, ensure that they are of kind *
after substituting * for the dropped kind variables. If not, throw an error.
Derive instance constraints (and any kind variables which are specialized
to * in those constraints)
Substitute the kind variables specialized in the constraints with *
We now substitute all of the specialized-to-* kind variable names with
*, but in the original types, not the synonym-expanded types. The reason
we do this is a superficial one: we want the derived instance to resemble
the datatype written in source code as closely as possible. For example,
for the following data family instance:
data family Fam a
newtype instance Fam String = Fam String
We'd want to generate the instance:
instance C (Fam String)
Not:
instance C (Fam [Char])
See Note [Kind signatures in derived instances] for an explanation
of the isDataFamily check.
If the datatype context mentions any of the dropped type variables,
we can't derive an instance, so throw an error.
Also ensure the dropped types can be safely eta-reduced. Otherwise,
throw an error.
| Attempt to derive a constraint on a Type. If successful, return
Just the constraint and any kind variable names constrained to *.
Otherwise, return Nothing and the empty list.
See Note [Type inference in derived instances] for the heuristics used to
come up with constraints.
-----------------------------------------------------------------------------
Error messages
-----------------------------------------------------------------------------
the type variables to be eta-reduced cannot realize kind *.
instance.
type variables.
eta-reduced type variables.
than the last nth positions of a data type in a constructor's field.
| One of the last type variables cannot be eta-reduced (see the canEtaReduce
function for the criteria it would have to meet).
-----------------------------------------------------------------------------
Class-specific constants
-----------------------------------------------------------------------------
| A representation of which class is being derived.
| A representation of which function is being generated.
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
Describes how to fold over a Type in a functor like way
^ Does not contain variables
^ A bare variable
^ A bare variable, contravariantly
^ Function type
^ Tuple type. The [a] is the result of folding over the
arguments of the tuple.
^ Type app, variables only in last argument. The [(Type, a)]
represents the last argument types. That is, they form the
argument parts of @fun_ty arg_ty_1 ... arg_ty_n@.
^ Type app, variable other than in last arguments
^ Forall type
^ Variables to look for
^ How to fold
^ Type to process
Covariant or contravariant context
(result of type a, does type contain var)
Variable does not occur
T (..var..) ty_1 ... ty_n
T (..no var..) ty_1 ... ty_n
We can't decompose type families, so
error if we encounter one here.
Make a 'LamE' using a fresh variable.
Use an underscore in front of the variable name, as it's possible for
@
@
Without the underscore, that code would trigger -Wunused-matches warnings.
Use an underscore in front of the variable name, as it's possible for
@
bifoldr (\\_n1 n2 -> n2) ...
@
Without the underscore, that code would trigger -Wunused-matches warnings.
@mkSimpleConMatch fold conName insides@ produces a match clause in
of the respective arguments of @conName@.
filters out the expressions corresponding to arguments whose types do not
instance (i.e., those elements of @insides@ containing @False@).
constructor name. This is because it uses a specialized
constructor function expression that only takes as many parameters as
there are argument types that mention the last type variable.
indicies in each expression to match up with the argument indices
in conExpr (defined below).
with the same index has a type which mentions the last type
variable.
Indicates whether a tuple is boxed or unboxed, as well as its number of
"case x of (a1,a2,a3) -> fold [x1 a1, x2 a2, x3 a3]"
Adapt to the type of ConP changing in template-haskell-2.18.0.0. | # LANGUAGE CPP #
# LANGUAGE PatternGuards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE Unsafe #
Copyright : ( C ) 2008 - 2016 , ( C ) 2015 - 2016
Maintainer : < >
or ' Bitraversable ' instances , or to splice their functions directly into
source code . You need to enable the @TemplateHaskell@ language extension
module Data.Bifunctor.TH
(
deriveBifunctor
, deriveBifunctorOptions
, makeBimap
, makeBimapOptions
, deriveBifoldable
, deriveBifoldableOptions
, makeBifold
, makeBifoldOptions
, makeBifoldMap
, makeBifoldMapOptions
, makeBifoldr
, makeBifoldrOptions
, makeBifoldl
, makeBifoldlOptions
* ' '
, deriveBitraversable
, deriveBitraversableOptions
, makeBitraverse
, makeBitraverseOptions
, makeBisequenceA
, makeBisequenceAOptions
, makeBimapM
, makeBimapMOptions
, makeBisequence
, makeBisequenceOptions
, Options(..)
, defaultOptions
) where
import Control.Monad (guard, unless, when)
import Data.Bifunctor.TH.Internal
import qualified Data.List as List
import qualified Data.Map as Map ((!), fromList, keys, lookup, member, size)
import Data.Maybe
import Language.Haskell.TH.Datatype as Datatype
import Language.Haskell.TH.Datatype.TyVarBndr
import Language.Haskell.TH.Lib
import Language.Haskell.TH.Ppr
import Language.Haskell.TH.Syntax
| Options that further configure how the functions in " Data . Bifunctor . TH "
newtype Options = Options
{ emptyCaseBehavior :: Bool
( This has no effect on GHCs before 7.8 , since @EmptyCase@ is only
available in 7.8 or later . )
} deriving (Eq, Ord, Read, Show)
defaultOptions :: Options
defaultOptions = Options { emptyCaseBehavior = False }
$ derive
' deriveBifunctor ' , ' deriveBifoldable ' , and ' deriveBitraversable ' automatically
generate their respective class instances for a given data type , newtype , or data
family instance that has at least two type variable . Examples :
@
& # 123;-# ; LANGUAGE TemplateHaskell & # 35;-} ;
import Data . Bifunctor . TH
data Pair a b = Pair a b
data WrapLeftPair f g a b = WrapLeftPair ( f a ) ( g a b )
$ ( ' deriveBifoldable ' ' ' WrapLeftPair )
@
If you are using @template - haskell-2.7.0.0@ or later ( i.e. , GHC 7.4 or later ) ,
the @derive@ functions can be used data family instances ( which requires the
@-XTypeFamilies@ extension ) . To do so , pass the name of a data or newtype instance
constructor ( NOT a data family name ! ) to a @derive@ function . Note that the
generated code may require the @-XFlexibleInstances@ extension . Example :
@
& # 123;-# ; LANGUAGE FlexibleInstances , TemplateHaskell , TypeFamilies & # 35;-} ;
import Data . Bifunctor . TH
class AssocClass a b c where
data AssocData a b c
instance AssocClass Int b c where
data AssocData Int b c = AssocDataInt1 Int | AssocDataInt2 b c
@
Note that there are some limitations :
* The ' Name ' argument to a @derive@ function must not be a type synonym .
* With a @derive@ function , the last two type variables must both be of kind @*@.
Other type variables of kind @ * - > * @ are assumed to require a ' Functor ' ,
' Foldable ' , or ' ' constraint ( depending on which @derive@ function is
used ) , and other type variables of kind @ * - > * - > * @ are assumed to require an
' Bifunctor ' , ' Bifoldable ' , or ' Bitraversable ' constraint . If your data type
does n't meet these assumptions , use a @make@ function .
* If using the @-XDatatypeContexts@ , , or @-XGADTs@
extensions , a constraint can not mention either of the last two type variables . For
example , @data Illegal2 a b where I2 : : a = > a - > b - > Illegal2 a b@ can not
have a derived ' Bifunctor ' instance .
* If either of the last two type variables is used within a constructor argument 's
type , it must only be used in the last two type arguments . For example ,
@data Legal a b = Legal ( Int , Int , a , b)@ can have a derived ' Bifunctor ' instance ,
but @data Illegal a b = Illegal ( a , b , a , b)@ can not .
* Data family instances must be able to eta - reduce the last two type variables . In other
words , if you have a instance of the form :
@
data family Family a1 ... an t1 t2
data instance Family e1 ... e2 v1 v2 = ...
@
Then the following conditions must hold :
1 . @v1@ and @v2@ must be distinct type variables .
2 . Neither @v1@ not @v2@ must be mentioned in any of @e1@ , ... , @e2@.
'deriveBifunctor', 'deriveBifoldable', and 'deriveBitraversable' automatically
generate their respective class instances for a given data type, newtype, or data
family instance that has at least two type variable. Examples:
@
{-# LANGUAGE TemplateHaskell #-}
import Data.Bifunctor.TH
data Pair a b = Pair a b
data WrapLeftPair f g a b = WrapLeftPair (f a) (g a b)
$('deriveBifoldable' ''WrapLeftPair)
@
If you are using @template-haskell-2.7.0.0@ or later (i.e., GHC 7.4 or later),
the @derive@ functions can be used data family instances (which requires the
@-XTypeFamilies@ extension). To do so, pass the name of a data or newtype instance
constructor (NOT a data family name!) to a @derive@ function. Note that the
generated code may require the @-XFlexibleInstances@ extension. Example:
@
{-# LANGUAGE FlexibleInstances, TemplateHaskell, TypeFamilies #-}
import Data.Bifunctor.TH
class AssocClass a b c where
data AssocData a b c
instance AssocClass Int b c where
data AssocData Int b c = AssocDataInt1 Int | AssocDataInt2 b c
@
Note that there are some limitations:
* The 'Name' argument to a @derive@ function must not be a type synonym.
* With a @derive@ function, the last two type variables must both be of kind @*@.
Other type variables of kind @* -> *@ are assumed to require a 'Functor',
'Foldable', or 'Traversable' constraint (depending on which @derive@ function is
used), and other type variables of kind @* -> * -> *@ are assumed to require an
'Bifunctor', 'Bifoldable', or 'Bitraversable' constraint. If your data type
doesn't meet these assumptions, use a @make@ function.
* If using the @-XDatatypeContexts@, @-XExistentialQuantification@, or @-XGADTs@
extensions, a constraint cannot mention either of the last two type variables. For
example, @data Illegal2 a b where I2 :: Ord a => a -> b -> Illegal2 a b@ cannot
have a derived 'Bifunctor' instance.
* If either of the last two type variables is used within a constructor argument's
type, it must only be used in the last two type arguments. For example,
@data Legal a b = Legal (Int, Int, a, b)@ can have a derived 'Bifunctor' instance,
but @data Illegal a b = Illegal (a, b, a, b)@ cannot.
* Data family instances must be able to eta-reduce the last two type variables. In other
words, if you have a instance of the form:
@
data family Family a1 ... an t1 t2
data instance Family e1 ... e2 v1 v2 = ...
@
Then the following conditions must hold:
1. @v1@ and @v2@ must be distinct type variables.
2. Neither @v1@ not @v2@ must be mentioned in any of @e1@, ..., @e2@.
-}
$ make
There may be scenarios in which you want to , say , ' bimap ' over an arbitrary data type
or data family instance without having to make the type an instance of ' Bifunctor ' . For
these cases , this module provides several functions ( all prefixed with @make@- ) that
splice the appropriate lambda expression into your source code .
This is particularly useful for creating instances for sophisticated data types . For
example , ' deriveBifunctor ' can not infer the correct type context for
@newtype HigherKinded f a b c = HigherKinded ( f a b c)@ , since is of kind
@ * - > * - > * - > * @. However , it is still possible to create a ' Bifunctor ' instance for
@HigherKinded@ without too much trouble using ' ' :
@
& # 123;-# ; LANGUAGE FlexibleContexts , TemplateHaskell & # 35;-} ;
import Data . Bifunctor
import Data . Bifunctor . TH
newtype HigherKinded f a b c = HigherKinded ( f a b c )
instance Bifunctor ( f a ) = ( HigherKinded f a ) where
bimap = $ ( makeBimap '' HigherKinded )
@
There may be scenarios in which you want to, say, 'bimap' over an arbitrary data type
or data family instance without having to make the type an instance of 'Bifunctor'. For
these cases, this module provides several functions (all prefixed with @make@-) that
splice the appropriate lambda expression into your source code.
This is particularly useful for creating instances for sophisticated data types. For
example, 'deriveBifunctor' cannot infer the correct type context for
@newtype HigherKinded f a b c = HigherKinded (f a b c)@, since @f@ is of kind
@* -> * -> * -> *@. However, it is still possible to create a 'Bifunctor' instance for
@HigherKinded@ without too much trouble using 'makeBimap':
@
{-# LANGUAGE FlexibleContexts, TemplateHaskell #-}
import Data.Bifunctor
import Data.Bifunctor.TH
newtype HigherKinded f a b c = HigherKinded (f a b c)
instance Bifunctor (f a) => Bifunctor (HigherKinded f a) where
bimap = $(makeBimap ''HigherKinded)
@
-}
| Generates a ' Bifunctor ' instance declaration for the given data type or data
deriveBifunctor :: Name -> Q [Dec]
deriveBifunctor = deriveBifunctorOptions defaultOptions
deriveBifunctorOptions :: Options -> Name -> Q [Dec]
deriveBifunctorOptions = deriveBiClass Bifunctor
' Bifunctor ' instance ) .
makeBimap :: Name -> Q Exp
makeBimap = makeBimapOptions defaultOptions
| Like ' ' , but takes an ' Options ' argument .
makeBimapOptions :: Options -> Name -> Q Exp
makeBimapOptions = makeBiFun Bimap
deriveBifoldable :: Name -> Q [Dec]
deriveBifoldable = deriveBifoldableOptions defaultOptions
deriveBifoldableOptions :: Options -> Name -> Q [Dec]
deriveBifoldableOptions = deriveBiClass Bifoldable
makeBifold :: Name -> Q Exp
makeBifold = makeBifoldOptions defaultOptions
makeBifoldOptions :: Options -> Name -> Q Exp
makeBifoldOptions opts name = appsE [ makeBifoldMapOptions opts name
, varE idValName
, varE idValName
]
makeBifoldMap :: Name -> Q Exp
makeBifoldMap = makeBifoldMapOptions defaultOptions
makeBifoldMapOptions :: Options -> Name -> Q Exp
makeBifoldMapOptions = makeBiFun BifoldMap
makeBifoldr :: Name -> Q Exp
makeBifoldr = makeBifoldrOptions defaultOptions
makeBifoldrOptions :: Options -> Name -> Q Exp
makeBifoldrOptions = makeBiFun Bifoldr
makeBifoldl :: Name -> Q Exp
makeBifoldl = makeBifoldlOptions defaultOptions
makeBifoldlOptions :: Options -> Name -> Q Exp
makeBifoldlOptions opts name = do
f <- newName "f"
g <- newName "g"
z <- newName "z"
t <- newName "t"
lamE [varP f, varP g, varP z, varP t] $
appsE [ varE appEndoValName
, appsE [ varE getDualValName
, appsE [ makeBifoldMapOptions opts name
, foldFun f
, foldFun g
, varE t]
]
, varE z
]
where
foldFun :: Name -> Q Exp
foldFun n = infixApp (conE dualDataName)
(varE composeValName)
(infixApp (conE endoDataName)
(varE composeValName)
(varE flipValName `appE` varE n)
)
| Generates a ' Bitraversable ' instance declaration for the given data type or data
deriveBitraversable :: Name -> Q [Dec]
deriveBitraversable = deriveBitraversableOptions defaultOptions
deriveBitraversableOptions :: Options -> Name -> Q [Dec]
deriveBitraversableOptions = deriveBiClass Bitraversable
requiring a ' Bitraversable ' instance ) .
makeBitraverse :: Name -> Q Exp
makeBitraverse = makeBitraverseOptions defaultOptions
| Like ' ' , but takes an ' Options ' argument .
makeBitraverseOptions :: Options -> Name -> Q Exp
makeBitraverseOptions = makeBiFun Bitraverse
requiring a ' Bitraversable ' instance ) .
makeBisequenceA :: Name -> Q Exp
makeBisequenceA = makeBisequenceAOptions defaultOptions
makeBisequenceAOptions :: Options -> Name -> Q Exp
makeBisequenceAOptions opts name = appsE [ makeBitraverseOptions opts name
, varE idValName
, varE idValName
]
requiring a ' Bitraversable ' instance ) .
makeBimapM :: Name -> Q Exp
makeBimapM = makeBimapMOptions defaultOptions
makeBimapMOptions :: Options -> Name -> Q Exp
makeBimapMOptions opts name = do
f <- newName "f"
g <- newName "g"
lamE [varP f, varP g] . infixApp (varE unwrapMonadValName) (varE composeValName) $
appsE [ makeBitraverseOptions opts name
, wrapMonadExp f
, wrapMonadExp g
]
where
wrapMonadExp :: Name -> Q Exp
wrapMonadExp n = infixApp (conE wrapMonadDataName) (varE composeValName) (varE n)
requiring a ' Bitraversable ' instance ) .
makeBisequence :: Name -> Q Exp
makeBisequence = makeBisequenceOptions defaultOptions
makeBisequenceOptions :: Options -> Name -> Q Exp
makeBisequenceOptions opts name = appsE [ makeBimapMOptions opts name
, varE idValName
, varE idValName
]
| Derive a class instance declaration ( depending on the BiClass argument 's value ) .
deriveBiClass :: BiClass -> Options -> Name -> Q [Dec]
deriveBiClass biClass opts name = do
info <- reifyDatatype name
case info of
DatatypeInfo { datatypeContext = ctxt
, datatypeName = parentName
, datatypeInstTypes = instTys
, datatypeVariant = variant
, datatypeCons = cons
} -> do
(instanceCxt, instanceType)
<- buildTypeInstance biClass parentName ctxt instTys variant
(:[]) `fmap` instanceD (return instanceCxt)
(return instanceType)
(biFunDecs biClass opts parentName instTys cons)
particular class ( bimap for Bifunctor , bifoldr and for Bifoldable , and
bitraverse for Bitraversable ) .
For why both bifoldr and are derived for Bifoldable , see Trac # 7436 .
biFunDecs :: BiClass -> Options -> Name -> [Type] -> [ConstructorInfo] -> [Q Dec]
biFunDecs biClass opts parentName instTys cons =
map makeFunD $ biClassToFuns biClass
where
makeFunD :: BiFun -> Q Dec
makeFunD biFun =
funD (biFunName biFun)
[ clause []
(normalB $ makeBiFunForCons biFun opts parentName instTys cons)
[]
]
| Generates a lambda expression which behaves like the BiFun argument .
makeBiFun :: BiFun -> Options -> Name -> Q Exp
makeBiFun biFun opts name = do
info <- reifyDatatype name
case info of
DatatypeInfo { datatypeContext = ctxt
, datatypeName = parentName
, datatypeInstTypes = instTys
, datatypeVariant = variant
, datatypeCons = cons
} ->
or not the provided datatype can actually have bimap / bifoldr / bitraverse / etc .
buildTypeInstance (biFunToClass biFun) parentName ctxt instTys variant
>> makeBiFunForCons biFun opts parentName instTys cons
makeBiFunForCons :: BiFun -> Options -> Name -> [Type] -> [ConstructorInfo] -> Q Exp
makeBiFunForCons biFun opts _parentName instTys cons = do
map1 <- newName "f"
map2 <- newName "g"
value <- newName "value"
let argNames = catMaybes [ Just map1
, Just map2
, guard (biFun == Bifoldr) >> Just z
, Just value
]
lastTyVars = map varTToName $ drop (length instTys - 2) instTys
tvMap = Map.fromList $ zip lastTyVars [map1, map2]
lamE (map varP argNames)
. appsE
$ [ varE $ biFunConstName biFun
, makeFun z value tvMap
] ++ map varE argNames
where
makeFun :: Name -> Name -> TyVarMap -> Q Exp
makeFun z value tvMap = do
roles <- reifyRoles _parentName
case () of
_
| Just (rs, PhantomR) <- unsnoc roles
, Just (_, PhantomR) <- unsnoc rs
-> biFunPhantom z value
| null cons && emptyCaseBehavior opts
-> biFunEmptyCase biFun z value
| null cons
-> biFunNoCons biFun z value
| otherwise
-> caseE (varE value)
(map (makeBiFunForCon biFun z tvMap) cons)
biFunPhantom :: Name -> Name -> Q Exp
biFunPhantom z value =
biFunTrivial coerce
(varE pureValName `appE` coerce)
biFun z
where
coerce :: Q Exp
coerce = varE coerceValName `appE` varE value
makeBiFunForCon :: BiFun -> Name -> TyVarMap -> ConstructorInfo -> Q Match
makeBiFunForCon biFun z tvMap
con@ConstructorInfo { constructorName = conName
, constructorContext = ctxt } = do
when ((any (`predMentionsName` Map.keys tvMap) ctxt
|| Map.size tvMap < 2)
&& not (allowExQuant (biFunToClass biFun))) $
existentialContextError conName
case biFun of
Bimap -> makeBimapMatch tvMap con
Bifoldr -> makeBifoldrMatch z tvMap con
BifoldMap -> makeBifoldMapMatch tvMap con
Bitraverse -> makeBitraverseMatch tvMap con
| Generates a match whose right - hand side implements @bimap@.
makeBimapMatch :: TyVarMap -> ConstructorInfo -> Q Match
makeBimapMatch tvMap con@ConstructorInfo{constructorName = conName} = do
parts <- foldDataConArgs tvMap ft_bimap con
match_for_con conName parts
where
ft_bimap :: FFoldType (Exp -> Q Exp)
ft_bimap = FT { ft_triv = return
, ft_var = \v x -> return $ VarE (tvMap Map.! v) `AppE` x
, ft_fun = \g h x -> mkSimpleLam $ \b -> do
gg <- g b
h $ x `AppE` gg
, ft_tup = mkSimpleTupleCase match_for_con
, ft_ty_app = \argGs x -> do
let inspect :: (Type, Exp -> Q Exp) -> Q Exp
inspect (argTy, g)
If the argument type is a bare occurrence of one
This was inspired by .
| Just argVar <- varTToName_maybe argTy
, Just f <- Map.lookup argVar tvMap
= return $ VarE f
| otherwise
= mkSimpleLam g
appsE $ varE (fmapArity (length argGs))
: map inspect argGs
++ [return x]
, ft_forall = \_ g x -> g x
, ft_bad_app = \_ -> outOfPlaceTyVarError conName
, ft_co_var = \_ _ -> contravarianceError conName
}
match_for_con :: Name -> [Exp -> Q Exp] -> Q Match
match_for_con = mkSimpleConMatch $ \conName' xs ->
makeBifoldrMatch :: Name -> TyVarMap -> ConstructorInfo -> Q Match
makeBifoldrMatch z tvMap con@ConstructorInfo{constructorName = conName} = do
parts <- foldDataConArgs tvMap ft_bifoldr con
parts' <- sequence parts
match_for_con (VarE z) conName parts'
where
The Bool is True if the type mentions of the last two type parameters ,
False otherwise . Later , uses to filter
ft_bifoldr :: FFoldType (Q (Bool, Exp))
See Note [ ft_triv for Bifoldable and Bitraversable ]
ft_triv = do lam <- mkSimpleLam2 $ \_ z' -> return z'
return (False, lam)
, ft_var = \v -> return (True, VarE $ tvMap Map.! v)
, ft_tup = \t gs -> do
gg <- sequence gs
lam <- mkSimpleLam2 $ \x z' ->
mkSimpleTupleCase (match_for_con z') t gg x
return (True, lam)
, ft_ty_app = \gs -> do
lam <- mkSimpleLam2 $ \x z' ->
appsE $ varE (foldrArity (length gs))
: map (\(_, hs) -> fmap snd hs) gs
++ map return [z', x]
return (True, lam)
, ft_forall = \_ g -> g
, ft_co_var = \_ -> contravarianceError conName
, ft_fun = \_ _ -> noFunctionsError conName
, ft_bad_app = outOfPlaceTyVarError conName
}
match_for_con :: Exp -> Name -> [(Bool, Exp)] -> Q Match
match_for_con zExp = mkSimpleConMatch2 $ \_ xs -> return $ mkBifoldr xs
where
g1 v1 ( ( .. z ) )
mkBifoldr :: [Exp] -> Exp
mkBifoldr = foldr AppE zExp
| Generates a match whose right - hand side implements @bifoldMap@.
makeBifoldMapMatch :: TyVarMap -> ConstructorInfo -> Q Match
makeBifoldMapMatch tvMap con@ConstructorInfo{constructorName = conName} = do
parts <- foldDataConArgs tvMap ft_bifoldMap con
parts' <- sequence parts
match_for_con conName parts'
where
The Bool is True if the type mentions of the last two type parameters ,
False otherwise . Later , uses to filter
ft_bifoldMap :: FFoldType (Q (Bool, Exp))
See Note [ ft_triv for Bifoldable and Bitraversable ]
ft_triv = do lam <- mkSimpleLam $ \_ -> return $ VarE memptyValName
return (False, lam)
, ft_var = \v -> return (True, VarE $ tvMap Map.! v)
, ft_tup = \t gs -> do
gg <- sequence gs
lam <- mkSimpleLam $ mkSimpleTupleCase match_for_con t gg
return (True, lam)
, ft_ty_app = \gs -> do
e <- appsE $ varE (foldMapArity (length gs))
: map (\(_, hs) -> fmap snd hs) gs
return (True, e)
, ft_forall = \_ g -> g
, ft_co_var = \_ -> contravarianceError conName
, ft_fun = \_ _ -> noFunctionsError conName
, ft_bad_app = outOfPlaceTyVarError conName
}
match_for_con :: Name -> [(Bool, Exp)] -> Q Match
match_for_con = mkSimpleConMatch2 $ \_ xs -> return $ mkBifoldMap xs
where
mkBifoldMap :: [Exp] -> Exp
mkBifoldMap [] = VarE memptyValName
mkBifoldMap es = foldr1 (AppE . AppE (VarE mappendValName)) es
makeBitraverseMatch :: TyVarMap -> ConstructorInfo -> Q Match
makeBitraverseMatch tvMap con@ConstructorInfo{constructorName = conName} = do
parts <- foldDataConArgs tvMap ft_bitrav con
parts' <- sequence parts
match_for_con conName parts'
where
The Bool is True if the type mentions of the last two type parameters ,
False otherwise . Later , uses to filter
ft_bitrav :: FFoldType (Q (Bool, Exp))
See Note [ ft_triv for Bifoldable and Bitraversable ]
ft_triv = return (False, VarE pureValName)
, ft_var = \v -> return (True, VarE $ tvMap Map.! v)
, ft_tup = \t gs -> do
gg <- sequence gs
lam <- mkSimpleLam $ mkSimpleTupleCase match_for_con t gg
return (True, lam)
, ft_ty_app = \gs -> do
e <- appsE $ varE (traverseArity (length gs))
: map (\(_, hs) -> fmap snd hs) gs
return (True, e)
, ft_forall = \_ g -> g
, ft_co_var = \_ -> contravarianceError conName
, ft_fun = \_ _ -> noFunctionsError conName
, ft_bad_app = outOfPlaceTyVarError conName
}
match_for_con :: Name -> [(Bool, Exp)] -> Q Match
match_for_con = mkSimpleConMatch2 $ \conExp xs -> return $ mkApCon conExp xs
where
mkApCon :: Exp -> [Exp] -> Exp
mkApCon conExp [] = VarE pureValName `AppE` conExp
mkApCon conExp [e] = VarE fmapValName `AppE` conExp `AppE` e
mkApCon conExp (e1:e2:es) = List.foldl' appAp
(VarE liftA2ValName `AppE` conExp `AppE` e1 `AppE` e2) es
where appAp se1 se2 = InfixE (Just se1) (VarE apValName) (Just se2)
Template Haskell reifying and AST manipulation
buildTypeInstance :: BiClass
^ Bifunctor , Bifoldable , or Bitraversable
-> Name
-> Cxt
-> [Type]
-> DatatypeVariant
-> Q (Cxt, Type)
buildTypeInstance biClass tyConName dataCxt instTysOrig variant = do
( See GHC Trac # 11416 for a scenario where this actually happened . )
varTysExp <- mapM resolveTypeSynonyms instTysOrig
let remainingLength :: Int
remainingLength = length instTysOrig - 2
droppedTysExp :: [Type]
droppedTysExp = drop remainingLength varTysExp
droppedStarKindStati :: [StarKindStatus]
droppedStarKindStati = map canRealizeKindStar droppedTysExp
when (remainingLength < 0 || elem NotKindStar droppedStarKindStati) $
derivingKindError biClass tyConName
let droppedKindVarNames :: [Name]
droppedKindVarNames = catKindVarNames droppedStarKindStati
varTysExpSubst :: [Type]
varTysExpSubst = map (substNamesWithKindStar droppedKindVarNames) varTysExp
remainingTysExpSubst, droppedTysExpSubst :: [Type]
(remainingTysExpSubst, droppedTysExpSubst) =
splitAt remainingLength varTysExpSubst
droppedTyVarNames :: [Name]
droppedTyVarNames = freeVariables droppedTysExpSubst
unless (all hasKindStar droppedTysExpSubst) $
derivingKindError biClass tyConName
let preds :: [Maybe Pred]
kvNames :: [[Name]]
kvNames' :: [Name]
(preds, kvNames) = unzip $ map (deriveConstraint biClass) remainingTysExpSubst
kvNames' = concat kvNames
remainingTysExpSubst' :: [Type]
remainingTysExpSubst' =
map (substNamesWithKindStar kvNames') remainingTysExpSubst
remainingTysOrigSubst :: [Type]
remainingTysOrigSubst =
map (substNamesWithKindStar (List.union droppedKindVarNames kvNames'))
$ take remainingLength instTysOrig
isDataFamily <-
case variant of
Datatype -> return False
Newtype -> return False
DataInstance -> return True
NewtypeInstance -> return True
#if MIN_VERSION_th_abstraction(0,5,0)
Datatype.TypeData -> typeDataError tyConName
#endif
let remainingTysOrigSubst' :: [Type]
remainingTysOrigSubst' =
if isDataFamily
then remainingTysOrigSubst
else map unSigT remainingTysOrigSubst
instanceCxt :: Cxt
instanceCxt = catMaybes preds
instanceType :: Type
instanceType = AppT (ConT $ biClassName biClass)
$ applyTyCon tyConName remainingTysOrigSubst'
when (any (`predMentionsName` droppedTyVarNames) dataCxt) $
datatypeContextError tyConName instanceType
unless (canEtaReduce remainingTysExpSubst' droppedTysExpSubst) $
etaReductionError instanceType
return (instanceCxt, instanceType)
deriveConstraint :: BiClass -> Type -> (Maybe Pred, [Name])
deriveConstraint biClass t
| not (isTyVar t) = (Nothing, [])
| otherwise = case hasKindVarChain 1 t of
Just ns -> ((`applyClass` tName) `fmap` biClassConstraint biClass 1, ns)
_ -> case hasKindVarChain 2 t of
Just ns -> ((`applyClass` tName) `fmap` biClassConstraint biClass 2, ns)
_ -> (Nothing, [])
where
tName :: Name
tName = varTToName t
Note [ Kind signatures in derived instances ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is possible to put explicit kind signatures into the derived instances , e.g. ,
instance C a = > C ( Data ( f : : * - > * ) ) where ...
But it is preferable to avoid this if possible . If we come up with an incorrect
kind signature ( which is entirely possible , since our type inferencer is pretty
unsophisticated - see Note [ Type inference in derived instances ] ) , then GHC will
flat - out reject the instance , which is quite unfortunate .
Plain old datatypes have the advantage that you can avoid using any kind signatures
at all in their instances . This is because a datatype declaration uses all type
variables , so the types that we use in a derived instance uniquely determine their
kinds . As long as we plug in the right types , the kind inferencer can do the rest
of the work . For this reason , we use unSigT to remove all kind signatures before
splicing in the instance context and head .
Data family instances are trickier , since a data family can have two instances that
are distinguished by kind alone , e.g. ,
data family Fam ( a : : k )
data instance Fam ( a : : * - > * )
data instance Fam ( a : : * )
If we dropped the kind signatures for C ( Fam a ) , then GHC will have no way of
knowing which instance we are talking about . To avoid this scenario , we always
include explicit kind signatures in data family instances . There is a chance that
the inferred kind signatures will be incorrect , but if so , we can always fall back
on the make- functions .
Note [ Type inference in derived instances ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Type inference is can be tricky to get right , and we want to avoid recreating the
entirety of GHC 's type inferencer in Template Haskell . For this reason , we will
probably never come up with derived instance contexts that are as accurate as
GHC 's . But that does n't mean we ca n't do anything ! There are a couple of simple
things we can do to make instance contexts that work for 80 % of use cases :
1 . If one of the last type parameters is polykinded , then its kind will be
specialized to * in the derived instance . We note what kind variable the type
parameter had and substitute it with * in the other types as well . For example ,
imagine you had
data Data ( a : : k ) ( b : : k ) ( c : : k )
Then you 'd want to derived instance to be :
instance C ( Data ( a : : * ) )
Not :
instance C ( Data ( a : : k ) )
2 . We naïvely come up with instance constraints using the following criteria :
( i ) If there 's a type parameter n of kind k1 - > k2 ( where k1 / k2 are * or kind
variables ) , then generate a Functor n constraint , and if k1 / k2 are kind
variables , then substitute k1 / k2 with * elsewhere in the types . We must
consider the case where they are kind variables because you might have a
scenario like this :
newtype Compose ( f : : k3 - > * ) ( g : : k1 - > k2 - > k3 ) ( a : : k1 ) ( b : : k2 )
= Compose ( f ( g a b ) )
Which would have a derived Bifunctor instance of :
instance ( Functor f , ) = ( Compose f g ) where ...
( ii ) If there 's a type parameter n of kind k1 - > k2 - > k3 ( where k1 / k2 / k3 are
* or kind variables ) , then generate a Bifunctor n constraint and perform
kind substitution as in the other case .
Note [Kind signatures in derived instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is possible to put explicit kind signatures into the derived instances, e.g.,
instance C a => C (Data (f :: * -> *)) where ...
But it is preferable to avoid this if possible. If we come up with an incorrect
kind signature (which is entirely possible, since our type inferencer is pretty
unsophisticated - see Note [Type inference in derived instances]), then GHC will
flat-out reject the instance, which is quite unfortunate.
Plain old datatypes have the advantage that you can avoid using any kind signatures
at all in their instances. This is because a datatype declaration uses all type
variables, so the types that we use in a derived instance uniquely determine their
kinds. As long as we plug in the right types, the kind inferencer can do the rest
of the work. For this reason, we use unSigT to remove all kind signatures before
splicing in the instance context and head.
Data family instances are trickier, since a data family can have two instances that
are distinguished by kind alone, e.g.,
data family Fam (a :: k)
data instance Fam (a :: * -> *)
data instance Fam (a :: *)
If we dropped the kind signatures for C (Fam a), then GHC will have no way of
knowing which instance we are talking about. To avoid this scenario, we always
include explicit kind signatures in data family instances. There is a chance that
the inferred kind signatures will be incorrect, but if so, we can always fall back
on the make- functions.
Note [Type inference in derived instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Type inference is can be tricky to get right, and we want to avoid recreating the
entirety of GHC's type inferencer in Template Haskell. For this reason, we will
probably never come up with derived instance contexts that are as accurate as
GHC's. But that doesn't mean we can't do anything! There are a couple of simple
things we can do to make instance contexts that work for 80% of use cases:
1. If one of the last type parameters is polykinded, then its kind will be
specialized to * in the derived instance. We note what kind variable the type
parameter had and substitute it with * in the other types as well. For example,
imagine you had
data Data (a :: k) (b :: k) (c :: k)
Then you'd want to derived instance to be:
instance C (Data (a :: *))
Not:
instance C (Data (a :: k))
2. We naïvely come up with instance constraints using the following criteria:
(i) If there's a type parameter n of kind k1 -> k2 (where k1/k2 are * or kind
variables), then generate a Functor n constraint, and if k1/k2 are kind
variables, then substitute k1/k2 with * elsewhere in the types. We must
consider the case where they are kind variables because you might have a
scenario like this:
newtype Compose (f :: k3 -> *) (g :: k1 -> k2 -> k3) (a :: k1) (b :: k2)
= Compose (f (g a b))
Which would have a derived Bifunctor instance of:
instance (Functor f, Bifunctor g) => Bifunctor (Compose f g) where ...
(ii) If there's a type parameter n of kind k1 -> k2 -> k3 (where k1/k2/k3 are
* or kind variables), then generate a Bifunctor n constraint and perform
kind substitution as in the other case.
-}
Note [ Matching functions with GADT type variables ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When deriving Bifoldable , there is a tricky corner case to consider :
data Both a b where
BothCon : : x - > x - > Both x x
Which fold functions should be applied to which arguments of BothCon ? We have a
choice , since both the function of type ( a - > m ) and of type ( b - > m ) can be
applied to either argument . In such a scenario , the second fold function takes
precedence over the first fold function , so the derived Bifoldable instance would be :
instance Both where
bifoldMap _ g ( BothCon x1 x2 ) = g x1 < > g x2
This is not an arbitrary choice , as this definition ensures that
bifoldMap i d = Foldable.foldMap for a derived Bifoldable instance for Both .
Note [Matching functions with GADT type variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When deriving Bifoldable, there is a tricky corner case to consider:
data Both a b where
BothCon :: x -> x -> Both x x
Which fold functions should be applied to which arguments of BothCon? We have a
choice, since both the function of type (a -> m) and of type (b -> m) can be
applied to either argument. In such a scenario, the second fold function takes
precedence over the first fold function, so the derived Bifoldable instance would be:
instance Bifoldable Both where
bifoldMap _ g (BothCon x1 x2) = g x1 <> g x2
This is not an arbitrary choice, as this definition ensures that
bifoldMap id = Foldable.foldMap for a derived Bifoldable instance for Both.
-}
| Either the given data type does n't have enough type variables , or one of
derivingKindError :: BiClass -> Name -> Q a
derivingKindError biClass tyConName = fail
. showString "Cannot derive well-kinded instance of form ‘"
. showString className
. showChar ' '
. showParen True
( showString (nameBase tyConName)
. showString " ..."
)
. showString "‘\n\tClass "
. showString className
. showString " expects an argument of kind * -> * -> *"
$ ""
where
className :: String
className = nameBase $ biClassName biClass
| One of the last two type variables appeard in a contravariant position
when deriving Bifoldable or Bitraversable .
contravarianceError :: Name -> Q a
contravarianceError conName = fail
. showString "Constructor ‘"
. showString (nameBase conName)
. showString "‘ must not use the last type variable(s) in a function argument"
$ ""
| A constructor has a function argument in a derived Bifoldable or Bitraversable
noFunctionsError :: Name -> Q a
noFunctionsError conName = fail
. showString "Constructor ‘"
. showString (nameBase conName)
. showString "‘ must not contain function types"
$ ""
| The data type has a DatatypeContext which mentions one of the eta - reduced
datatypeContextError :: Name -> Type -> Q a
datatypeContextError dataName instanceType = fail
. showString "Can't make a derived instance of ‘"
. showString (pprint instanceType)
. showString "‘:\n\tData type ‘"
. showString (nameBase dataName)
. showString "‘ must not have a class context involving the last type argument(s)"
$ ""
| The data type has an existential constraint which mentions one of the
existentialContextError :: Name -> Q a
existentialContextError conName = fail
. showString "Constructor ‘"
. showString (nameBase conName)
. showString "‘ must be truly polymorphic in the last argument(s) of the data type"
$ ""
| The data type mentions one of the n eta - reduced type variables in a place other
outOfPlaceTyVarError :: Name -> Q a
outOfPlaceTyVarError conName = fail
. showString "Constructor ‘"
. showString (nameBase conName)
. showString "‘ must only use its last two type variable(s) within"
. showString " the last two argument(s) of a data type"
$ ""
etaReductionError :: Type -> Q a
etaReductionError instanceType = fail $
"Cannot eta-reduce to an instance of form \n\tinstance (...) => "
++ pprint instanceType
typeDataError :: Name -> Q a
typeDataError dataName = fail
. showString "Cannot derive instance for ‘"
. showString (nameBase dataName)
. showString "‘, which is a ‘type data‘ declaration"
$ ""
data BiClass = Bifunctor | Bifoldable | Bitraversable
data BiFun = Bimap | Bifoldr | BifoldMap | Bitraverse
deriving Eq
biFunConstName :: BiFun -> Name
biFunConstName Bimap = bimapConstValName
biFunConstName Bifoldr = bifoldrConstValName
biFunConstName BifoldMap = bifoldMapConstValName
biFunConstName Bitraverse = bitraverseConstValName
biClassName :: BiClass -> Name
biClassName Bifunctor = bifunctorTypeName
biClassName Bifoldable = bifoldableTypeName
biClassName Bitraversable = bitraversableTypeName
biFunName :: BiFun -> Name
biFunName Bimap = bimapValName
biFunName Bifoldr = bifoldrValName
biFunName BifoldMap = bifoldMapValName
biFunName Bitraverse = bitraverseValName
biClassToFuns :: BiClass -> [BiFun]
biClassToFuns Bifunctor = [Bimap]
biClassToFuns Bifoldable = [Bifoldr, BifoldMap]
biClassToFuns Bitraversable = [Bitraverse]
biFunToClass :: BiFun -> BiClass
biFunToClass Bimap = Bifunctor
biFunToClass Bifoldr = Bifoldable
biFunToClass BifoldMap = Bifoldable
biFunToClass Bitraverse = Bitraversable
biClassConstraint :: BiClass -> Int -> Maybe Name
biClassConstraint Bifunctor 1 = Just functorTypeName
biClassConstraint Bifoldable 1 = Just foldableTypeName
biClassConstraint Bitraversable 1 = Just traversableTypeName
biClassConstraint biClass 2 = Just $ biClassName biClass
biClassConstraint _ _ = Nothing
fmapArity :: Int -> Name
fmapArity 1 = fmapValName
fmapArity 2 = bimapValName
fmapArity n = arityErr n
foldrArity :: Int -> Name
foldrArity 1 = foldrValName
foldrArity 2 = bifoldrValName
foldrArity n = arityErr n
foldMapArity :: Int -> Name
foldMapArity 1 = foldMapValName
foldMapArity 2 = bifoldMapValName
foldMapArity n = arityErr n
traverseArity :: Int -> Name
traverseArity 1 = traverseValName
traverseArity 2 = bitraverseValName
traverseArity n = arityErr n
arityErr :: Int -> a
arityErr n = error $ "Unsupported arity: " ++ show n
allowExQuant :: BiClass -> Bool
allowExQuant Bifoldable = True
allowExQuant _ = False
biFunEmptyCase :: BiFun -> Name -> Name -> Q Exp
biFunEmptyCase biFun z value =
biFunTrivial emptyCase
(varE pureValName `appE` emptyCase)
biFun z
where
emptyCase :: Q Exp
emptyCase = caseE (varE value) []
biFunNoCons :: BiFun -> Name -> Name -> Q Exp
biFunNoCons biFun z value =
biFunTrivial seqAndError
(varE pureValName `appE` seqAndError)
biFun z
where
seqAndError :: Q Exp
seqAndError = appE (varE seqValName) (varE value) `appE`
appE (varE errorValName)
(stringE $ "Void " ++ nameBase (biFunName biFun))
biFunTrivial :: Q Exp -> Q Exp -> BiFun -> Name -> Q Exp
biFunTrivial bimapE bitraverseE biFun z = go biFun
where
go :: BiFun -> Q Exp
go Bimap = bimapE
go Bifoldr = varE z
go BifoldMap = varE memptyValName
go Bitraverse = bitraverseE
Note [ ft_triv for Bifoldable and Bitraversable ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When deriving Bifoldable and Bitraversable , we filter out any subexpressions whose
type does not mention one of the last two type parameters . From this , you might
think that we do n't need to implement ft_triv for bifoldr , , or
bitraverse at all , but in fact we do need to . Imagine the following data type :
data T a b = MkT a ( T Int b )
In a derived Bifoldable T instance , you would generate the following bifoldMap
definition :
bifoldMap f g ( MkT a1 a2 ) = f a1 < > ( \ _ - > mempty ) g arg2
You need to fill in bi_triv ( \ _ - > ) as the first argument to the recursive
call to , since that is how the algorithm handles polymorphic recursion .
Note [ft_triv for Bifoldable and Bitraversable]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When deriving Bifoldable and Bitraversable, we filter out any subexpressions whose
type does not mention one of the last two type parameters. From this, you might
think that we don't need to implement ft_triv for bifoldr, bifoldMap, or
bitraverse at all, but in fact we do need to. Imagine the following data type:
data T a b = MkT a (T Int b)
In a derived Bifoldable T instance, you would generate the following bifoldMap
definition:
bifoldMap f g (MkT a1 a2) = f a1 <> bifoldMap (\_ -> mempty) g arg2
You need to fill in bi_triv (\_ -> mempty) as the first argument to the recursive
call to bifoldMap, since that is how the algorithm handles polymorphic recursion.
-}
Generic traversal for functor - like deriving
Much of the code below is cargo - culted from the TcGenFunctor module in GHC .
= FT { ft_triv :: a
, ft_var :: Name -> a
, ft_co_var :: Name -> a
, ft_fun :: a -> a -> a
, ft_tup :: TupleSort -> [a] -> a
, ft_ty_app :: [(Type, a)] -> a
, ft_bad_app :: a
, ft_forall :: [TyVarBndrSpec] -> a -> a
}
Note that in GHC , this function is pure . It must be monadic here since we :
( 1 ) Expand type synonyms
( 2 ) Detect type family applications
Which require reification in Template Haskell , but are pure in Core .
functorLikeTraverse :: forall a.
-> Q a
functorLikeTraverse tvMap FT { ft_triv = caseTrivial, ft_var = caseVar
, ft_co_var = caseCoVar, ft_fun = caseFun
, ft_tup = caseTuple, ft_ty_app = caseTyApp
, ft_bad_app = caseWrongArg, ft_forall = caseForAll }
ty
= do ty' <- resolveTypeSynonyms ty
(res, _) <- go False ty'
return res
where
-> Type
go co t@AppT{}
| (ArrowT, [funArg, funRes]) <- unapplyTy t
= do (funArgR, funArgC) <- go (not co) funArg
(funResR, funResC) <- go co funRes
if funArgC || funResC
then return (caseFun funArgR funResR, True)
else trivial
go co t@AppT{} = do
let (f, args) = unapplyTy t
(_, fc) <- go co f
(xrs, xcs) <- unzip <$> mapM (go co) args
let numLastArgs, numFirstArgs :: Int
numLastArgs = min 2 $ length args
numFirstArgs = length args - numLastArgs
tuple :: TupleSort -> Q (a, Bool)
tuple tupSort = return (caseTuple tupSort xrs, True)
wrongArg :: Q (a, Bool)
wrongArg = return (caseWrongArg, True)
case () of
_ | not (or xcs)
At this point we know that xrs , xcs is not empty ,
and at least one xr is True
| TupleT len <- f
-> tuple $ Boxed len
| UnboxedTupleT len <- f
-> tuple $ Unboxed len
| fc || or (take numFirstArgs xcs)
-> do itf <- isInTypeFamilyApp tyVarNames f args
then wrongArg
else return ( caseTyApp $ drop numFirstArgs $ zip args xrs
, True )
go co (SigT t k) = do
(_, kc) <- go_kind co k
if kc
then return (caseWrongArg, True)
else go co t
go co (VarT v)
| Map.member v tvMap
= return (if co then caseCoVar v else caseVar v, True)
| otherwise
= trivial
go co (ForallT tvbs _ t) = do
(tr, tc) <- go co t
let tvbNames = map tvName tvbs
if not tc || any (`elem` tvbNames) tyVarNames
then trivial
else return (caseForAll tvbs tr, True)
go _ _ = trivial
go_kind :: Bool
-> Kind
-> Q (a, Bool)
go_kind = go
trivial :: Q (a, Bool)
trivial = return (caseTrivial, False)
tyVarNames :: [Name]
tyVarNames = Map.keys tvMap
Fold over the arguments of a data constructor in a Functor - like way .
foldDataConArgs :: forall a. TyVarMap -> FFoldType a -> ConstructorInfo -> Q [a]
foldDataConArgs tvMap ft con = do
fieldTys <- mapM resolveTypeSynonyms $ constructorFields con
mapM foldArg fieldTys
where
foldArg :: Type -> Q a
foldArg = functorLikeTraverse tvMap ft
mkSimpleLam :: (Exp -> Q Exp) -> Q Exp
mkSimpleLam lam = do
certain Bifoldable instances to generate code like this ( see # 89 ):
( \\_n - > ) ...
n <- newName "_n"
body <- lam (VarE n)
return $ LamE [VarP n] body
Make a ' LamE ' using two fresh variables .
mkSimpleLam2 :: (Exp -> Exp -> Q Exp) -> Q Exp
mkSimpleLam2 lam = do
certain Bifoldable instances to generate code like this ( see # 89 ):
n1 <- newName "_n1"
n2 <- newName "n2"
body <- lam (VarE n1) (VarE n2)
return $ LamE [VarP n1, VarP n2] body
" Con a1 a2 a3 - > fold [ x1 a1 , x2 a2 , x3 a3 ] "
which the LHS pattern - matches on , followed by a match on the
constructor @conName@ and its arguments . The RHS folds ( with @fold@ ) over
@conName@ and its arguments , applying an expression ( from @insides@ ) to each
mkSimpleConMatch :: (Name -> [a] -> Q Exp)
-> Name
-> [Exp -> a]
-> Q Match
mkSimpleConMatch fold conName insides = do
varsNeeded <- newNameList "_arg" $ length insides
let pat = conPCompat conName (map VarP varsNeeded)
rhs <- fold conName (zipWith (\i v -> i $ VarE v) insides varsNeeded)
return $ Match pat (NormalB rhs) []
" Con a1 a2 a3 - > fmap ( \b2 - > Con a1 b2 a3 ) ( traverse f a2 ) "
@mkSimpleConMatch2 fold conName insides@ behaves very similarly to
' mkSimpleConMatch ' , with two key differences :
1 . @insides@ is a @[(Bool , Exp)]@ instead of a @[Exp]@. This is because it
mention the last type variable in a derived ' Foldable ' or ' '
2 . @fold@ takes an expression as its first argument instead of a
mkSimpleConMatch2 :: (Exp -> [Exp] -> Q Exp)
-> Name
-> [(Bool, Exp)]
-> Q Match
mkSimpleConMatch2 fold conName insides = do
varsNeeded <- newNameList "_arg" lengthInsides
let pat = conPCompat conName (map VarP varsNeeded)
Make sure to zip BEFORE invoking . We want the variable
exps = catMaybes $ zipWith (\(m, i) v -> if m then Just (i `AppE` VarE v)
else Nothing)
insides varsNeeded
An element of argTysTyVarInfo is True if the constructor argument
argTysTyVarInfo = map fst insides
(asWithTyVar, asWithoutTyVar) = partitionByList argTysTyVarInfo varsNeeded
conExpQ
| null asWithTyVar = appsE (conE conName:map varE asWithoutTyVar)
| otherwise = do
bs <- newNameList "b" lengthInsides
let bs' = filterByList argTysTyVarInfo bs
vars = filterByLists argTysTyVarInfo
(map varE bs) (map varE varsNeeded)
lamE (map varP bs') (appsE (conE conName:vars))
conExp <- conExpQ
rhs <- fold conExp exps
return $ Match pat (NormalB rhs) []
where
lengthInsides = length insides
arguments . For instance , ( a , b ) corresponds to @Boxed 2@ , and ( # a , b , c # )
corresponds to @Unboxed 3@.
data TupleSort
= Boxed Int
| Unboxed Int
mkSimpleTupleCase :: (Name -> [a] -> Q Match)
-> TupleSort -> [a] -> Exp -> Q Exp
mkSimpleTupleCase matchForCon tupSort insides x = do
let tupDataName = case tupSort of
Boxed len -> tupleDataName len
Unboxed len -> unboxedTupleDataName len
m <- matchForCon tupDataName insides
return $ CaseE x [m]
conPCompat :: Name -> [Pat] -> Pat
conPCompat n pats = ConP n
#if MIN_VERSION_template_haskell(2,18,0)
[]
#endif
pats
|
b7ba749f7815da3b8ba38bff3e5b66fb88ad23c7bc5f0e087661d69a2fcbb75e | finnishtransportagency/harja | sampo_sanoma.clj | (ns harja.palvelin.integraatiot.sampo.sanomat.sampo-sanoma
(:require [clojure.xml :refer [parse]]
[clojure.zip :refer [xml-zip]]
[clojure.data.zip.xml :as z]
[taoensso.timbre :as log]
[harja.tyokalut.xml :as xml])
(:import (java.text SimpleDateFormat ParseException)
(java.sql Date)))
(def +xsd-polku+ "xsd/sampo/inbound/")
(defn parsi-paivamaara [teksti]
(try (new Date (.getTime (.parse (SimpleDateFormat. "yyyy-MM-dd'T'HH:mm:ss.SSS") teksti)))
(catch ParseException e
(log/error e "Virhe parsiessa päivämäärää: " teksti)
nil)))
(defn hae-viesti-id [data]
(or (z/xml1-> data (z/attr :messageId))
(z/xml1-> data (z/attr :message_Id))
(z/xml1-> data (z/attr :messageid))
(z/xml1-> data (z/attr :message_id))))
(defn lue-hanke [program]
{:viesti-id (hae-viesti-id program)
:sampo-id (z/xml1-> program (z/attr :id))
:nimi (z/xml1-> program (z/attr :name))
:alkupvm (parsi-paivamaara (z/xml1-> program (z/attr :schedule_start)))
:loppupvm (parsi-paivamaara (z/xml1-> program (z/attr :schedule_finish)))
:alueurakkanro (z/xml1-> program (z/attr :vv_alueurakkanro))
:yhteyshenkilo-sampo-id (z/xml1-> program (z/attr :resourceId))})
(defn lue-urakka [project]
{:viesti-id (hae-viesti-id project)
:sampo-id (z/xml1-> project (z/attr :id))
:nimi (z/xml1-> project (z/attr :name))
:alkupvm (parsi-paivamaara (z/xml1-> project (z/attr :schedule_start)))
:loppupvm (parsi-paivamaara (z/xml1-> project (z/attr :schedule_finish)))
:hanke-sampo-id (z/xml1-> project (z/attr :programId))
:yhteyshenkilo-sampo-id (z/xml1-> project (z/attr :resourceId))
:ely-hash (z/xml1-> project (z/attr :financialDepartmentHash))
:alueurakkanro (z/xml1-> project (z/attr :vv_alueurakkanro))
:urakoitsijan-nimi (z/xml1-> project (z/attr :company_name))
:urakoitsijan-ytunnus (z/xml1-> project (z/attr :vv_corporate_id))})
(defn lue-sopimus [order]
{:viesti-id (hae-viesti-id order)
:sampo-id (z/xml1-> order (z/attr :id))
:nimi (z/xml1-> order (z/attr :name))
:alkupvm (parsi-paivamaara (z/xml1-> order (z/attr :schedule_start)))
:loppupvm (parsi-paivamaara (z/xml1-> order (z/attr :schedule_finish)))
:urakka-sampo-id (z/xml1-> order (z/attr :projectId))
:urakoitsija-sampo-id (z/xml1-> order (z/attr :contractPartyId))})
(defn lue-toimenpideinstanssi [operation]
{:viesti-id (hae-viesti-id operation)
:sampo-id (z/xml1-> operation (z/attr :id))
:nimi (z/xml1-> operation (z/attr :name))
:alkupvm (parsi-paivamaara (z/xml1-> operation (z/attr :schedule_start)))
:loppupvm (parsi-paivamaara (z/xml1-> operation (z/attr :schedule_finish)))
:vastuuhenkilo-id (z/xml1-> operation (z/attr :managerId))
:talousosasto-id (z/xml1-> operation (z/attr :financialDepartmentHash))
:talousosasto-polku (z/xml1-> operation (z/attr :financialDepartmentOBS))
:tuote-id (z/xml1-> operation (z/attr :productHash))
:tuote-polku (z/xml1-> operation (z/attr :productOBS))
:urakka-sampo-id (z/xml1-> operation (z/attr :projectId))
:sampo-toimenpidekoodi (z/xml1-> operation (z/attr :vv_operation))})
(defn lue-organisaatio [company]
{:viesti-id (hae-viesti-id company)
:sampo-id (z/xml1-> company (z/attr :id))
:nimi (z/xml1-> company (z/attr :name))
:y-tunnus (z/xml1-> company (z/attr :vv_corporate_id))
:katuosoite (z/xml1-> (z/xml1-> company) :contactInformation (z/attr :address))
:postinumero (z/xml1-> (z/xml1-> company) :contactInformation (z/attr :postal_Code))
:kaupunki (z/xml1-> (z/xml1-> company) :contactInformation (z/attr :city))})
(defn lue-yhteyshenkilo [resource]
{:viesti-id (hae-viesti-id resource)
:sampo-id (z/xml1-> resource (z/attr :id))
:etunimi (z/xml1-> resource (z/attr :first_name))
:sukunimi (z/xml1-> resource (z/attr :last_name))
:sahkoposti (z/xml1-> (z/xml1-> resource) :contactInformation (z/attr :email))})
(defn lue-viesti [viesti]
(when (not (xml/validi-xml? +xsd-polku+ "Sampo2Harja.xsd" viesti))
(throw (new RuntimeException "XML-sanoma ei ole XSD-skeeman Sampo2Harja.xsd mukaan validi.")))
(let [data (xml/lue viesti)]
{:hankkeet (z/xml-> data :Program lue-hanke)
:urakat (z/xml-> data :Project lue-urakka)
:sopimukset (z/xml-> data :Order lue-sopimus)
:toimenpideinstanssit (z/xml-> data :Operation lue-toimenpideinstanssi)
:organisaatiot (z/xml-> data :Company lue-organisaatio)
:yhteyshenkilot (z/xml-> data :Resource lue-yhteyshenkilo)}))
(defn lue-api-viesti [data]
{:hankkeet (z/xml-> data :Program lue-hanke)
:urakat (z/xml-> data :Project lue-urakka)
:sopimukset (z/xml-> data :Order lue-sopimus)
:toimenpideinstanssit (z/xml-> data :Operation lue-toimenpideinstanssi)
:organisaatiot (z/xml-> data :Company lue-organisaatio)
:yhteyshenkilot (z/xml-> data :Resource lue-yhteyshenkilo)})
| null | https://raw.githubusercontent.com/finnishtransportagency/harja/37a496f4df7594d04f19601c30acf623186283af/src/clj/harja/palvelin/integraatiot/sampo/sanomat/sampo_sanoma.clj | clojure | (ns harja.palvelin.integraatiot.sampo.sanomat.sampo-sanoma
(:require [clojure.xml :refer [parse]]
[clojure.zip :refer [xml-zip]]
[clojure.data.zip.xml :as z]
[taoensso.timbre :as log]
[harja.tyokalut.xml :as xml])
(:import (java.text SimpleDateFormat ParseException)
(java.sql Date)))
(def +xsd-polku+ "xsd/sampo/inbound/")
(defn parsi-paivamaara [teksti]
(try (new Date (.getTime (.parse (SimpleDateFormat. "yyyy-MM-dd'T'HH:mm:ss.SSS") teksti)))
(catch ParseException e
(log/error e "Virhe parsiessa päivämäärää: " teksti)
nil)))
(defn hae-viesti-id [data]
(or (z/xml1-> data (z/attr :messageId))
(z/xml1-> data (z/attr :message_Id))
(z/xml1-> data (z/attr :messageid))
(z/xml1-> data (z/attr :message_id))))
(defn lue-hanke [program]
{:viesti-id (hae-viesti-id program)
:sampo-id (z/xml1-> program (z/attr :id))
:nimi (z/xml1-> program (z/attr :name))
:alkupvm (parsi-paivamaara (z/xml1-> program (z/attr :schedule_start)))
:loppupvm (parsi-paivamaara (z/xml1-> program (z/attr :schedule_finish)))
:alueurakkanro (z/xml1-> program (z/attr :vv_alueurakkanro))
:yhteyshenkilo-sampo-id (z/xml1-> program (z/attr :resourceId))})
(defn lue-urakka [project]
{:viesti-id (hae-viesti-id project)
:sampo-id (z/xml1-> project (z/attr :id))
:nimi (z/xml1-> project (z/attr :name))
:alkupvm (parsi-paivamaara (z/xml1-> project (z/attr :schedule_start)))
:loppupvm (parsi-paivamaara (z/xml1-> project (z/attr :schedule_finish)))
:hanke-sampo-id (z/xml1-> project (z/attr :programId))
:yhteyshenkilo-sampo-id (z/xml1-> project (z/attr :resourceId))
:ely-hash (z/xml1-> project (z/attr :financialDepartmentHash))
:alueurakkanro (z/xml1-> project (z/attr :vv_alueurakkanro))
:urakoitsijan-nimi (z/xml1-> project (z/attr :company_name))
:urakoitsijan-ytunnus (z/xml1-> project (z/attr :vv_corporate_id))})
(defn lue-sopimus [order]
{:viesti-id (hae-viesti-id order)
:sampo-id (z/xml1-> order (z/attr :id))
:nimi (z/xml1-> order (z/attr :name))
:alkupvm (parsi-paivamaara (z/xml1-> order (z/attr :schedule_start)))
:loppupvm (parsi-paivamaara (z/xml1-> order (z/attr :schedule_finish)))
:urakka-sampo-id (z/xml1-> order (z/attr :projectId))
:urakoitsija-sampo-id (z/xml1-> order (z/attr :contractPartyId))})
(defn lue-toimenpideinstanssi [operation]
{:viesti-id (hae-viesti-id operation)
:sampo-id (z/xml1-> operation (z/attr :id))
:nimi (z/xml1-> operation (z/attr :name))
:alkupvm (parsi-paivamaara (z/xml1-> operation (z/attr :schedule_start)))
:loppupvm (parsi-paivamaara (z/xml1-> operation (z/attr :schedule_finish)))
:vastuuhenkilo-id (z/xml1-> operation (z/attr :managerId))
:talousosasto-id (z/xml1-> operation (z/attr :financialDepartmentHash))
:talousosasto-polku (z/xml1-> operation (z/attr :financialDepartmentOBS))
:tuote-id (z/xml1-> operation (z/attr :productHash))
:tuote-polku (z/xml1-> operation (z/attr :productOBS))
:urakka-sampo-id (z/xml1-> operation (z/attr :projectId))
:sampo-toimenpidekoodi (z/xml1-> operation (z/attr :vv_operation))})
(defn lue-organisaatio [company]
{:viesti-id (hae-viesti-id company)
:sampo-id (z/xml1-> company (z/attr :id))
:nimi (z/xml1-> company (z/attr :name))
:y-tunnus (z/xml1-> company (z/attr :vv_corporate_id))
:katuosoite (z/xml1-> (z/xml1-> company) :contactInformation (z/attr :address))
:postinumero (z/xml1-> (z/xml1-> company) :contactInformation (z/attr :postal_Code))
:kaupunki (z/xml1-> (z/xml1-> company) :contactInformation (z/attr :city))})
(defn lue-yhteyshenkilo [resource]
{:viesti-id (hae-viesti-id resource)
:sampo-id (z/xml1-> resource (z/attr :id))
:etunimi (z/xml1-> resource (z/attr :first_name))
:sukunimi (z/xml1-> resource (z/attr :last_name))
:sahkoposti (z/xml1-> (z/xml1-> resource) :contactInformation (z/attr :email))})
(defn lue-viesti [viesti]
(when (not (xml/validi-xml? +xsd-polku+ "Sampo2Harja.xsd" viesti))
(throw (new RuntimeException "XML-sanoma ei ole XSD-skeeman Sampo2Harja.xsd mukaan validi.")))
(let [data (xml/lue viesti)]
{:hankkeet (z/xml-> data :Program lue-hanke)
:urakat (z/xml-> data :Project lue-urakka)
:sopimukset (z/xml-> data :Order lue-sopimus)
:toimenpideinstanssit (z/xml-> data :Operation lue-toimenpideinstanssi)
:organisaatiot (z/xml-> data :Company lue-organisaatio)
:yhteyshenkilot (z/xml-> data :Resource lue-yhteyshenkilo)}))
(defn lue-api-viesti [data]
{:hankkeet (z/xml-> data :Program lue-hanke)
:urakat (z/xml-> data :Project lue-urakka)
:sopimukset (z/xml-> data :Order lue-sopimus)
:toimenpideinstanssit (z/xml-> data :Operation lue-toimenpideinstanssi)
:organisaatiot (z/xml-> data :Company lue-organisaatio)
:yhteyshenkilot (z/xml-> data :Resource lue-yhteyshenkilo)})
|
|
1052dd03028a21f5e9612dccddf5c7ec72158ee8f49ac185c5a58e3c02a3dd1a | dropbox/datagraph | Main.hs | # LANGUAGE OverloadedStrings , LambdaCase , GADTs , StandaloneDeriving , FlexibleInstances #
# LANGUAGE MultiParamTypeClasses , TypeFamilies , InstanceSigs , GeneralizedNewtypeDeriving #
{-# LANGUAGE ScopedTypeVariables, RecordWildCards, PartialTypeSignatures #-}
module Main (main) where
--import Debug.Trace
import Network.Wai
import Network.HTTP.Types (status200)
import Network.Wai.Handler.Warp (run)
import Data.Text.Encoding (decodeUtf8)
import Data.Attoparsec.Text (parseOnly, endOfInput)
import qualified Data.Text as Text
import Data.ByteString.Lazy (toStrict)
import qualified Data.GraphQL.AST as AST
import Data.GraphQL.Parser (document)
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HashMap
import Haxl.Prelude
import Haxl.Core
import Data.Traversable (for)
import qualified Data.Aeson.Encode.Pretty as JSON
import GraphQL
import GraphQLHelpers
import DropboxDataSource
import StarWarsModel
import StarWarsDataSource
import NumberDataSource
decodeInputValue :: AST.Value -> InputValue
decodeInputValue = \case
AST.ValueVariable _ -> error "TODO: variable lookup in environment"
AST.ValueInt i -> IScalar $ SInt i
AST.ValueFloat f -> IScalar $ SFloat f
AST.ValueBoolean f -> IScalar $ SBoolean f
AST.ValueString (AST.StringValue s) -> IScalar $ SString s
AST.ValueEnum s -> IScalar $ SEnum s
AST.ValueList (AST.ListValue ls) -> IList $ fmap decodeInputValue ls
AST.ValueObject (AST.ObjectValue fields) -> IObject $
HashMap.fromList [(name, decodeInputValue value) | AST.ObjectField name value <- fields]
decodeArgument :: AST.Argument -> (Text, InputValue)
decodeArgument (AST.Argument name value) = (name, decodeInputValue value)
processSelectionSet :: ObjectResolver -> AST.SelectionSet -> GraphQLHandler (HashMap Text FullyResolvedValue)
processSelectionSet objectResolver selectionSet = do
fmap HashMap.fromList $ for selectionSet $ \case
AST.SelectionField (AST.Field alias name arguments _directives innerSelectionSet) -> do
-- traceShowM $ name
valueResolver <- case HashMap.lookup name objectResolver of
Just vr -> return vr
Nothing -> fail $ "Requested unknown field: " ++ Text.unpack name
let args = HashMap.fromList $ fmap decodeArgument arguments
outputValue <- valueResolver args >>= \case
RNull -> return FNull
RScalar s -> return $ FScalar s
RList ls -> do
if null innerSelectionSet then do
fail "TODO: lists without selection sets are unsupported"
else do
elements <- for ls $ \elementResolver -> do
element <- elementResolver HashMap.empty >>= \case
RObject elementObjectResolver -> do
processSelectionSet elementObjectResolver innerSelectionSet
_ -> do
fail "Selecting fields from lists requires that all element values be lists"
return (FObject element :: FullyResolvedValue)
return (FList elements :: FullyResolvedValue)
RObject o -> do
if null innerSelectionSet then do
fail "Must select fields out of object"
else do
FObject <$> processSelectionSet o innerSelectionSet
return (if Text.null alias then name else alias, outputValue)
_ -> fail "unsupported selection"
meResolver :: ValueResolver
meResolver = idResolver $ UserID "ME"
friendResolver :: ValueResolver
friendResolver args = do
userID <- requireArgument args "id"
fetchByID (userID :: UserID)
heroResolver :: ValueResolver
heroResolver args = do
episodeID <- lookupArgument args "episode" >>= \case
Just x -> return x
Nothing -> return NewHope
episode <- dataFetch $ FetchEpisode episodeID
character <- dataFetch $ FetchCharacter $ eHero episode
return $ RObject $ resolveObject character
episodeResolver :: ValueResolver
episodeResolver args = do
episodeID <- requireArgument args "id"
episode <- dataFetch $ FetchEpisode episodeID
return $ RObject $ resolveObject episode
addToNumberResolver :: ValueResolver
addToNumberResolver args = do
newNumber <- requireArgument args "newNumber"
() <- uncachedRequest $ AddToNumber newNumber
CAREFUL - the ( ) < - above causes ghc to emit a > > = rather than > > which
-- is important because >>= guarantees sequencing in haxl but >> runs
-- both sides in parallel. Running in parallel here is a bad deal because
-- the fetch needs to happen after the write.
newNumberObject <- dataFetch FetchCurrentNumber
return $ RObject $ resolveObject newNumberObject
data Server = Server
{ rootQuery :: ObjectResolver
, rootMutation :: ObjectResolver
}
data QueryBatch
= QueryBatch [AST.Node]
| SingleMutation AST.Node
data AccumulationState = AccumulationState [AST.Node] [QueryBatch]
flushQueries :: AccumulationState -> [QueryBatch]
flushQueries (AccumulationState [] batches) = batches
flushQueries (AccumulationState queries batches) = QueryBatch (reverse queries) : batches
addDefinition :: AccumulationState -> AST.OperationDefinition -> AccumulationState
addDefinition (AccumulationState queries batches) (AST.Query node) =
AccumulationState (node : queries) batches
addDefinition acc (AST.Mutation node) =
AccumulationState [] (SingleMutation node : flushQueries acc)
groupQueries :: [AST.OperationDefinition] -> [QueryBatch]
groupQueries = reverse . flushQueries . foldl' addDefinition (AccumulationState [] [])
handleRequest :: Server -> StateStore -> (Response -> IO b) -> AST.Document -> IO b
handleRequest server stateStore respond doc = do
let (AST.Document defns) = doc
let operations = [op | AST.DefinitionOperation op <- defns]
let groups = groupQueries operations
outputs <- for groups $ \case
QueryBatch queries -> do
queryEnv <- initEnv stateStore ()
runHaxl queryEnv $ do
for queries $ \(AST.Node name [] [] selectionSet) -> do
output <- processSelectionSet (rootQuery server) selectionSet
return (name, output)
SingleMutation mutation -> do
let (AST.Node name [] [] selectionSet) = mutation
-- top-level mutations must be executed in order, and clear the cache
-- in between
maps <- for selectionSet $ \selection -> do
mutationEnv <- initEnv stateStore ()
runHaxl mutationEnv $ do
processSelectionSet (rootMutation server) [selection]
return [(name, mconcat $ maps)]
let response = HashMap.fromList [("data" :: Text, HashMap.fromList $ mconcat outputs )]
respond $ responseLBS
status200
[("Content-Type", "application/json")]
(JSON.encodePretty response)
app :: StateStore -> Application
app stateStore request respond = do
-- TODO: check the request URL
-- TODO: check the request method (require POST)
_body <- fmap (decodeUtf8 . toStrict) $ strictRequestBody request
let body' = Text.unlines
[ ""
, "query our_names { me { name }, friend(id: \"10\") { name } }"
, "query HeroNameQuery { newhope_hero: hero(episode: NEWHOPE) { name } empire_hero: hero(episode: EMPIRE) { name } jedi_hero: hero(episode: JEDI) { name } }"
, "query EpisodeQuery { episode(id: NEWHOPE) { name releaseYear } }"
, "query newhope_hero_friends { episode(id: NEWHOPE) { hero { name, friends { name }, appearsIn { releaseYear } } } }"
, "mutation numbers { first: addToNumber(newNumber: 1) { theNumber } second: addToNumber(newNumber: 2) { theNumber } third: addToNumber(newNumber: 3) { theNumber } }"
]
queryDoc <- case parseOnly (document <* endOfInput) body' of
Left err -> do
fail $ "Error parsing query: " ++ err
Right d -> do
return d
let rootQuery = HashMap.fromList
[ ("me", meResolver)
, ("friend", friendResolver)
, ("hero", heroResolver)
, ("episode", episodeResolver)
]
let rootMutation = HashMap.fromList
[ ("addToNumber", addToNumberResolver)
]
let server = Server rootQuery rootMutation
handleRequest server stateStore respond queryDoc
main :: IO ()
main = do
putStrLn $ ":8080/"
conn <- openStarWarsRedisConnection
nds <- initializeNumberDataSource 0
let stateStore = stateSet nds $ stateSet conn $ stateSet UserRequestState $ stateEmpty
run 8080 $ app stateStore
| null | https://raw.githubusercontent.com/dropbox/datagraph/429957ebdbf1ef3553dabb2a8f7b16056ef4ac41/src/Main.hs | haskell | # LANGUAGE ScopedTypeVariables, RecordWildCards, PartialTypeSignatures #
import Debug.Trace
traceShowM $ name
is important because >>= guarantees sequencing in haxl but >> runs
both sides in parallel. Running in parallel here is a bad deal because
the fetch needs to happen after the write.
top-level mutations must be executed in order, and clear the cache
in between
TODO: check the request URL
TODO: check the request method (require POST) | # LANGUAGE OverloadedStrings , LambdaCase , GADTs , StandaloneDeriving , FlexibleInstances #
# LANGUAGE MultiParamTypeClasses , TypeFamilies , InstanceSigs , GeneralizedNewtypeDeriving #
module Main (main) where
import Network.Wai
import Network.HTTP.Types (status200)
import Network.Wai.Handler.Warp (run)
import Data.Text.Encoding (decodeUtf8)
import Data.Attoparsec.Text (parseOnly, endOfInput)
import qualified Data.Text as Text
import Data.ByteString.Lazy (toStrict)
import qualified Data.GraphQL.AST as AST
import Data.GraphQL.Parser (document)
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HashMap
import Haxl.Prelude
import Haxl.Core
import Data.Traversable (for)
import qualified Data.Aeson.Encode.Pretty as JSON
import GraphQL
import GraphQLHelpers
import DropboxDataSource
import StarWarsModel
import StarWarsDataSource
import NumberDataSource
decodeInputValue :: AST.Value -> InputValue
decodeInputValue = \case
AST.ValueVariable _ -> error "TODO: variable lookup in environment"
AST.ValueInt i -> IScalar $ SInt i
AST.ValueFloat f -> IScalar $ SFloat f
AST.ValueBoolean f -> IScalar $ SBoolean f
AST.ValueString (AST.StringValue s) -> IScalar $ SString s
AST.ValueEnum s -> IScalar $ SEnum s
AST.ValueList (AST.ListValue ls) -> IList $ fmap decodeInputValue ls
AST.ValueObject (AST.ObjectValue fields) -> IObject $
HashMap.fromList [(name, decodeInputValue value) | AST.ObjectField name value <- fields]
decodeArgument :: AST.Argument -> (Text, InputValue)
decodeArgument (AST.Argument name value) = (name, decodeInputValue value)
processSelectionSet :: ObjectResolver -> AST.SelectionSet -> GraphQLHandler (HashMap Text FullyResolvedValue)
processSelectionSet objectResolver selectionSet = do
fmap HashMap.fromList $ for selectionSet $ \case
AST.SelectionField (AST.Field alias name arguments _directives innerSelectionSet) -> do
valueResolver <- case HashMap.lookup name objectResolver of
Just vr -> return vr
Nothing -> fail $ "Requested unknown field: " ++ Text.unpack name
let args = HashMap.fromList $ fmap decodeArgument arguments
outputValue <- valueResolver args >>= \case
RNull -> return FNull
RScalar s -> return $ FScalar s
RList ls -> do
if null innerSelectionSet then do
fail "TODO: lists without selection sets are unsupported"
else do
elements <- for ls $ \elementResolver -> do
element <- elementResolver HashMap.empty >>= \case
RObject elementObjectResolver -> do
processSelectionSet elementObjectResolver innerSelectionSet
_ -> do
fail "Selecting fields from lists requires that all element values be lists"
return (FObject element :: FullyResolvedValue)
return (FList elements :: FullyResolvedValue)
RObject o -> do
if null innerSelectionSet then do
fail "Must select fields out of object"
else do
FObject <$> processSelectionSet o innerSelectionSet
return (if Text.null alias then name else alias, outputValue)
_ -> fail "unsupported selection"
meResolver :: ValueResolver
meResolver = idResolver $ UserID "ME"
friendResolver :: ValueResolver
friendResolver args = do
userID <- requireArgument args "id"
fetchByID (userID :: UserID)
heroResolver :: ValueResolver
heroResolver args = do
episodeID <- lookupArgument args "episode" >>= \case
Just x -> return x
Nothing -> return NewHope
episode <- dataFetch $ FetchEpisode episodeID
character <- dataFetch $ FetchCharacter $ eHero episode
return $ RObject $ resolveObject character
episodeResolver :: ValueResolver
episodeResolver args = do
episodeID <- requireArgument args "id"
episode <- dataFetch $ FetchEpisode episodeID
return $ RObject $ resolveObject episode
addToNumberResolver :: ValueResolver
addToNumberResolver args = do
newNumber <- requireArgument args "newNumber"
() <- uncachedRequest $ AddToNumber newNumber
CAREFUL - the ( ) < - above causes ghc to emit a > > = rather than > > which
newNumberObject <- dataFetch FetchCurrentNumber
return $ RObject $ resolveObject newNumberObject
data Server = Server
{ rootQuery :: ObjectResolver
, rootMutation :: ObjectResolver
}
data QueryBatch
= QueryBatch [AST.Node]
| SingleMutation AST.Node
data AccumulationState = AccumulationState [AST.Node] [QueryBatch]
flushQueries :: AccumulationState -> [QueryBatch]
flushQueries (AccumulationState [] batches) = batches
flushQueries (AccumulationState queries batches) = QueryBatch (reverse queries) : batches
addDefinition :: AccumulationState -> AST.OperationDefinition -> AccumulationState
addDefinition (AccumulationState queries batches) (AST.Query node) =
AccumulationState (node : queries) batches
addDefinition acc (AST.Mutation node) =
AccumulationState [] (SingleMutation node : flushQueries acc)
groupQueries :: [AST.OperationDefinition] -> [QueryBatch]
groupQueries = reverse . flushQueries . foldl' addDefinition (AccumulationState [] [])
handleRequest :: Server -> StateStore -> (Response -> IO b) -> AST.Document -> IO b
handleRequest server stateStore respond doc = do
let (AST.Document defns) = doc
let operations = [op | AST.DefinitionOperation op <- defns]
let groups = groupQueries operations
outputs <- for groups $ \case
QueryBatch queries -> do
queryEnv <- initEnv stateStore ()
runHaxl queryEnv $ do
for queries $ \(AST.Node name [] [] selectionSet) -> do
output <- processSelectionSet (rootQuery server) selectionSet
return (name, output)
SingleMutation mutation -> do
let (AST.Node name [] [] selectionSet) = mutation
maps <- for selectionSet $ \selection -> do
mutationEnv <- initEnv stateStore ()
runHaxl mutationEnv $ do
processSelectionSet (rootMutation server) [selection]
return [(name, mconcat $ maps)]
let response = HashMap.fromList [("data" :: Text, HashMap.fromList $ mconcat outputs )]
respond $ responseLBS
status200
[("Content-Type", "application/json")]
(JSON.encodePretty response)
app :: StateStore -> Application
app stateStore request respond = do
_body <- fmap (decodeUtf8 . toStrict) $ strictRequestBody request
let body' = Text.unlines
[ ""
, "query our_names { me { name }, friend(id: \"10\") { name } }"
, "query HeroNameQuery { newhope_hero: hero(episode: NEWHOPE) { name } empire_hero: hero(episode: EMPIRE) { name } jedi_hero: hero(episode: JEDI) { name } }"
, "query EpisodeQuery { episode(id: NEWHOPE) { name releaseYear } }"
, "query newhope_hero_friends { episode(id: NEWHOPE) { hero { name, friends { name }, appearsIn { releaseYear } } } }"
, "mutation numbers { first: addToNumber(newNumber: 1) { theNumber } second: addToNumber(newNumber: 2) { theNumber } third: addToNumber(newNumber: 3) { theNumber } }"
]
queryDoc <- case parseOnly (document <* endOfInput) body' of
Left err -> do
fail $ "Error parsing query: " ++ err
Right d -> do
return d
let rootQuery = HashMap.fromList
[ ("me", meResolver)
, ("friend", friendResolver)
, ("hero", heroResolver)
, ("episode", episodeResolver)
]
let rootMutation = HashMap.fromList
[ ("addToNumber", addToNumberResolver)
]
let server = Server rootQuery rootMutation
handleRequest server stateStore respond queryDoc
main :: IO ()
main = do
putStrLn $ ":8080/"
conn <- openStarWarsRedisConnection
nds <- initializeNumberDataSource 0
let stateStore = stateSet nds $ stateSet conn $ stateSet UserRequestState $ stateEmpty
run 8080 $ app stateStore
|
941293367b9a0be33ade10e66dc1f07b5f9c00b23a12ab4d02eb51d3784b4846 | racket/racket7 | log.rkt | #lang racket/base
(require "context.rkt")
(provide log-expand
log-expand*
log-expand...
...log-expand
log-expand-start)
(define-syntax log-expand...
(syntax-rules (lambda)
[(_ ctx (lambda (obs) body ...))
(let ([obs (expand-context-observer ctx)])
(when obs
body ...))]))
(define-syntax-rule (...log-expand obs [key arg ...] ...)
(begin
(call-expand-observe obs key arg ...)
...))
(define-syntax log-expand*
(syntax-rules ()
[(_ ctx #:when guard [key arg ...] ...)
(log-expand... ctx
(lambda (obs)
(when guard
(...log-expand obs [key arg ...] ...))))]
[(_ ctx #:unless guard [key arg ...] ...)
(log-expand* ctx #:when (not guard) [key arg ...] ...)]
[(_ ctx [key arg ...] ...)
(log-expand* ctx #:when #t [key arg ...] ...)]))
(define-syntax-rule (log-expand ctx key arg ...)
(log-expand* ctx #:when #t [key arg ...]))
(define (call-expand-observe obs key . args)
(obs (hash-ref key->number key) (cond
[(null? args) #f]
[else (apply list* args)])))
(define (log-expand-start)
(define obs (current-expand-observe))
(when obs
(call-expand-observe obs 'start-expand)))
;; For historical reasons, an expander observer currently expects
;; numbers
(define key->number
#hash((visit . 0)
(resolve . 1)
(return . 2)
(next . 3)
(enter-list . 4)
(exit-list . 5)
(enter-prim . 6)
(exit-prim . 7)
(enter-macro . 8)
(exit-macro . 9)
(enter-block . 10)
(splice . 11)
(block->list . 12)
(next-group . 13)
(block->letrec . 14)
(let-renames . 16)
(lambda-renames . 17)
(case-lambda-renames . 18)
(letrec-syntaxes-renames . 19)
(phase-up . 20)
(macro-pre-x . 21)
(macro-post-x . 22)
(module-body . 23)
(block-renames . 24)
(prim-stop . 100)
(prim-module . 101)
(prim-module-begin . 102)
(prim-define-syntaxes . 103)
(prim-define-values . 104)
(prim-if . 105)
(prim-with-continuation-mark . 106)
(prim-begin . 107)
(prim-begin0 . 108)
(prim-#%app . 109)
(prim-lambda . 110)
(prim-case-lambda . 111)
(prim-let-values . 112)
(prim-letrec-values . 113)
(prim-letrec-syntaxes+values . 114)
(prim-#%datum . 115)
(prim-#%top . 116)
(prim-quote . 117)
(prim-quote-syntax . 118)
(prim-require . 119)
(prim-provide . 122)
(prim-set! . 123)
(prim-#%expression . 138)
(prim-#%variable-reference . 149)
(prim-#%stratified . 155)
(prim-begin-for-syntax . 156)
(prim-submodule . 158)
(prim-submodule* . 159)
(variable . 125)
(enter-check . 126)
(exit-check . 127)
(lift-loop . 128)
(letlift-loop . 136)
(module-lift-loop . 137)
(module-lift-end-loop . 135)
(local-lift . 129)
(lift-statement . 134)
(lift-require . 150)
(lift-provide . 151)
(enter-local . 130)
(exit-local . 131)
(local-pre . 132)
(local-post . 133)
(enter-local-expr . 139)
(exit-local-expr . 140)
(start-expand . 141)
(tag . 142)
(local-bind . 143)
(exit-local-bind . 160)
(enter-bind . 144)
(exit-bind . 145)
(opaque-expr . 146)
(rename-list . 147)
(rename-one . 148)
(track-origin . 152)
(local-value . 153)
(local-value-result . 154)
(prepare-env . 157)))
| null | https://raw.githubusercontent.com/racket/racket7/5dbb62c6bbec198b4a790f1dc08fef0c45c2e32b/racket/src/expander/expand/log.rkt | racket | For historical reasons, an expander observer currently expects
numbers | #lang racket/base
(require "context.rkt")
(provide log-expand
log-expand*
log-expand...
...log-expand
log-expand-start)
(define-syntax log-expand...
(syntax-rules (lambda)
[(_ ctx (lambda (obs) body ...))
(let ([obs (expand-context-observer ctx)])
(when obs
body ...))]))
(define-syntax-rule (...log-expand obs [key arg ...] ...)
(begin
(call-expand-observe obs key arg ...)
...))
(define-syntax log-expand*
(syntax-rules ()
[(_ ctx #:when guard [key arg ...] ...)
(log-expand... ctx
(lambda (obs)
(when guard
(...log-expand obs [key arg ...] ...))))]
[(_ ctx #:unless guard [key arg ...] ...)
(log-expand* ctx #:when (not guard) [key arg ...] ...)]
[(_ ctx [key arg ...] ...)
(log-expand* ctx #:when #t [key arg ...] ...)]))
(define-syntax-rule (log-expand ctx key arg ...)
(log-expand* ctx #:when #t [key arg ...]))
(define (call-expand-observe obs key . args)
(obs (hash-ref key->number key) (cond
[(null? args) #f]
[else (apply list* args)])))
(define (log-expand-start)
(define obs (current-expand-observe))
(when obs
(call-expand-observe obs 'start-expand)))
(define key->number
#hash((visit . 0)
(resolve . 1)
(return . 2)
(next . 3)
(enter-list . 4)
(exit-list . 5)
(enter-prim . 6)
(exit-prim . 7)
(enter-macro . 8)
(exit-macro . 9)
(enter-block . 10)
(splice . 11)
(block->list . 12)
(next-group . 13)
(block->letrec . 14)
(let-renames . 16)
(lambda-renames . 17)
(case-lambda-renames . 18)
(letrec-syntaxes-renames . 19)
(phase-up . 20)
(macro-pre-x . 21)
(macro-post-x . 22)
(module-body . 23)
(block-renames . 24)
(prim-stop . 100)
(prim-module . 101)
(prim-module-begin . 102)
(prim-define-syntaxes . 103)
(prim-define-values . 104)
(prim-if . 105)
(prim-with-continuation-mark . 106)
(prim-begin . 107)
(prim-begin0 . 108)
(prim-#%app . 109)
(prim-lambda . 110)
(prim-case-lambda . 111)
(prim-let-values . 112)
(prim-letrec-values . 113)
(prim-letrec-syntaxes+values . 114)
(prim-#%datum . 115)
(prim-#%top . 116)
(prim-quote . 117)
(prim-quote-syntax . 118)
(prim-require . 119)
(prim-provide . 122)
(prim-set! . 123)
(prim-#%expression . 138)
(prim-#%variable-reference . 149)
(prim-#%stratified . 155)
(prim-begin-for-syntax . 156)
(prim-submodule . 158)
(prim-submodule* . 159)
(variable . 125)
(enter-check . 126)
(exit-check . 127)
(lift-loop . 128)
(letlift-loop . 136)
(module-lift-loop . 137)
(module-lift-end-loop . 135)
(local-lift . 129)
(lift-statement . 134)
(lift-require . 150)
(lift-provide . 151)
(enter-local . 130)
(exit-local . 131)
(local-pre . 132)
(local-post . 133)
(enter-local-expr . 139)
(exit-local-expr . 140)
(start-expand . 141)
(tag . 142)
(local-bind . 143)
(exit-local-bind . 160)
(enter-bind . 144)
(exit-bind . 145)
(opaque-expr . 146)
(rename-list . 147)
(rename-one . 148)
(track-origin . 152)
(local-value . 153)
(local-value-result . 154)
(prepare-env . 157)))
|
89c7633c497b2caa0ab2d035db3d733874ae962f6679a10ff651ba941377fd6b | MedeaMelana/Magic | Events.hs | {-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
module Magic.Engine.Events (
-- * Executing effects
executeMagic, executeEffects, executeEffect, raise, applyReplacementEffects,
compileEffect, tick
) where
import Magic.Some
import Magic.Core
import qualified Magic.IdList as IdList
import Magic.Events (willMoveToGraveyard)
import Magic.Labels ((=+))
import Magic.Types
import Magic.Engine.Types
import Control.Applicative ((<$>), (<$))
import Control.Category ((.))
import Control.Monad (forM_,)
import Control.Monad.Except (throwError)
import Control.Monad.Reader (ask, runReaderT)
import Control.Monad.Operational (singleton, Program, ProgramT, viewT, ProgramViewT(..))
import Data.Label (get, set)
import Data.Label.Monadic (gets, puts, (=:), (=.), asks)
import Data.List ((\\))
import Data.Monoid ((<>))
import qualified Data.MultiSet as MultiSet
import Data.Traversable (for)
import Prelude hiding (interact, (.))
executeMagic :: EventSource -> Magic a -> Engine a
executeMagic source m = do
world <- view ask
runExecuteEffectsProgram source (runReaderT (runViewT (runMagic m)) world)
runExecuteEffectsProgram :: EventSource -> ProgramT ExecuteEffects (Program Interact) a -> Engine a
runExecuteEffectsProgram source program = interact (viewT program) >>= eval
where
eval (Return x) = return x
eval (ExecuteEffects effs :>>= k) = executeEffects source effs >>= runExecuteEffectsProgram source . k
eval (Tick :>>= k) =
tick >>= runExecuteEffectsProgram source . k
-- Execute multiple effects as a single event, applying replacement effects and
-- triggering abilities.
executeEffects :: EventSource -> [OneShotEffect] -> Engine [Event]
executeEffects _ [] = return []
executeEffects source effects = do
effects' <- concat <$> for effects (applyReplacementEffects source)
-- If enough players lose to end the game, end the game right now
let losingPlayers = [ p | Will (LoseGame p) <- effects' ]
remainingPlayers <- (\\ losingPlayers) . IdList.ids <$> gets players
case remainingPlayers of
[] -> throwError GameDraw
[p] -> throwError (GameWin p)
_ -> return () -- continue as normal
events <- concat <$> for effects' compileEffect
turnHistory =. (++ events)
raise source events
return events
raise :: EventSource -> [Event] -> Engine ()
raise source events = do
world <- view ask
interact $ singleton (LogEvents source events world)
ros <- view allObjects
forM_ ros $ \(ro, o) -> do
let tas = get triggeredAbilities o
let p = get controller o
prestackItems <- view $ do
programs <- tas events ro p
viewedObject <- asks (objectBase ro)
return (map (\program -> ((ro, viewedObject), program)) programs)
prestack . player p =. (++ prestackItems)
executeEffect :: EventSource -> OneShotEffect -> Engine [Event]
executeEffect source = executeEffects source . (: [])
[ 616 ] Interaction of Replacement and/or Prevention Effects
-- TODO Handle multiple effects (in a single event) at once, to be able to adhere
-- to APNAP order; see
applyReplacementEffects :: EventSource -> OneShotEffect -> Engine [OneShotEffect]
applyReplacementEffects _ eff = return [eff]
applyReplacementEffects source = do
objects < - map snd < $ > view allObjects
-- go (concatMap (get replacementEffects) objects) eff
--where
-- go :: [ReplacementEffect] -> OneShotEffect -> Engine [OneShotEffect]
-- go availableEffects effectToReplace = do
effectToReplace
let ( notApplicable , applicable ) =
partitionEithers $ map ( \f - > maybe ( Left f ) ( \m - > Right ( f , m ) ) ( f effectToReplace ) ) availableEffects
-- if null applicable
-- then return [effectToReplace]
-- else do
( ( _ , mReplacements ) , ) < -
askQuestion p ( AskPickReplacementEffect applicable )
-- replacements <- executeMagic undefined mReplacements
-- -- TODO Resolve replacements in affected player APNAP order.
fmap concat $ for replacements ( go ( map + + notApplicable ) )
[ 616.1 ] The affected player chooses which replacement effect to apply first .
affectedPlayer :: OneShotEffect -> Engine PlayerRef
affectedPlayer e =
case e of
WillMoveObject _ _ o -> return (get (controller . objectPart) o)
Will (GainLife p _) -> return p
Will (LoseLife p _) -> return p
Will (DamageObject _ r _ _ _) -> controllerOf r
Will (DamagePlayer _ p _ _ _) -> return p
Will (ShuffleLibrary p) -> return p
Will (DrawCard p) -> return p
Will (DestroyPermanent r _) -> controllerOf r
Will (TapPermanent r) -> controllerOf r
Will (UntapPermanent r) -> controllerOf r
Will (AddCounter o _) -> controllerOfSome o
Will (RemoveCounter o _) -> controllerOfSome o
Will (AddToManaPool p _) -> return p
Will (SpendFromManaPool p _) -> return p
Will (AttachPermanent o _ _) -> controllerOf o -- debatable
Will (RemoveFromCombat r) -> controllerOf r
Will (PlayLand p _) -> return p
Will (LoseGame p) -> return p
Will (WinGame p) -> return p
Will (InstallLayeredEffect r _) -> controllerOfSome r
Will (CeaseToExist o) -> controllerOfSome o
Will (Sacrifice r) -> controllerOf r
Will (RevealCards p _) -> return p
where
controllerOf :: ObjectRef ty -> Engine PlayerRef
controllerOf r = view $ asks (controller . objectPart . object r)
controllerOfSome :: SomeObjectRef -> Engine PlayerRef
controllerOfSome r = view $ asks (controller . objectBase r)
COMPILATION OF EFFECTS
-- | Compile and execute an effect.
compileEffect :: OneShotEffect -> Engine [Event]
compileEffect e =
case e of
WillMoveObject mOldRef rToZone obj ->
let createObject = do
t <- tick
let insertOp =
case rToZone of
Stack -> IdList.consM
_ -> IdList.snocM
newId <- insertOp (compileZoneRef rToZone) (set (timestamp . objectPart) t obj)
return [DidMoveObject mOldRef (Some rToZone, newId)]
in case mOldRef of
-- TODO 303.4f-g Auras entering the battlefield without being cast
Nothing -> createObject
Just (Some rFromZone, i) -> do
mObj <- IdList.removeM (compileZoneRef rFromZone) i
case mObj of
Nothing -> return []
Just _ -> createObject
Will simpleEffect ->
let simply = ([Did simpleEffect] <$)
combine eff = (++ [Did simpleEffect]) <$> compileEffect eff
onlyIf b ac = if b then ac else return []
in case simpleEffect of
GainLife p n -> onlyIf (n >= 0) $
simply $ life . player p =. (+ n)
LoseLife p n -> onlyIf (n >= 0) $
simply $ life . player p =. (subtract n)
RevealCards _ _ ->
simply $ return ()
TapPermanent r -> do
ts <- gets (tapStatus . object r)
onlyIf (ts == Untapped) $
simply $ tapStatus . object r =: Tapped
UntapPermanent r -> do
ts <- gets (tapStatus . object r)
onlyIf (ts == Tapped) $
simply $ tapStatus . object r =: Untapped
AddCounter r ty ->
simply $ counters . objectBase r =+ [ty]
DrawCard p -> do
lib <- gets (library . player p)
case IdList.toList lib of
[] -> do
failedCardDraw . player p =: True
return []
(ro, o) : _ ->
combine $ WillMoveObject (Just (Some (Library p), ro)) (Hand p) o
DestroyPermanent r _ -> do
o <- gets (objectPart . object r)
combine $ willMoveToGraveyard r o
ShuffleLibrary p -> simply $ do
let libraryLabel = library . player p
lib <- gets libraryLabel
lib' <- IdList.shuffle lib
puts libraryLabel lib'
PlayLand _ ro -> do
o <- gets (objectBase ro)
TODO apply replacement effects on the move effect
TODO store more sensible data in the PlayLand event
combine $ WillMoveObject (Just ro) Battlefield (Permanent o Untapped 0 False Nothing Nothing)
AddToManaPool p pool ->
simply $ manaPool . player p =. MultiSet.union pool
SpendFromManaPool p pool ->
simply $ manaPool . player p =. MultiSet.difference pool
DamageObject _source r amount _isCombatDamage _isPreventable ->
TODO check for protection , infect , wither , lifelink
onlyIf (amount > 0) $
simply $ damage . object r =. (+ amount)
DamagePlayer _source p amount _isCombatDamage _isPreventable ->
TODO check for protection , infect , wither , lifelink
onlyIf (amount > 0) $ combine (Will (LoseLife p amount))
LoseGame p -> do
-- TODO Remove all objects that belong to the player
ps <- gets players
case IdList.remove p ps of
Nothing -> return []
Just (_, ps') -> simply $ players =: ps'
WinGame p ->
throwError (GameWin p)
InstallLayeredEffect r eff ->
simply $ temporaryEffects . objectBase r =. (++ [eff])
CeaseToExist (Some z, i) -> do
m <- IdList.removeM (compileZoneRef z) i
case m of
Nothing -> return []
Just _ -> simply $ return ()
Sacrifice r@(Battlefield, i) -> do
o <- view (asks (objectPart . object r))
combine $ WillMoveObject (Just (Some Battlefield, i)) (Graveyard (get owner o)) (CardObject o)
_ -> error "compileEffect: effect not implemented"
tick :: Engine Timestamp
tick = do
t <- gets time
time =. succ
return t
| null | https://raw.githubusercontent.com/MedeaMelana/Magic/7bd87e4e1d54a7c5e5f81661196cafb87682c62a/Magic/src/Magic/Engine/Events.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE OverloadedStrings #
* Executing effects
Execute multiple effects as a single event, applying replacement effects and
triggering abilities.
If enough players lose to end the game, end the game right now
continue as normal
TODO Handle multiple effects (in a single event) at once, to be able to adhere
to APNAP order; see
go (concatMap (get replacementEffects) objects) eff
where
go :: [ReplacementEffect] -> OneShotEffect -> Engine [OneShotEffect]
go availableEffects effectToReplace = do
if null applicable
then return [effectToReplace]
else do
replacements <- executeMagic undefined mReplacements
-- TODO Resolve replacements in affected player APNAP order.
debatable
| Compile and execute an effect.
TODO 303.4f-g Auras entering the battlefield without being cast
TODO Remove all objects that belong to the player |
module Magic.Engine.Events (
executeMagic, executeEffects, executeEffect, raise, applyReplacementEffects,
compileEffect, tick
) where
import Magic.Some
import Magic.Core
import qualified Magic.IdList as IdList
import Magic.Events (willMoveToGraveyard)
import Magic.Labels ((=+))
import Magic.Types
import Magic.Engine.Types
import Control.Applicative ((<$>), (<$))
import Control.Category ((.))
import Control.Monad (forM_,)
import Control.Monad.Except (throwError)
import Control.Monad.Reader (ask, runReaderT)
import Control.Monad.Operational (singleton, Program, ProgramT, viewT, ProgramViewT(..))
import Data.Label (get, set)
import Data.Label.Monadic (gets, puts, (=:), (=.), asks)
import Data.List ((\\))
import Data.Monoid ((<>))
import qualified Data.MultiSet as MultiSet
import Data.Traversable (for)
import Prelude hiding (interact, (.))
executeMagic :: EventSource -> Magic a -> Engine a
executeMagic source m = do
world <- view ask
runExecuteEffectsProgram source (runReaderT (runViewT (runMagic m)) world)
runExecuteEffectsProgram :: EventSource -> ProgramT ExecuteEffects (Program Interact) a -> Engine a
runExecuteEffectsProgram source program = interact (viewT program) >>= eval
where
eval (Return x) = return x
eval (ExecuteEffects effs :>>= k) = executeEffects source effs >>= runExecuteEffectsProgram source . k
eval (Tick :>>= k) =
tick >>= runExecuteEffectsProgram source . k
executeEffects :: EventSource -> [OneShotEffect] -> Engine [Event]
executeEffects _ [] = return []
executeEffects source effects = do
effects' <- concat <$> for effects (applyReplacementEffects source)
let losingPlayers = [ p | Will (LoseGame p) <- effects' ]
remainingPlayers <- (\\ losingPlayers) . IdList.ids <$> gets players
case remainingPlayers of
[] -> throwError GameDraw
[p] -> throwError (GameWin p)
events <- concat <$> for effects' compileEffect
turnHistory =. (++ events)
raise source events
return events
raise :: EventSource -> [Event] -> Engine ()
raise source events = do
world <- view ask
interact $ singleton (LogEvents source events world)
ros <- view allObjects
forM_ ros $ \(ro, o) -> do
let tas = get triggeredAbilities o
let p = get controller o
prestackItems <- view $ do
programs <- tas events ro p
viewedObject <- asks (objectBase ro)
return (map (\program -> ((ro, viewedObject), program)) programs)
prestack . player p =. (++ prestackItems)
executeEffect :: EventSource -> OneShotEffect -> Engine [Event]
executeEffect source = executeEffects source . (: [])
[ 616 ] Interaction of Replacement and/or Prevention Effects
applyReplacementEffects :: EventSource -> OneShotEffect -> Engine [OneShotEffect]
applyReplacementEffects _ eff = return [eff]
applyReplacementEffects source = do
objects < - map snd < $ > view allObjects
effectToReplace
let ( notApplicable , applicable ) =
partitionEithers $ map ( \f - > maybe ( Left f ) ( \m - > Right ( f , m ) ) ( f effectToReplace ) ) availableEffects
( ( _ , mReplacements ) , ) < -
askQuestion p ( AskPickReplacementEffect applicable )
fmap concat $ for replacements ( go ( map + + notApplicable ) )
[ 616.1 ] The affected player chooses which replacement effect to apply first .
affectedPlayer :: OneShotEffect -> Engine PlayerRef
affectedPlayer e =
case e of
WillMoveObject _ _ o -> return (get (controller . objectPart) o)
Will (GainLife p _) -> return p
Will (LoseLife p _) -> return p
Will (DamageObject _ r _ _ _) -> controllerOf r
Will (DamagePlayer _ p _ _ _) -> return p
Will (ShuffleLibrary p) -> return p
Will (DrawCard p) -> return p
Will (DestroyPermanent r _) -> controllerOf r
Will (TapPermanent r) -> controllerOf r
Will (UntapPermanent r) -> controllerOf r
Will (AddCounter o _) -> controllerOfSome o
Will (RemoveCounter o _) -> controllerOfSome o
Will (AddToManaPool p _) -> return p
Will (SpendFromManaPool p _) -> return p
Will (RemoveFromCombat r) -> controllerOf r
Will (PlayLand p _) -> return p
Will (LoseGame p) -> return p
Will (WinGame p) -> return p
Will (InstallLayeredEffect r _) -> controllerOfSome r
Will (CeaseToExist o) -> controllerOfSome o
Will (Sacrifice r) -> controllerOf r
Will (RevealCards p _) -> return p
where
controllerOf :: ObjectRef ty -> Engine PlayerRef
controllerOf r = view $ asks (controller . objectPart . object r)
controllerOfSome :: SomeObjectRef -> Engine PlayerRef
controllerOfSome r = view $ asks (controller . objectBase r)
COMPILATION OF EFFECTS
compileEffect :: OneShotEffect -> Engine [Event]
compileEffect e =
case e of
WillMoveObject mOldRef rToZone obj ->
let createObject = do
t <- tick
let insertOp =
case rToZone of
Stack -> IdList.consM
_ -> IdList.snocM
newId <- insertOp (compileZoneRef rToZone) (set (timestamp . objectPart) t obj)
return [DidMoveObject mOldRef (Some rToZone, newId)]
in case mOldRef of
Nothing -> createObject
Just (Some rFromZone, i) -> do
mObj <- IdList.removeM (compileZoneRef rFromZone) i
case mObj of
Nothing -> return []
Just _ -> createObject
Will simpleEffect ->
let simply = ([Did simpleEffect] <$)
combine eff = (++ [Did simpleEffect]) <$> compileEffect eff
onlyIf b ac = if b then ac else return []
in case simpleEffect of
GainLife p n -> onlyIf (n >= 0) $
simply $ life . player p =. (+ n)
LoseLife p n -> onlyIf (n >= 0) $
simply $ life . player p =. (subtract n)
RevealCards _ _ ->
simply $ return ()
TapPermanent r -> do
ts <- gets (tapStatus . object r)
onlyIf (ts == Untapped) $
simply $ tapStatus . object r =: Tapped
UntapPermanent r -> do
ts <- gets (tapStatus . object r)
onlyIf (ts == Tapped) $
simply $ tapStatus . object r =: Untapped
AddCounter r ty ->
simply $ counters . objectBase r =+ [ty]
DrawCard p -> do
lib <- gets (library . player p)
case IdList.toList lib of
[] -> do
failedCardDraw . player p =: True
return []
(ro, o) : _ ->
combine $ WillMoveObject (Just (Some (Library p), ro)) (Hand p) o
DestroyPermanent r _ -> do
o <- gets (objectPart . object r)
combine $ willMoveToGraveyard r o
ShuffleLibrary p -> simply $ do
let libraryLabel = library . player p
lib <- gets libraryLabel
lib' <- IdList.shuffle lib
puts libraryLabel lib'
PlayLand _ ro -> do
o <- gets (objectBase ro)
TODO apply replacement effects on the move effect
TODO store more sensible data in the PlayLand event
combine $ WillMoveObject (Just ro) Battlefield (Permanent o Untapped 0 False Nothing Nothing)
AddToManaPool p pool ->
simply $ manaPool . player p =. MultiSet.union pool
SpendFromManaPool p pool ->
simply $ manaPool . player p =. MultiSet.difference pool
DamageObject _source r amount _isCombatDamage _isPreventable ->
TODO check for protection , infect , wither , lifelink
onlyIf (amount > 0) $
simply $ damage . object r =. (+ amount)
DamagePlayer _source p amount _isCombatDamage _isPreventable ->
TODO check for protection , infect , wither , lifelink
onlyIf (amount > 0) $ combine (Will (LoseLife p amount))
LoseGame p -> do
ps <- gets players
case IdList.remove p ps of
Nothing -> return []
Just (_, ps') -> simply $ players =: ps'
WinGame p ->
throwError (GameWin p)
InstallLayeredEffect r eff ->
simply $ temporaryEffects . objectBase r =. (++ [eff])
CeaseToExist (Some z, i) -> do
m <- IdList.removeM (compileZoneRef z) i
case m of
Nothing -> return []
Just _ -> simply $ return ()
Sacrifice r@(Battlefield, i) -> do
o <- view (asks (objectPart . object r))
combine $ WillMoveObject (Just (Some Battlefield, i)) (Graveyard (get owner o)) (CardObject o)
_ -> error "compileEffect: effect not implemented"
tick :: Engine Timestamp
tick = do
t <- gets time
time =. succ
return t
|
0bc02b9229a307fd37529cc68bf82bf86110a614564698843515e1ba69409d84 | ragkousism/Guix-on-Hurd | gtk.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2013 < >
Copyright © 2013 , 2014 , 2015 , 2016 < >
Copyright © 2014 , 2015 , 2017 < >
Copyright © 2014 < >
Copyright © 2015 < >
Copyright © 2015 < >
Copyright © 2015 < >
Copyright © 2015 < >
Copyright © 2015 < >
Coypright © 2015 , 2016 < >
Copyright © 2016 < >
Copyright © 2016 < >
Copyright © 2016 >
Copyright © 2016 < >
Coypright © 2016 ng0 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages gtk)
#:use-module ((guix licenses) #:prefix license:)
#:use-module (guix utils)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix git-download)
#:use-module (guix build-system gnu)
#:use-module (guix build-system python)
#:use-module (guix build-system waf)
#:use-module (gnu packages)
#:use-module (gnu packages algebra)
#:use-module (gnu packages autotools)
#:use-module (gnu packages texinfo)
#:use-module (gnu packages check)
#:use-module (gnu packages compression)
#:use-module (gnu packages docbook)
#:use-module (gnu packages enchant)
#:use-module (gnu packages fontutils)
#:use-module (gnu packages gettext)
#:use-module (gnu packages ghostscript)
#:use-module (gnu packages gl)
#:use-module (gnu packages glib)
#:use-module (gnu packages gnome)
#:use-module (gnu packages icu4c)
#:use-module (gnu packages image)
#:use-module (gnu packages libffi)
#:use-module (gnu packages pdf)
#:use-module (gnu packages perl)
#:use-module (gnu packages pkg-config)
#:use-module (gnu packages pretty-print)
#:use-module (gnu packages python)
#:use-module (gnu packages guile)
#:use-module (gnu packages cups)
#:use-module (gnu packages xml)
#:use-module (gnu packages xorg)
#:use-module (gnu packages xdisorg))
(define-public atk
(package
(name "atk")
(version "2.22.0")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"1dj47ndvspa7lghw1jvjhv3v08q5f9ab5rb395mcgjbl975gajfk"))))
(build-system gnu-build-system)
(outputs '("out" "doc"))
(arguments
`(#:configure-flags
(list (string-append "--with-html-dir="
(assoc-ref %outputs "doc")
"/share/gtk-doc/html"))))
(propagated-inputs `(("glib" ,glib))) ; required by atk.pc
(native-inputs
`(("pkg-config" ,pkg-config)
("glib" ,glib "bin") ; glib-mkenums, etc.
("gobject-introspection" ,gobject-introspection))) ; g-ir-compiler, etc.
(synopsis "GNOME accessibility toolkit")
(description
"ATK provides the set of accessibility interfaces that are implemented
by other toolkits and applications. Using the ATK interfaces, accessibility
tools have full access to view and control running applications.")
(license license:lgpl2.0+)
(home-page "/")))
(define-public cairo
(package
(name "cairo")
(version "1.14.8")
(source (origin
(method url-fetch)
(uri (string-append "-"
version ".tar.xz"))
(sha256
(base32
"082ypjlh03ss5616amgjp9ap3xwwccyh2knyyrj1a4d4x65dkwni"))
(patches (search-patches "cairo-CVE-2016-9082.patch"))))
(build-system gnu-build-system)
(propagated-inputs
`(("fontconfig" ,fontconfig)
("freetype" ,freetype)
("glib" ,glib)
("libpng" ,libpng)
("libx11" ,libx11)
("libxext" ,libxext)
("libxrender" ,libxrender)
("pixman" ,pixman)))
(inputs
`(("ghostscript" ,ghostscript)
("libspectre" ,libspectre)
("poppler" ,poppler)
("xextproto" ,xextproto)
("zlib" ,zlib)))
(native-inputs
`(("pkg-config" ,pkg-config)
("python" ,python-wrapper)))
(arguments
`(#:tests? #f ; see -guix/2013-06/msg00085.html
needed for GNU Icecat
(synopsis "2D graphics library")
(description
"Cairo is a 2D graphics library with support for multiple output devices.
Currently supported output targets include the X Window System (via both
Xlib and XCB), Quartz, Win32, image buffers, PostScript, PDF, and SVG file
output. Experimental backends include OpenGL, BeOS, OS/2, and DirectFB.
Cairo is designed to produce consistent output on all output media while
taking advantage of display hardware acceleration when available
eg. through the X Render Extension).
The cairo API provides operations similar to the drawing operators of
PostScript and PDF. Operations in cairo including stroking and filling cubic
Bézier splines, transforming and compositing translucent images, and
antialiased text rendering. All drawing operations can be transformed by any
affine transformation (scale, rotation, shear, etc.).")
or Mozilla Public License 1.1
(home-page "/")))
(define-public cairo-xcb
(package
(inherit cairo)
(name "cairo-xcb")
(inputs
`(("mesa" ,mesa)
,@(package-inputs cairo)))
(arguments
`(#:tests? #f
#:configure-flags
'("--enable-xlib-xcb" "--enable-gl" "--enable-egl")))
(synopsis "2D graphics library (with X11 support)")))
(define-public harfbuzz
(package
(name "harfbuzz")
(version "1.4.1")
(source (origin
(method url-fetch)
(uri (string-append "/"
"harfbuzz/release/harfbuzz-"
version ".tar.bz2"))
(sha256
(base32
"1g8mndf0p0fzjfvxrprga84zvqq186gbddnw6wbna7cscfmpz8l5"))))
(build-system gnu-build-system)
(outputs '("out"
160 K , only hb - view depend on cairo
(inputs
`(("cairo" ,cairo)))
(propagated-inputs
;; There are all in the Requires or Requires.private field of '.pc'.
`(("glib" ,glib)
("graphite2" ,graphite2)
("icu4c" ,icu4c)))
(native-inputs
`(("gobject-introspection" ,gobject-introspection)
("pkg-config" ,pkg-config)
incompatible with Python 3 ( print syntax )
(arguments
`(#:configure-flags `("--with-graphite2"
"--with-gobject"
,(string-append
"--bindir=" (assoc-ref %outputs "bin") "/bin"))))
(synopsis "OpenType text shaping engine")
(description
"HarfBuzz is an OpenType text shaping engine.")
(license (license:x11-style "file"
"See 'COPYING' in the distribution."))
(home-page "/")))
(define-public pango
(package
(name "pango")
(version "1.40.3")
(source (origin
(method url-fetch)
(uri (string-append "mirror/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"1lqi4yncw5q0v7g5makzxyp18g5cksqyld8m1wx0qli8wxf8pfmb"))))
(build-system gnu-build-system)
(propagated-inputs
`(("cairo" ,cairo)
("harfbuzz" ,harfbuzz)))
(inputs
`(("zlib" ,zlib)
Some packages , such as Openbox , expect Pango to be built with the
;; optional libxft support.
("libxft" ,libxft)))
(native-inputs
`(("pkg-config" ,pkg-config)
("glib" ,glib "bin") ; glib-mkenums, etc.
("gobject-introspection" ,gobject-introspection))) ; g-ir-compiler, etc.
(synopsis "GNOME text and font handling library")
(description
"Pango is the core text and font handling library used in GNOME
applications. It has extensive support for the different writing systems
used throughout the world.")
(license license:lgpl2.0+)
(home-page "/")))
(define-public pangox-compat
(package
(name "pangox-compat")
(version "0.0.2")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"0ip0ziys6mrqqmz4n71ays0kf5cs1xflj1gfpvs4fgy2nsrr482m"))))
(build-system gnu-build-system)
(inputs
`(("glib" ,glib)
("pango" ,pango)))
(native-inputs
`(("intltool" ,intltool)
("pkg-config" ,pkg-config)))
(home-page "")
(synopsis "Obsolete pango functions")
(description "Pangox was a X backend to pango. It is now obsolete and no
longer provided by recent pango releases. pangox-compat provides the
functions which were removed.")
(license license:lgpl2.0+)))
(define-public ganv
(package
(name "ganv")
(version "1.4.2")
(source (origin
(method url-fetch)
(uri (string-append "-"
version ".tar.bz2"))
(sha256
(base32
"0g7s5mp14qgbfjdql0k1s8464r21g47ssn5dws6jazsnw6njhl0l"))))
(build-system waf-build-system)
(arguments
`(#:phases (alist-cons-before
'configure 'set-flags
(lambda* (#:key outputs #:allow-other-keys)
Compile with C++11 , required by .
(setenv "CXXFLAGS" "-std=c++11")
;; Allow 'bin/ganv_bench' to find libganv-1.so.
(setenv "LDFLAGS"
(string-append "-Wl,-rpath="
(assoc-ref outputs "out") "/lib")))
%standard-phases)
#:tests? #f)) ; no check target
(inputs
`(("gtk" ,gtk+-2)
("gtkmm" ,gtkmm-2)))
(native-inputs
`(("glib" ,glib "bin") ; for glib-genmarshal, etc.
("pkg-config" ,pkg-config)))
(home-page "/")
(synopsis "GTK+ widget for interactive graph-like environments")
(description
"Ganv is an interactive GTK+ widget for interactive “boxes and lines” or
graph-like environments, e.g. modular synths or finite state machine
diagrams.")
(license license:gpl3+)))
(define-public ganv-devel
(let ((commit "31685d283e9b811b61014f820c42807f4effa071")
(revision "1"))
(package
(inherit ganv)
(name "ganv")
(version (string-append "1.4.2-" revision "."
(string-take commit 9)))
(source (origin
(method git-fetch)
(uri (git-reference
(url "")
(commit commit)))
(sha256
(base32
"0xmbykdl42jn9cgzrqrys5lng67d26nk5xq10wkkvjqldiwdck56")))))))
(define-public gtksourceview-2
(package
(name "gtksourceview")
(version "2.10.5") ; This is the last version which builds against gtk+2
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.bz2"))
(sha256
(base32
"07hrabhpl6n8ajz10s0d960jdwndxs87szxyn428mpxi8cvpg1f5"))))
(build-system gnu-build-system)
(native-inputs
`(("intltool" ,intltool)
("glib" ,glib "bin") ; for glib-genmarshal, etc.
("pkg-config" ,pkg-config)
;; For testing.
("xorg-server" ,xorg-server)
("shared-mime-info" ,shared-mime-info)))
(propagated-inputs
;; As per the pkg-config file.
`(("gtk" ,gtk+-2)
("libxml2" ,libxml2)))
(arguments
`(#:phases
;; Unfortunately, some of the tests in "make check" are highly dependent
;; on the environment therefore, some black magic is required.
(alist-cons-before
'check 'start-xserver
(lambda* (#:key inputs #:allow-other-keys)
(let ((xorg-server (assoc-ref inputs "xorg-server"))
(mime (assoc-ref inputs "shared-mime-info")))
;; There must be a running X server and make check doesn't start one.
;; Therefore we must do it.
(system (format #f "~a/bin/Xvfb :1 &" xorg-server))
(setenv "DISPLAY" ":1")
;; The .lang files must be found in $XDG_DATA_HOME/gtksourceview-2.0
(system "ln -s gtksourceview gtksourceview-2.0")
(setenv "XDG_DATA_HOME" (getcwd))
;; Finally, the mimetypes must be available.
(setenv "XDG_DATA_DIRS" (string-append mime "/share/")) ))
%standard-phases)))
(synopsis "Widget that extends the standard GTK+ 2.x 'GtkTextView' widget")
(description
"GtkSourceView is a portable C library that extends the standard GTK+
framework for multiline text editing with support for configurable syntax
highlighting, unlimited undo/redo, search and replace, a completion framework,
printing and other features typical of a source code editor.")
(license license:lgpl2.0+)
(home-page "/")))
(define-public gtksourceview
(package
(name "gtksourceview")
(version "3.22.2")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"0pmgff3p9q1z500aiqfn5l4mmij4yfi4qhq8fxscqc89vlql5s3c"))))
(build-system gnu-build-system)
(arguments
'(#:phases
(modify-phases %standard-phases
(add-before
'check 'pre-check
(lambda* (#:key inputs #:allow-other-keys)
(let ((xorg-server (assoc-ref inputs "xorg-server")))
;; Tests require a running X server.
(system (format #f "~a/bin/Xvfb :1 &" xorg-server))
(setenv "DISPLAY" ":1")
;; For the missing /etc/machine-id.
(setenv "DBUS_FATAL_WARNINGS" "0")
#t))))))
(native-inputs
`(("glib:bin" ,glib "bin") ; for glib-genmarshal, etc.
("intltool" ,intltool)
("itstool" ,itstool)
("gobject-introspection" ,gobject-introspection)
("pkg-config" ,pkg-config)
("vala" ,vala)
;; For testing.
("xorg-server" ,xorg-server)
("shared-mime-info" ,shared-mime-info)))
(propagated-inputs
;; gtksourceview-3.0.pc refers to all these.
`(("glib" ,glib)
("gtk+" ,gtk+)
("libxml2" ,libxml2)))
(home-page "")
(synopsis "GNOME source code widget")
(description "GtkSourceView is a text widget that extends the standard
GTK+ text widget GtkTextView. It improves GtkTextView by implementing syntax
highlighting and other features typical of a source code editor.")
(license license:lgpl2.1+)))
(define-public gdk-pixbuf
(package
(name "gdk-pixbuf")
(version "2.36.3")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"1v1rssjd8p5s3lymsfhiq5mbs2pc0h1r6jd0asrwdbrign7i68sj"))))
(build-system gnu-build-system)
(arguments
'(#:configure-flags '("--with-x11")
#:phases
(modify-phases %standard-phases
(add-after
'unpack 'disable-failing-tests
(lambda _
(substitute* "tests/Makefile.in"
;; XXX FIXME: This test fails on armhf machines with:
;; SKIP Not enough memory to load bitmap image
ERROR : cve-2015 - 4491 - too few tests run ( expected 4 , got 2 )
(("cve-2015-4491\\$\\(EXEEXT\\) ") "")
;; XXX FIXME: This test fails with:
;; ERROR:pixbuf-jpeg.c:74:test_type9_rotation_exif_tag:
;; assertion failed (error == NULL): Data differ
;; (gdk-pixbuf-error-quark, 0)
(("pixbuf-jpeg\\$\\(EXEEXT\\) ") ""))
#t)))))
(propagated-inputs
`(;; Required by gdk-pixbuf-2.0.pc
("glib" ,glib)
("libpng" ,libpng)
;; Used for testing and required at runtime.
("shared-mime-info" ,shared-mime-info)))
(inputs
`(("libjpeg" ,libjpeg)
("libtiff" ,libtiff)
("libx11" ,libx11)))
(native-inputs
`(("pkg-config" ,pkg-config)
("glib" ,glib "bin") ; glib-mkenums, etc.
("gobject-introspection" ,gobject-introspection))) ; g-ir-compiler, etc.
(synopsis "GNOME image loading and manipulation library")
(description
"GdkPixbuf is a library for image loading and manipulation developed
in the GNOME project.")
(license license:lgpl2.0+)
(home-page "-pixbuf/")))
To build gdk - pixbuf with SVG support , we need librsvg , and librsvg depends
on gdk - pixbuf , so this new varibale . Also , adds 90MiB to the
;; closure size.
(define-public gdk-pixbuf+svg
(package (inherit gdk-pixbuf)
(name "gdk-pixbuf+svg")
(inputs
`(("librsvg" ,librsvg)
,@(package-inputs gdk-pixbuf)))
(arguments
'(#:configure-flags '("--with-x11")
tested by the - pixbuf package already
#:phases
(modify-phases %standard-phases
(add-after 'install 'register-svg-loader
(lambda* (#:key inputs outputs #:allow-other-keys)
(let* ((out (assoc-ref outputs "out"))
(librsvg (assoc-ref inputs "librsvg"))
(loaders
(append
(find-files out "^libpixbufloader-.*\\.so$")
(find-files librsvg "^libpixbufloader-.*\\.so$")))
(gdk-pixbuf-query-loaders
(string-append out "/bin/gdk-pixbuf-query-loaders")))
(zero? (apply system* `(,gdk-pixbuf-query-loaders
"--update-cache" ,@loaders)))))))))
(synopsis
"GNOME image loading and manipulation library, with SVG support")))
(define-public at-spi2-core
(package
(name "at-spi2-core")
(version "2.22.0")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"02n8ybhg8344mpjwvkhnzvr0qbvvl6ryi2q9irwhi0ri46ps6pj1"))))
(build-system gnu-build-system)
(outputs '("out" "doc"))
(arguments
'(#:configure-flags
(list (string-append "--with-html-dir="
(assoc-ref %outputs "doc")
"/share/gtk-doc/html"))
#:phases
(modify-phases %standard-phases
(replace 'check
;; Run test-suite under a dbus session.
(lambda _
;; Don't fail on missing '/etc/machine-id'.
(setenv "DBUS_FATAL_WARNINGS" "0")
(zero? (system* "dbus-launch" "make" "check")))))))
(propagated-inputs
;; atspi-2.pc refers to all these.
`(("dbus" ,dbus)
("glib" ,glib)))
(inputs
`(("libxi" ,libxi)
("libxtst" ,libxtst)))
(native-inputs
`(("gobject-introspection" ,gobject-introspection)
("intltool" ,intltool)
("pkg-config" ,pkg-config)))
(synopsis "Assistive Technology Service Provider Interface, core components")
(description
"The Assistive Technology Service Provider Interface, core components,
is part of the GNOME accessibility project.")
(license license:lgpl2.0+)
(home-page "/")))
(define-public at-spi2-atk
(package
(name "at-spi2-atk")
(version "2.22.0")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"1h8k271ad78smm41c9bmw5dc4gki0wfy324cn2g25svkp2zfvgg8"))))
(build-system gnu-build-system)
(arguments
'(#:phases
(modify-phases %standard-phases
(replace 'check
;; Run test-suite under a dbus session.
(lambda _
(setenv "DBUS_FATAL_WARNINGS" "0")
(zero? (system* "dbus-launch" "make" "check")))))))
(propagated-inputs
required by atk-bridge-2.0.pc
(inputs
`(("atk" ,atk)))
(native-inputs
`(("dbus" ,dbus) ; for testing
("pkg-config" ,pkg-config)))
(synopsis "Assistive Technology Service Provider Interface, ATK bindings")
(description
"The Assistive Technology Service Provider Interface
is part of the GNOME accessibility project.")
(license license:lgpl2.0+)
(home-page "/")))
(define-public gtk+-2
(package
(name "gtk+")
(version "2.24.31")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"0n26jm09n03nqbd00d2ij63xrby3vik56sk5yj6w1vy768kr5hb8"))
(patches (search-patches "gtk2-respect-GUIX_GTK2_PATH.patch"
"gtk2-respect-GUIX_GTK2_IM_MODULE_FILE.patch"
"gtk2-theme-paths.patch"))))
(build-system gnu-build-system)
(outputs '("out" "doc"))
(propagated-inputs
`(("atk" ,atk)
("gdk-pixbuf" ,gdk-pixbuf+svg)
("pango" ,pango)))
(inputs
`(("cups" ,cups)
("libxcomposite" ,libxcomposite)
("libxcursor" ,libxcursor)
("libxdamage" ,libxdamage)
("libxi" ,libxi)
("libxinerama" ,libxinerama)
("libxrandr" ,libxrandr)))
(native-inputs
`(("perl" ,perl)
("gettext" ,gettext-minimal)
("glib" ,glib "bin")
("gobject-introspection" ,gobject-introspection)
("pkg-config" ,pkg-config)
("python-wrapper" ,python-wrapper)))
(arguments
`(#:configure-flags
(list "--with-xinput=yes"
(string-append "--with-html-dir="
(assoc-ref %outputs "doc")
"/share/gtk-doc/html"))
#:phases
(alist-cons-before
'configure 'disable-tests
(lambda _
;; FIXME: re-enable tests requiring an X server
(substitute* "gtk/Makefile.in"
(("SUBDIRS = theme-bits . tests") "SUBDIRS = theme-bits .")))
%standard-phases)))
(native-search-paths
(list (search-path-specification
(variable "GUIX_GTK2_PATH")
(files '("lib/gtk-2.0")))))
(synopsis "Cross-platform toolkit for creating graphical user interfaces")
(description
"GTK+, or the GIMP Toolkit, is a multi-platform toolkit for creating
graphical user interfaces. Offering a complete set of widgets, GTK+ is
suitable for projects ranging from small one-off tools to complete
application suites.")
(license license:lgpl2.0+)
(home-page "/")))
(define-public gtk+
(package (inherit gtk+-2)
(name "gtk+")
NOTE : When updating the version of ' gtk+ ' , the hash of ' mate - themes ' in
;; mate.scm will also need to be updated.
(version "3.22.6")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"0bqpx8825b1fdjmz14wq20zq58gq1yi1p5xjps8l6zqid8hmm9zb"))
(patches (search-patches "gtk3-respect-GUIX_GTK3_PATH.patch"
"gtk3-respect-GUIX_GTK3_IM_MODULE_FILE.patch"))))
(outputs '("out" "bin" "doc"))
(propagated-inputs
`(("at-spi2-atk" ,at-spi2-atk)
("atk" ,atk)
("gdk-pixbuf" ,gdk-pixbuf+svg)
("libepoxy" ,libepoxy)
("libxcursor" ,libxcursor)
("libxi" ,libxi)
("libxinerama" ,libxinerama)
("libxdamage" ,libxdamage)
("pango" ,pango)))
(inputs
`(("libxml2" ,libxml2)
XXX : colord depends on mozjs ( through polkit ) , which fails on
;; on non-intel systems now.
;;("colord" ,colord)
("cups" ,cups) ;for printing support
XXX : rest depends on p11 - kit , which fails on now .
;;("rest" ,rest)
("json-glib" ,json-glib)))
(native-inputs
`(("perl" ,perl)
("glib" ,glib "bin")
("gettext" ,gettext-minimal)
("pkg-config" ,pkg-config)
("gobject-introspection" ,gobject-introspection)
("python-wrapper" ,python-wrapper)
("xorg-server" ,xorg-server)))
(arguments
47 MiB goes to " out " ( 24 of which is locale data ! ) , and 26 MiB goes
;; to "doc".
#:configure-flags (list (string-append "--with-html-dir="
(assoc-ref %outputs "doc")
"/share/gtk-doc/html"))
#:phases (modify-phases %standard-phases
(add-before 'configure 'pre-configure
(lambda _
;; Disable most tests, failing in the chroot with the message:
D - Bus library appears to be incorrectly set up ; failed to read
;; machine uuid: Failed to open "/etc/machine-id": No such file or
;; directory.
;; See the manual page for dbus-uuidgen to correct this issue.
(substitute* "testsuite/Makefile.in"
(("SUBDIRS = gdk gtk a11y css reftests")
"SUBDIRS = gdk"))
#t))
(add-after 'install 'move-desktop-files
;; Move desktop files into 'bin' to avoid cycle references.
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out"))
(bin (assoc-ref outputs "bin")))
(mkdir-p (string-append bin "/share"))
(rename-file (string-append out "/share/applications")
(string-append bin "/share/applications"))
#t))))))
(native-search-paths
(list (search-path-specification
(variable "GUIX_GTK3_PATH")
(files '("lib/gtk-3.0")))))))
;;;
;;; Guile bindings.
;;;
(define-public guile-cairo
(package
(name "guile-cairo")
(version "1.4.1")
(source (origin
(method url-fetch)
(uri (string-append
"-cairo/guile-cairo-"
version
".tar.gz"))
(sha256
(base32
"1f5nd9n46n6cwfl1byjml02q3y2hgn7nkx98km1czgwarxl7ws3x"))))
(build-system gnu-build-system)
(arguments
'(#:phases (alist-cons-before
'configure 'set-module-directory
(lambda* (#:key outputs #:allow-other-keys)
;; Install modules under $out/share/guile/site/2.0.
(let ((out (assoc-ref outputs "out")))
(substitute* "Makefile.in"
(("scmdir = ([[:graph:]]+).*" _ value)
(string-append "scmdir = " value "/2.0\n")))
(substitute* "cairo/Makefile.in"
(("moduledir = ([[:graph:]]+).*" _ value)
(string-append "moduledir = "
"$(prefix)/share/guile/site/2.0/cairo\n'")))))
(alist-cons-after
'install 'install-missing-file
(lambda* (#:key outputs #:allow-other-keys)
;; By default 'vector-types.scm' is not installed, so do
;; it here.
(let ((out (assoc-ref outputs "out")))
(copy-file "cairo/vector-types.scm"
(string-append out "/share/guile/site/2.0"
"/cairo/vector-types.scm"))))
%standard-phases))))
(inputs
`(("guile-lib" ,guile-lib)
("expat" ,expat)
("guile" ,guile-2.0)))
(propagated-inputs
The .pc file refers to ' cairo ' .
`(("cairo" ,cairo)))
(native-inputs
`(("pkg-config" ,pkg-config)))
(home-page "-cairo/")
(synopsis "Cairo bindings for GNU Guile")
(description
"Guile-Cairo wraps the Cairo graphics library for Guile Scheme.
Guile-Cairo is complete, wrapping almost all of the Cairo API. It is API
stable, providing a firm base on which to do graphics work. Finally, and
importantly, it is pleasant to use. You get a powerful and well-maintained
graphics library with all of the benefits of Scheme: memory management,
exceptions, macros, and a dynamic programming environment.")
(license license:lgpl3+)))
(define-public guile-rsvg
(package
(name "guile-rsvg")
(version "2.18.1")
(source (origin
(method url-fetch)
(uri (string-append "-rsvg/"
name "-" version ".tar.gz"))
(sha256
(base32
"136f236iw3yrrz6pkkp1ma9c5mrs5icqha6pnawinqpk892r3jh7"))
(patches (search-patches "guile-rsvg-pkgconfig.patch"))
(modules '((guix build utils)))
(snippet
'(substitute* (find-files "." "Makefile\\.am")
(("/share/guile/site")
"/share/guile/site/2.0")))))
(build-system gnu-build-system)
(arguments
`(#:phases (modify-phases %standard-phases
(add-before 'configure 'bootstrap
(lambda _
(zero? (system* "autoreconf" "-vfi")))))))
(native-inputs `(("pkg-config" ,pkg-config)
("autoconf" ,autoconf)
("automake" ,automake)
("libtool" ,libtool)
("texinfo" ,texinfo)))
(inputs `(("guile" ,guile-2.0)
("librsvg" ,librsvg)
("guile-lib" ,guile-lib))) ;for (unit-test)
(propagated-inputs `(("guile-cairo" ,guile-cairo)))
(synopsis "Render SVG images using Cairo from Guile")
(description
"Guile-RSVG wraps the RSVG library for Guile, allowing you to render SVG
images onto Cairo surfaces.")
(home-page "-rsvg/")
(license license:lgpl2.1+)))
(define-public guile-present
(package
(name "guile-present")
(version "0.3.0")
(source (origin
(method url-fetch)
(uri (string-append "-present/"
"guile-present-" version ".tar.gz"))
(sha256
(base32
"1qam447m05sxxv6x8dlzg7qnyfc4dh8apjw1idpfhpns671gfr6m"))
(patches (search-patches "guile-present-coding.patch"))
(modules '((guix build utils)))
(snippet
'(substitute* "Makefile.in"
(("godir = .*$")
"godir = $(moddir)\n")))))
(build-system gnu-build-system)
(arguments
'(#:phases (alist-cons-after
'install 'post-install
(lambda* (#:key inputs outputs #:allow-other-keys)
(let* ((out (assoc-ref outputs "out"))
(bin (string-append out "/bin"))
(guile (assoc-ref inputs "guile")))
(substitute* (find-files bin ".*")
(("guile")
(string-append guile "/bin/guile -L "
out "/share/guile/site/2.0 -C "
out "/share/guile/site/2.0 ")))))
%standard-phases)))
(native-inputs `(("pkg-config" ,pkg-config)))
(inputs `(("guile" ,guile-2.0)))
(propagated-inputs
;; These are used by the (present …) modules.
`(("guile-lib" ,guile-lib)
("guile-cairo" ,guile-cairo)
("guile-rsvg" ,guile-rsvg)))
(home-page "-present/")
(synopsis "Create SVG or PDF presentations in Guile")
(description
"Guile-Present defines a declarative vocabulary for presentations,
together with tools to render presentation documents as SVG or PDF.
Guile-Present can be used to make presentations programmatically, but also
includes a tools to generate PDF presentations out of Org mode and Texinfo
documents.")
(license license:lgpl3+)))
(define-public guile-gnome
(package
(name "guile-gnome")
(version "2.16.4")
(source (origin
(method url-fetch)
(uri
(string-append "mirror/" name
"/guile-gnome-platform/guile-gnome-platform-"
version ".tar.gz"))
(sha256
(base32
"1hqnqbb2lmr3hgbcv9kds1himn3av6h0lkk0zll8agcrsn7d9axd"))))
(build-system gnu-build-system)
(native-inputs
`(("pkg-config" ,pkg-config)
("atk" ,atk)
;;("corba" ,corba) ; not packaged yet
("gconf" ,gconf)
("gobject-introspection" ,gobject-introspection)
( " " , ) ; not packaged yet
("gnome-vfs" ,gnome-vfs)
("gdk-pixbuf" ,gdk-pixbuf)
("gtk+" ,gtk+-2)
("libglade" ,libglade)
("libgnome" ,libgnome)
("libgnomecanvas" ,libgnomecanvas)
("libgnomeui" ,libgnomeui)
("pango" ,pango)
("libffi" ,libffi)
("glib" ,glib)))
(inputs `(("guile" ,guile-2.0)))
(propagated-inputs
`(("guile-cairo" ,guile-cairo)
("g-wrap" ,g-wrap)
("guile-lib" ,guile-lib)))
(arguments
FIXME
#:phases (modify-phases %standard-phases
(add-before 'configure 'pre-configure
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
(substitute* (find-files "." "^Makefile.in$")
(("guilesite :=.*guile/site" all)
(string-append all "/2.0")))
#t))))))
(outputs '("out" "debug"))
(synopsis "Guile interface for GTK+ programming for GNOME")
(description
"Includes guile-clutter, guile-gnome-gstreamer,
guile-gnome-platform (GNOME developer libraries), and guile-gtksourceview.")
(home-page "-gnome/")
(license license:gpl2+)))
;;;
;;; C++ bindings.
;;;
(define-public cairomm
(package
(name "cairomm")
(version "1.12.2")
(source (origin
(method url-fetch)
(uri (string-append "/"
name "-" version ".tar.gz"))
(sha256
(base32
"16fmigxsaz85c3lgcls7biwyz8zy8c8h3jndfm54cxxas3a7zi25"))))
(build-system gnu-build-system)
(arguments
;; The examples lack -lcairo.
'(#:make-flags '("LDFLAGS=-lcairo")))
(native-inputs `(("pkg-config" ,pkg-config)))
(propagated-inputs
`(("libsigc++" ,libsigc++)
("freetype" ,freetype)
("fontconfig" ,fontconfig)
("cairo" ,cairo)))
(home-page "/")
(synopsis "C++ bindings to the Cairo 2D graphics library")
(description
"Cairomm provides a C++ programming interface to the Cairo 2D graphics
library.")
(license license:lgpl2.0+)))
(define-public pangomm
(package
(name "pangomm")
(version "2.40.1")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"1bz3gciff23bpw9bqc4v2l3lkq9w7394v3a4jxkvx0ap5lmfwqlp"))))
(build-system gnu-build-system)
(native-inputs `(("pkg-config" ,pkg-config)))
(propagated-inputs
`(("cairo" ,cairo)
("cairomm" ,cairomm)
("glibmm" ,glibmm)
("pango" ,pango)))
(home-page "/")
(synopsis "C++ interface to the Pango text rendering library")
(description
"Pangomm provides a C++ programming interface to the Pango text rendering
library.")
(license license:lgpl2.1+)))
(define-public atkmm
(package
(name "atkmm")
(version "2.24.2")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"1gaqwhviadsmy0fsr47686yglv1p4mpkamj0in127bz2b5bki5gz"))))
(build-system gnu-build-system)
(native-inputs `(("pkg-config" ,pkg-config)))
(propagated-inputs
`(("glibmm" ,glibmm) ("atk" ,atk)))
(home-page "")
(synopsis "C++ interface to the ATK accessibility library")
(description
"ATKmm provides a C++ programming interface to the ATK accessibility
toolkit.")
(license license:lgpl2.1+)))
(define-public gtkmm
(package
(name "gtkmm")
(version "3.22.0")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"1x8l0ny6r3ym53z82q9d5fan4m9vi93xy3b3hj1hrclgc95lvnh5"))))
(build-system gnu-build-system)
(native-inputs `(("pkg-config" ,pkg-config)
("glib" ,glib "bin") ;for 'glib-compile-resources'
("xorg-server" ,xorg-server)))
(propagated-inputs
`(("pangomm" ,pangomm)
("cairomm" ,cairomm)
("atkmm" ,atkmm)
("gtk+" ,gtk+)
("glibmm" ,glibmm)))
(arguments
'(#:phases (modify-phases %standard-phases
(add-before 'check 'run-xvfb
(lambda* (#:key inputs #:allow-other-keys)
(let ((xorg-server (assoc-ref inputs "xorg-server")))
;; Tests such as 'object_move/test' require a running
;; X server.
(system (string-append xorg-server "/bin/Xvfb :1 &"))
(setenv "DISPLAY" ":1")
;; Don't fail because of the missing /etc/machine-id.
(setenv "DBUS_FATAL_WARNINGS" "0")
#t))))))
(home-page "/")
(synopsis
"C++ interface to the GTK+ graphical user interface library")
(description
"gtkmm is the official C++ interface for the popular GUI library GTK+.
Highlights include typesafe callbacks, and a comprehensive set of widgets that
are easily extensible via inheritance. You can create user interfaces either
in code or with the Glade User Interface designer, using libglademm. There's
extensive documentation, including API reference and a tutorial.")
(license license:lgpl2.1+)))
(define-public gtkmm-2
(package (inherit gtkmm)
(name "gtkmm")
(version "2.24.5")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"0wkbzvsx4kgw16f6xjdc1dz7f77ldngdila4yi5lw2zrgcxsb006"))))
(arguments
'(#:configure-flags '("CPPFLAGS=-std=c++11"))) ; required by libsigc++
(native-inputs `(("pkg-config" ,pkg-config)))
(propagated-inputs
`(("pangomm" ,pangomm)
("cairomm" ,cairomm)
("atkmm" ,atkmm)
("gtk+" ,gtk+-2)
("glibmm" ,glibmm)))))
(define-public python-pycairo
(package
(name "python-pycairo")
(version "1.10.0")
(source
(origin
(method url-fetch)
(uri (string-append "-"
version ".tar.bz2"))
(sha256
(base32
"1gjkf8x6hyx1skq3hhwcbvwifxvrf9qxis5vx8x5igmmgs70g94s"))
(patches (search-patches "pycairo-wscript.patch"))))
(build-system waf-build-system)
(native-inputs
`(("pkg-config" ,pkg-config)
("python-waf" ,python-waf)))
pycairo.pc references cairo
`(("cairo" ,cairo)))
(arguments
`(#:tests? #f
#:phases
(modify-phases %standard-phases
(add-before
'configure 'patch-waf
(lambda* (#:key inputs #:allow-other-keys)
;; The bundled `waf' doesn't work with python-3.4.x.
(copy-file (assoc-ref %build-inputs "python-waf") "./waf"))))))
(home-page "/")
(synopsis "Python bindings for cairo")
(description
"Pycairo is a set of Python bindings for the Cairo graphics library.")
(license license:lgpl3+)
(properties `((python2-variant . ,(delay python2-pycairo))))))
(define-public python2-pycairo
(package (inherit (strip-python2-variant python-pycairo))
(name "python2-pycairo")
(version "1.10.0")
(source
(origin
(method url-fetch)
(uri (string-append "-"
version ".tar.bz2"))
(sha256
(base32
"0cblk919wh6w0pgb45zf48xwxykfif16qk264yga7h9fdkq3j16k"))))
(arguments
`(#:python ,python-2
,@(substitute-keyword-arguments (package-arguments python-pycairo)
((#:phases phases)
`(alist-delete 'patch-waf ,phases))
((#:native-inputs native-inputs)
`(alist-delete "python-waf" ,native-inputs)))))
Dual - licensed under LGPL 2.1 or Mozilla Public License 1.1
(license (list license:lgpl2.1 license:mpl1.1))))
(define-public python2-pygtk
(package
(name "python2-pygtk")
(version "2.24.0")
(source
(origin
(method url-fetch)
(uri (string-append "mirror"
"/pygtk/" (version-major+minor version)
"/pygtk-" version ".tar.bz2"))
(sha256
(base32
"04k942gn8vl95kwf0qskkv6npclfm31d78ljkrkgyqxxcni1w76d"))))
(build-system gnu-build-system)
(outputs '("out"
13 MiB of gtk - doc HTML
(native-inputs
`(("pkg-config" ,pkg-config)))
(inputs
`(("python" ,python-2)
("libglade" ,libglade)
("glib" ,glib)))
(propagated-inputs
`(("python-pycairo" ,python2-pycairo) ;loaded at runtime
("python-pygobject" ,python2-pygobject-2) ;referenced in pc file
("gtk+" ,gtk+-2)))
(arguments
`(#:tests? #f
#:phases (modify-phases %standard-phases
(add-before 'configure 'set-gtk-doc-directory
(lambda* (#:key outputs #:allow-other-keys)
;; Install documentation to "doc".
(let ((doc (assoc-ref outputs "doc")))
(substitute* "docs/Makefile.in"
(("TARGET_DIR = \\$\\(datadir\\)")
(string-append "TARGET_DIR = " doc))))))
(add-after 'configure 'fix-codegen
(lambda* (#:key inputs #:allow-other-keys)
(substitute* "pygtk-codegen-2.0"
(("^prefix=.*$")
(string-append
"prefix="
(assoc-ref inputs "python-pygobject") "\n")))))
(add-after 'install 'install-pth
(lambda* (#:key inputs outputs #:allow-other-keys)
;; pygtk's modules are stored in a subdirectory of
python 's site - packages directory . Add a .pth file so
;; that python will add that subdirectory to its module
;; search path.
(let* ((out (assoc-ref outputs "out"))
(site (string-append out "/lib/python"
,(version-major+minor
(package-version python-2))
"/site-packages")))
(call-with-output-file (string-append site "/pygtk.pth")
(lambda (port)
(format port "gtk-2.0~%")))))))))
(home-page "/")
(synopsis "Python bindings for GTK+")
(description
"PyGTK allows you to write full featured GTK programs in Python. It is
targetted at GTK 2.x, and can be used in conjunction with gnome-python to
write GNOME applications.")
(license license:lgpl2.1+)))
(define-public girara
(package
(name "girara")
(version "0.2.7")
(source (origin
(method url-fetch)
(uri
(string-append "-"
version ".tar.gz"))
(sha256
(base32
"1r9jbhf9n40zj4ddqv1q5spijpjm683nxg4hr5lnir4a551s7rlq"))))
(native-inputs `(("pkg-config" ,pkg-config)
("gettext" ,gettext-minimal)))
(inputs `(("gtk+" ,gtk+)
("check" ,check)))
(arguments
`(#:make-flags
`(,(string-append "PREFIX=" (assoc-ref %outputs "out"))
"COLOR=0" "CC=gcc")
#:test-target "test"
Tests fail with " can not open display : "
#:phases
(alist-delete 'configure %standard-phases)))
(build-system gnu-build-system)
(home-page "/")
(synopsis "Library for minimalistic gtk+3 user interfaces")
(description "Girara is a library that implements a user interface that
focuses on simplicity and minimalism. Currently based on GTK+, a
cross-platform widget toolkit, it provides an interface that focuses on three
main components: a so-called view widget that represents the actual
application, an input bar that is used to execute commands of the
application and the status bar which provides the user with current
information.")
(license license:zlib)))
(define-public gtk-doc
(package
(name "gtk-doc")
(version "1.25")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"0hpxcij9xx9ny3gs9p0iz4r8zslw8wqymbyababiyl7603a6x90y"))))
(build-system gnu-build-system)
(arguments
`(#:parallel-tests? #f
#:phases
(modify-phases %standard-phases
(add-before
'configure 'fix-docbook
(lambda* (#:key inputs #:allow-other-keys)
(substitute* "configure"
;; The configure check is overzealous about making sure that
things are in place -- it uses the xmlcatalog tool to make
;; sure that docbook-xsl is available, but this tool can only
look in one catalog file , unlike the $ XML_CATALOG_FILES
variable that defines . Fool the test by using the
;; docbook-xsl catalog explicitly and get on with life.
(("\"\\$XML_CATALOG_FILE\" \
\"/")
(string-append (car (find-files (assoc-ref inputs "docbook-xsl")
"^catalog.xml$"))
" \"/")))
#t)))
#:configure-flags
(list (string-append "--with-xml-catalog="
(assoc-ref %build-inputs "docbook-xml")
"/xml/dtd/docbook/catalog.xml"))))
(native-inputs
`(("pkg-config" ,pkg-config)
("itstool" ,itstool)
("libxml" ,libxml2)
("gettext" ,gettext-minimal)
("bc" ,bc)))
(inputs
`(("perl" ,perl)
("python" ,python)
("xsltproc" ,libxslt)
("dblatex" ,dblatex)
("docbook-xml" ,docbook-xml-4.3)
("docbook-xsl" ,docbook-xsl)
("source-highlight" ,source-highlight)
("glib" ,glib)))
(home-page "-doc/")
(synopsis "Documentation generator from C source code")
(description
"GTK-Doc generates API documentation from comments added to C code. It is
typically used to document the public API of GTK+ and GNOME libraries, but it
can also be used to document application code.")
(license license:gpl2+)))
(define-public gtk-engines
(package
(name "gtk-engines")
(version "2.20.2")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.bz2"))
(sha256
(base32
"1db65pb0j0mijmswrvpgkdabilqd23x22d95hp5kwxvcramq1dhm"))))
(build-system gnu-build-system)
(arguments
`(#:configure-flags
`("--enable-animation")))
(native-inputs
`(("pkg-config" ,pkg-config)
("intltool" ,intltool)))
(inputs
;; Don't propagate GTK+ to reduce "profile pollution".
`(("gtk+" ,gtk+-2))) ; required by gtk-engines-2.pc
(home-page "")
(synopsis "Theming engines for GTK+ 2.x")
(description
"This package contains the standard GTK+ 2.x theming engines including
Clearlooks, Crux, High Contrast, Industrial, LighthouseBlue, Metal, Mist,
Redmond95 and ThinIce.")
(license (list license:gpl2+ license:lgpl2.1+))))
(define-public murrine
(package
(name "murrine")
(version "0.98.2")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"129cs5bqw23i76h3nmc29c9mqkm9460iwc8vkl7hs4xr07h8mip9"))))
(build-system gnu-build-system)
(arguments
`(#:configure-flags
`("--enable-animation"
"--enable-animationrtl")))
(native-inputs
`(("pkg-config" ,pkg-config)
("intltool" ,intltool)))
(propagated-inputs
`(("gtk+" ,gtk+-2)))
(home-page "")
(synopsis "Cairo-based theming engine for GTK+ 2.x")
(description
"Murrine is a cairo-based GTK+ theming engine. It is named after the
glass artworks done by Venicians glass blowers.")
(license license:gpl2+)))
(define-public gtkspell3
(package
(name "gtkspell3")
(version "3.0.8")
(source (origin
(method url-fetch)
(uri (string-append "mirror/"
version "/" name "-" version ".tar.gz"))
(sha256
(base32
"1zrz5pz4ryvcssk898liynmy2wyxgj95ak7mp2jv7x62yzihq6h1"))))
(build-system gnu-build-system)
(native-inputs
`(("intltool" ,intltool)
("pkg-config" ,pkg-config)))
(inputs
`(("enchant" ,enchant)
("gobject-introspection" ,gobject-introspection)
("gtk+" ,gtk+)
("pango" ,pango)))
(home-page "")
(synopsis "Spell-checking addon for GTK's TextView widget")
(description
"GtkSpell provides word-processor-style highlighting and replacement of
misspelled words in a GtkTextView widget.")
(license license:gpl2+)))
(define-public clipit
(package
(name "clipit")
(version "1.4.2")
(source (origin
(method url-fetch)
(uri (string-append
"-"
version ".tar.gz"))
(sha256
(base32
"0jrwn8qfgb15rwspdp1p8hb1nc0ngmpvgr87d4k3lhlvqg2cfqva"))))
(build-system gnu-build-system)
(native-inputs
`(("intltool" ,intltool)
("pkg-config" ,pkg-config)))
(inputs
`(("gtk+" ,gtk+-2)))
(home-page "")
(synopsis "Lightweight GTK+ clipboard manager")
(description
"ClipIt is a clipboard manager with features such as a history, search
thereof, global hotkeys and clipboard item actions. It was forked from
Parcellite and adds bugfixes and features.")
(license license:gpl2+)))
| null | https://raw.githubusercontent.com/ragkousism/Guix-on-Hurd/e951bb2c0c4961dc6ac2bda8f331b9c4cee0da95/gnu/packages/gtk.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
required by atk.pc
glib-mkenums, etc.
g-ir-compiler, etc.
see -guix/2013-06/msg00085.html
There are all in the Requires or Requires.private field of '.pc'.
optional libxft support.
glib-mkenums, etc.
g-ir-compiler, etc.
Allow 'bin/ganv_bench' to find libganv-1.so.
no check target
for glib-genmarshal, etc.
This is the last version which builds against gtk+2
for glib-genmarshal, etc.
For testing.
As per the pkg-config file.
Unfortunately, some of the tests in "make check" are highly dependent
on the environment therefore, some black magic is required.
There must be a running X server and make check doesn't start one.
Therefore we must do it.
The .lang files must be found in $XDG_DATA_HOME/gtksourceview-2.0
Finally, the mimetypes must be available.
Tests require a running X server.
For the missing /etc/machine-id.
for glib-genmarshal, etc.
For testing.
gtksourceview-3.0.pc refers to all these.
XXX FIXME: This test fails on armhf machines with:
SKIP Not enough memory to load bitmap image
XXX FIXME: This test fails with:
ERROR:pixbuf-jpeg.c:74:test_type9_rotation_exif_tag:
assertion failed (error == NULL): Data differ
(gdk-pixbuf-error-quark, 0)
Required by gdk-pixbuf-2.0.pc
Used for testing and required at runtime.
glib-mkenums, etc.
g-ir-compiler, etc.
closure size.
Run test-suite under a dbus session.
Don't fail on missing '/etc/machine-id'.
atspi-2.pc refers to all these.
Run test-suite under a dbus session.
for testing
FIXME: re-enable tests requiring an X server
mate.scm will also need to be updated.
on non-intel systems now.
("colord" ,colord)
for printing support
("rest" ,rest)
to "doc".
Disable most tests, failing in the chroot with the message:
failed to read
machine uuid: Failed to open "/etc/machine-id": No such file or
directory.
See the manual page for dbus-uuidgen to correct this issue.
Move desktop files into 'bin' to avoid cycle references.
Guile bindings.
Install modules under $out/share/guile/site/2.0.
By default 'vector-types.scm' is not installed, so do
it here.
for (unit-test)
These are used by the (present …) modules.
("corba" ,corba) ; not packaged yet
not packaged yet
C++ bindings.
The examples lack -lcairo.
for 'glib-compile-resources'
Tests such as 'object_move/test' require a running
X server.
Don't fail because of the missing /etc/machine-id.
required by libsigc++
The bundled `waf' doesn't work with python-3.4.x.
loaded at runtime
referenced in pc file
Install documentation to "doc".
pygtk's modules are stored in a subdirectory of
that python will add that subdirectory to its module
search path.
The configure check is overzealous about making sure that
sure that docbook-xsl is available, but this tool can only
docbook-xsl catalog explicitly and get on with life.
Don't propagate GTK+ to reduce "profile pollution".
required by gtk-engines-2.pc | Copyright © 2013 < >
Copyright © 2013 , 2014 , 2015 , 2016 < >
Copyright © 2014 , 2015 , 2017 < >
Copyright © 2014 < >
Copyright © 2015 < >
Copyright © 2015 < >
Copyright © 2015 < >
Copyright © 2015 < >
Copyright © 2015 < >
Coypright © 2015 , 2016 < >
Copyright © 2016 < >
Copyright © 2016 < >
Copyright © 2016 >
Copyright © 2016 < >
Coypright © 2016 ng0 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages gtk)
#:use-module ((guix licenses) #:prefix license:)
#:use-module (guix utils)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix git-download)
#:use-module (guix build-system gnu)
#:use-module (guix build-system python)
#:use-module (guix build-system waf)
#:use-module (gnu packages)
#:use-module (gnu packages algebra)
#:use-module (gnu packages autotools)
#:use-module (gnu packages texinfo)
#:use-module (gnu packages check)
#:use-module (gnu packages compression)
#:use-module (gnu packages docbook)
#:use-module (gnu packages enchant)
#:use-module (gnu packages fontutils)
#:use-module (gnu packages gettext)
#:use-module (gnu packages ghostscript)
#:use-module (gnu packages gl)
#:use-module (gnu packages glib)
#:use-module (gnu packages gnome)
#:use-module (gnu packages icu4c)
#:use-module (gnu packages image)
#:use-module (gnu packages libffi)
#:use-module (gnu packages pdf)
#:use-module (gnu packages perl)
#:use-module (gnu packages pkg-config)
#:use-module (gnu packages pretty-print)
#:use-module (gnu packages python)
#:use-module (gnu packages guile)
#:use-module (gnu packages cups)
#:use-module (gnu packages xml)
#:use-module (gnu packages xorg)
#:use-module (gnu packages xdisorg))
(define-public atk
(package
(name "atk")
(version "2.22.0")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"1dj47ndvspa7lghw1jvjhv3v08q5f9ab5rb395mcgjbl975gajfk"))))
(build-system gnu-build-system)
(outputs '("out" "doc"))
(arguments
`(#:configure-flags
(list (string-append "--with-html-dir="
(assoc-ref %outputs "doc")
"/share/gtk-doc/html"))))
(native-inputs
`(("pkg-config" ,pkg-config)
(synopsis "GNOME accessibility toolkit")
(description
"ATK provides the set of accessibility interfaces that are implemented
by other toolkits and applications. Using the ATK interfaces, accessibility
tools have full access to view and control running applications.")
(license license:lgpl2.0+)
(home-page "/")))
(define-public cairo
(package
(name "cairo")
(version "1.14.8")
(source (origin
(method url-fetch)
(uri (string-append "-"
version ".tar.xz"))
(sha256
(base32
"082ypjlh03ss5616amgjp9ap3xwwccyh2knyyrj1a4d4x65dkwni"))
(patches (search-patches "cairo-CVE-2016-9082.patch"))))
(build-system gnu-build-system)
(propagated-inputs
`(("fontconfig" ,fontconfig)
("freetype" ,freetype)
("glib" ,glib)
("libpng" ,libpng)
("libx11" ,libx11)
("libxext" ,libxext)
("libxrender" ,libxrender)
("pixman" ,pixman)))
(inputs
`(("ghostscript" ,ghostscript)
("libspectre" ,libspectre)
("poppler" ,poppler)
("xextproto" ,xextproto)
("zlib" ,zlib)))
(native-inputs
`(("pkg-config" ,pkg-config)
("python" ,python-wrapper)))
(arguments
needed for GNU Icecat
(synopsis "2D graphics library")
(description
"Cairo is a 2D graphics library with support for multiple output devices.
Currently supported output targets include the X Window System (via both
Xlib and XCB), Quartz, Win32, image buffers, PostScript, PDF, and SVG file
output. Experimental backends include OpenGL, BeOS, OS/2, and DirectFB.
Cairo is designed to produce consistent output on all output media while
taking advantage of display hardware acceleration when available
eg. through the X Render Extension).
The cairo API provides operations similar to the drawing operators of
PostScript and PDF. Operations in cairo including stroking and filling cubic
Bézier splines, transforming and compositing translucent images, and
antialiased text rendering. All drawing operations can be transformed by any
affine transformation (scale, rotation, shear, etc.).")
or Mozilla Public License 1.1
(home-page "/")))
(define-public cairo-xcb
(package
(inherit cairo)
(name "cairo-xcb")
(inputs
`(("mesa" ,mesa)
,@(package-inputs cairo)))
(arguments
`(#:tests? #f
#:configure-flags
'("--enable-xlib-xcb" "--enable-gl" "--enable-egl")))
(synopsis "2D graphics library (with X11 support)")))
(define-public harfbuzz
(package
(name "harfbuzz")
(version "1.4.1")
(source (origin
(method url-fetch)
(uri (string-append "/"
"harfbuzz/release/harfbuzz-"
version ".tar.bz2"))
(sha256
(base32
"1g8mndf0p0fzjfvxrprga84zvqq186gbddnw6wbna7cscfmpz8l5"))))
(build-system gnu-build-system)
(outputs '("out"
160 K , only hb - view depend on cairo
(inputs
`(("cairo" ,cairo)))
(propagated-inputs
`(("glib" ,glib)
("graphite2" ,graphite2)
("icu4c" ,icu4c)))
(native-inputs
`(("gobject-introspection" ,gobject-introspection)
("pkg-config" ,pkg-config)
incompatible with Python 3 ( print syntax )
(arguments
`(#:configure-flags `("--with-graphite2"
"--with-gobject"
,(string-append
"--bindir=" (assoc-ref %outputs "bin") "/bin"))))
(synopsis "OpenType text shaping engine")
(description
"HarfBuzz is an OpenType text shaping engine.")
(license (license:x11-style "file"
"See 'COPYING' in the distribution."))
(home-page "/")))
(define-public pango
(package
(name "pango")
(version "1.40.3")
(source (origin
(method url-fetch)
(uri (string-append "mirror/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"1lqi4yncw5q0v7g5makzxyp18g5cksqyld8m1wx0qli8wxf8pfmb"))))
(build-system gnu-build-system)
(propagated-inputs
`(("cairo" ,cairo)
("harfbuzz" ,harfbuzz)))
(inputs
`(("zlib" ,zlib)
Some packages , such as Openbox , expect Pango to be built with the
("libxft" ,libxft)))
(native-inputs
`(("pkg-config" ,pkg-config)
(synopsis "GNOME text and font handling library")
(description
"Pango is the core text and font handling library used in GNOME
applications. It has extensive support for the different writing systems
used throughout the world.")
(license license:lgpl2.0+)
(home-page "/")))
(define-public pangox-compat
(package
(name "pangox-compat")
(version "0.0.2")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"0ip0ziys6mrqqmz4n71ays0kf5cs1xflj1gfpvs4fgy2nsrr482m"))))
(build-system gnu-build-system)
(inputs
`(("glib" ,glib)
("pango" ,pango)))
(native-inputs
`(("intltool" ,intltool)
("pkg-config" ,pkg-config)))
(home-page "")
(synopsis "Obsolete pango functions")
(description "Pangox was a X backend to pango. It is now obsolete and no
longer provided by recent pango releases. pangox-compat provides the
functions which were removed.")
(license license:lgpl2.0+)))
(define-public ganv
(package
(name "ganv")
(version "1.4.2")
(source (origin
(method url-fetch)
(uri (string-append "-"
version ".tar.bz2"))
(sha256
(base32
"0g7s5mp14qgbfjdql0k1s8464r21g47ssn5dws6jazsnw6njhl0l"))))
(build-system waf-build-system)
(arguments
`(#:phases (alist-cons-before
'configure 'set-flags
(lambda* (#:key outputs #:allow-other-keys)
Compile with C++11 , required by .
(setenv "CXXFLAGS" "-std=c++11")
(setenv "LDFLAGS"
(string-append "-Wl,-rpath="
(assoc-ref outputs "out") "/lib")))
%standard-phases)
(inputs
`(("gtk" ,gtk+-2)
("gtkmm" ,gtkmm-2)))
(native-inputs
("pkg-config" ,pkg-config)))
(home-page "/")
(synopsis "GTK+ widget for interactive graph-like environments")
(description
"Ganv is an interactive GTK+ widget for interactive “boxes and lines” or
graph-like environments, e.g. modular synths or finite state machine
diagrams.")
(license license:gpl3+)))
(define-public ganv-devel
(let ((commit "31685d283e9b811b61014f820c42807f4effa071")
(revision "1"))
(package
(inherit ganv)
(name "ganv")
(version (string-append "1.4.2-" revision "."
(string-take commit 9)))
(source (origin
(method git-fetch)
(uri (git-reference
(url "")
(commit commit)))
(sha256
(base32
"0xmbykdl42jn9cgzrqrys5lng67d26nk5xq10wkkvjqldiwdck56")))))))
(define-public gtksourceview-2
(package
(name "gtksourceview")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.bz2"))
(sha256
(base32
"07hrabhpl6n8ajz10s0d960jdwndxs87szxyn428mpxi8cvpg1f5"))))
(build-system gnu-build-system)
(native-inputs
`(("intltool" ,intltool)
("pkg-config" ,pkg-config)
("xorg-server" ,xorg-server)
("shared-mime-info" ,shared-mime-info)))
(propagated-inputs
`(("gtk" ,gtk+-2)
("libxml2" ,libxml2)))
(arguments
`(#:phases
(alist-cons-before
'check 'start-xserver
(lambda* (#:key inputs #:allow-other-keys)
(let ((xorg-server (assoc-ref inputs "xorg-server"))
(mime (assoc-ref inputs "shared-mime-info")))
(system (format #f "~a/bin/Xvfb :1 &" xorg-server))
(setenv "DISPLAY" ":1")
(system "ln -s gtksourceview gtksourceview-2.0")
(setenv "XDG_DATA_HOME" (getcwd))
(setenv "XDG_DATA_DIRS" (string-append mime "/share/")) ))
%standard-phases)))
(synopsis "Widget that extends the standard GTK+ 2.x 'GtkTextView' widget")
(description
"GtkSourceView is a portable C library that extends the standard GTK+
framework for multiline text editing with support for configurable syntax
highlighting, unlimited undo/redo, search and replace, a completion framework,
printing and other features typical of a source code editor.")
(license license:lgpl2.0+)
(home-page "/")))
(define-public gtksourceview
(package
(name "gtksourceview")
(version "3.22.2")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"0pmgff3p9q1z500aiqfn5l4mmij4yfi4qhq8fxscqc89vlql5s3c"))))
(build-system gnu-build-system)
(arguments
'(#:phases
(modify-phases %standard-phases
(add-before
'check 'pre-check
(lambda* (#:key inputs #:allow-other-keys)
(let ((xorg-server (assoc-ref inputs "xorg-server")))
(system (format #f "~a/bin/Xvfb :1 &" xorg-server))
(setenv "DISPLAY" ":1")
(setenv "DBUS_FATAL_WARNINGS" "0")
#t))))))
(native-inputs
("intltool" ,intltool)
("itstool" ,itstool)
("gobject-introspection" ,gobject-introspection)
("pkg-config" ,pkg-config)
("vala" ,vala)
("xorg-server" ,xorg-server)
("shared-mime-info" ,shared-mime-info)))
(propagated-inputs
`(("glib" ,glib)
("gtk+" ,gtk+)
("libxml2" ,libxml2)))
(home-page "")
(synopsis "GNOME source code widget")
(description "GtkSourceView is a text widget that extends the standard
GTK+ text widget GtkTextView. It improves GtkTextView by implementing syntax
highlighting and other features typical of a source code editor.")
(license license:lgpl2.1+)))
(define-public gdk-pixbuf
(package
(name "gdk-pixbuf")
(version "2.36.3")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"1v1rssjd8p5s3lymsfhiq5mbs2pc0h1r6jd0asrwdbrign7i68sj"))))
(build-system gnu-build-system)
(arguments
'(#:configure-flags '("--with-x11")
#:phases
(modify-phases %standard-phases
(add-after
'unpack 'disable-failing-tests
(lambda _
(substitute* "tests/Makefile.in"
ERROR : cve-2015 - 4491 - too few tests run ( expected 4 , got 2 )
(("cve-2015-4491\\$\\(EXEEXT\\) ") "")
(("pixbuf-jpeg\\$\\(EXEEXT\\) ") ""))
#t)))))
(propagated-inputs
("glib" ,glib)
("libpng" ,libpng)
("shared-mime-info" ,shared-mime-info)))
(inputs
`(("libjpeg" ,libjpeg)
("libtiff" ,libtiff)
("libx11" ,libx11)))
(native-inputs
`(("pkg-config" ,pkg-config)
(synopsis "GNOME image loading and manipulation library")
(description
"GdkPixbuf is a library for image loading and manipulation developed
in the GNOME project.")
(license license:lgpl2.0+)
(home-page "-pixbuf/")))
To build gdk - pixbuf with SVG support , we need librsvg , and librsvg depends
on gdk - pixbuf , so this new varibale . Also , adds 90MiB to the
(define-public gdk-pixbuf+svg
(package (inherit gdk-pixbuf)
(name "gdk-pixbuf+svg")
(inputs
`(("librsvg" ,librsvg)
,@(package-inputs gdk-pixbuf)))
(arguments
'(#:configure-flags '("--with-x11")
tested by the - pixbuf package already
#:phases
(modify-phases %standard-phases
(add-after 'install 'register-svg-loader
(lambda* (#:key inputs outputs #:allow-other-keys)
(let* ((out (assoc-ref outputs "out"))
(librsvg (assoc-ref inputs "librsvg"))
(loaders
(append
(find-files out "^libpixbufloader-.*\\.so$")
(find-files librsvg "^libpixbufloader-.*\\.so$")))
(gdk-pixbuf-query-loaders
(string-append out "/bin/gdk-pixbuf-query-loaders")))
(zero? (apply system* `(,gdk-pixbuf-query-loaders
"--update-cache" ,@loaders)))))))))
(synopsis
"GNOME image loading and manipulation library, with SVG support")))
(define-public at-spi2-core
(package
(name "at-spi2-core")
(version "2.22.0")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"02n8ybhg8344mpjwvkhnzvr0qbvvl6ryi2q9irwhi0ri46ps6pj1"))))
(build-system gnu-build-system)
(outputs '("out" "doc"))
(arguments
'(#:configure-flags
(list (string-append "--with-html-dir="
(assoc-ref %outputs "doc")
"/share/gtk-doc/html"))
#:phases
(modify-phases %standard-phases
(replace 'check
(lambda _
(setenv "DBUS_FATAL_WARNINGS" "0")
(zero? (system* "dbus-launch" "make" "check")))))))
(propagated-inputs
`(("dbus" ,dbus)
("glib" ,glib)))
(inputs
`(("libxi" ,libxi)
("libxtst" ,libxtst)))
(native-inputs
`(("gobject-introspection" ,gobject-introspection)
("intltool" ,intltool)
("pkg-config" ,pkg-config)))
(synopsis "Assistive Technology Service Provider Interface, core components")
(description
"The Assistive Technology Service Provider Interface, core components,
is part of the GNOME accessibility project.")
(license license:lgpl2.0+)
(home-page "/")))
(define-public at-spi2-atk
(package
(name "at-spi2-atk")
(version "2.22.0")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"1h8k271ad78smm41c9bmw5dc4gki0wfy324cn2g25svkp2zfvgg8"))))
(build-system gnu-build-system)
(arguments
'(#:phases
(modify-phases %standard-phases
(replace 'check
(lambda _
(setenv "DBUS_FATAL_WARNINGS" "0")
(zero? (system* "dbus-launch" "make" "check")))))))
(propagated-inputs
required by atk-bridge-2.0.pc
(inputs
`(("atk" ,atk)))
(native-inputs
("pkg-config" ,pkg-config)))
(synopsis "Assistive Technology Service Provider Interface, ATK bindings")
(description
"The Assistive Technology Service Provider Interface
is part of the GNOME accessibility project.")
(license license:lgpl2.0+)
(home-page "/")))
(define-public gtk+-2
(package
(name "gtk+")
(version "2.24.31")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"0n26jm09n03nqbd00d2ij63xrby3vik56sk5yj6w1vy768kr5hb8"))
(patches (search-patches "gtk2-respect-GUIX_GTK2_PATH.patch"
"gtk2-respect-GUIX_GTK2_IM_MODULE_FILE.patch"
"gtk2-theme-paths.patch"))))
(build-system gnu-build-system)
(outputs '("out" "doc"))
(propagated-inputs
`(("atk" ,atk)
("gdk-pixbuf" ,gdk-pixbuf+svg)
("pango" ,pango)))
(inputs
`(("cups" ,cups)
("libxcomposite" ,libxcomposite)
("libxcursor" ,libxcursor)
("libxdamage" ,libxdamage)
("libxi" ,libxi)
("libxinerama" ,libxinerama)
("libxrandr" ,libxrandr)))
(native-inputs
`(("perl" ,perl)
("gettext" ,gettext-minimal)
("glib" ,glib "bin")
("gobject-introspection" ,gobject-introspection)
("pkg-config" ,pkg-config)
("python-wrapper" ,python-wrapper)))
(arguments
`(#:configure-flags
(list "--with-xinput=yes"
(string-append "--with-html-dir="
(assoc-ref %outputs "doc")
"/share/gtk-doc/html"))
#:phases
(alist-cons-before
'configure 'disable-tests
(lambda _
(substitute* "gtk/Makefile.in"
(("SUBDIRS = theme-bits . tests") "SUBDIRS = theme-bits .")))
%standard-phases)))
(native-search-paths
(list (search-path-specification
(variable "GUIX_GTK2_PATH")
(files '("lib/gtk-2.0")))))
(synopsis "Cross-platform toolkit for creating graphical user interfaces")
(description
"GTK+, or the GIMP Toolkit, is a multi-platform toolkit for creating
graphical user interfaces. Offering a complete set of widgets, GTK+ is
suitable for projects ranging from small one-off tools to complete
application suites.")
(license license:lgpl2.0+)
(home-page "/")))
(define-public gtk+
(package (inherit gtk+-2)
(name "gtk+")
NOTE : When updating the version of ' gtk+ ' , the hash of ' mate - themes ' in
(version "3.22.6")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"0bqpx8825b1fdjmz14wq20zq58gq1yi1p5xjps8l6zqid8hmm9zb"))
(patches (search-patches "gtk3-respect-GUIX_GTK3_PATH.patch"
"gtk3-respect-GUIX_GTK3_IM_MODULE_FILE.patch"))))
(outputs '("out" "bin" "doc"))
(propagated-inputs
`(("at-spi2-atk" ,at-spi2-atk)
("atk" ,atk)
("gdk-pixbuf" ,gdk-pixbuf+svg)
("libepoxy" ,libepoxy)
("libxcursor" ,libxcursor)
("libxi" ,libxi)
("libxinerama" ,libxinerama)
("libxdamage" ,libxdamage)
("pango" ,pango)))
(inputs
`(("libxml2" ,libxml2)
XXX : colord depends on mozjs ( through polkit ) , which fails on
XXX : rest depends on p11 - kit , which fails on now .
("json-glib" ,json-glib)))
(native-inputs
`(("perl" ,perl)
("glib" ,glib "bin")
("gettext" ,gettext-minimal)
("pkg-config" ,pkg-config)
("gobject-introspection" ,gobject-introspection)
("python-wrapper" ,python-wrapper)
("xorg-server" ,xorg-server)))
(arguments
47 MiB goes to " out " ( 24 of which is locale data ! ) , and 26 MiB goes
#:configure-flags (list (string-append "--with-html-dir="
(assoc-ref %outputs "doc")
"/share/gtk-doc/html"))
#:phases (modify-phases %standard-phases
(add-before 'configure 'pre-configure
(lambda _
(substitute* "testsuite/Makefile.in"
(("SUBDIRS = gdk gtk a11y css reftests")
"SUBDIRS = gdk"))
#t))
(add-after 'install 'move-desktop-files
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out"))
(bin (assoc-ref outputs "bin")))
(mkdir-p (string-append bin "/share"))
(rename-file (string-append out "/share/applications")
(string-append bin "/share/applications"))
#t))))))
(native-search-paths
(list (search-path-specification
(variable "GUIX_GTK3_PATH")
(files '("lib/gtk-3.0")))))))
(define-public guile-cairo
(package
(name "guile-cairo")
(version "1.4.1")
(source (origin
(method url-fetch)
(uri (string-append
"-cairo/guile-cairo-"
version
".tar.gz"))
(sha256
(base32
"1f5nd9n46n6cwfl1byjml02q3y2hgn7nkx98km1czgwarxl7ws3x"))))
(build-system gnu-build-system)
(arguments
'(#:phases (alist-cons-before
'configure 'set-module-directory
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
(substitute* "Makefile.in"
(("scmdir = ([[:graph:]]+).*" _ value)
(string-append "scmdir = " value "/2.0\n")))
(substitute* "cairo/Makefile.in"
(("moduledir = ([[:graph:]]+).*" _ value)
(string-append "moduledir = "
"$(prefix)/share/guile/site/2.0/cairo\n'")))))
(alist-cons-after
'install 'install-missing-file
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
(copy-file "cairo/vector-types.scm"
(string-append out "/share/guile/site/2.0"
"/cairo/vector-types.scm"))))
%standard-phases))))
(inputs
`(("guile-lib" ,guile-lib)
("expat" ,expat)
("guile" ,guile-2.0)))
(propagated-inputs
The .pc file refers to ' cairo ' .
`(("cairo" ,cairo)))
(native-inputs
`(("pkg-config" ,pkg-config)))
(home-page "-cairo/")
(synopsis "Cairo bindings for GNU Guile")
(description
"Guile-Cairo wraps the Cairo graphics library for Guile Scheme.
Guile-Cairo is complete, wrapping almost all of the Cairo API. It is API
stable, providing a firm base on which to do graphics work. Finally, and
importantly, it is pleasant to use. You get a powerful and well-maintained
graphics library with all of the benefits of Scheme: memory management,
exceptions, macros, and a dynamic programming environment.")
(license license:lgpl3+)))
(define-public guile-rsvg
(package
(name "guile-rsvg")
(version "2.18.1")
(source (origin
(method url-fetch)
(uri (string-append "-rsvg/"
name "-" version ".tar.gz"))
(sha256
(base32
"136f236iw3yrrz6pkkp1ma9c5mrs5icqha6pnawinqpk892r3jh7"))
(patches (search-patches "guile-rsvg-pkgconfig.patch"))
(modules '((guix build utils)))
(snippet
'(substitute* (find-files "." "Makefile\\.am")
(("/share/guile/site")
"/share/guile/site/2.0")))))
(build-system gnu-build-system)
(arguments
`(#:phases (modify-phases %standard-phases
(add-before 'configure 'bootstrap
(lambda _
(zero? (system* "autoreconf" "-vfi")))))))
(native-inputs `(("pkg-config" ,pkg-config)
("autoconf" ,autoconf)
("automake" ,automake)
("libtool" ,libtool)
("texinfo" ,texinfo)))
(inputs `(("guile" ,guile-2.0)
("librsvg" ,librsvg)
(propagated-inputs `(("guile-cairo" ,guile-cairo)))
(synopsis "Render SVG images using Cairo from Guile")
(description
"Guile-RSVG wraps the RSVG library for Guile, allowing you to render SVG
images onto Cairo surfaces.")
(home-page "-rsvg/")
(license license:lgpl2.1+)))
(define-public guile-present
(package
(name "guile-present")
(version "0.3.0")
(source (origin
(method url-fetch)
(uri (string-append "-present/"
"guile-present-" version ".tar.gz"))
(sha256
(base32
"1qam447m05sxxv6x8dlzg7qnyfc4dh8apjw1idpfhpns671gfr6m"))
(patches (search-patches "guile-present-coding.patch"))
(modules '((guix build utils)))
(snippet
'(substitute* "Makefile.in"
(("godir = .*$")
"godir = $(moddir)\n")))))
(build-system gnu-build-system)
(arguments
'(#:phases (alist-cons-after
'install 'post-install
(lambda* (#:key inputs outputs #:allow-other-keys)
(let* ((out (assoc-ref outputs "out"))
(bin (string-append out "/bin"))
(guile (assoc-ref inputs "guile")))
(substitute* (find-files bin ".*")
(("guile")
(string-append guile "/bin/guile -L "
out "/share/guile/site/2.0 -C "
out "/share/guile/site/2.0 ")))))
%standard-phases)))
(native-inputs `(("pkg-config" ,pkg-config)))
(inputs `(("guile" ,guile-2.0)))
(propagated-inputs
`(("guile-lib" ,guile-lib)
("guile-cairo" ,guile-cairo)
("guile-rsvg" ,guile-rsvg)))
(home-page "-present/")
(synopsis "Create SVG or PDF presentations in Guile")
(description
"Guile-Present defines a declarative vocabulary for presentations,
together with tools to render presentation documents as SVG or PDF.
Guile-Present can be used to make presentations programmatically, but also
includes a tools to generate PDF presentations out of Org mode and Texinfo
documents.")
(license license:lgpl3+)))
(define-public guile-gnome
(package
(name "guile-gnome")
(version "2.16.4")
(source (origin
(method url-fetch)
(uri
(string-append "mirror/" name
"/guile-gnome-platform/guile-gnome-platform-"
version ".tar.gz"))
(sha256
(base32
"1hqnqbb2lmr3hgbcv9kds1himn3av6h0lkk0zll8agcrsn7d9axd"))))
(build-system gnu-build-system)
(native-inputs
`(("pkg-config" ,pkg-config)
("atk" ,atk)
("gconf" ,gconf)
("gobject-introspection" ,gobject-introspection)
("gnome-vfs" ,gnome-vfs)
("gdk-pixbuf" ,gdk-pixbuf)
("gtk+" ,gtk+-2)
("libglade" ,libglade)
("libgnome" ,libgnome)
("libgnomecanvas" ,libgnomecanvas)
("libgnomeui" ,libgnomeui)
("pango" ,pango)
("libffi" ,libffi)
("glib" ,glib)))
(inputs `(("guile" ,guile-2.0)))
(propagated-inputs
`(("guile-cairo" ,guile-cairo)
("g-wrap" ,g-wrap)
("guile-lib" ,guile-lib)))
(arguments
FIXME
#:phases (modify-phases %standard-phases
(add-before 'configure 'pre-configure
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
(substitute* (find-files "." "^Makefile.in$")
(("guilesite :=.*guile/site" all)
(string-append all "/2.0")))
#t))))))
(outputs '("out" "debug"))
(synopsis "Guile interface for GTK+ programming for GNOME")
(description
"Includes guile-clutter, guile-gnome-gstreamer,
guile-gnome-platform (GNOME developer libraries), and guile-gtksourceview.")
(home-page "-gnome/")
(license license:gpl2+)))
(define-public cairomm
(package
(name "cairomm")
(version "1.12.2")
(source (origin
(method url-fetch)
(uri (string-append "/"
name "-" version ".tar.gz"))
(sha256
(base32
"16fmigxsaz85c3lgcls7biwyz8zy8c8h3jndfm54cxxas3a7zi25"))))
(build-system gnu-build-system)
(arguments
'(#:make-flags '("LDFLAGS=-lcairo")))
(native-inputs `(("pkg-config" ,pkg-config)))
(propagated-inputs
`(("libsigc++" ,libsigc++)
("freetype" ,freetype)
("fontconfig" ,fontconfig)
("cairo" ,cairo)))
(home-page "/")
(synopsis "C++ bindings to the Cairo 2D graphics library")
(description
"Cairomm provides a C++ programming interface to the Cairo 2D graphics
library.")
(license license:lgpl2.0+)))
(define-public pangomm
(package
(name "pangomm")
(version "2.40.1")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"1bz3gciff23bpw9bqc4v2l3lkq9w7394v3a4jxkvx0ap5lmfwqlp"))))
(build-system gnu-build-system)
(native-inputs `(("pkg-config" ,pkg-config)))
(propagated-inputs
`(("cairo" ,cairo)
("cairomm" ,cairomm)
("glibmm" ,glibmm)
("pango" ,pango)))
(home-page "/")
(synopsis "C++ interface to the Pango text rendering library")
(description
"Pangomm provides a C++ programming interface to the Pango text rendering
library.")
(license license:lgpl2.1+)))
(define-public atkmm
(package
(name "atkmm")
(version "2.24.2")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"1gaqwhviadsmy0fsr47686yglv1p4mpkamj0in127bz2b5bki5gz"))))
(build-system gnu-build-system)
(native-inputs `(("pkg-config" ,pkg-config)))
(propagated-inputs
`(("glibmm" ,glibmm) ("atk" ,atk)))
(home-page "")
(synopsis "C++ interface to the ATK accessibility library")
(description
"ATKmm provides a C++ programming interface to the ATK accessibility
toolkit.")
(license license:lgpl2.1+)))
(define-public gtkmm
(package
(name "gtkmm")
(version "3.22.0")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"1x8l0ny6r3ym53z82q9d5fan4m9vi93xy3b3hj1hrclgc95lvnh5"))))
(build-system gnu-build-system)
(native-inputs `(("pkg-config" ,pkg-config)
("xorg-server" ,xorg-server)))
(propagated-inputs
`(("pangomm" ,pangomm)
("cairomm" ,cairomm)
("atkmm" ,atkmm)
("gtk+" ,gtk+)
("glibmm" ,glibmm)))
(arguments
'(#:phases (modify-phases %standard-phases
(add-before 'check 'run-xvfb
(lambda* (#:key inputs #:allow-other-keys)
(let ((xorg-server (assoc-ref inputs "xorg-server")))
(system (string-append xorg-server "/bin/Xvfb :1 &"))
(setenv "DISPLAY" ":1")
(setenv "DBUS_FATAL_WARNINGS" "0")
#t))))))
(home-page "/")
(synopsis
"C++ interface to the GTK+ graphical user interface library")
(description
"gtkmm is the official C++ interface for the popular GUI library GTK+.
Highlights include typesafe callbacks, and a comprehensive set of widgets that
are easily extensible via inheritance. You can create user interfaces either
in code or with the Glade User Interface designer, using libglademm. There's
extensive documentation, including API reference and a tutorial.")
(license license:lgpl2.1+)))
(define-public gtkmm-2
(package (inherit gtkmm)
(name "gtkmm")
(version "2.24.5")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"0wkbzvsx4kgw16f6xjdc1dz7f77ldngdila4yi5lw2zrgcxsb006"))))
(arguments
(native-inputs `(("pkg-config" ,pkg-config)))
(propagated-inputs
`(("pangomm" ,pangomm)
("cairomm" ,cairomm)
("atkmm" ,atkmm)
("gtk+" ,gtk+-2)
("glibmm" ,glibmm)))))
(define-public python-pycairo
(package
(name "python-pycairo")
(version "1.10.0")
(source
(origin
(method url-fetch)
(uri (string-append "-"
version ".tar.bz2"))
(sha256
(base32
"1gjkf8x6hyx1skq3hhwcbvwifxvrf9qxis5vx8x5igmmgs70g94s"))
(patches (search-patches "pycairo-wscript.patch"))))
(build-system waf-build-system)
(native-inputs
`(("pkg-config" ,pkg-config)
("python-waf" ,python-waf)))
pycairo.pc references cairo
`(("cairo" ,cairo)))
(arguments
`(#:tests? #f
#:phases
(modify-phases %standard-phases
(add-before
'configure 'patch-waf
(lambda* (#:key inputs #:allow-other-keys)
(copy-file (assoc-ref %build-inputs "python-waf") "./waf"))))))
(home-page "/")
(synopsis "Python bindings for cairo")
(description
"Pycairo is a set of Python bindings for the Cairo graphics library.")
(license license:lgpl3+)
(properties `((python2-variant . ,(delay python2-pycairo))))))
(define-public python2-pycairo
(package (inherit (strip-python2-variant python-pycairo))
(name "python2-pycairo")
(version "1.10.0")
(source
(origin
(method url-fetch)
(uri (string-append "-"
version ".tar.bz2"))
(sha256
(base32
"0cblk919wh6w0pgb45zf48xwxykfif16qk264yga7h9fdkq3j16k"))))
(arguments
`(#:python ,python-2
,@(substitute-keyword-arguments (package-arguments python-pycairo)
((#:phases phases)
`(alist-delete 'patch-waf ,phases))
((#:native-inputs native-inputs)
`(alist-delete "python-waf" ,native-inputs)))))
Dual - licensed under LGPL 2.1 or Mozilla Public License 1.1
(license (list license:lgpl2.1 license:mpl1.1))))
(define-public python2-pygtk
(package
(name "python2-pygtk")
(version "2.24.0")
(source
(origin
(method url-fetch)
(uri (string-append "mirror"
"/pygtk/" (version-major+minor version)
"/pygtk-" version ".tar.bz2"))
(sha256
(base32
"04k942gn8vl95kwf0qskkv6npclfm31d78ljkrkgyqxxcni1w76d"))))
(build-system gnu-build-system)
(outputs '("out"
13 MiB of gtk - doc HTML
(native-inputs
`(("pkg-config" ,pkg-config)))
(inputs
`(("python" ,python-2)
("libglade" ,libglade)
("glib" ,glib)))
(propagated-inputs
("gtk+" ,gtk+-2)))
(arguments
`(#:tests? #f
#:phases (modify-phases %standard-phases
(add-before 'configure 'set-gtk-doc-directory
(lambda* (#:key outputs #:allow-other-keys)
(let ((doc (assoc-ref outputs "doc")))
(substitute* "docs/Makefile.in"
(("TARGET_DIR = \\$\\(datadir\\)")
(string-append "TARGET_DIR = " doc))))))
(add-after 'configure 'fix-codegen
(lambda* (#:key inputs #:allow-other-keys)
(substitute* "pygtk-codegen-2.0"
(("^prefix=.*$")
(string-append
"prefix="
(assoc-ref inputs "python-pygobject") "\n")))))
(add-after 'install 'install-pth
(lambda* (#:key inputs outputs #:allow-other-keys)
python 's site - packages directory . Add a .pth file so
(let* ((out (assoc-ref outputs "out"))
(site (string-append out "/lib/python"
,(version-major+minor
(package-version python-2))
"/site-packages")))
(call-with-output-file (string-append site "/pygtk.pth")
(lambda (port)
(format port "gtk-2.0~%")))))))))
(home-page "/")
(synopsis "Python bindings for GTK+")
(description
"PyGTK allows you to write full featured GTK programs in Python. It is
targetted at GTK 2.x, and can be used in conjunction with gnome-python to
write GNOME applications.")
(license license:lgpl2.1+)))
(define-public girara
(package
(name "girara")
(version "0.2.7")
(source (origin
(method url-fetch)
(uri
(string-append "-"
version ".tar.gz"))
(sha256
(base32
"1r9jbhf9n40zj4ddqv1q5spijpjm683nxg4hr5lnir4a551s7rlq"))))
(native-inputs `(("pkg-config" ,pkg-config)
("gettext" ,gettext-minimal)))
(inputs `(("gtk+" ,gtk+)
("check" ,check)))
(arguments
`(#:make-flags
`(,(string-append "PREFIX=" (assoc-ref %outputs "out"))
"COLOR=0" "CC=gcc")
#:test-target "test"
Tests fail with " can not open display : "
#:phases
(alist-delete 'configure %standard-phases)))
(build-system gnu-build-system)
(home-page "/")
(synopsis "Library for minimalistic gtk+3 user interfaces")
(description "Girara is a library that implements a user interface that
focuses on simplicity and minimalism. Currently based on GTK+, a
cross-platform widget toolkit, it provides an interface that focuses on three
main components: a so-called view widget that represents the actual
application, an input bar that is used to execute commands of the
application and the status bar which provides the user with current
information.")
(license license:zlib)))
(define-public gtk-doc
(package
(name "gtk-doc")
(version "1.25")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"0hpxcij9xx9ny3gs9p0iz4r8zslw8wqymbyababiyl7603a6x90y"))))
(build-system gnu-build-system)
(arguments
`(#:parallel-tests? #f
#:phases
(modify-phases %standard-phases
(add-before
'configure 'fix-docbook
(lambda* (#:key inputs #:allow-other-keys)
(substitute* "configure"
things are in place -- it uses the xmlcatalog tool to make
look in one catalog file , unlike the $ XML_CATALOG_FILES
variable that defines . Fool the test by using the
(("\"\\$XML_CATALOG_FILE\" \
\"/")
(string-append (car (find-files (assoc-ref inputs "docbook-xsl")
"^catalog.xml$"))
" \"/")))
#t)))
#:configure-flags
(list (string-append "--with-xml-catalog="
(assoc-ref %build-inputs "docbook-xml")
"/xml/dtd/docbook/catalog.xml"))))
(native-inputs
`(("pkg-config" ,pkg-config)
("itstool" ,itstool)
("libxml" ,libxml2)
("gettext" ,gettext-minimal)
("bc" ,bc)))
(inputs
`(("perl" ,perl)
("python" ,python)
("xsltproc" ,libxslt)
("dblatex" ,dblatex)
("docbook-xml" ,docbook-xml-4.3)
("docbook-xsl" ,docbook-xsl)
("source-highlight" ,source-highlight)
("glib" ,glib)))
(home-page "-doc/")
(synopsis "Documentation generator from C source code")
(description
"GTK-Doc generates API documentation from comments added to C code. It is
typically used to document the public API of GTK+ and GNOME libraries, but it
can also be used to document application code.")
(license license:gpl2+)))
(define-public gtk-engines
(package
(name "gtk-engines")
(version "2.20.2")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.bz2"))
(sha256
(base32
"1db65pb0j0mijmswrvpgkdabilqd23x22d95hp5kwxvcramq1dhm"))))
(build-system gnu-build-system)
(arguments
`(#:configure-flags
`("--enable-animation")))
(native-inputs
`(("pkg-config" ,pkg-config)
("intltool" ,intltool)))
(inputs
(home-page "")
(synopsis "Theming engines for GTK+ 2.x")
(description
"This package contains the standard GTK+ 2.x theming engines including
Clearlooks, Crux, High Contrast, Industrial, LighthouseBlue, Metal, Mist,
Redmond95 and ThinIce.")
(license (list license:gpl2+ license:lgpl2.1+))))
(define-public murrine
(package
(name "murrine")
(version "0.98.2")
(source (origin
(method url-fetch)
(uri (string-append "mirror/" name "/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"129cs5bqw23i76h3nmc29c9mqkm9460iwc8vkl7hs4xr07h8mip9"))))
(build-system gnu-build-system)
(arguments
`(#:configure-flags
`("--enable-animation"
"--enable-animationrtl")))
(native-inputs
`(("pkg-config" ,pkg-config)
("intltool" ,intltool)))
(propagated-inputs
`(("gtk+" ,gtk+-2)))
(home-page "")
(synopsis "Cairo-based theming engine for GTK+ 2.x")
(description
"Murrine is a cairo-based GTK+ theming engine. It is named after the
glass artworks done by Venicians glass blowers.")
(license license:gpl2+)))
(define-public gtkspell3
(package
(name "gtkspell3")
(version "3.0.8")
(source (origin
(method url-fetch)
(uri (string-append "mirror/"
version "/" name "-" version ".tar.gz"))
(sha256
(base32
"1zrz5pz4ryvcssk898liynmy2wyxgj95ak7mp2jv7x62yzihq6h1"))))
(build-system gnu-build-system)
(native-inputs
`(("intltool" ,intltool)
("pkg-config" ,pkg-config)))
(inputs
`(("enchant" ,enchant)
("gobject-introspection" ,gobject-introspection)
("gtk+" ,gtk+)
("pango" ,pango)))
(home-page "")
(synopsis "Spell-checking addon for GTK's TextView widget")
(description
"GtkSpell provides word-processor-style highlighting and replacement of
misspelled words in a GtkTextView widget.")
(license license:gpl2+)))
(define-public clipit
(package
(name "clipit")
(version "1.4.2")
(source (origin
(method url-fetch)
(uri (string-append
"-"
version ".tar.gz"))
(sha256
(base32
"0jrwn8qfgb15rwspdp1p8hb1nc0ngmpvgr87d4k3lhlvqg2cfqva"))))
(build-system gnu-build-system)
(native-inputs
`(("intltool" ,intltool)
("pkg-config" ,pkg-config)))
(inputs
`(("gtk+" ,gtk+-2)))
(home-page "")
(synopsis "Lightweight GTK+ clipboard manager")
(description
"ClipIt is a clipboard manager with features such as a history, search
thereof, global hotkeys and clipboard item actions. It was forked from
Parcellite and adds bugfixes and features.")
(license license:gpl2+)))
|
e4f645810565c8c5bb72d912f81f5c200e2103b12e3e82bd2ec3e83e4cef4145 | mokus0/polynomial | Newton.hs | module Bench.Newton where
import Criterion
newtonTests :: [Benchmark]
newtonTests = []
| null | https://raw.githubusercontent.com/mokus0/polynomial/42fde2c9a72f449eeda4d534acfe64b8e92b5c27/bench/src/Bench/Newton.hs | haskell | module Bench.Newton where
import Criterion
newtonTests :: [Benchmark]
newtonTests = []
|
|
1a79e5f6ef8e3edab333228d6d7ec831711e6db8a0abdaf47ad930f13d37ce1a | mbutterick/quad | math.rkt | #lang quad/unicode/unicode-class-prep
math?
# downloaded from
# -15/MathClassEx-15.txt
# and used under license
# File: MathClassEx.txt
# Revision: 15
# Date: 2017-06-01, 12:35:00 GMT
#
# © 2017 Unicode®, Inc.
# Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries.
# For terms of use, see
# For documentation, see /
#
# ------------------------------------------------
# This file is a classification of characters based on their usage in
# mathematical notation and providing a mapping to standard entity
# sets commonly used for SGML and MathML documents.
#
# While the contents of this file represent the best information
# available to the authors and the Unicode Technical Committee as
# of the date referenced above, it is likely that the information
# in this file will change from time to time. Most importantly,
# the mapping of characters to the ISO standard SGML entity sets
# is under review by the relevant ISO committee and may therefore
# change.
#
# This file is *NOT* formally part of the Unicode Character Database
# at this time.
#
# The data consists of 7 fields. The number and type of fields may change
# in future versions of this file.
#
# The character encoding of this plain-text file is UTF-8.
#
# 1: code point or range
#
# 2: class, one of:
#
# N - Normal - includes all digits and symbols requiring only one form
# A - Alphabetic
# B - Binary
# C - Closing - usually paired with opening delimiter
# D - Diacritic
# F - Fence - unpaired delimiter (often used as opening or closing)
# G - Glyph_Part - piece of large operator
# L - Large - n-ary or large operator, often takes limits
# O - Opening - usually paired with closing delimiter
# P - Punctuation
# R - Relation - includes arrows
# S - Space
# U - Unary - operators that are only unary
# V - Vary - operators that can be unary or binary depending on context
# X - Special - characters not covered by other classes
#
# The C, O, and F operators are stretchy. In addition, some binary operators such
# as U+002F are stretchy as noted in the descriptive comments. The classes are
# also useful in determining extra spacing around the operators as discussed
# in UTR #25.
#
# 3: Unicode character (UTF-8)
#
# 4: ISO entity name
#
# 5: ISO entity set
#
# 6: descriptive comments (of various types)
# The descriptive comments provide more information about a character,
# or its specific appearance. Some descriptions contain common macro
# names (with slash) but in the majority of cases, the description is
# simply the description of the entity in the published entity set, if
# different from the formal Unicode character name. Minor differences
# in word order, punctuation and verb forms have been ignored, but not
# systematic differences in terminology, such as filled vs. black.
# In principle this allows location of entities by their description.
#
# 7: Unicode character name or names
# Character names are provided for ease of reference only.
#
# Fields are delimited by ';'. Spaces adjacent to the delimiter or the '#' are
# not significant. Future versions of this file may use different amounts of
# whitespace.
#
# Some character positions in the Mathematical Alphanumeric Symbols block are
# reserved and have been mapped to the Letterlike Symbols block in Unicode.
# This is indicated in 24 special purpose comments.
#
# The character repertoire of this revision is the repertoire of Unicode
# Version 9.0. For more information see Revision 15 or later of UTR #25.
# ------------------------------------------------
#code point;class;char;entity name;entity set;note/description;CHARACTER NAME
0020;S; ;;;;SPACE
0021;N;!;excl;ISONUM;Factorial spacing;EXCLAMATION MARK
0021;N;!;fact;;;EXCLAMATION MARK
0023;N;#;num;ISONUM;;NUMBER SIGN
0024;N;$;dollar;ISONUM;;DOLLAR SIGN
0025;N;%;percnt;ISONUM;;PERCENT SIGN
0026;N;&;amp;ISONUM;;AMPERSAND
C;);rpar;ISONUM;;RIGHT PARENTHESIS
N;*;ast;ISONUM;[high , not /ast];ASTERISK
V;+;plus;;;PLUS SIGN
002C;P;,;comma;ISONUM;;COMMA
002D;N;-;;;(deprecated for math) ;HYPHEN-MINUS
002E;P;.;period;ISONUM;period;FULL STOP
B;/;sol;ISONUM;No extra spacing , stretchy;SOLIDUS
0030..0039;N;0..9;;;;DIGIT ZERO..DIGIT NINE
P;:;colon;ISONUM;;COLON
003B;P;;;semi;ISONUM;;SEMICOLON
SIGN
R;=;equals;ISONUM;;EQUALS SIGN
003E;R;>;gt;ISONUM;;GREATER-THAN SIGN
0040;N;@;commat;ISONUM;;COMMERCIAL AT
A;A .. Z;;;;LATIN CAPITAL LETTER A .. LATIN CAPITAL LETTER Z
BRACKET
B;\;bsol;ISONUM;No extra spacing , stretchy;REVERSE SOLIDUS
005D;C;];rsqb;ISONUM;;RIGHT SQUARE BRACKET
N;^;;;TeX superscript operator;CIRCUMFLEX ACCENT
005F;N;_;;;TeX subscript operator;LOW LINE
for 0300;GRAVE ACCENT
0061..007A;A;a..z;;;;LATIN SMALL LETTER A..LATIN SMALL LETTER Z
O;{;lcub;ISONUM;;LEFT CURLY BRACKET
F;|;verbar;ISONUM;;VERTICAL LINE
C;};rcub;ISONUM;;RIGHT BRACKET
007E;N;~;;;;TILDE
S ; ; nbsp;;;NO - BREAK SPACE
P;¡;iexcl;ISONUM;;INVERTED EXCLAMATION MARK
00A2;N;¢;cent;ISONUM;;CENT SIGN
00A3;N;£;pound;ISONUM;;POUND SIGN
00A4;N;¤;curren;ISONUM;;CURRENCY SIGN
N;¥;yen;ISONUM;;YEN SIGN
00A6;N;¦;brvbar;ISONUM; (vertical) ;BROKEN BAR
N;§;sect;ISONUM;;SECTION SIGN
D;¨;Dot;;/die , for 0308;DIAERESIS
U;¬;not;ISONUM ; /neg /lnot ; NOT SIGN
D;¯;macr;;Alias for 0304 ;
00B0;N;°;deg;ISONUM;;DEGREE SIGN
00B1;V;±;plusmn;ISONUM;;PLUS-MINUS SIGN
00B2;N;²;sup2;ISONUM;;SUPERSCRIPT TWO
N;³;sup3;ISONUM;;SUPERSCRIPT THREE
N;´;acute;;Alias for 0301;ACUTE ACCENT
00B5;N;µ;micro;ISONUM;;MICRO SIGN
N;¶;para;ISONUM ; ( paragraph sign ) ; PILCROW SIGN
B;·;middot;ISONUM ; /centerdot ; MIDDLE DOT
00B9;N;¹;sup1;ISONUM;;SUPERSCRIPT ONE
00BC;N;¼;frac14;ISONUM;;VULGAR FRACTION ONE QUARTER
00BD;N;½;frac12;ISONUM;;VULGAR FRACTION ONE HALF
00BE;N;¾;frac34;ISONUM;;VULGAR FRACTION THREE QUARTERS
00BF;P;¿;iquest;ISONUM;;INVERTED QUESTION MARK
00D7;B;×;times;ISONUM;;MULTIPLICATION SIGN
00F7;B;÷;divide;ISONUM;;DIVISION SIGN
0131;A;ı;imath;;;LATIN SMALL LETTER DOTLESS I
0237;A;ȷ;jmath;;;LATIN SMALL LETTER DOTLESS J
for 0302;MODIFIER LETTER CIRCUMFLEX ACCENT
D;ˇ;caron;;Alias for
for 0306;BREVE
02D9;D;˙;dot;;Alias for 0307;DOT ABOVE
02DA;D;˚;ring;;Alias for 030A;RING ABOVE
D;˜;tilde;;Alias for 0303;SMALL TILDE
prefers 0060;COMBINING GRAVE ACCENT
D;́;;ISODIA;MathML prefers 00B4;COMBINING ACUTE ACCENT
0302;D;̂;;ISODIA;MathML prefers 02C6;COMBINING CIRCUMFLEX ACCENT
D;̃;;ISODIA;MathML prefers 02DC;COMBINING TILDE
D;̄;;ISODIA;MathML prefers 00AF;COMBINING MACRON
D;̅;;;;COMBINING OVERLINE
0306;D;̆;;ISODIA;MathML prefers 02D8;COMBINING BREVE
D;̇;;ISODIA;MathML prefers 02D9;COMBINING DOT ABOVE
D;̈;;ISODIA;MathML prefers 00A8;COMBINING DIAERESIS
prefers 02DA;COMBINING RING ABOVE
D;̌;;ISODIA;MathML prefers 02C7;COMBINING CARON
D;̑;;;;COMBINING INVERTED BREVE
0323;D;̣;;;;COMBINING DOT BELOW
BELOW
032D;D;̭;;;;COMBINING CIRCUMFLEX ACCENT BELOW
032E;D;̮;;;;COMBINING BREVE BELOW
BELOW
0330;D;̰;;;;COMBINING TILDE BELOW
0331;D;̱;;;;COMBINING MACRON BELOW
0332;D;̲;;;;COMBINING LOW LINE
0333;D;̳;2lowbar;;;COMBINING DOUBLE LOW LINE
; ; ; negation slash ; COMBINING LONG SOLIDUS OVERLAY
033A;D;̺;;;;COMBINING INVERTED BRIDGE BELOW
D;̿;;;;COMBINING DOUBLE OVERLINE
0346;D;͆;;;;COMBINING BRIDGE ABOVE
0391;A;Α;Agr;ISOGRK1;;GREEK CAPITAL LETTER ALPHA
0392;A;Β;Bgr;ISOGRK1;;GREEK CAPITAL LETTER BETA
0393;A;Γ;Gamma;ISOGRK3;;GREEK CAPITAL LETTER GAMMA
0394;A;Δ;Delta;ISOGRK3;;GREEK CAPITAL LETTER DELTA
0395;A;Ε;Egr;ISOGRK1;;GREEK CAPITAL LETTER EPSILON
0396;A;Ζ;Zgr;ISOGRK1;;GREEK CAPITAL LETTER ZETA
A;Η;EEgr;ISOGRK1;;GREEK CAPITAL LETTER ETA
A;Θ;Theta;ISOGRK3;;GREEK CAPITAL LETTER THETA
0399;A;Ι;Igr;ISOGRK1;;GREEK CAPITAL LETTER IOTA
039A;A;Κ;Kgr;ISOGRK1;;GREEK CAPITAL LETTER KAPPA
039B;A;Λ;Lambda;ISOGRK3;;GREEK CAPITAL LETTER LAMDA
039C;A;Μ;Mgr;ISOGRK1;;GREEK CAPITAL LETTER MU
039D;A;Ν;Ngr;ISOGRK1;;GREEK CAPITAL LETTER NU
039E;A;Ξ;Xi;ISOGRK3;;GREEK CAPITAL LETTER XI
039F;A;Ο;Ogr;ISOGRK1;;GREEK CAPITAL LETTER OMICRON
03A0;A;Π;Pi;ISOGRK3;;GREEK CAPITAL LETTER PI
03A1;A;Ρ;Rgr;ISOGRK1;;GREEK CAPITAL LETTER RHO
A;Σ;Sigma;ISOGRK3;;GREEK CAPITAL LETTER SIGMA
03A4;A;Τ;Tgr;ISOGRK1;;GREEK CAPITAL LETTER TAU
A;Φ;Phi;ISOGRK3;;GREEK CAPITAL LETTER PHI
A;Χ;KHgr;ISOGRK1;;GREEK CAPITAL LETTER CHI
03A8;A;Ψ;Psi;ISOGRK3;;GREEK CAPITAL LETTER PSI
03A9;A;Ω;Omega;ISOGRK3;;GREEK CAPITAL LETTER OMEGA
03B1;A;α;alpha;ISOGRK3;;GREEK SMALL LETTER ALPHA
03B2;A;β;beta;ISOGRK3;;GREEK SMALL LETTER BETA
03B3;A;γ;gamma;ISOGRK3;;GREEK SMALL LETTER GAMMA
03B4;A;δ;delta;ISOGRK3;;GREEK SMALL LETTER DELTA
03B5;A;ε;epsiv;ISOGRK3; rounded;GREEK SMALL LETTER EPSILON
03B6;A;ζ;zeta;ISOGRK3;;GREEK SMALL LETTER ZETA
A;η;eta;ISOGRK3;;GREEK SMALL LETTER ETA
03B8;A;θ;theta;ISOGRK3; straight theta ;GREEK SMALL LETTER THETA
A;ι;iota;ISOGRK3;;GREEK SMALL LETTER IOTA
03BA;A;κ;kappa;ISOGRK3;;GREEK SMALL LETTER KAPPA
03BB;A;λ;lambda;ISOGRK3;;GREEK SMALL LETTER LAMDA
A;μ;mu;ISOGRK3;;GREEK SMALL LETTER MU
03BD;A;ν;nu;ISOGRK3;;GREEK SMALL LETTER NU
03BE;A;ξ;xi;ISOGRK3;;GREEK SMALL LETTER XI
A;ο;ogr;ISOGRK1;;GREEK SMALL LETTER OMICRON
03C0;A;π;pi;ISOGRK3;;GREEK SMALL LETTER PI
03C1;A;ρ;rho;ISOGRK3;;GREEK SMALL LETTER RHO
03C3;A;σ;sigma;ISOGRK3;;GREEK SMALL LETTER SIGMA
03C4;A;τ;tau;ISOGRK3;;GREEK SMALL LETTER TAU
03C5;A;υ;upsi;ISOGRK3;;GREEK SMALL LETTER UPSILON
A;φ;phiv;ISOGRK3 ; \varphi ; GREEK SMALL LETTER PHI
A;χ;chi;ISOGRK3;;GREEK SMALL LETTER CHI
03C8;A;ψ;psi;ISOGRK3;;GREEK SMALL LETTER PSI
03C9;A;ω;omega;ISOGRK3;;GREEK SMALL LETTER OMEGA
03D0;A;ϐ;;;;GREEK BETA SYMBOL
A;ϑ;thetav;ISOGRK3 ; \vartheta - curly or open small theta ; GREEK THETA SYMBOL
03D2;A;ϒ;Upsi;ISOGRK3;;GREEK UPSILON WITH HOOK SYMBOL
03D5;A;ϕ;phi;ISOGRK3;;GREEK PHI SYMBOL
A;ϖ;piv;ISOGRK3 ; rounded ( pomega ) ; GREEK PI SYMBOL
N;Ϙ;;;;GREEK LETTER ARCHAIC KOPPA
03D9;N;ϙ;;;;GREEK SMALL LETTER ARCHAIC KOPPA
03DA;A;Ϛ;;; capital;GREEK LETTER STIGMA
03DB;A;ϛ;stigma;;;GREEK SMALL LETTER STIGMA
A;Ϝ;Gammad;ISOGRK3 ; capital;GREEK LETTER DIGAMMA
03DD;A;ϝ;gammad;ISOGRK3; old;GREEK SMALL LETTER DIGAMMA
A;Ϡ ; ; ; capital;GREEK LETTER
A;ϡ;sampi;;;GREEK SMALL LETTER
03F0;A;ϰ;kappav;ISOGRK3; rounded;GREEK KAPPA SYMBOL
A;ϱ;rhov;ISOGRK3 ; rounded;GREEK RHO SYMBOL
03F4;A;ϴ;Thetav;;;GREEK CAPITAL THETA SYMBOL
A;ϵ;epsi;ISOGRK3;straight;GREEK LUNATE EPSILON SYMBOL
N;϶;bepsi;ISOAMSR;;GREEK REVERSED SYMBOL
A;Ш;Shcy;ISOCYR1;;CYRILLIC CAPITAL LETTER SHA
CUBE ROOT
ROOT
0608;A;؈;;;;ARABIC RAY
S ; ; enquad;;;EN QUAD
S ; ; emquad;;;EM QUAD
S ; ; ensp;ISOPUB ; ( half an em ) ; EN SPACE
2003;S; ;emsp;ISOPUB;;EM SPACE
S ; ; ; ; ; THREE - PER - EM SPACE
S ; ; ; ; mid space;FOUR - PER - EM SPACE
2006;S; ;;;;SIX-PER-EM SPACE
2007;S; ;;;;FIGURE SPACE
S ; ; ; ; ; THIN SPACE
200A;S; ;;;;HAIR SPACE
S;;zwsp;;;ZERO WIDTH SPACE
2010;P;‐;hyphen;ISOPUB; (true graphic) ;HYPHEN
P;‒;dash;ISOPUB;;FIGURE DASH
2013;P;–;ndash;ISOPUB;;EN DASH
P;—;mdash;ISOPUB;;EM DASH
VERTICAL LINE
2020;R;†;dagger;ISOAMSB;(N in ISOPUB);DAGGER
2021;R;‡;Dagger;ISOAMSB;(N in ISOPUB);DOUBLE DAGGER
; /bullet round bullet , filled ; BULLET
N; … ;hellip;ISOPUB;;HORIZONTAL ELLIPSIS
2032;N;′;prime;ISOTECH; minute ;PRIME
N;″;Prime;ISOTECH ; second ; DOUBLE PRIME
N;‴;tprime;ISOTECH;;TRIPLE PRIME
N;‵;bprime;ISOAMSO;;REVERSED PRIME
N;‶;bPrime;;;REVERSED DOUBLE PRIME
N;‷;btprime;;;REVERSED TRIPLE PRIME
N;※;;;Japanese
203C;N;‼;;;Factorial spacing;DOUBLE EXCLAMATION MARK
B;⁀;;;z notation sequence concatenation ; CHARACTER TIE
2044;B;⁄;;;stretchy;FRACTION SLASH
204E;B;⁎;lowast;ISOTECH;;LOW ASTERISK
2050;R;⁐;closur;;;CLOSE UP
VERTICALLY
N;⁒;;;;COMMERCIAL MINUS SIGN
2057;N;⁗;qprime;ISOTECH;;QUADRUPLE PRIME
S ; ; ; ; ; MEDIUM MATHEMATICAL SPACE
2061;B;;;;;FUNCTION APPLICATION
2062;B;;;;;INVISIBLE TIMES
2063;P;;;;;INVISIBLE SEPARATOR
2064;X;;;;;INVISIBLE PLUS
207A..207E;N;⁺..⁾;;; subscript operators;SUPERSCRIPT PLUS SIGN..SUPERSCRIPT RIGHT PARENTHESIS
.. ₎ ; ; ; superscript operators;SUBSCRIPT PLUS SIGN .. SUBSCRIPT RIGHT PARENTHESIS
20D0;D;⃐;;;;COMBINING LEFT HARPOON ABOVE
20D1;D;⃑;;;;COMBINING RIGHT HARPOON ABOVE
20D2;D;⃒;;;;COMBINING LONG VERTICAL LINE OVERLAY
20D3;X;⃓;;;;COMBINING SHORT VERTICAL LINE OVERLAY
D;⃔;;;;COMBINING ANTICLOCKWISE ARROW ABOVE
ARROW ABOVE
20D6;D;⃖;;;;COMBINING LEFT ARROW ABOVE
20D7;D;⃗;;;;COMBINING RIGHT ARROW ABOVE
20D8;D;⃘;;;;COMBINING RING OVERLAY
20D9;D;⃙;;;;COMBINING CLOCKWISE RING OVERLAY
20DA;D;⃚;;;;COMBINING ANTICLOCKWISE RING OVERLAY
20DB;D;⃛;tdot;ISOTECH;;COMBINING THREE DOTS ABOVE
20DC;D;⃜;DotDot;ISOTECH;;COMBINING FOUR DOTS ABOVE
D;⃝;;;;COMBINING ENCLOSING CIRCLE
20DE;D;⃞;;;;COMBINING ENCLOSING SQUARE
D;⃟;;;;COMBINING ENCLOSING DIAMOND
20E1;D;⃡;;;;COMBINING LEFT RIGHT ARROW ABOVE
20E4;D;⃤;;;;COMBINING ENCLOSING UPWARD POINTING TRIANGLE
20E5;D;⃥;;;;COMBINING REVERSE SOLIDUS OVERLAY
20E6;D;⃦;;;;COMBINING DOUBLE VERTICAL STROKE OVERLAY
D;⃧ ; ; ; actuary;COMBINING ANNUITY SYMBOL
D;⃨;;;;COMBINING TRIPLE UNDERDOT
20E9;D;⃩;;;;COMBINING WIDE BRIDGE ABOVE
D;⃪;;;;COMBINING LEFTWARDS ARROW OVERLAY
D;⃫;;;;COMBINING LONG DOUBLE SOLIDUS OVERLAY
D;⃬;;;;COMBINING RIGHTWARDS HARPOON WITH BARB DOWNWARDS
D;⃭;;;;COMBINING LEFTWARDS HARPOON WITH BARB DOWNWARDS
20EE;D;⃮;;;;COMBINING LEFT ARROW BELOW
20EF;D;⃯;;;;COMBINING RIGHT ARROW BELOW
2102;A;ℂ;Copf;ISOMOPF; /Bbb C, open face C;DOUBLE-STRUCK CAPITAL C
2107;N;ℇ;;;;EULER CONSTANT
A;ℊ;gscr;ISOMSCR ; ; SCRIPT SMALL G
210B;A;ℋ;Hscr;ISOMSCR; /scr H ;SCRIPT CAPITAL H
210C;A;ℌ;Hfr;ISOMFRK; /frak H;BLACK-LETTER CAPITAL H
A;ℍ;Hopf;ISOMOPF ; /Bbb H ; DOUBLE - STRUCK CAPITAL H
210E;N;ℎ;;;;PLANCK CONSTANT
210F;N;ℏ;plankv;ISOAMSO; /hslash - variant;PLANCK CONSTANT OVER TWO PI
A;ℐ;Iscr;ISOMSCR ; /scr I ; SCRIPT CAPITAL I
2111;A;ℑ;image;ISOAMSO; imaginary part ;BLACK-LETTER CAPITAL I
2112;A;ℒ;lagran;ISOTECH; Lagrangian ;SCRIPT CAPITAL L
2113;A;ℓ;ell;ISOAMSO; cursive small l;SCRIPT SMALL L
A;ℕ;Nopf;ISOMOPF ; /Bbb N , open face N;DOUBLE - STRUCK CAPITAL N
A;℘;weierp;ISOAMSO ; ( Unicode name is a misnomer ) ; SCRIPT CAPITAL P
2119;A;ℙ;Popf;ISOMOPF; /Bbb P, open face P;DOUBLE-STRUCK CAPITAL P
211A;A;ℚ;Qopf;ISOMOPF; /Bbb Q, open face Q;DOUBLE-STRUCK CAPITAL Q
211B;A;ℛ;Rscr;ISOMSCR; /scr R ;SCRIPT CAPITAL R
211C;A;ℜ;real;ISOAMSO;;BLACK-LETTER CAPITAL R
A;ℝ;Ropf;ISOMOPF ; /Bbb R , open face R;DOUBLE - STRUCK CAPITAL R
2124;A;ℤ;Zopf;ISOMOPF; /Bbb Z, open face Z;DOUBLE-STRUCK CAPITAL Z
N;Ω;ohm;ISONUM ; ( deprecated in math , use greek letter ) ; OHM SIGN
N; ℧ ;mho;ISOAMSO ; conductance;INVERTED OHM SIGN
2128;A;ℨ;Zfr;ISOMFRK; /frak Z;BLACK-LETTER CAPITAL Z
N; ℩ ;iiota;ISOAMSO ; inverted iota;TURNED GREEK SMALL LETTER IOTA
A;Å;angst;ISOTECH ; capital A , ring ( deprecated in math ) ; ANGSTROM SIGN
A;ℬ;bernou;ISOTECH ; function ; SCRIPT CAPITAL B
212D;A;ℭ;Cfr;ISOMFRK;;BLACK-LETTER CAPITAL C
A;ℯ;escr;ISOMSCR ; /scr e ; SCRIPT SMALL E
2130;A;ℰ;Escr;ISOMSCR; /scr E ;SCRIPT CAPITAL E
2131;A;ℱ;Fscr;ISOMSCR; /scr F ;SCRIPT CAPITAL F
2133;A;ℳ;phmmat;ISOTECH; physics M-matrix ;SCRIPT CAPITAL M
2134;A;ℴ;order;ISOTECH; order of ;SCRIPT SMALL O
A;ℵ;aleph;ISOTECH ; aleph , Hebrew;ALEF SYMBOL
A;ℶ;beth;ISOAMSO ; , Hebrew ; BET SYMBOL
A;ℷ;gimel;ISOAMSO ; gimel , Hebrew;GIMEL SYMBOL
A;ℸ;daleth;ISOAMSO ; , Hebrew ; DALET SYMBOL
SMALL PI
213D;A;ℽ;opfgamma;;;DOUBLE-STRUCK SMALL GAMMA
213E;N;ℾ;opfGam;;;DOUBLE-STRUCK CAPITAL GAMMA
213F;A;ℿ;opfPi;;;DOUBLE-STRUCK CAPITAL PI
2140;L;⅀;opfsum;;;DOUBLE-STRUCK N-ARY SUMMATION
CAPITAL G
CAPITAL L
N;⅃;;;;REVERSED SANS - SERIF CAPITAL L
CAPITAL Y
N;ⅅ;;;;DOUBLE - STRUCK ITALIC CAPITAL D
N;ⅆ;;;;DOUBLE - STRUCK ITALIC SMALL D
N;ⅇ;;;;DOUBLE - STRUCK ITALIC SMALL E
2148;N;ⅈ;;;;DOUBLE-STRUCK ITALIC SMALL I
2149;N;ⅉ;;;;DOUBLE-STRUCK ITALIC SMALL J
N;⅋;turnamp;;;TURNED AMPERSAND
; /leftarrow /gets ; LEFTWARDS ARROW
2191;R;↑;uarr;ISONUM;;UPWARDS ARROW
R;→;rarr;ISONUM ; /rightarrow /to;RIGHTWARDS ARROW
2193;R;↓;darr;ISONUM;;DOWNWARDS ARROW
2194;R;↔;harr;ISOAMSA; left and right arrow ;LEFT RIGHT ARROW
2195;R;↕;varr;ISOAMSA; up and down arrow;UP DOWN ARROW
R; ↖ ;nwarr;ISOAMSA ; NW pointing arrow;NORTH WEST ARROW
R; ; NE pointing arrow;NORTH EAST ARROW
R; ↘ ;searr;ISOAMSA ; SE pointing arrow;SOUTH EAST ARROW
R; ↙ ;swarr;ISOAMSA ; SW pointing arrow;SOUTH WEST ARROW
R;↚;nlarr;ISOAMSA ; not left arrow ; LEFTWARDS ARROW WITH STROKE
219B;R;↛;nrarr;ISOAMSA; not right arrow;RIGHTWARDS ARROW WITH STROKE
R; ↜ ;larrw ; ; left arrow - wavy;LEFTWARDS WAVE ARROW
R; ↝ ;rarrw;ISOAMSA ; right arrow - wavy ; RIGHTWARDS WAVE ARROW
R; ↞ ;Larr;ISOAMSA;;LEFTWARDS TWO HEADED ARROW
R; ↟ ;Uarr;ISOAMSA;;UPWARDS TWO HEADED ARROW
R;↠;Rarr;ISOAMSA;;RIGHTWARDS TWO HEADED ARROW
R; ↡ ;Darr;ISOAMSA;;DOWNWARDS TWO HEADED ARROW
R; ↢ ;larrtl;ISOAMSA ; left arrow - tailed;LEFTWARDS ARROW WITH TAIL
R;↣;rarrtl;ISOAMSA ; right arrow - tailed ; RIGHTWARDS ARROW WITH TAIL
R; ↤ ;mapstoleft ; ; maps to , leftward;LEFTWARDS ARROW FROM BAR
R; ↥ ;mapstoup ; ; maps to , upward;UPWARDS ARROW FROM BAR
R;↦;map;ISOAMSA ; maps to , rightward ; RIGHTWARDS ARROW FROM BAR
R; ↧ ;mapstodown ; ; maps to , downward;DOWNWARDS ARROW FROM BAR
R; ↨ ;varrb ; ; up and down arrow , bar under ; UP DOWN ARROW WITH BASE
21A9;R;↩;larrhk;ISOAMSA;;LEFTWARDS ARROW WITH HOOK
R; ↪ ;rarrhk;ISOAMSA;;RIGHTWARDS ARROW WITH HOOK
21AB;R;↫;larrlp;ISOAMSA;;LEFTWARDS ARROW WITH LOOP
R; ↬ ;rarrlp;ISOAMSA;;RIGHTWARDS ARROW WITH LOOP
R; ↭ ;harrw;ISOAMSA ; left and right arrow - wavy;LEFT RIGHT WAVE ARROW
21AE;R;↮;nharr;ISOAMSA; not left and right arrow ;LEFT RIGHT ARROW WITH STROKE
R; ↯ ;zigdarr;;;DOWNWARDS ARROW
R; ↰ ; /Lsh ; UPWARDS ARROW WITH TIP LEFTWARDS
21B1;R;↱;rsh;ISOAMSA; /Rsh ;UPWARDS ARROW WITH TIP RIGHTWARDS
R; ↲ ;ldsh;ISOAMSA ; left down angled arrow ; DOWNWARDS ARROW WITH TIP LEFTWARDS
21B3;R;↳;rdsh;ISOAMSA; right down angled arrow;DOWNWARDS ARROW WITH TIP RIGHTWARDS
R; ; left curved arrow;ANTICLOCKWISE TOP SEMICIRCLE ARROW
R; ↷ ;curarr;ISOAMSA ; right curved arrow ; CLOCKWISE TOP SEMICIRCLE ARROW
21BA;R;↺;;;;ANTICLOCKWISE OPEN CIRCLE ARROW
R; ↻ ;;;;CLOCKWISE OPEN CIRCLE ARROW
21BC;R;↼;lharu;ISOAMSA; left harpoon-up;LEFTWARDS HARPOON WITH BARB UPWARDS
R; ↽ ;lhard;ISOAMSA ; left harpoon - down;LEFTWARDS HARPOON WITH BARB DOWNWARDS
R; ↾ ;uharr;ISOAMSA ; /upharpoonright /restriction up harpoon - right ; UPWARDS HARPOON WITH BARB RIGHTWARDS
21BF;R;↿;uharl;ISOAMSA; up harpoon-left;UPWARDS HARPOON WITH BARB LEFTWARDS
R; ⇀ ;rharu;ISOAMSA ; right harpoon - up ; RIGHTWARDS HARPOON WITH BARB UPWARDS
R; ⇁ ;rhard;ISOAMSA ; right harpoon - down ; RIGHTWARDS HARPOON WITH BARB DOWNWARDS
R; ⇂ ;dharr;ISOAMSA ; down harpoon - right ; DOWNWARDS HARPOON WITH BARB RIGHTWARDS
21C3;R;⇃;dharl;ISOAMSA; down harpoon-left;DOWNWARDS HARPOON WITH BARB LEFTWARDS
21C4;R;⇄;rlarr;ISOAMSA; right arrow over left arrow;RIGHTWARDS ARROW OVER LEFTWARDS ARROW
R; ⇅ ;udarr;ISOAMSA ; up arrow , down arrow ; UPWARDS ARROW LEFTWARDS OF DOWNWARDS ARROW
R; ⇆ ;lrarr;ISOAMSA ; left arrow over right arrow;LEFTWARDS ARROW OVER RIGHTWARDS ARROW
R; ⇇ ;llarr;ISOAMSA ; two left arrows;LEFTWARDS PAIRED ARROWS
R; ⇈ ;uuarr;ISOAMSA ; two up arrows;UPWARDS PAIRED ARROWS
R; ⇉ ;rrarr;ISOAMSA ; two right arrows ; RIGHTWARDS PAIRED ARROWS
R; ⇊ ;ddarr;ISOAMSA ; two down arrows;DOWNWARDS PAIRED ARROWS
21CB;R;⇋;lrhar;ISOAMSA; left harpoon over right;LEFTWARDS HARPOON OVER RIGHTWARDS HARPOON
21CC;R;⇌;rlhar;ISOAMSA; right harpoon over left;RIGHTWARDS HARPOON OVER LEFTWARDS HARPOON
R; ⇍ ;nlArr;ISOAMSA ; not implied by ; LEFTWARDS DOUBLE ARROW WITH STROKE
21CE;R;⇎;nhArr;ISOAMSA; not left and right double arrows ;LEFT RIGHT DOUBLE ARROW WITH STROKE
R;⇏;nrArr;ISOAMSA ; not implies;RIGHTWARDS DOUBLE ARROW WITH STROKE
R; ⇐ ;lArr;ISOTECH ; is implied by;LEFTWARDS DOUBLE ARROW
21D1;R;⇑;uArr;ISOAMSA; up double arrow;UPWARDS DOUBLE ARROW
R;⇒;rArr;ISOTECH ; implies;RIGHTWARDS DOUBLE ARROW
R; ⇓ ;dArr;ISOAMSA ; down double arrow;DOWNWARDS DOUBLE ARROW
21D4;R;⇔;hArr;ISOAMSA; left and right double arrow;LEFT RIGHT DOUBLE ARROW
21D5;R;⇕;vArr;ISOAMSA; up and down double arrow ;UP DOWN DOUBLE ARROW
R; ⇖ ;nwArr;ISOAMSA ; NW pointing double arrow ; NORTH WEST DOUBLE ARROW
R; ⇗ ;neArr;ISOAMSA ; NE pointing double arrow ; NORTH EAST DOUBLE ARROW
R; ⇘ ;seArr;ISOAMSA ; SE pointing double arrow ; SOUTH EAST DOUBLE ARROW
R; ⇙ ;swArr;ISOAMSA ; SW pointing double arrow ; SOUTH WEST DOUBLE ARROW
R; ;lAarr;ISOAMSA ; left triple arrow;LEFTWARDS TRIPLE ARROW
R; ⇛ ;rAarr;ISOAMSA ; right triple arrow ; RIGHTWARDS TRIPLE ARROW
R; ⇜ ;ziglarr ; ; left zig - zag arrow ; LEFTWARDS SQUIGGLE ARROW
21DD;R;⇝;zigrarr;ISOAMSA; right zig-zag arrow;RIGHTWARDS SQUIGGLE ARROW
21DE;R;⇞;;;;UPWARDS ARROW WITH DOUBLE STROKE
21DF;R;⇟;;;;DOWNWARDS ARROW WITH DOUBLE STROKE
21E0;R;⇠;;;;LEFTWARDS DASHED ARROW
21E1;R;⇡;;;;UPWARDS DASHED ARROW
R; ⇢ ;;;;RIGHTWARDS DASHED ARROW
21E3;R;⇣;;;;DOWNWARDS DASHED ARROW
R; ⇤ ;larrb;;;LEFTWARDS ARROW TO BAR
21E5;R;⇥;rarrb;;;RIGHTWARDS ARROW TO BAR
21E6;R;⇦;;;;LEFTWARDS WHITE ARROW
21E7;R;⇧;;;;UPWARDS WHITE ARROW
R; ⇨ ;;;;RIGHTWARDS WHITE ARROW
R; ⇩ ;;;;DOWNWARDS WHITE ARROW
R; .. ARROW FROM BAR .. SOUTH EAST ARROW TO CORNER
R; ⇳ ;;;;UP DOWN WHITE ARROW
R;⇴;;;;RIGHT ARROW WITH SMALL
R;⇵;duarr;ISOAMSA;;DOWNWARDS ARROW LEFTWARDS OF UPWARDS ARROW
21F6;R;⇶;rarr3;;;THREE RIGHTWARDS ARROWS
21F7;R;⇷;nvlarr;;;LEFTWARDS ARROW WITH VERTICAL STROKE
21F8;R;⇸;nvrarr;;;RIGHTWARDS ARROW WITH VERTICAL STROKE
21F9;R;⇹;nvharr;;;LEFT RIGHT ARROW WITH VERTICAL STROKE
21FA;R;⇺;;;;LEFTWARDS ARROW WITH DOUBLE VERTICAL STROKE
21FB;R;⇻;;;;RIGHTWARDS ARROW WITH DOUBLE VERTICAL STROKE
21FC;R;⇼;;;;LEFT RIGHT ARROW WITH DOUBLE VERTICAL STROKE
21FD;R;⇽;loarr;ISOAMSA;;LEFTWARDS OPEN-HEADED ARROW
21FE;R;⇾;roarr;ISOAMSA;;RIGHTWARDS OPEN-HEADED ARROW
21FF;R;⇿;hoarr;ISOAMSA;;LEFT RIGHT OPEN-HEADED ARROW
2200;U;∀;forall;ISOTECH;;FOR ALL
U;∁;comp;ISOAMSO;;COMPLEMENT
DIFFERENTIAL
U;∃;exist;ISOTECH ; at least one exists;THERE EXISTS
2204;U;∄;nexist;ISOAMSO; negated exists ;THERE DOES NOT EXIST
N;∅;emptyv;ISOAMSO ; circle , slash;EMPTY SET
U;∆ ; ; ; Laplacian ( Delta , nabla^2 ) ; INCREMENT
U;∇;nabla;ISOTECH ; nabla , del , operator;NABLA
2208;R;∈;isin;ISOTECH; set membership, variant;ELEMENT OF
2209;R;∉;notin;ISOTECH; negated set membership ;NOT AN ELEMENT OF
220A;R;∊;isinv;ISOTECH; set membership ;SMALL ELEMENT OF
220B;R;∋;ni;ISOTECH; contains, variant;CONTAINS AS MEMBER
R;∌;notni;ISOTECH ; negated contains , variant;DOES NOT CONTAIN AS MEMBER
; /ni /owns contains ; SMALL CONTAINS AS MEMBER
N;∎;qed;;;END OF PROOF
L;∏;prod;ISOAMSO ; product operator ; N - ARY PRODUCT
L;∐;coprod;ISOAMSB ; coproduct operator ; N - ARY COPRODUCT
L;∑;sum;ISOAMSB ; summation operator ; N - ARY SUMMATION
2212;V;−;minus;ISOTECH;;MINUS SIGN
2213;V;∓;mnplus;;;MINUS-OR-PLUS SIGN
B;∔;plusdo;ISOAMSB ; plus sign , dot above ; DOT PLUS
B;∖;ssetmn;ISOAMSB ; small set minus ( cf . reverse solidus ) ; SET MINUS
B;∗;midast;ISOAMSB ; centered asterisk;ASTERISK OPERATOR
2218;B;∘;compfn;ISOTECH; composite function (small circle);RING OPERATOR
2219;B;∙;;;;BULLET OPERATOR
221A;L;√;radic;ISOTECH; radical;SQUARE ROOT
221B;L;∛;;;;CUBE ROOT
221C;L;∜;;;;FOURTH ROOT
R;∝;prop;ISOTECH;;PROPORTIONAL TO
221E;N;∞;infin;ISOTECH;;INFINITY
N;∟;angrt;ISOTECH ; ( 90 degree);RIGHT ANGLE
N;∠;ang;ISOAMSO;;ANGLE
2221;N;∡;angmsd;ISOAMSO;;MEASURED ANGLE
N;∢;angsph;ISOTECH;;SPHERICAL ANGLE
R;∣;mid;ISOAMSR;/mid ; DIVIDES
2224;R;∤;nmid;ISOAMSN;negated mid;DOES NOT DIVIDE
2225;R;∥;par;ISOTECH;;PARALLEL TO
2226;R;∦;npar;ISOAMSN;;NOT PARALLEL TO
2227;B;∧;and;ISOTECH;/wedge /land ;LOGICAL AND
B;∨;or;ISOTECH;/vee /lor;LOGICAL OR
B;∩;cap;ISOTECH;;INTERSECTION
B;∪;cup;ISOTECH;logical sum;UNION
222B;L;∫;int;ISOTECH;;INTEGRAL
222C;L;∬;Int;ISOTECH;;DOUBLE INTEGRAL
222E;L;∮;conint;ISOTECH;;CONTOUR INTEGRAL
222F;L;∯;Conint;ISOTECH;double contour integral operator ;SURFACE INTEGRAL
L;∰;Cconint;ISOTECH;triple contour integral operator ; VOLUME INTEGRAL
2231;L;∱;cwint;ISOTECH;;CLOCKWISE INTEGRAL
2232;L;∲;cwconint;ISOTECH;;CLOCKWISE CONTOUR INTEGRAL
2233;L;∳;awconint;ISOTECH;;ANTICLOCKWISE CONTOUR INTEGRAL
R;∴;there4;ISOTECH;;THEREFORE
2235;R;∵;becaus;ISOTECH;;BECAUSE
2236;R;∶;ratio;ISOAMSR;;RATIO
R;∷;Colon;ISOAMSR;two colons ; PROPORTION
B;∸;minusd;ISOAMSB;minus sign , dot above;DOT MINUS
2239;R;∹;excess;; excess (-:);EXCESS
R;∺;mDDot;ISOAMSR;minus with four dots , geometric properties ; GEOMETRIC PROPORTION
R;∻;homtht;ISOAMSR;;HOMOTHETIC
223C;R;∼;sim;ISOTECH; similar;TILDE OPERATOR
223D;R;∽;bsim;ISOAMSR; reverse similar;REVERSED TILDE
B;∾;ac;ISOAMSB ; most positive;INVERTED LAZY S
N;∿;;;;SINE WAVE
2240;B;≀;wreath;ISOAMSB;;WREATH PRODUCT
R;≁;nsim;ISOAMSO ; not
; equals , similar;MINUS TILDE
R;≃;sime;ISOTECH ; similar , equals;ASYMPTOTICALLY EQUAL TO
2244;R;≄;nsime;ISOAMSN; not similar, equals;NOT ASYMPTOTICALLY EQUAL TO
2245;R;≅;cong;ISOTECH; congruent with ;APPROXIMATELY EQUAL TO
R;≆;simne;ISOAMSN;similar , not equals [ vert only for 9573 entity ] ; APPROXIMATELY BUT NOT ACTUALLY EQUAL TO
R;≇;ncong;ISOAMSN ; not congruent with ; NOR ACTUALLY EQUAL TO
2248;R;≈;ap;ISOTECH; approximate;ALMOST EQUAL TO
R;≉;nap;ISOAMSN ; not EQUAL TO
224A;R;≊;ape;ISOAMSR; approximate, equals;ALMOST EQUAL OR EQUAL TO
224B;R;≋;apid;ISOAMSR; approximately identical to ;TRIPLE TILDE
224C;R;≌;bcong;ISOAMSR;;ALL EQUAL TO
224D;R;≍;asymp;ISOAMSR; asymptotically equal to;EQUIVALENT TO
R;≎;bump;ISOAMSR ; bumpy equals ; GEOMETRICALLY EQUIVALENT TO
224F;R;≏;bumpe;ISOAMSR; bumpy equals, equals ;DIFFERENCE BETWEEN
; equals , single dot above ; APPROACHES THE LIMIT
R;≑;eDot;ISOAMSR ; /doteqdot /Doteq equals , even dots ; GEOMETRICALLY EQUAL TO
R;≒;efDot;ISOAMSR ; equals , falling dots ; APPROXIMATELY EQUAL TO OR THE IMAGE OF
R;≓;erDot;ISOAMSR ; equals , rising dots;IMAGE OF OR APPROXIMATELY EQUAL TO
R;≔;colone;ISOAMSR;;COLON EQUALS
; circle on equals sign;RING IN EQUAL TO
2257;R;≗;cire;ISOAMSR; circle, equals ;RING EQUAL TO
2258;R;≘;arceq;; arc, equals;CORRESPONDS TO
R;≙;wedgeq;ISOTECH ; corresponds to ( wedge , equals ) ; ESTIMATES
225A;R;≚;veeeq;ISOTECH; logical or, equals ;EQUIANGULAR TO
R;≛;;;;STAR EQUALS
R;≜;trie;ISOAMSR ; triangle , equals ; DELTA EQUAL TO
225D;R;≝;eqdef;;;EQUAL TO BY DEFINITION
225E;R;≞;measeq;; (m over equals);MEASURED BY
; equal with question mark;QUESTIONED EQUAL TO
R;≠;ne;ISOTECH ; /ne /neq ; NOT EQUAL TO
2261;R;≡;equiv;ISOTECH;;IDENTICAL TO
IDENTICAL TO
R;≣;Equiv ; ; ( 4 lines);STRICTLY EQUIVALENT TO
R;≤;le;ISOTECH ; /leq /le ; LESS - THAN OR EQUAL TO
R;≥;ge;ISOTECH ; /geq /ge ; GREATER - THAN OR EQUAL TO
; less , double equals;LESS - THAN OVER EQUAL TO
2267;R;≧;gE;ISOAMSR; greater, double equals ;GREATER-THAN OVER EQUAL TO
R;≨;lnE;ISOAMSN ; less , not double equals;LESS - THAN BUT NOT EQUAL TO
2269;R;≩;gnE;ISOAMSN; greater, not double equals ;GREATER-THAN BUT NOT EQUAL TO
R;≪;Lt ; ; much less than , type 2 ; MUCH LESS - THAN
R;≫;Gt ; ; much greater than , type 2;MUCH GREATER - THAN
226C;R;≬;twixt;ISOAMSR;;BETWEEN
R;≭;nasymp ; ; not asymptotically equal EQUIVALENT TO
LESS - THAN
2270;R;≰;nle;ISOAMSN;;NEITHER LESS-THAN NOR EQUAL TO
NOR EQUAL TO
2272;R;≲;lsim;ISOAMSR; less, similar;LESS-THAN OR EQUIVALENT TO
2273;R;≳;gsim;ISOAMSR; greater, similar ;GREATER-THAN OR EQUIVALENT TO
2274;R;≴;nlsim;ISOAMSN; not less, similar;NEITHER LESS-THAN NOR EQUIVALENT TO
R;≵;ngsim;ISOAMSN ; not greater , similar ; NOR EQUIVALENT TO
R;≶;lg;ISOAMSR ; less , greater;LESS - THAN OR GREATER - THAN
2277;R;≷;gl;ISOAMSR; greater, less;GREATER-THAN OR LESS-THAN
NOR GREATER - THAN
NOR LESS - THAN
227A;R;≺;pr;ISOAMSR;;PRECEDES
227B;R;≻;sc;ISOAMSR;;SUCCEEDS
R;≼;prcue;ISOAMSR ; precedes , curly equals ; PRECEDES OR EQUAL TO
227D;R;≽;sccue;ISOAMSR; succeeds, curly equals ;SUCCEEDS OR EQUAL TO
R;≾;prsim;ISOAMSR ; precedes , similar;PRECEDES OR EQUIVALENT TO
227F;R;≿;scsim;ISOAMSR; succeeds, similar;SUCCEEDS OR EQUIVALENT TO
R;⊀;npr;ISOAMSN ; not precedes ; DOES NOT PRECEDE
2281;R;⊁;nsc;ISOAMSN; not succeeds ;DOES NOT SUCCEED
R;⊂;sub;ISOTECH ; subset or is implied by;SUBSET OF
R;⊃;sup;ISOTECH ; superset or implies;SUPERSET OF
R;⊄;nsub;ISOAMSN;;NOT A SUBSET OF
A SUPERSET OF
R;⊆;sube;ISOTECH ; subset , equals ; SUBSET OF OR EQUAL TO
R;⊇;supe;ISOTECH ; superset , equals ; OF OR EQUAL TO
R;⊈;nsube;ISOAMSN ; not subset , equals ; NEITHER A SUBSET OF NOR EQUAL TO
R;⊉;nsupe;ISOAMSN ; not superset , equals ; NEITHER A SUPERSET OF NOR EQUAL TO
R;⊊;subne;ISOAMSN ; subset , not equals ; SUBSET OF WITH NOT EQUAL TO
R;⊋;supne;ISOAMSN ; superset , not equals ; OF WITH NOT EQUAL TO
228C;B;⊌;;;;MULTISET
B;⊍;cupdot;ISOAMSB ; union , with dot;MULTISET
B;⊎;uplus;ISOAMSB ; plus sign in union ; MULTISET UNION
228F;R;⊏;sqsub;ISOAMSR; square subset;SQUARE IMAGE OF
R;⊐;sqsup;ISOAMSR ; square superset;SQUARE ORIGINAL OF
2291;R;⊑;sqsube;ISOAMSR; square subset, equals;SQUARE IMAGE OF OR EQUAL TO
R;⊒;sqsupe;ISOAMSR ; square superset , equals;SQUARE ORIGINAL OF OR EQUAL TO
B;⊓;sqcap;ISOAMSB ; square intersection;SQUARE CAP
B;⊔;sqcup;ISOAMSB ; square union ; SQUARE CUP
B;⊕;oplus;ISOAMSB ; plus sign in circle;CIRCLED PLUS
B;⊖;ominus;ISOAMSB ; minus sign in circle ; CIRCLED MINUS
B;⊗;otimes;ISOAMSB ; multiply sign in circle;CIRCLED TIMES
B;⊘;osol;ISOAMSB ; solidus in circle;CIRCLED DIVISION SLASH
B;⊙;odot;ISOAMSB ; middle dot in circle ; CIRCLED DOT OPERATOR
B;⊚;ocir;ISOAMSB ; small circle in circle ; CIRCLED RING OPERATOR
; asterisk in circle ; CIRCLED ASTERISK OPERATOR
229C;B;⊜;oeq;; equal in circle;CIRCLED EQUALS
229D;B;⊝;odash;ISOAMSB; hyphen in circle ;CIRCLED DASH
229E;B;⊞;plusb;ISOAMSB; plus sign in box \boxplus;SQUARED PLUS
B;⊟;minusb;ISOAMSB ; minus sign in box \boxminus;SQUARED MINUS
B;⊠;timesb;ISOAMSB ; multiply sign in box \boxtimes;SQUARED TIMES
B;⊡;sdotb;ISOAMSB ; small dot in box \dotsquare \boxdot ; SQUARED DOT OPERATOR
22A2;R;⊢;vdash;ISOAMSR; vertical, dash ;RIGHT TACK
; dash , vertical ; LEFT TACK
N;⊤;top;ISOTECH ; top;DOWN
22A5;R;⊥;bottom;ISOTECH; bottom ;UP TACK
R;⊦ ; ; ; ( vertical , short dash ) ;
22A7;R;⊧;models;ISOAMSR; (vertical, short double dash);MODELS
22A8;R;⊨;vDash;ISOAMSR; vertical, double dash;TRUE
22A9;R;⊩;Vdash;ISOAMSR; double vertical, dash;FORCES
R;⊪;Vvdash;ISOAMSR ; triple vertical , dash;TRIPLE VERTICAL BAR RIGHT TURNSTILE
22AB;R;⊫;VDash;ISOAMSR; double vert, double dash ;DOUBLE VERTICAL BAR DOUBLE RIGHT TURNSTILE
22AC;R;⊬;nvdash;ISOAMSN; not vertical, dash ;DOES NOT PROVE
R;⊭;nvDash;ISOAMSN ; not vertical , double dash;NOT TRUE
22AE;R;⊮;nVdash;ISOAMSN; not double vertical, dash;DOES NOT FORCE
R;⊯;nVDash;ISOAMSN ; not double vert , double dash ; NEGATED DOUBLE VERTICAL BAR DOUBLE RIGHT TURNSTILE
R;⊰;prurel;ISOAMSR ; element precedes under relation;PRECEDES UNDER RELATION
22B1;R;⊱;scurel;;;SUCCEEDS UNDER RELATION
; left triangle , open , variant ; OF
22B3;R;⊳;vrtri;ISOAMSR; right triangle, open, variant;CONTAINS AS NORMAL SUBGROUP
22B4;R;⊴;ltrie;ISOAMSR; left triangle, equals;NORMAL SUBGROUP OF OR EQUAL TO
R;⊵;rtrie;ISOAMSR ; right triangle , equals ; CONTAINS AS NORMAL SUBGROUP OR EQUAL TO
22B6;R;⊶;origof;ISOAMSA;;ORIGINAL OF
OF
22B8;R;⊸;mumap;ISOAMSA; /multimap;MULTIMAP
22B9;B;⊹;hercon;ISOAMSB;;HERMITIAN CONJUGATE MATRIX
B;⊺;intcal;ISOAMSB ; intercal ; INTERCALATE
; ; logical or , bar below ( large vee ) , exclusive disjunction ; XOR
22BC;B;⊼;barwed;ISOAMSB; bar, wedge (large wedge) ;NAND
B;⊽;;ISOAMSB ; bar , vee ( large vee ) ; NOR
22BE;N;⊾;angrtvb;ISOAMSO; right angle-measured [with arc];RIGHT ANGLE WITH ARC
N;⊿;;;;RIGHT TRIANGLE
22C0;L;⋀;xwedge;ISOAMSB; logical or operator;N-ARY LOGICAL AND
L;⋁;xvee;ISOAMSB ; logical and operator ; N - ARY LOGICAL OR
L;⋂;xcap;ISOAMSB ; intersection operator;N - ARY INTERSECTION
L;⋃;xcup;ISOAMSB ; union operator ; N - ARY UNION
B;⋄;diam;ISOAMSB ; white diamond;DIAMOND OPERATOR
B;⋅;sdot;ISOAMSB ; small middle dot ; DOT OPERATOR
22C6;B;⋆;sstarf;ISOAMSB; small star, filled, low;STAR OPERATOR
B;⋇;divonx;ISOAMSB ; division on times;DIVISION TIMES
22C8;R;⋈;bowtie;ISOAMSR;;BOWTIE
B;⋉;ltimes;ISOAMSB ; times sign , left closed;LEFT NORMAL FACTOR SEMIDIRECT PRODUCT
B;⋊;rtimes;ISOAMSB ; times sign , right closed ; RIGHT NORMAL FACTOR SEMIDIRECT PRODUCT
B;⋋;lthree;ISOAMSB;;LEFT SEMIDIRECT PRODUCT
B;⋌;rthree;ISOAMSB;;RIGHT SEMIDIRECT PRODUCT
R;⋍;bsime;ISOAMSR ; reverse similar , equals;REVERSED TILDE EQUALS
22CE;B;⋎;cuvee;ISOAMSB;;CURLY LOGICAL OR
22CF;B;⋏;cuwed;ISOAMSB;;CURLY LOGICAL AND
22D0;R;⋐;Sub;ISOAMSR;;DOUBLE SUBSET
R;⋑;Sup;ISOAMSR;;DOUBLE
B;⋒;Cap;ISOAMSB ; /Cap /doublecap;DOUBLE INTERSECTION
B;⋓;Cup;ISOAMSB ; /Cup /doublecup;DOUBLE UNION
22D4;R;⋔;fork;ISOAMSR;;PITCHFORK
R;⋕;epar;ISOTECH ; parallel , equal;EQUAL AND PARALLEL TO
R;⋖;ltdot;ISOAMSR;;LESS - THAN WITH DOT
R;⋗;gtdot;ISOAMSR;;GREATER - THAN WITH DOT
22D8;R;⋘;Ll;ISOAMSR; /Ll /lll /llless triple less-than;VERY MUCH LESS-THAN
22D9;R;⋙;Gg;ISOAMSR; /ggg /Gg /gggtr triple greater-than;VERY MUCH GREATER-THAN
R;⋚;leg;ISOAMSR ; less , equals , greater;LESS - THAN EQUAL TO OR GREATER - THAN
R;⋛;gel;ISOAMSR ; greater , equals , less;GREATER - THAN EQUAL TO OR LESS - THAN
; equal - or - less;EQUAL TO OR LESS - THAN
R;⋝;eg;ISOAMSR ; equal - or - greater ; EQUAL TO OR GREATER - THAN
R;⋞;cuepr;ISOAMSR ; curly equals , precedes ; EQUAL TO OR PRECEDES
R;⋟;cuesc;ISOAMSR ; curly equals , succeeds ; EQUAL TO OR SUCCEEDS
22E0;R;⋠;nprcue;ISOAMSN; not precedes, curly equals ;DOES NOT PRECEDE OR EQUAL
22E1;R;⋡;nsccue;ISOAMSN; not succeeds, curly equals ;DOES NOT SUCCEED OR EQUAL
22E2;R;⋢;nsqsube;ISOAMSN; not, square subset, equals ;NOT SQUARE IMAGE OF OR EQUAL TO
22E3;R;⋣;nsqsupe;ISOAMSN; not, square superset, equals ;NOT SQUARE ORIGINAL OF OR EQUAL TO
22E4;R;⋤;sqsubne;; square subset, not equals;SQUARE IMAGE OF OR NOT EQUAL TO
R;⋥;sqsupne ; ; square superset , not equals;SQUARE ORIGINAL OF OR NOT EQUAL TO
R;⋦;lnsim;ISOAMSN ; less , not similar;LESS - THAN BUT NOT EQUIVALENT TO
22E7;R;⋧;gnsim;ISOAMSN; greater, not similar ;GREATER-THAN BUT NOT EQUIVALENT TO
R;⋨;prnsim;ISOAMSN ; precedes , not similar;PRECEDES BUT NOT EQUIVALENT TO
R;⋩;scnsim;ISOAMSN ; succeeds , not similar;SUCCEEDS BUT NOT EQUIVALENT TO
R;⋪;nltri;ISOAMSN ; not left triangle;NOT OF
R;⋫;nrtri;ISOAMSN ; not right triangle ; DOES NOT CONTAIN AS NORMAL SUBGROUP
R;⋬;nltrie;ISOAMSN ; not left triangle , equals;NOT NORMAL SUBGROUP OF OR EQUAL TO
22ED;R;⋭;nrtrie;ISOAMSN; not right triangle, equals;DOES NOT CONTAIN AS NORMAL SUBGROUP OR EQUAL
R;⋮;vellip;ISOPUB ; vertical ellipsis;VERTICAL ELLIPSIS
R;⋯;ctdot;ISOTECH ; three dots , centered ; MIDLINE HORIZONTAL ELLIPSIS
R;⋰;utdot;ISOTECH ; three dots , ascending;UP RIGHT DIAGONAL ELLIPSIS
R;⋱;dtdot;ISOTECH ; three dots , descending ; DOWN RIGHT DIAGONAL ELLIPSIS
22F2;R;⋲;disin;ISOTECH;;ELEMENT OF WITH LONG HORIZONTAL STROKE
22F3;R;⋳;isinsv;ISOTECH;;ELEMENT OF WITH VERTICAL BAR AT END OF HORIZONTAL STROKE
22F4;R;⋴;isins;ISOTECH;;SMALL ELEMENT OF WITH VERTICAL BAR AT END OF HORIZONTAL STROKE
22F5;R;⋵;isindot;ISOTECH;;ELEMENT OF WITH DOT ABOVE
22F6;R;⋶;notinvc;ISOTECH;;ELEMENT OF WITH OVERBAR
22F7;R;⋷;notinvb;ISOTECH;;SMALL ELEMENT OF WITH OVERBAR
R;⋸;isinvb;;;ELEMENT OF WITH UNDERBAR
OF WITH TWO HORIZONTAL STROKES
22FA;R;⋺;nisd;ISOTECH;;CONTAINS WITH LONG HORIZONTAL STROKE
22FB;R;⋻;xnis;ISOTECH;;CONTAINS WITH VERTICAL BAR AT END OF HORIZONTAL STROKE
22FC;R;⋼;nis;ISOTECH;;SMALL CONTAINS WITH VERTICAL BAR AT END OF HORIZONTAL STROKE
22FD;R;⋽;notnivc;ISOTECH;;CONTAINS WITH OVERBAR
22FE;R;⋾;notnivb;ISOTECH;;SMALL CONTAINS WITH OVERBAR
22FF;R;⋿;;;;Z NOTATION BAG MEMBERSHIP
N; ⌀ ;diameter ; ; 2205 diameter sign ; DIAMETER SIGN
2302;N;⌂;;;;HOUSE
B; ⌅ ;;ISOAMSB;;PROJECTIVE
B; ⌆ ;;ISOAMSB;;PERSPECTIVE
2308;O;⌈;lceil;ISOAMSC;;LEFT CEILING
2309;C;⌉;rceil;ISOAMSC;;RIGHT CEILING
FLOOR
230B;C;⌋;rfloor;ISOAMSC;;RIGHT FLOOR
2310;N;⌐;bnot;ISOTECH;;REVERSED NOT SIGN
N; ⌑ ;;;;SQUARE LOZENGE
2319;N;⌙;;;;TURNED NOT SIGN
O; ⌜ LEFT CORNER
231D;C;⌝;urcorn;ISOAMSC;;TOP RIGHT CORNER
231E;O;⌞;dlcorn;ISOAMSC;;BOTTOM LEFT CORNER
231F;C;⌟;drcorn;ISOAMSC;;BOTTOM RIGHT CORNER
G;⌠ .. ⌡ ; ; ; ( integral parts ) ; TOP HALF INTEGRAL .. BOTTOM HALF INTEGRAL
2322;R;⌢;frown;ISOAMSR; down curve ;FROWN
R; ⌣ ;smile;ISOAMSR ; up curve ; SMILE
O ; ; ; ; left angle bracket ( deprecated for math use , use 27E8 ) ; LEFT - POINTING ANGLE BRACKET
#232A;C;;;; right angle bracket (deprecated for math use, use 27E9);RIGHT-POINTING ANGLE BRACKET
2336;N;⌶;topbot;ISOTECH; top and bottom ;APL FUNCTIONAL SYMBOL I-BEAM
B; ⌽ ;ovbar;ISOAMSB ; circle with vertical bar ; APL FUNCTIONAL SYMBOL CIRCLE STILE
233F;R;⌿;solbar;ISOAMSN; solidus, bar through ;APL FUNCTIONAL SYMBOL SLASH BAR
R;⍼;;;;RIGHT ANGLE WITH DOWNWARDS ZIGZAG ARROW
2394;N;⎔;hbenzen;ISOCHEM; horizontal benzene ring [hexagon flat open] ;SOFTWARE-FUNCTION SYMBOL
G;⎛ .. ⎯ ; ; ; ( bracket parts);LEFT PARENTHESIS UPPER HOOK .. HORIZONTAL LINE EXTENSION
23B0;R;⎰;lmoust;ISOAMSC;;UPPER LEFT OR LOWER RIGHT CURLY BRACKET SECTION
R;⎱;rmoust;ISOAMSC;;UPPER RIGHT OR LOWER LEFT CURLY BRACKET SECTION
23B2..23B3;G;⎲..⎳;;; (summation parts);SUMMATION TOP..SUMMATION BOTTOM
23B4;N;⎴;tbrk;ISOAMSO;;TOP SQUARE BRACKET
23B5;N;⎵;bbrk;ISOAMSO;;BOTTOM SQUARE BRACKET
N; ⎶ BRACKET OVER TOP SQUARE BRACKET
G ; ⎷ ; ; ; ( square root part ) ; RADICAL SYMBOL BOTTOM
G ; ; ; ; ( vertical line extension);VERTICAL LINE EXTENSION
N;⏜;ovrpar ; ; over parenthesis ; TOP PARENTHESIS
N;⏝;udrpar ; ; under parenthesis;BOTTOM PARENTHESIS
N;⏞;ovrcub ; ; over brace ; TOP CURLY BRACKET
23DF;N;⏟;udrcub;; under brace;BOTTOM CURLY BRACKET
N;⏠;;;;TOP TORTOISE SHELL BRACKET
23E1;N;⏡;;;;BOTTOM TORTOISE SHELL BRACKET
23E2;N;⏢;;ISOTECH;;WHITE TRAPEZIUM
N; ⏣ ;;ISOCHEM;;BENZENE RING WITH
N; ⏤ ;;ISOTECH;;STRAIGHTNESS
N;
N; ⏦ ;;ISOTECH;;AC CURRENT
23E7;N;⏧;;ISOTECH;;ELECTRICAL INTERSECTION
24C8;N;Ⓢ;oS;ISOAMSO; capital S in circle;CIRCLED LATIN CAPITAL LETTER S
N; ■ ;squarf;ISOPUB ; square , filled ; BLACK SQUARE
N; □ ;square;ISOPUB ; square , open ; WHITE SQUARE
25AA;N;▪;squf;ISOPUB;? /blacksquare - sq bullet, filled ;BLACK SMALL SQUARE
25AB;N;▫;;;;WHITE SMALL SQUARE
25AD;N;▭;rect;; horizontal rectangle, open;WHITE RECTANGLE
N; ; histogram marker ; BLACK VERTICAL RECTANGLE
25AF;N;▯;;ISOPUB;;WHITE VERTICAL RECTANGLE
N; ▰ ;;;;BLACK PARALLELOGRAM
N ; ; ; parallelogram , open;WHITE PARALLELOGRAM
B; ▲ ;;;;BLACK UP - POINTING TRIANGLE
B; △ ;xutri;ISOAMSB ; big up triangle , open;WHITE UP - POINTING TRIANGLE
B; ▴ ;utrif;ISOPUB ; up triangle , filled;BLACK UP - POINTING SMALL TRIANGLE
B; ▵ ;utri;ISOPUB ; /triangle - up triangle , open;WHITE UP - POINTING SMALL TRIANGLE
25B6;B;▶;vrtrif;; (large) right triangle, filled ;BLACK RIGHT-POINTING TRIANGLE
B;▷;vrtri ; ; ( large ) right triangle , open , Z notation range restriction ; WHITE RIGHT - POINTING TRIANGLE
25B8;B;▸;rtrif;ISOPUB; right triangle, filled ;BLACK RIGHT-POINTING SMALL TRIANGLE
B; ▹ ; right triangle , open ; WHITE RIGHT - POINTING SMALL TRIANGLE
25BC;B;▼;;; big down triangle, filled;BLACK DOWN-POINTING TRIANGLE
25BD;B;▽;xdtri;ISOAMSB; big down triangle, open;WHITE DOWN-POINTING TRIANGLE
B; ;dtrif;ISOPUB ; down triangle , filled;BLACK DOWN - POINTING SMALL TRIANGLE
B; ▿ ;dtri;ISOPUB ; down triangle , open;WHITE DOWN - POINTING SMALL TRIANGLE
B; ◀ ;vltrif ; ; ( large ) left triangle , filled;BLACK LEFT - POINTING TRIANGLE
25C1;B;◁;vltri;; (large) left triangle, open, Z notation domain restriction ;WHITE LEFT-POINTING TRIANGLE
B; ◂ ;ltrif;ISOPUB ; left triangle , filled;BLACK LEFT - POINTING SMALL TRIANGLE
B; ◃ ;ltri;ISOPUB ; left triangle , open;WHITE LEFT - POINTING SMALL TRIANGLE
B; ◄ ;;;;BLACK LEFT - POINTING POINTER
25C5;B;◅;;;;WHITE LEFT-POINTING POINTER
25C6;N;◆;diamondf;ISOPUB;;BLACK DIAMOND
N; ;;;;WHITE DIAMOND
25C8;N;◈;;;;WHITE DIAMOND CONTAINING BLACK SMALL DIAMOND
N; ◉
25CA;B;◊;loz;ISOPUB; lozenge or total mark;LOZENGE
25CB;B;○;;ISOAMSB; large circle ;WHITE CIRCLE
N; ◎ ;;;;BULLSEYE
25CF;N;●;circlef;ISOPUB; circle, filled ;BLACK CIRCLE
25D0..25D3;N;◐..◓;;;;CIRCLE WITH LEFT HALF BLACK..CIRCLE WITH UPPER HALF BLACK
N; ◖ ;;;;LEFT HALF BLACK CIRCLE
25D7;N;◗;;;;RIGHT HALF BLACK CIRCLE
25E2;N;◢;lrtrif;; lower right triangle, filled ;BLACK LOWER RIGHT TRIANGLE
N; ◣ ;lltrif ; ; lower left triangle , filled;BLACK LOWER LEFT TRIANGLE
25E4;N;◤;ultrif;; upper left triangle, filled;BLACK UPPER LEFT TRIANGLE
N; ◥ ;urtrif ; ; upper right triangle , filled ; BLACK UPPER RIGHT TRIANGLE
B; ◦ ;;;;WHITE BULLET
25E7..25EA;N;◧..◪;;;;SQUARE WITH LEFT HALF BLACK..SQUARE WITH LOWER RIGHT DIAGONAL HALF BLACK
B; ◫ ;midb ; ; vertical bar in box;WHITE SQUARE WITH VERTICAL BISECTING LINE
B; ◬ ;tridot;ISOAMSB ; triangle with centered dot ; WHITE UP - POINTING TRIANGLE WITH DOT
N; ◯ ;xcirc;;;LARGE CIRCLE
B;◸;ultri;ISOAMSO;;UPPER LEFT TRIANGLE
25F9;B;◹;urtri;ISOAMSO;;UPPER RIGHT TRIANGLE
25FA;B;◺;lltri;ISOAMSO;;LOWER LEFT TRIANGLE
25FB;B;◻;xsqu;;;WHITE MEDIUM SQUARE
B;◼;xsquf;;;BLACK MEDIUM SQUARE
25FD;B;◽;vssqu;;;WHITE MEDIUM SMALL SQUARE
B;◾;vssquf;;;BLACK MEDIUM SMALL SQUARE
25FF;B;◿;lrtri;ISOAMSO;;LOWER RIGHT TRIANGLE
2605;B;★;starf;ISOPUB; star, filled ;BLACK STAR
B; ☆ ;star;ISOPUB ; star , open ; WHITE STAR
N; ;;;;SUN
260C;N;☌;;;;CONJUNCTION
N; ☽ ;;;;FIRST QUARTER MOON
N; ☾ ;;;;LAST
263F;N;☿;;;;MERCURY
2640;N;♀;female;ISOPUB; Venus;FEMALE SIGN
N; ♁ ;;;;EARTH
N; ♂ ;male;ISOPUB ; Mars ; MALE SIGN
2643;N;♃;;;;JUPITER
2644;N;♄;;;;SATURN
2646;N;♆;;;;NEPTUNE
2647;N;♇;;;;PLUTO
2648;N;♈;;;;ARIES
N; ♉ ;;;;TAURUS
N; ♠ ;spades;ISOPUB ; spades suit symbol ; BLACK SPADE SUIT
N; ♡ ;hearts;ISOPUB ; heart suit symbol;WHITE HEART SUIT
N; ♢ ; diamond suit symbol;WHITE DIAMOND SUIT
N; ♣ ;clubs;ISOPUB ; club suit symbol ; BLACK CLUB SUIT
N; ♤ ;spadeso ; ; spade , white ( card suit ) ; WHITE SPADE SUIT
N; ♥ ;heartsf ; ; filled heart ( card suit ) ; BLACK HEART SUIT
2666;N;♦;diamsf;; filled diamond (card suit) ;BLACK DIAMOND SUIT
N; ♧ ; ; club , white ( card suit);WHITE CLUB SUIT
2669;N;♩;sung;ISONUM; music note (sung text sign);QUARTER NOTE
266D;N;♭;flat;ISOPUB;;MUSIC FLAT SIGN
N; ♮ ;natur;ISOPUB;;MUSIC NATURAL SIGN
266F;N;♯;sharp;ISOPUB;;MUSIC SHARP SIGN
N; FACE-1
2681;N;⚁;;;;DIE FACE-2
2682;N;⚂;;;;DIE FACE-3
2683;N;⚃;;;;DIE FACE-4
N;
2685;N;⚅;;;;DIE FACE-6
N; ⚆ ;;;;WHITE CIRCLE WITH DOT RIGHT
N; ⚇ ;;;;WHITE WITH TWO DOTS
N; ⚈ ;;;;BLACK WITH WHITE DOT RIGHT
N; ⚉ ;;;;BLACK WITH TWO WHITE DOTS
26AA;N;⚪;;;;MEDIUM WHITE CIRCLE
26AB;N;⚫;;;;MEDIUM BLACK CIRCLE
26AC;N;⚬;;;;MEDIUM SMALL WHITE CIRCLE
26B2;N;⚲;;;;NEUTER
2713;N;✓;check;ISOPUB; tick ;CHECK MARK
N; ✗ ;cross;ISOPUB ; ballot cross ; BALLOT X
N; ✠ ;malt;ISOPUB;;MALTESE CROSS
272A;N;✪;;;;CIRCLED WHITE STAR
2736;N;✶;sext;ISOPUB;;SIX POINTED BLACK STAR
O;❲;lbbrk;ISOTECH ; left broken bracket;LIGHT LEFT TORTOISE SHELL BRACKET ORNAMENT
C;❳;rbbrk;ISOTECH ; right broken bracket ; LIGHT RIGHT TORTOISE SHELL BRACKET ORNAMENT
N;⟀;;;;THREE DIMENSIONAL ANGLE
27C1;N;⟁;;;;WHITE TRIANGLE CONTAINING SMALL WHITE TRIANGLE
27C2;R;⟂;perp;ISOTECH; perpendicular;PERPENDICULAR
27C3;R;⟃;;;;OPEN SUBSET
R;⟄;;;;OPEN
27C5;R;⟅;;;;LEFT S-SHAPED BAG DELIMITER
27C6;R;⟆;;;;RIGHT S-SHAPED BAG DELIMITER
27C7;R;⟇;;;;OR WITH DOT INSIDE
27C8;R;⟈;bsolsub;;;REVERSE SOLIDUS PRECEDING SUBSET
R;⟉;subsol;;;SUPERSET PRECEDING SOLIDUS
R;⟊;;;;VERTICAL BAR WITH HORIZONTAL STROKE
27CB;R;⟋;diagup;;;MATHEMATICAL RISING DIAGONAL
27CC;L;⟌;;;;LONG DIVISION
R;⟍;diagdown;;;MATHEMATICAL FALLING DIAGONAL
27CE;B;⟎;;;;SQUARED LOGICAL AND
27CF;B;⟏;;;;SQUARED LOGICAL OR
N;⟐;diamdot;;;WHITE DIAMOND WITH CENTRED DOT
B;⟑;;;;AND WITH DOT
27D2;R;⟒;;;;ELEMENT OF OPENING UPWARDS
R;⟓;;;;LOWER RIGHT CORNER WITH DOT
R;⟔;;;;UPPER LEFT CORNER WITH DOT
JOIN
27D6;L;⟖;;;;RIGHT OUTER JOIN
L;⟗;;;;FULL JOIN
L;⟘;;;;LARGE UP TACK
L;⟙;;;;LARGE DOWN
27DA;R;⟚;;;;LEFT AND RIGHT DOUBLE TURNSTILE
27DB;R;⟛;;;;LEFT AND RIGHT TACK
27DD;R;⟝;;;;LONG RIGHT TACK
27DE;R;⟞;;;;LONG LEFT TACK
TACK WITH CIRCLE ABOVE
B;⟠;;;;LOZENGE BY HORIZONTAL RULE
B;⟡;;;;WHITE CONCAVE - SIDED DIAMOND
B;⟢;;;;WHITE CONCAVE - SIDED DIAMOND WITH LEFTWARDS TICK
DIAMOND WITH RIGHTWARDS TICK
27E4;B;⟤;;;;WHITE SQUARE WITH LEFTWARDS TICK
TICK
O;⟦;lobrk;ISOTECH;;MATHEMATICAL LEFT WHITE SQUARE BRACKET
27E7;C;⟧;robrk;ISOTECH;;MATHEMATICAL RIGHT WHITE SQUARE BRACKET
27E8;O;⟨;lang;ISOTECH;;MATHEMATICAL LEFT ANGLE BRACKET
27E9;C;⟩;rang;ISOTECH;;MATHEMATICAL RIGHT ANGLE BRACKET
27EA;O;⟪;Lang;ISOTECH; left angle bracket, double ;MATHEMATICAL LEFT DOUBLE ANGLE BRACKET
C;⟫;Rang;ISOTECH ; right angle bracket , double;MATHEMATICAL RIGHT DOUBLE ANGLE BRACKET
27EC;O;⟬;loang;ISOTECK;;MATHEMATICAL LEFT WHITE TORTOISE SHELL BRACKET
C;⟭;roang;ISOTECH;;MATHEMATICAL RIGHT WHITE TORTOISE SHELL BRACKET
O;⟮ ; ; ; MATHEMATICAL LEFT FLATTENED PARENTHESIS
; ; ; rgroup ; MATHEMATICAL RIGHT FLATTENED PARENTHESIS
R;⟰;;;;UPWARDS QUADRUPLE ARROW
27F1;R;⟱;;;;DOWNWARDS QUADRUPLE ARROW
CIRCLE ARROW
CIRCLE ARROW
27F4;R;⟴;;;;RIGHT ARROW WITH CIRCLED PLUS
ARROW
ARROW
27F7;R;⟷;xharr;ISOAMSA;;LONG LEFT RIGHT ARROW
R;⟸;xlArr;ISOAMSA;;LONG LEFTWARDS DOUBLE ARROW
27F9;R;⟹;xrArr;ISOAMSA;;LONG RIGHTWARDS DOUBLE ARROW
27FA;R;⟺;xhArr;ISOAMSA;;LONG LEFT RIGHT DOUBLE ARROW
R;⟻;xmapfrom;;;LONG LEFTWARDS ARROW FROM BAR
R;⟼;xmap;ISOAMSA;;LONG RIGHTWARDS ARROW FROM BAR
R;⟽;xMapfrom;;;LONG LEFTWARDS DOUBLE ARROW FROM BAR
R;⟾;xMapto;;;LONG RIGHTWARDS DOUBLE ARROW FROM BAR
R;⟿;xzigrarr;ISOAMSA;;LONG RIGHTWARDS SQUIGGLE ARROW
R;⤀;;;;RIGHTWARDS TWO - HEADED ARROW WITH VERTICAL STROKE
R;⤁;;;;RIGHTWARDS TWO - HEADED ARROW WITH DOUBLE VERTICAL STROKE
2902;R;⤂;nvlArr;ISOAMSA;;LEFTWARDS DOUBLE ARROW WITH VERTICAL STROKE
R;⤃;nvrArr;ISOAMSA;;RIGHTWARDS DOUBLE ARROW WITH VERTICAL STROKE
2904;R;⤄;nvhArr;ISOAMSA;;LEFT RIGHT DOUBLE ARROW WITH VERTICAL STROKE
R;⤅;Map;ISOAMSA;;RIGHTWARDS TWO - HEADED ARROW FROM BAR
R;⤆;Mapfrom;;;LEFTWARDS DOUBLE ARROW FROM BAR
R;⤇;Mapto;;;RIGHTWARDS DOUBLE ARROW FROM BAR
2908;R;⤈;darrln;;;DOWNWARDS ARROW WITH HORIZONTAL STROKE
R;⤉;uarrln;;;UPWARDS ARROW WITH HORIZONTAL STROKE
290A;R;⤊;uAarr;;;UPWARDS TRIPLE ARROW
290B;R;⤋;dAarr;;;DOWNWARDS TRIPLE ARROW
290C;R;⤌;lbarr;ISOAMSA;;LEFTWARDS DOUBLE DASH ARROW
290D;R;⤍;rbarr;ISOAMSA;;RIGHTWARDS DOUBLE DASH ARROW
290E;R;⤎;lBarr;ISOAMSA;;LEFTWARDS TRIPLE DASH ARROW
R;⤏;rBarr;ISOAMSA;;RIGHTWARDS TRIPLE DASH ARROW
R;⤐;RBarr;ISOAMSA;;RIGHTWARDS TWO - HEADED TRIPLE DASH ARROW
R;⤑;DDotrahd;ISOAMSA;;RIGHTWARDS ARROW WITH DOTTED STEM
R;⤒;uarrb;;;UPWARDS ARROW TO BAR
2913;R;⤓;darrb;;;DOWNWARDS ARROW TO BAR
R;⤔;;;;RIGHTWARDS ARROW WITH TAIL WITH VERTICAL STROKE
2915;R;⤕;;;;RIGHTWARDS ARROW WITH TAIL WITH DOUBLE VERTICAL STROKE
R;⤖;Rarrtl;ISOAMSA;;RIGHTWARDS TWO - HEADED ARROW WITH TAIL
R;⤗;;;;RIGHTWARDS TWO - HEADED ARROW WITH TAIL WITH VERTICAL STROKE
R;⤘;;;;RIGHTWARDS TWO - HEADED ARROW WITH TAIL WITH DOUBLE VERTICAL STROKE
ARROW - TAIL
291A;R;⤚;ratail;ISOAMSA;;RIGHTWARDS ARROW-TAIL
291B;R;⤛;lAtail;ISOAMSA;;LEFTWARDS DOUBLE ARROW-TAIL
291C;R;⤜;rAtail;ISOAMSA;;RIGHTWARDS DOUBLE ARROW-TAIL
291D;R;⤝;larrfs;ISOAMSA;;LEFTWARDS ARROW TO BLACK DIAMOND
R;⤞;rarrfs;ISOAMSA;;RIGHTWARDS ARROW TO BLACK DIAMOND
291F;R;⤟;larrbfs;ISOAMSA;;LEFTWARDS ARROW FROM BAR TO BLACK DIAMOND
2920;R;⤠;rarrbfs;ISOAMSA;;RIGHTWARDS ARROW FROM BAR TO BLACK DIAMOND
R;⤡;nwsesarr;;;NORTH WEST AND SOUTH EAST ARROW
R;⤢;neswsarr;;;NORTH EAST AND SOUTH WEST ARROW
WITH HOOK
WITH HOOK
2925;R;⤥;searhk;ISOAMSA;;SOUTH EAST ARROW WITH HOOK
2926;R;⤦;swarhk;ISOAMSA;;SOUTH WEST ARROW WITH HOOK
R;⤧;nwnear;ISOAMSA;;NORTH WEST ARROW AND NORTH EAST ARROW
R;⤨;nesear;ISOAMSA;;NORTH EAST ARROW AND SOUTH EAST ARROW
R;⤩;seswar;ISOAMSA;;SOUTH EAST ARROW AND SOUTH WEST ARROW
R;⤪;swnwar;ISOAMSA;;SOUTH WEST ARROW AND NORTH WEST ARROW
R;⤫;rdiofdi;;;RISING DIAGONAL CROSSING FALLING DIAGONAL
R;⤬;fdiordi;;;FALLING DIAGONAL CROSSING RISING DIAGONAL
R;⤭;seonearr;;;SOUTH EAST ARROW CROSSING NORTH EAST ARROW
R;⤮;neosearr;;;NORTH EAST ARROW CROSSING SOUTH EAST ARROW
R;⤯;fdonearr;;;FALLING DIAGONAL CROSSING NORTH EAST ARROW
R;⤰;rdosearr;;;RISING DIAGONAL CROSSING SOUTH EAST ARROW
R;⤱;neonwarr;;;NORTH EAST ARROW CROSSING NORTH WEST ARROW
R;⤲;nwonearr;;;NORTH WEST ARROW CROSSING NORTH EAST ARROW
R;⤳;rarrc;ISOAMSA;;WAVE ARROW POINTING DIRECTLY RIGHT
2934;R;⤴;;;;ARROW POINTING RIGHTWARDS THEN CURVING UPWARDS
2935;R;⤵;;;;ARROW POINTING RIGHTWARDS THEN CURVING DOWNWARDS
R;⤶;ldca;ISOAMSA;;ARROW POINTING DOWNWARDS THEN CURVING LEFTWARDS
2937;R;⤷;rdca;ISOAMSA;;ARROW POINTING DOWNWARDS THEN CURVING RIGHTWARDS
2938;R;⤸;cudarrl;ISOAMSA;;RIGHT-SIDE ARC CLOCKWISE ARROW
2939;R;⤹;cudarrr;ISOAMSA;;LEFT-SIDE ARC ANTICLOCKWISE ARROW
R;⤺;;;;TOP ARC ANTICLOCKWISE ARROW
293B;R;⤻;;;;BOTTOM ARC ANTICLOCKWISE ARROW
CLOCKWISE ARROW WITH MINUS
R;⤽;cularrp;ISOAMSA;;TOP ARC ANTICLOCKWISE ARROW WITH PLUS
CLOCKWISE ARROW
R;⤿;;;;LOWER LEFT SEMICIRCULAR ARROW
R;⥀;olarr;ISOAMSA;;ANTICLOCKWISE CLOSED CIRCLE ARROW
R;⥁;orarr;ISOAMSA;;CLOCKWISE CLOSED CIRCLE ARROW
2942;R;⥂;arrlrsl;;;RIGHTWARDS ARROW ABOVE SHORT LEFTWARDS ARROW
2943;R;⥃;arrllsr;;;LEFTWARDS ARROW ABOVE SHORT RIGHTWARDS ARROW
R;⥄;arrsrll;;;SHORT RIGHTWARDS ARROW ABOVE LEFTWARDS ARROW
2945;R;⥅;rarrpl;ISOAMSA;;RIGHTWARDS ARROW WITH PLUS BELOW
2946;R;⥆;larrpl;ISOAMSA;;LEFTWARDS ARROW WITH PLUS BELOW
2947;R;⥇;rarrx;;;RIGHTWARDS ARROW THROUGH X
RIGHT ARROW THROUGH SMALL
R;⥉;Uarrocir;ISOAMSA;;UPWARDS TWO - HEADED ARROW FROM SMALL CIRCLE
R;⥊;lurdshar;ISOAMSA;;LEFT BARB UP RIGHT BARB DOWN HARPOON
294B;R;⥋;ldrushar;ISOAMSA;;LEFT BARB DOWN RIGHT BARB UP HARPOON
RIGHT DOWN BARB LEFT HARPOON
294D;R;⥍;uldrshar;;;UP BARB LEFT DOWN BARB RIGHT HARPOON
R;⥎;lurushar;;;LEFT BARB UP RIGHT BARB UP HARPOON
294F;R;⥏;urdrshar;;;UP BARB RIGHT DOWN BARB RIGHT HARPOON
2950;R;⥐;ldrdshar;;;LEFT BARB DOWN RIGHT BARB DOWN HARPOON
2951;R;⥑;uldlshar;;;UP BARB LEFT DOWN BARB LEFT HARPOON
2952;R;⥒;luharb;;;LEFTWARDS HARPOON WITH BARB UP TO BAR
2953;R;⥓;ruharb;;;RIGHTWARDS HARPOON WITH BARB UP TO BAR
2954;R;⥔;urharb;;;UPWARDS HARPOON WITH BARB RIGHT TO BAR
2955;R;⥕;drharb;;;DOWNWARDS HARPOON WITH BARB RIGHT TO BAR
2956;R;⥖;ldharb;;;LEFTWARDS HARPOON WITH BARB DOWN TO BAR
2957;R;⥗;rdharb;;;RIGHTWARDS HARPOON WITH BARB DOWN TO BAR
2958;R;⥘;ulharb;;;UPWARDS HARPOON WITH BARB LEFT TO BAR
2959;R;⥙;dlharb;;;DOWNWARDS HARPOON WITH BARB LEFT TO BAR
R;⥚;bluhar;;;LEFTWARDS HARPOON WITH BARB UP FROM BAR
R;⥛;bruhar;;;RIGHTWARDS HARPOON WITH BARB UP FROM BAR
295C;R;⥜;burhar;;;UPWARDS HARPOON WITH BARB RIGHT FROM BAR
295D;R;⥝;bdrhar;;;DOWNWARDS HARPOON WITH BARB RIGHT FROM BAR
R;⥞;bldhar;;;LEFTWARDS HARPOON WITH BARB DOWN FROM BAR
295F;R;⥟;brdhar;;;RIGHTWARDS HARPOON WITH BARB DOWN FROM BAR
R;⥠;bulhar;;;UPWARDS HARPOON WITH BARB LEFT FROM BAR
2961;R;⥡;bdlhar;;;DOWNWARDS HARPOON WITH BARB LEFT FROM BAR
2962;R;⥢;lHar;ISOAMSA;;LEFTWARDS HARPOON WITH BARB UP ABOVE LEFTWARDS HARPOON WITH BARB DOWN
R;⥣;uHar;ISOAMSA;;UPWARDS HARPOON WITH BARB LEFT BESIDE UPWARDS HARPOON WITH BARB RIGHT
2964;R;⥤;rHar;ISOAMSA;;RIGHTWARDS HARPOON WITH BARB UP ABOVE RIGHTWARDS HARPOON WITH BARB DOWN
R;⥥;dHar;ISOAMSA;;DOWNWARDS HARPOON WITH BARB LEFT BESIDE DOWNWARDS HARPOON WITH BARB RIGHT
R;⥦;luruhar;ISOAMSA;;LEFTWARDS HARPOON WITH BARB UP ABOVE RIGHTWARDS HARPOON WITH BARB UP
2967;R;⥧;ldrdhar;ISOAMSA;;LEFTWARDS HARPOON WITH BARB DOWN ABOVE RIGHTWARDS HARPOON WITH BARB DOWN
2968;R;⥨;ruluhar;ISOAMSA;;RIGHTWARDS HARPOON WITH BARB UP ABOVE LEFTWARDS HARPOON WITH BARB UP
R;⥩;rdldhar;ISOAMSA;;RIGHTWARDS HARPOON WITH BARB DOWN ABOVE LEFTWARDS HARPOON WITH BARB DOWN
296A;R;⥪;lharul;ISOAMSA;;LEFTWARDS HARPOON WITH BARB UP ABOVE LONG DASH
296B;R;⥫;llhard;ISOAMSA;;LEFTWARDS HARPOON WITH BARB DOWN BELOW LONG DASH
296C;R;⥬;rharul;ISOAMSA;;RIGHTWARDS HARPOON WITH BARB UP ABOVE LONG DASH
296D;R;⥭;lrhard;ISOAMSA;;RIGHTWARDS HARPOON WITH BARB DOWN BELOW LONG DASH
296E;R;⥮;udhar;ISOAMSA;;UPWARDS HARPOON WITH BARB LEFT BESIDE DOWNWARDS HARPOON WITH BARB RIGHT
296F;R;⥯;duhar;ISOAMSA;;DOWNWARDS HARPOON WITH BARB LEFT BESIDE UPWARDS HARPOON WITH BARB RIGHT
R;⥰;rimply;;;RIGHT DOUBLE ARROW WITH ROUNDED HEAD
2971;R;⥱;erarr;ISOAMSA;;EQUALS SIGN ABOVE RIGHTWARDS ARROW
2972;R;⥲;simrarr;ISOAMSA;;TILDE OPERATOR ABOVE RIGHTWARDS ARROW
2973;R;⥳;larrsim;ISOAMSA;;LEFTWARDS ARROW ABOVE TILDE OPERATOR
R;⥴;rarrsim;ISOAMSA;;RIGHTWARDS ARROW ABOVE TILDE OPERATOR
2975;R;⥵;rarrap;ISOAMSA;;RIGHTWARDS ARROW ABOVE ALMOST EQUAL TO
R;⥶;ltlarr;ISOAMSR;;LESS - THAN ABOVE LEFTWARDS ARROW
2977;R;⥷;;;;LEFTWARDS ARROW THROUGH LESS-THAN
2978;R;⥸;gtrarr;ISOAMSR;;GREATER-THAN ABOVE RIGHTWARDS ARROW
R;⥹;subrarr;ISOAMSR;;SUBSET ABOVE RIGHTWARDS ARROW
297A;R;⥺;;;;LEFTWARDS ARROW THROUGH SUBSET
R;⥻;suplarr;ISOAMSR;;SUPERSET ABOVE LEFTWARDS ARROW
297C;R;⥼;lfisht;ISOAMSA;;LEFT FISH TAIL
297D;R;⥽;rfisht;ISOAMSA;;RIGHT FISH TAIL
297E;R;⥾;ufisht;ISOAMSA;;UP FISH TAIL
297F;R;⥿;dfisht;ISOAMSA;;DOWN FISH TAIL
F;⦀;tverbar;;;TRIPLE VERTICAL BAR DELIMITER
N;⦁;scirclef;;;Z NOTATION SPOT
2982;F;⦂;;;;Z NOTATION TYPE COLON
2983;O;⦃;locub;;;LEFT WHITE CURLY BRACKET
2984;C;⦄;rocub;;;RIGHT WHITE CURLY BRACKET
2985;O;⦅;lopar;ISOTECH;;LEFT WHITE PARENTHESIS
2986;C;⦆;ropar;ISOTECH;;RIGHT WHITE PARENTHESIS
2987;O;⦇;;;;Z NOTATION LEFT IMAGE BRACKET
C;⦈;;;;Z NOTATION RIGHT IMAGE BRACKET
2989;O;⦉;;;;Z NOTATION LEFT BINDING BRACKET
NOTATION RIGHT BINDING BRACKET
O;⦋;lbrke;ISOAMSC;;LEFT BRACKET WITH UNDERBAR
BRACKET WITH UNDERBAR
O;⦍;lbrkslu;ISOAMSC;;LEFT SQUARE BRACKET WITH TICK IN TOP CORNER
298E;C;⦎;rbrksld;ISOAMSC;;RIGHT SQUARE BRACKET WITH TICK IN BOTTOM CORNER
WITH TICK IN BOTTOM CORNER
BRACKET WITH TICK IN TOP CORNER
O;⦑;langd;ISOAMSC;;LEFT ANGLE BRACKET WITH DOT
C;⦒;rangd;ISOAMSC;;RIGHT ANGLE BRACKET WITH DOT
O;⦓;lparlt;ISOAMSC;;LEFT ARC LESS - THAN BRACKET
2994;C;⦔;rpargt;ISOAMSC;;RIGHT ARC GREATER-THAN BRACKET
2995;O;⦕;gtlPar;ISOAMSC;;DOUBLE LEFT ARC GREATER-THAN BRACKET
RIGHT ARC LESS - THAN BRACKET
O;⦗;;;;LEFT BLACK TORTOISE SHELL BRACKET
2998;C;⦘;;;;RIGHT BLACK TORTOISE SHELL BRACKET
F;⦙;vellip4;;;DOTTED FENCE
299A;F;⦚;vzigzag;ISOAMSO;;VERTICAL ZIGZAG LINE
299B;N;⦛;;;;MEASURED ANGLE OPENING LEFT
299C;N;⦜;vangrt;ISOTECH;;RIGHT ANGLE VARIANT WITH SQUARE
N;⦝;angrtvbd;ISOAMSO;;MEASURED RIGHT ANGLE WITH DOT
N;⦞;angles;;;ANGLE WITH S INSIDE
299F;N;⦟;angdnr;;;ACUTE ANGLE
29A0;N;⦠;gtlpar;;;SPHERICAL ANGLE OPENING LEFT
29A1;N;⦡;;;;SPHERICAL ANGLE OPENING UP
29A2;N;⦢;angdnl;;;TURNED ANGLE
29A3;N;⦣;angupl;;;REVERSED ANGLE
N;⦤;ange;ISOAMSO;;ANGLE WITH UNDERBAR
ANGLE WITH UNDERBAR
29A6;N;⦦;dwangle;ISOTECH;;OBLIQUE ANGLE OPENING UP
29A7;N;⦧;uwangle;ISOTECH;;OBLIQUE ANGLE OPENING DOWN
29A8;N;⦨;angmsdaa;ISOAMSO;;MEASURED ANGLE WITH OPEN ARM ENDING IN ARROW POINTING UP AND RIGHT
29A9;N;⦩;angmsdab;ISOAMSO;;MEASURED ANGLE WITH OPEN ARM ENDING IN ARROW POINTING UP AND LEFT
29AA;N;⦪;angmsdac;ISOAMSO;;MEASURED ANGLE WITH OPEN ARM ENDING IN ARROW POINTING DOWN AND RIGHT
29AB;N;⦫;angmsdad;ISOAMSO;;MEASURED ANGLE WITH OPEN ARM ENDING IN ARROW POINTING DOWN AND LEFT
29AC;N;⦬;angmsdae;ISOAMSO;;MEASURED ANGLE WITH OPEN ARM ENDING IN ARROW POINTING RIGHT AND UP
29AD;N;⦭;angmsdaf;ISOAMSO;;MEASURED ANGLE WITH OPEN ARM ENDING IN ARROW POINTING LEFT AND UP
29AE;N;⦮;angmsdag;ISOAMSO;;MEASURED ANGLE WITH OPEN ARM ENDING IN ARROW POINTING RIGHT AND DOWN
29AF;N;⦯;angmsdah;ISOAMSO;;MEASURED ANGLE WITH OPEN ARM ENDING IN ARROW POINTING LEFT AND DOWN
29B0;N;⦰;bemptyv;ISOAMSO;;REVERSED EMPTY SET
N;⦱;demptyv;ISOAMSO;;EMPTY SET WITH OVERBAR
29B2;N;⦲;cemptyv;ISOAMSO;;EMPTY SET WITH SMALL CIRCLE ABOVE
29B3;N;⦳;raemptyv;ISOAMSO;;EMPTY SET WITH RIGHT ARROW ABOVE
29B4;N;⦴;laemptyv;ISOAMSO;;EMPTY SET WITH LEFT ARROW ABOVE
29B5;N;⦵;ohbar;ISOAMSB;;CIRCLE WITH HORIZONTAL BAR
29B6;B;⦶;omid;ISOAMSB;;CIRCLED VERTICAL BAR
B;⦷;opar;ISOAMSB;;CIRCLED PARALLEL
29B8;B;⦸;obsol;;;CIRCLED REVERSE SOLIDUS
29B9;B;⦹;operp;ISOAMSB;;CIRCLED PERPENDICULAR
N;⦺;;;;CIRCLE BY HORIZONTAL BAR AND TOP HALF BY VERTICAL BAR
N;⦻;olcross;ISOTECH;;CIRCLE WITH SUPERIMPOSED X
N;⦼;odsold;ISOAMSB;;CIRCLED - ROTATED DIVISION SIGN
N;⦽;oxuarr;;;UP ARROW THROUGH
29BE;N;⦾;olcir;ISOAMSB;;CIRCLED WHITE BULLET
29BF;N;⦿;ofcir;ISOAMSB;;CIRCLED BULLET
29C0;B;⧀;olt;ISOAMSB;;CIRCLED LESS-THAN
29C1;B;⧁;ogt;ISOAMSB;;CIRCLED GREATER-THAN
N;⧂;cirscir;ISOAMSO;;CIRCLE WITH SMALL CIRCLE TO THE RIGHT
N;⧃;cirE;ISOAMSO;;CIRCLE WITH TWO HORIZONTAL STROKES TO THE RIGHT
29C4;B;⧄;solb;ISOAMSB;;SQUARED RISING DIAGONAL SLASH
B;⧅;bsolb;ISOAMSB;;SQUARED FALLING DIAGONAL SLASH
B;⧆;astb;;;SQUARED ASTERISK
B;⧇;cirb;;;SQUARED SMALL CIRCLE
29C8;B;⧈;squb;;;SQUARED SQUARE
29C9;N;⧉;boxbox;ISOAMSO;;TWO JOINED SQUARES
N;⧊;tridoto;;;TRIANGLE WITH DOT ABOVE
N;⧋;tribar;;;TRIANGLE WITH UNDERBAR
29CC;N;⧌;triS;;;S IN TRIANGLE
N;⧍;trisb;ISOAMSB;;TRIANGLE WITH SERIFS AT BOTTOM
R;⧎;rtriltri;ISOAMSR;;RIGHT TRIANGLE ABOVE LEFT TRIANGLE
29CF;R;⧏;ltrivb;;;LEFT TRIANGLE BESIDE VERTICAL BAR
29D0;R;⧐;vbrtri;;;VERTICAL BAR BESIDE RIGHT TRIANGLE
R;⧑;lfbowtie;;;BOWTIE WITH LEFT HALF BLACK
29D2;R;⧒;rfbowtie;;;BOWTIE WITH RIGHT HALF BLACK
29D4;R;⧔;lftimes;;;TIMES WITH LEFT HALF BLACK
29D5;R;⧕;rftimes;;;TIMES WITH RIGHT HALF BLACK
29D6;B;⧖;hrglass;;;WHITE HOURGLASS
B;⧗;fhrglass;;;BLACK HOURGLASS
29D8;O;⧘;;;;LEFT WIGGLY FENCE
29D9;C;⧙;;;;RIGHT WIGGLY FENCE
29DA;O;⧚;;;;LEFT DOUBLE WIGGLY FENCE
29DB;C;⧛;;;;RIGHT DOUBLE WIGGLY FENCE
INFINITY
N;⧝;infintie;ISOTECH;;TIE OVER INFINITY
N;⧞;nvinfin;ISOTECH;;INFINITY NEGATED WITH VERTICAL BAR
R;⧟;dumap;;;DOUBLE - ENDED MULTIMAP
29E0;N;⧠;dalembrt;;;SQUARE WITH CONTOURED OUTLINE
R;⧡;lrtrieq;;;INCREASES AS
29E2;B;⧢;shuffle;;;SHUFFLE PRODUCT
SIGN AND SLANTED PARALLEL
29E4;R;⧤;smeparsl;ISOTECH;;EQUALS SIGN AND SLANTED PARALLEL WITH TILDE ABOVE
R;⧥;eqvparsl;ISOTECH;;IDENTICAL TO AND SLANTED PARALLEL
R;⧦;;;;GLEICH STARK
29E7;N;⧧;thermod;;;THERMODYNAMIC
N;⧨;dtrilf;;;DOWN - POINTING TRIANGLE WITH LEFT HALF BLACK
29E9;N;⧩;dtrirf;;;DOWN-POINTING TRIANGLE WITH RIGHT HALF BLACK
29EA;N;⧪;diamdarr;;;BLACK DIAMOND WITH DOWN ARROW
B;⧫;lozf;ISOPUB;;BLACK LOZENGE
N;⧬;cirdarr;;;WHITE WITH DOWN ARROW
29ED;N;⧭;cirfdarr;;;BLACK CIRCLE WITH DOWN ARROW
N;⧮;squerr;;;ERROR - BARRED WHITE SQUARE
N;⧯;squferr;;;ERROR - BARRED BLACK SQUARE
N;⧰;diamerr;;;ERROR - BARRED WHITE DIAMOND
N;⧱;diamerrf;;;ERROR - BARRED BLACK DIAMOND
29F2;N;⧲;cirerr;;;ERROR-BARRED WHITE CIRCLE
N;⧳;cirferr;;;ERROR - BARRED BLACK CIRCLE
29F4;R;⧴;;;;RULE-DELAYED
29F5;B;⧵;;;;REVERSE SOLIDUS OPERATOR
29F6;B;⧶;dsol;ISOTECH;;SOLIDUS WITH OVERBAR
B;⧷;rsolbar;;;REVERSE SOLIDUS WITH HORIZONTAL STROKE
29F8;L;⧸;xsol;;;BIG SOLIDUS
L;⧹;xbsol;;;BIG REVERSE SOLIDUS
29FA;B;⧺;;;;DOUBLE PLUS
B;⧻;;;;TRIPLE PLUS
29FC;O;⧼;;;;LEFT-POINTING CURVED ANGLE BRACKET
29FD;C;⧽;;;;RIGHT-POINTING CURVED ANGLE BRACKET
B;⧿;;;;MINY
L;⨀;xodot;ISOAMSB;;N - ARY CIRCLED DOT OPERATOR
2A01;L;⨁;xoplus;ISOAMSB;;N-ARY CIRCLED PLUS OPERATOR
2A02;L;⨂;xotime;ISOAMSB;;N-ARY CIRCLED TIMES OPERATOR
L;⨃;xcupdot;;;N - ARY UNION OPERATOR WITH DOT
2A04;L;⨄;xuplus;ISOAMSB;;N-ARY UNION OPERATOR WITH PLUS
L;⨅;xsqcap;ISOAMSB;;N - ARY SQUARE INTERSECTION OPERATOR
2A06;L;⨆;xsqcup;ISOAMSB;;N-ARY SQUARE UNION OPERATOR
2A07;L;⨇;xandand;;;TWO LOGICAL AND OPERATOR
2A08;L;⨈;xoror;;;TWO LOGICAL OR OPERATOR
L;⨉;xtimes;;;N - ARY TIMES OPERATOR
L;⨊;;;;MODULO TWO SUM
L;⨋;sumint;;;SUMMATION WITH INTEGRAL
L;⨌;qint;ISOTECH;;QUADRUPLE INTEGRAL OPERATOR
2A0D;L;⨍;fpartint;ISOTECH;;FINITE PART INTEGRAL
WITH DOUBLE STROKE
2A0F;L;⨏;slint;;;INTEGRAL AVERAGE WITH SLASH
2A10;L;⨐;cirfnint;ISOTECH;;CIRCULATION FUNCTION
2A11;L;⨑;awint;ISOTECH;;ANTICLOCKWISE INTEGRATION
2A12;L;⨒;rppolint;ISOTECH;;LINE INTEGRATION WITH RECTANGULAR PATH AROUND POLE
2A13;L;⨓;scpolint;ISOTECH;;LINE INTEGRATION WITH SEMICIRCULAR PATH AROUND POLE
2A14;L;⨔;npolint;ISOTECH;;LINE INTEGRATION NOT INCLUDING THE POLE
2A15;L;⨕;pointint;ISOTECH;;INTEGRAL AROUND A POINT OPERATOR
INTEGRAL OPERATOR
L;⨗;intlarhk;ISOTECH;;INTEGRAL WITH LEFTWARDS ARROW WITH HOOK
L;⨘;timeint;;;INTEGRAL WITH TIMES SIGN
L;⨙;capint;;;INTEGRAL WITH INTERSECTION
L;⨚;cupint;;;INTEGRAL WITH UNION
L;⨛;upint;;;INTEGRAL WITH OVERBAR
L;⨜;lowint;;;INTEGRAL WITH UNDERBAR
L;⨝;Join;;;JOIN
L;⨞;xltri;;;LARGE LEFT TRIANGLE OPERATOR
2A1F;L;⨟;;;;Z NOTATION SCHEMA COMPOSITION
2A20;L;⨠;;;;Z NOTATION SCHEMA PIPING
L;⨡;;;;Z NOTATION SCHEMA PROJECTION
2A22;B;⨢;pluscir;ISOAMSB;;PLUS SIGN WITH SMALL CIRCLE ABOVE
2A23;B;⨣;plusacir;ISOAMSB;;PLUS SIGN WITH CIRCUMFLEX ACCENT ABOVE
2A24;B;⨤;simplus;ISOAMSB;;PLUS SIGN WITH TILDE ABOVE
2A25;B;⨥;plusdu;ISOAMSB;;PLUS SIGN WITH DOT BELOW
B;⨦;plussim;ISOAMSB;;PLUS SIGN WITH TILDE BELOW
B;⨧;plustwo;ISOAMSB;;PLUS SIGN WITH SUBSCRIPT TWO
B;⨨;plustrif;;;PLUS SIGN WITH BLACK TRIANGLE
2A29;B;⨩;mcomma;ISOAMSR;;MINUS SIGN WITH COMMA ABOVE
2A2A;B;⨪;minusdu;ISOAMSB;;MINUS SIGN WITH DOT BELOW
2A2B;B;⨫;;;;MINUS SIGN WITH FALLING DOTS
2A2C;B;⨬;;;;MINUS SIGN WITH RISING DOTS
B;⨭;loplus;ISOAMSB;;PLUS SIGN IN LEFT HALF
B;⨮;roplus;ISOAMSB;;PLUS SIGN IN RIGHT HALF
OR CROSS PRODUCT
2A30;B;⨰;timesd;ISOAMSB;;MULTIPLICATION SIGN WITH DOT ABOVE
B;⨱;timesbar;ISOAMSB;;MULTIPLICATION SIGN WITH UNDERBAR
2A32;B;⨲;btimes;;;SEMIDIRECT PRODUCT WITH BOTTOM CLOSED
2A33;B;⨳;smashp;ISOAMSB;;SMASH PRODUCT
B;⨴;lotimes;ISOAMSB;;MULTIPLICATION SIGN IN LEFT HALF
B;⨵;rotimes;ISOAMSB;;MULTIPLICATION SIGN IN RIGHT HALF
B;⨶;otimesas;ISOAMSB;;CIRCLED SIGN WITH CIRCUMFLEX ACCENT
2A37;B;⨷;Otimes;ISOAMSB;;MULTIPLICATION SIGN IN DOUBLE CIRCLE
2A38;B;⨸;odiv;ISOAMSB;;CIRCLED DIVISION SIGN
B;⨹;triplus;ISOAMSB;;PLUS SIGN IN TRIANGLE
2A3A;B;⨺;triminus;ISOAMSB;;MINUS SIGN IN TRIANGLE
B;⨻;tritime;ISOAMSB;;MULTIPLICATION SIGN IN TRIANGLE
2A3C;B;⨼;iprod;ISOAMSB;;INTERIOR PRODUCT
B;⨽;iprodr;ISOAMSB;;RIGHTHAND INTERIOR PRODUCT
B;⨾;;;;Z NOTATION RELATIONAL COMPOSITION
OR
B;⩀;capdot;ISOAMSB;;INTERSECTION WITH DOT
2A41;B;⩁;;;;UNION WITH MINUS SIGN
2A42;B;⩂;ncup;ISOAMSB;;UNION WITH OVERBAR
2A43;B;⩃;ncap;ISOAMSB;;INTERSECTION WITH OVERBAR
2A44;B;⩄;capand;ISOAMSB;;INTERSECTION WITH LOGICAL AND
B;⩅;cupor;ISOAMSB;;UNION WITH LOGICAL OR
2A46;B;⩆;cupcap;ISOAMSB;;UNION ABOVE INTERSECTION
2A47;B;⩇;capcup;ISOAMSB;;INTERSECTION ABOVE UNION
2A48;B;⩈;cupbrcap;ISOAMSB;;UNION ABOVE BAR ABOVE INTERSECTION
2A49;B;⩉;capbrcup;ISOAMSB;;INTERSECTION ABOVE BAR ABOVE UNION
2A4A;B;⩊;cupcup;ISOAMSB;;UNION BESIDE AND JOINED WITH UNION
2A4B;B;⩋;capcap;ISOAMSB;;INTERSECTION BESIDE AND JOINED WITH INTERSECTION
B;⩌;ccups;ISOAMSB;;CLOSED UNION WITH SERIFS
INTERSECTION WITH SERIFS
B;⩎;;;;DOUBLE SQUARE INTERSECTION
2A4F;B;⩏;;;;DOUBLE SQUARE UNION
2A50;B;⩐;ccupssm;ISOAMSB;;CLOSED UNION WITH SERIFS AND SMASH PRODUCT
2A51;B;⩑;anddot;;;LOGICAL AND WITH DOT ABOVE
B;⩒;ordot;;;LOGICAL OR WITH DOT ABOVE
B;⩓;And;ISOTECH;;DOUBLE LOGICAL AND
2A54;B;⩔;Or;ISOTECH;;DOUBLE LOGICAL OR
B;⩕;andand;ISOTECH;;TWO INTERSECTING LOGICAL AND
2A56;B;⩖;oror;ISOTECH;;TWO INTERSECTING LOGICAL OR
2A57;B;⩗;orslope;ISOTECH;;SLOPING LARGE OR
2A58;B;⩘;andslope;ISOTECH;;SLOPING LARGE AND
2A59;R;⩙;;;;LOGICAL OR OVERLAPPING LOGICAL AND
2A5A;B;⩚;andv;ISOTECH;;LOGICAL AND WITH MIDDLE STEM
2A5B;B;⩛;orv;ISOTECH;;LOGICAL OR WITH MIDDLE STEM
2A5C;B;⩜;andd;ISOTECH;;LOGICAL AND WITH HORIZONTAL DASH
B;⩝;ord;ISOTECH;;LOGICAL OR WITH HORIZONTAL DASH
2A5E;B;⩞;Barwed;ISOAMSB;;LOGICAL AND WITH DOUBLE OVERBAR
B;⩟;wedbar;ISOAMSB;;LOGICAL AND WITH UNDERBAR
B;⩠;wedBar;;;LOGICAL AND WITH DOUBLE UNDERBAR
B;⩡;;ISOAMSB;;SMALL VEE WITH UNDERBAR
2A62;B;⩢;Barvee;;;LOGICAL OR WITH DOUBLE OVERBAR
B;⩣;veeBar;;;LOGICAL OR WITH DOUBLE UNDERBAR
B;⩤;;;;Z NOTATION DOMAIN
B;⩥;;;;Z NOTATION RANGE
SIGN WITH DOT BELOW
2A67;R;⩧;;;;IDENTICAL WITH DOT ABOVE
2A68;R;⩨;;;;TRIPLE HORIZONTAL BAR WITH DOUBLE VERTICAL STROKE
2A69;R;⩩;;;;TRIPLE HORIZONTAL BAR WITH TRIPLE VERTICAL STROKE
R;⩪;simdot;ISOTECH;;TILDE OPERATOR WITH DOT ABOVE
2A6B;R;⩫;;;;TILDE OPERATOR WITH RISING DOTS
R;⩬;;;;SIMILAR MINUS SIMILAR
2A6D;R;⩭;congdot;ISOAMSR;;CONGRUENT WITH DOT ABOVE
R;⩮;easter;ISOAMSR;;EQUALS WITH ASTERISK
R;⩯;apacir;ISOTECH;;ALMOST EQUAL TO WITH CIRCUMFLEX ACCENT
R;⩰;apE;ISOAMSR;;APPROXIMATELY EQUAL OR EQUAL TO
SIGN ABOVE PLUS SIGN
2A72;B;⩲;pluse;ISOAMSB;;PLUS SIGN ABOVE EQUALS SIGN
2A73;R;⩳;Esim;ISOAMSR;;EQUALS SIGN ABOVE TILDE OPERATOR
R;⩴;Colone;ISOAMSR;;DOUBLE COLON EQUAL
R;⩵;eqeq;;;TWO CONSECUTIVE EQUALS SIGNS
R;⩶;;;;THREE CONSECUTIVE EQUALS SIGNS
R;⩷;eDDot;ISOAMSR;;EQUALS SIGN WITH TWO DOTS ABOVE AND TWO DOTS BELOW
WITH FOUR DOTS ABOVE
R;⩹;ltcir;ISOAMSR;;LESS - THAN WITH CIRCLE INSIDE
2A7A;R;⩺;gtcir;ISOAMSR;;GREATER-THAN WITH CIRCLE INSIDE
2A7B;R;⩻;ltquest;ISOAMSR;;LESS-THAN WITH QUESTION MARK ABOVE
R;⩼;gtquest;ISOAMSR;;GREATER - THAN WITH QUESTION MARK ABOVE
2A7D;R;⩽;les;ISOAMSR;;LESS-THAN OR SLANTED EQUAL TO
R;⩾;ges;ISOAMSR;;GREATER - THAN OR SLANTED EQUAL TO
R;⩿;lesdot;ISOAMSR;;LESS - THAN OR SLANTED EQUAL TO WITH DOT INSIDE
R;⪀;gesdot;ISOAMSR;;GREATER - THAN OR SLANTED EQUAL TO WITH DOT INSIDE
R;⪁;lesdoto;ISOAMSR;;LESS - THAN OR SLANTED EQUAL TO WITH DOT ABOVE
R;⪂;gesdoto;ISOAMSR;;GREATER - THAN OR SLANTED EQUAL TO WITH DOT ABOVE
2A83;R;⪃;lesdotor;ISOAMSR;;LESS-THAN OR SLANTED EQUAL TO WITH DOT ABOVE RIGHT
R;⪄;gesdotol;ISOAMSR;;GREATER - THAN OR SLANTED EQUAL TO WITH DOT ABOVE LEFT
R;⪅;lap;ISOAMSR;;LESS - THAN OR APPROXIMATE
R;⪆;gap;ISOAMSR;;GREATER - THAN OR APPROXIMATE
2A87;R;⪇;lne;ISOAMSN;;LESS-THAN AND SINGLE-LINE NOT EQUAL TO
2A88;R;⪈;gne;ISOAMSN;;GREATER-THAN AND SINGLE-LINE NOT EQUAL TO
2A89;R;⪉;lnap;ISOAMSN;;LESS-THAN AND NOT APPROXIMATE
2A8A;R;⪊;gnap;ISOAMSN;;GREATER-THAN AND NOT APPROXIMATE
2A8B;R;⪋;lEg;ISOAMSR;;LESS-THAN ABOVE DOUBLE-LINE EQUAL ABOVE GREATER-THAN
2A8C;R;⪌;gEl;ISOAMSR;;GREATER-THAN ABOVE DOUBLE-LINE EQUAL ABOVE LESS-THAN
2A8D;R;⪍;lsime;ISOAMSR;;LESS-THAN ABOVE SIMILAR OR EQUAL
2A8E;R;⪎;gsime;ISOAMSR;;GREATER-THAN ABOVE SIMILAR OR EQUAL
2A8F;R;⪏;lsimg;ISOAMSR;;LESS-THAN ABOVE SIMILAR ABOVE GREATER-THAN
2A90;R;⪐;gsiml;ISOAMSR;;GREATER-THAN ABOVE SIMILAR ABOVE LESS-THAN
2A91;R;⪑;lgE;ISOAMSR;;LESS-THAN ABOVE GREATER-THAN ABOVE DOUBLE-LINE EQUAL
2A92;R;⪒;glE;ISOAMSR;;GREATER-THAN ABOVE LESS-THAN ABOVE DOUBLE-LINE EQUAL
2A93;R;⪓;lesges;ISOAMSR;;LESS-THAN ABOVE SLANTED EQUAL ABOVE GREATER-THAN ABOVE SLANTED EQUAL
2A94;R;⪔;gesles;ISOAMSR;;GREATER-THAN ABOVE SLANTED EQUAL ABOVE LESS-THAN ABOVE SLANTED EQUAL
R;⪕;els;ISOAMSR;;SLANTED EQUAL TO OR LESS - THAN
R;⪖;egs;ISOAMSR;;SLANTED EQUAL TO OR GREATER - THAN
R;⪗;elsdot;ISOAMSR;;SLANTED EQUAL TO OR LESS - THAN WITH DOT INSIDE
R;⪘;egsdot;ISOAMSR;;SLANTED EQUAL TO OR GREATER - THAN WITH DOT INSIDE
R;⪙;;;;DOUBLE - LINE EQUAL TO OR LESS - THAN
2A9A;R;⪚;;;;DOUBLE-LINE EQUAL TO OR GREATER-THAN
2A9B;R;⪛;;;;DOUBLE-LINE SLANTED EQUAL TO OR LESS-THAN
2A9C;R;⪜;;;;DOUBLE-LINE SLANTED EQUAL TO OR GREATER-THAN
2A9D;R;⪝;siml;ISOAMSR;;SIMILAR OR LESS-THAN
2A9E;R;⪞;simg;ISOAMSR;;SIMILAR OR GREATER-THAN
2A9F;R;⪟;simlE;ISOAMSR;;SIMILAR ABOVE LESS-THAN ABOVE EQUALS SIGN
R;⪠;simgE;ISOAMSR;;SIMILAR ABOVE GREATER - THAN ABOVE EQUALS SIGN
2AA1;R;⪡;;ISOAMSR;;DOUBLE NESTED LESS-THAN
2AA2;R;⪢;;ISOAMSR;;DOUBLE NESTED GREATER-THAN
R;⪣;Ltbar;;;DOUBLE NESTED LESS - THAN WITH UNDERBAR
2AA4;R;⪤;glj;ISOAMSR;;GREATER-THAN OVERLAPPING LESS-THAN
2AA5;R;⪥;gla;ISOAMSR;;GREATER-THAN BESIDE LESS-THAN
R;⪦;ltcc;ISOAMSR;;LESS - THAN CLOSED BY CURVE
R;⪧;gtcc;ISOAMSR;;GREATER - THAN CLOSED BY CURVE
R;⪨;lescc;ISOAMSR;;LESS - THAN CLOSED BY CURVE ABOVE SLANTED EQUAL
2AA9;R;⪩;gescc;ISOAMSR;;GREATER-THAN CLOSED BY CURVE ABOVE SLANTED EQUAL
2AAA;R;⪪;smt;ISOAMSR;;SMALLER THAN
2AAB;R;⪫;lat;ISOAMSR;;LARGER THAN
2AAC;R;⪬;smte;ISOAMSR;;SMALLER THAN OR EQUAL TO
2AAD;R;⪭;late;ISOAMSR;;LARGER THAN OR EQUAL TO
2AAE;R;⪮;bumpE;ISOAMSR;;EQUALS SIGN WITH BUMPY ABOVE
2AAF;R;⪯;pre;ISOAMSR;;PRECEDES ABOVE SINGLE-LINE EQUALS SIGN
2AB0;R;⪰;sce;ISOAMSR;;SUCCEEDS ABOVE SINGLE-LINE EQUALS SIGN
2AB1;R;⪱;;;;PRECEDES ABOVE SINGLE-LINE NOT EQUAL TO
2AB2;R;⪲;;;;SUCCEEDS ABOVE SINGLE-LINE NOT EQUAL TO
2AB3;R;⪳;prE;ISOAMSR;;PRECEDES ABOVE EQUALS SIGN
R;⪴;scE;ISOAMSR;;SUCCEEDS ABOVE EQUALS SIGN
2AB5;R;⪵;prnE;ISOAMSN;;PRECEDES ABOVE NOT EQUAL TO
2AB6;R;⪶;scnE;ISOAMSN;;SUCCEEDS ABOVE NOT EQUAL TO
2AB7;R;⪷;prap;ISOAMSR;;PRECEDES ABOVE ALMOST EQUAL TO
2AB8;R;⪸;scap;ISOAMSR;;SUCCEEDS ABOVE ALMOST EQUAL TO
2AB9;R;⪹;prnap;ISOAMSN;;PRECEDES ABOVE NOT ALMOST EQUAL TO
2ABA;R;⪺;scnap;ISOAMSN;;SUCCEEDS ABOVE NOT ALMOST EQUAL TO
2ABB;R;⪻;Pr;ISOAMSR;;DOUBLE PRECEDES
2ABC;R;⪼;Sc;ISOAMSR;;DOUBLE SUCCEEDS
R;⪽;subdot;ISOAMSB;;SUBSET WITH DOT
2ABE;R;⪾;supdot;ISOAMSB;;SUPERSET WITH DOT
R;⪿;subplus;ISOAMSR;;SUBSET WITH PLUS SIGN BELOW
SIGN BELOW
R;⫁;submult;ISOAMSR;;SUBSET WITH MULTIPLICATION SIGN BELOW
2AC2;R;⫂;supmult;ISOAMSR;;SUPERSET WITH MULTIPLICATION SIGN BELOW
R;⫃;subedot;ISOAMSR;;SUBSET OF OR EQUAL TO WITH DOT ABOVE
R;⫄;supedot;ISOAMSR;;SUPERSET OF OR EQUAL TO WITH DOT ABOVE
R;⫅;subE;ISOAMSR;;SUBSET OF ABOVE EQUALS SIGN
2AC6;R;⫆;supE;ISOAMSR;;SUPERSET OF ABOVE EQUALS SIGN
2AC7;R;⫇;subsim;ISOAMSR;;SUBSET OF ABOVE TILDE OPERATOR
2AC8;R;⫈;supsim;ISOAMSR;;SUPERSET OF ABOVE TILDE OPERATOR
2AC9;R;⫉;;;;SUBSET OF ABOVE ALMOST EQUAL TO
2ACA;R;⫊;;;;SUPERSET OF ABOVE ALMOST EQUAL TO
2ACB;R;⫋;subnE;ISOAMSN;;SUBSET OF ABOVE NOT EQUAL TO
R;⫌;supnE;ISOAMSN;;SUPERSET OF ABOVE NOT EQUAL TO
2ACD;R;⫍;;;;SQUARE LEFT OPEN BOX OPERATOR
2ACE;R;⫎;;;;SQUARE RIGHT OPEN BOX OPERATOR
2AD0;R;⫐;csup;ISOAMSR;;CLOSED SUPERSET
R;⫑;csube;ISOAMSR;;CLOSED SUBSET OR EQUAL TO
R;⫒;csupe;ISOAMSR;;CLOSED OR EQUAL TO
2AD3;R;⫓;subsup;ISOAMRR;;SUBSET ABOVE SUPERSET
2AD4;R;⫔;supsub;ISOAMSR;;SUPERSET ABOVE SUBSET
2AD5;R;⫕;subsub;ISOAMSR;;SUBSET ABOVE SUBSET
R;⫖;supsup;ISOAMSR;;SUPERSET ABOVE SUPERSET
2AD7;R;⫗;suphsub;ISOAMSR;;SUPERSET BESIDE SUBSET
R;⫘;supdsub;ISOAMSR;;SUPERSET BESIDE AND JOINED BY DASH WITH SUBSET
2AD9;R;⫙;forkv;ISOAMSR;;ELEMENT OF OPENING DOWNWARDS
2ADA;R;⫚;topfork;ISOAMSR;;PITCHFORK WITH TEE TOP
2ADB;R;⫛;mlcp;ISOAMSR;;TRANSVERSAL INTERSECTION
2ADC;R;⫝̸;;;;FORKING
2ADD;R;⫝;;;;NONFORKING
R;⫞;;;;SHORT LEFT TACK
R;⫟;;;;SHORT DOWN
2AE0;R;⫠;;;;SHORT UP TACK
2AE1;N;⫡;;;;PERPENDICULAR WITH S
R;⫢;vDdash;;;VERTICAL BAR TRIPLE RIGHT TURNSTILE
2AE3;R;⫣;dashV;;;DOUBLE VERTICAL BAR LEFT TURNSTILE
2AE4;R;⫤;Dashv;ISOAMSR;;VERTICAL BAR DOUBLE LEFT TURNSTILE
2AE5;R;⫥;;;;DOUBLE VERTICAL BAR DOUBLE LEFT TURNSTILE
2AE6;R;⫦;Vdashl;ISOAMSR;;LONG DASH FROM LEFT MEMBER OF DOUBLE VERTICAL
2AE7;R;⫧;Barv;ISOAMSR;;SHORT DOWN TACK WITH OVERBAR
R;⫨;vBar;ISOAMSR;;SHORT UP TACK WITH UNDERBAR
2AE9;R;⫩;vBarv;ISOAMSR;;SHORT UP TACK ABOVE SHORT DOWN TACK
R;⫪;barV;;;DOUBLE DOWN
R;⫫;Vbar;ISOAMSR;;DOUBLE UP TACK
2AEC;R;⫬;Not;ISOTECH;;DOUBLE STROKE NOT SIGN
2AED;R;⫭;bNot;ISOTECH;;REVERSED DOUBLE STROKE NOT SIGN
R;⫮;rnmid;ISOAMSN;;DOES NOT DIVIDE WITH REVERSED NEGATION SLASH
R;⫯;cirmid;ISOAMSA;;VERTICAL LINE WITH CIRCLE ABOVE
2AF0;R;⫰;midcir;ISOAMSA;;VERTICAL LINE WITH CIRCLE BELOW
N;⫱;topcir;ISOTECH;;DOWN TACK WITH CIRCLE BELOW
2AF2;R;⫲;nhpar;ISOTECH;;PARALLEL WITH HORIZONTAL STROKE
2AF3;R;⫳;parsim;ISOAMSN;;PARALLEL WITH TILDE OPERATOR
B;⫴;vert3;;;TRIPLE VERTICAL BAR BINARY RELATION
2AF5;B;⫵;;;;TRIPLE VERTICAL BAR WITH HORIZONTAL STROKE
B;⫶;vellipv;;;TRIPLE COLON OPERATOR
2AF7;R;⫷;;;;TRIPLE NESTED LESS-THAN
2AF9;R;⫹;;;;DOUBLE-LINE SLANTED LESS-THAN OR EQUAL TO
2AFA;R;⫺;;;;DOUBLE-LINE SLANTED GREATER-THAN OR EQUAL TO
BINARY RELATION
2AFC;L;⫼;;;;LARGE TRIPLE VERTICAL BAR OPERATOR
2AFD;B;⫽;parsl;ISOTECH;;DOUBLE SOLIDUS OPERATOR
2AFE;B;⫾;;;;WHITE VERTICAL BAR
WHITE VERTICAL BAR
R; ⬀ ;;;;NORTH EAST WHITE ARROW
R; ⬁ ;;;;NORTH WEST WHITE ARROW
R; ⬂ ;;;;SOUTH EAST WHITE ARROW
R; ⬃ ;;;;SOUTH WEST WHITE ARROW
2B04;R;⬄;;;;LEFT RIGHT WHITE ARROW
2B05;R;⬅;;;;LEFTWARDS BLACK ARROW
2B06;R;⬆;;;;UPWARDS BLACK ARROW
2B07;R;⬇;;;;DOWNWARDS BLACK ARROW
R; ⬈ ;;;;NORTH EAST BLACK ARROW
R; ⬉ ;;;;NORTH WEST BLACK ARROW
R; ⬊ ;;;;SOUTH EAST BLACK ARROW
2B0B;R;⬋;;;;SOUTH WEST BLACK ARROW
R; ⬌ ;;;;LEFT RIGHT BLACK ARROW
R; ⬍ ;;;;UP DOWN BLACK ARROW
R; ⬎ ;;;;RIGHTWARDS ARROW WITH TIP DOWNWARDS
2B0F;R;⬏;;;;RIGHTWARDS ARROW WITH TIP UPWARDS
R; ⬐ ;;;;LEFTWARDS ARROW WITH TIP DOWNWARDS
2B11;R;⬑;;;;LEFTWARDS ARROW WITH TIP UPWARDS
N; ⬒ ;squarft?;;;SQUARE WITH TOP HALF BLACK
N; ⬓ ;squarfb?;;;SQUARE WITH BOTTOM HALF BLACK
2B14;N;⬔;squarftr;ISOPUB;;SQUARE WITH UPPER RIGHT DIAGONAL HALF BLACK
2B15;N;⬕;squarfbl;ISOPUB;;SQUARE WITH LOWER LEFT DIAGONAL HALF BLACK
N; ;diamonfl;ISOPUB;;DIAMOND WITH LEFT HALF BLACK
2B17;N;⬗;diamonfr;ISOPUB;;DIAMOND WITH RIGHT HALF BLACK
2B18;N;⬘;diamonft;ISOPUB;;DIAMOND WITH TOP HALF BLACK
2B19;N;⬙;diamonfb;ISOPUB;;DIAMOND WITH BOTTOM HALF BLACK
2B1B..2B1F;N;⬛..⬟;;;;BLACK LARGE SQUARE..BLACK PENTAGON
N; ⬠ ;;;;WHITE PENTAGON
2B21;N;⬡;benzen;ISOCHEM;;WHITE HEXAGON
N; ⬢ ;;;;BLACK HEXAGON
N; ⬣ ;;;;HORIZONTAL BLACK HEXAGON
N; ⬤ .. ⬯ ;;;;BLACK LARGE CIRCLE .. WHITE VERTICAL ELLIPSE
R;⬰ .. ⭄;;;;LEFT ARROW WITH SMALL .. RIGHTWARDS ARROW THROUGH SUPERSET
2B45;R;⭅;;;;LEFTWARDS QUADRUPLE ARROW
2B46;R;⭆;;;;RIGHTWARDS QUADRUPLE ARROW
2B47..2B4C;R;⭇..⭌;;;;REVERSE TILDE OPERATOR ABOVE RIGHTWARDS ARROW..RIGHTWARDS ARROW ABOVE REVERSE TILDE OPERATOR
N; ⭐ .. ;;;;WHITE MEDIUM STAR .. WHITE RIGHT - POINTING PENTAGON
R; ⮕ ;;;;RIGHTWARDS BLACK ARROW
2BC2..2BC8;N;⯂..⯈;;;;TURNED BLACK PENTAGON..BLACK MEDIUM RIGHT-POINTING TRIANGLE CENTRED
2BCA;N;⯊;;;;TOP HALF BLACK CIRCLE
2BCB;N;⯋;;;;BOTTOM HALF BLACK CIRCLE
X ; 〈 ; ; ; ( deprecated for math use);LEFT ANGLE BRACKET
X ; 〉 ; ; ; ( deprecated for math use);RIGHT ANGLE BRACKET
O;;lbbrk;ISOTECH ; left broken bracket;LEFT TORTOISE SHELL BRACKET
C;;rbbrk;ISOTECH ; right broken bracket ; RIGHT TORTOISE SHELL BRACKET
#3018;O;;loang;ISOTECH;;LEFT WHITE TORTOISE SHELL BRACKET
C;;roang;ISOTECH;;RIGHT WHITE TORTOISE SHELL BRACKET
; ; ; ( deprecated for math use);LEFT WHITE SQUARE BRACKET
; ; ; ( deprecated for math use);RIGHT WHITE SQUARE BRACKET
306E;N;の;;;;HIRAGANA LETTER NO
FB29;X;﬩;;; (doesn't have cross shape) ;HEBREW LETTER ALTERNATIVE PLUS SIGN
D;︀;;;;VARIATION SELECTOR-1
#FE35;;;;;(deprecated for math use);PRESENTATION FORM FOR VERTICAL LEFT PARENTHESIS
#FE36;;;;;(deprecated for math use);PRESENTATION FORM FOR VERTICAL RIGHT PARENTHESIS
( deprecated for math use);PRESENTATION FORM FOR VERTICAL LEFT CURLY BRACKET
#FE38;;;;;(deprecated for math use);PRESENTATION FORM FOR VERTICAL RIGHT CURLY BRACKET
FE61..FE66;X;﹡..﹦;;; ("small" compatibility variants of ASCII math operators) ;SMALL ASTERISK..SMALL EQUALS SIGN
FE68;X;﹨;;; ("small" compatibility variant of ASCII math operators) ;SMALL REVERSE SOLIDUS
X;+ ; ; ; ( " wide " compatibility variant of ASCII math operators ) ; FULLWIDTH PLUS SIGN
X;< .. ; ; ; ( " wide " compatibility variants of ASCII math operators ) ; FULLWIDTH LESS - THAN SIGN .. FULLWIDTH GREATER - THAN SIGN
X;\ ; ; ; ( " wide " compatibility variant of ASCII math operators ) ; FULLWIDTH REVERSE SOLIDUS
X;^ ; ; ; ( " wide " compatibility variant of ASCII math operators ) ; FULLWIDTH CIRCUMFLEX ACCENT
X;| ; ; ; ( " wide " compatibility variant of ASCII math operators ) ; FULLWIDTH VERTICAL LINE
X ; ~ ; ; ; ( " wide " compatibility variant of math operator ) ; FULLWIDTH TILDE
FFE2;X;¬;;; ("wide" compatibility variant of ASCII math operators) ;FULLWIDTH NOT SIGN
X;← .. ↓ ; ; ; ( " wide " compatibility variants of arrows);HALFWIDTH LEFTWARDS ARROW .. HALFWIDTH DOWNWARDS ARROW
1D400..1D454;A;𝐀..𝑔;;;;MATHEMATICAL BOLD CAPITAL A..MATHEMATICAL ITALIC SMALL G
N;;;;;ITALIC SMALL H < reserved >
A;𝑖 .. 𝒛;;;;MATHEMATICAL ITALIC SMALL I .. MATHEMATICAL BOLD ITALIC SMALL Z
1D49C;A;𝒜;Ascr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL A
#1D49D=212C;A;;Bscr;ISOMSCR;;SCRIPT CAPITAL B <reserved>
1D49E;A;𝒞;Cscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL C
1D49F;A;𝒟;Dscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL D
#1D4A0=2130;A;;Escr;ISOMSCR;;SCRIPT CAPITAL E <reserved>
< reserved >
1D4A2;A;𝒢;Gscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL G
#1D4A3=210B;A;;Hscr;ISOMSCR;;SCRIPT CAPITAL H <reserved>
#1D4A4=2110;A;;Iscr;ISOMSCR;;SCRIPT CAPITAL I <reserved>
1D4A5;A;𝒥;Jscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL J
1D4A6;A;𝒦;Kscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL K
#1D4A7=2112;A;;Lscr;ISOMSCR;;SCRIPT CAPITAL L <reserved>
#1D4A8=2133;A;;Mscr;ISOMSCR;;SCRIPT CAPITAL M <reserved>
1D4A9;A;𝒩;Nscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL N
1D4AA;A;𝒪;Oscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL O
1D4AB;A;𝒫;Pscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL P
1D4AC;A;𝒬;Qscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL Q
#1D4AD=211B;A;;Rscr;ISOMSCR;;SCRIPT CAPITAL R <reserved>
1D4AE;A;𝒮;Sscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL S
1D4AF;A;𝒯;Tscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL T
1D4B0;A;𝒰;Uscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL U
1D4B1;A;𝒱;Vscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL V
1D4B2;A;𝒲;Wscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL W
1D4B3;A;𝒳;Xscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL X
1D4B4;A;𝒴;Yscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL Y
1D4B5;A;𝒵;Zscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL Z
1D4B6;A;𝒶;ascr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL A
1D4B7;A;𝒷;bscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL B
1D4B8;A;𝒸;cscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL C
1D4B9;A;𝒹;dscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL D
#1D4BA=212F;A;;escr;ISOMSCR;;SCRIPT SMALL E <reserved>
A;𝒻;fscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL F
#1D4BC=210A;A;;gscr;ISOMSCR;;SCRIPT SMALL G <reserved>
1D4BD;A;𝒽;hscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL H
1D4BE;A;𝒾;iscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL I
1D4BF;A;𝒿;jscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL J
1D4C0;A;𝓀;kscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL K
1D4C1;A;𝓁;lscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL L
1D4C2;A;𝓂;mscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL M
1D4C3;A;𝓃;nscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL N
#1D4C4=2134;A;;oscr;ISOMSCR;;SCRIPT SMALL O <reserved>
1D4C5;A;𝓅;pscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL P
1D4C6;A;𝓆;qscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL Q
1D4C7;A;𝓇;rscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL R
1D4C8;A;𝓈;sscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL S
1D4C9;A;𝓉;tscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL T
1D4CA;A;𝓊;uscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL U
SMALL V
1D4CC;A;𝓌;wscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL W
1D4CD;A;𝓍;xscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL X
1D4CE;A;𝓎;yscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL Y
1D4CF;A;𝓏;zscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL Z
1D4D0..1D503;A;𝓐..𝔃;;;;MATHEMATICAL BOLD SCRIPT CAPITAL A..MATHEMATICAL BOLD SCRIPT SMALL Z
1D504;A;𝔄;Afr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL A
1D505;A;𝔅;Bfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL B
A;;Cfr;ISOMFRK;;FRAKTUR CAPITAL C < reserved >
1D507;A;𝔇;Dfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL D
1D509;A;𝔉;Ffr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL F
1D50A;A;𝔊;Gfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL G
#1D50B=210C;A;;Hfr;ISOMFRK;;FRAKTUR CAPITAL H <reserved>
#1D50C=2111;A;;Ifr;ISOMFRK;;FRAKTUR CAPITAL I <reserved>
1D50D;A;𝔍;Jfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL J
1D50E;A;𝔎;Kfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL K
1D50F;A;𝔏;Lfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL L
1D510;A;𝔐;Mfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL M
1D511;A;𝔑;Nfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL N
A;𝔒;Ofr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL O
1D513;A;𝔓;Pfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL P
1D514;A;𝔔;Qfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL Q
#1D515=211C;A;;Rfr;ISOMFRK;;FRAKTUR CAPITAL R <reserved>
A;𝔖;Sfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL S
1D517;A;𝔗;Tfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL T
1D518;A;𝔘;Ufr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL U
A;𝔙;Vfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL V
A;𝔚;Wfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL W
A;𝔛;Xfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL X
1D51C;A;𝔜;Yfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL Y
#1D51D=2128;A;;Zfr;ISOMFRK;;FRAKTUR CAPITAL Z <reserved>
1D51E;A;𝔞;afr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL A
A;𝔟;bfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL B
1D520;A;𝔠;cfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL C
A;𝔡;dfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL D
A;𝔢;efr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL E
1D523;A;𝔣;ffr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL F
A;𝔤;gfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL G
1D525;A;𝔥;hfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL H
1D526;A;𝔦;ifr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL I
1D527;A;𝔧;jfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL J
1D528;A;𝔨;kfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL K
A;𝔩;lfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL L
1D52A;A;𝔪;mfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL M
A;𝔫;nfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL N
1D52C;A;𝔬;ofr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL O
1D52D;A;𝔭;pfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL P
A;𝔮;qfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL Q
A;𝔯;rfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL R
1D530;A;𝔰;sfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL S
A;𝔱;tfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL T
1D532;A;𝔲;ufr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL U
1D533;A;𝔳;vfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL V
1D534;A;𝔴;wfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL W
A;𝔵;xfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL X
A;𝔶;yfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL Y
A;𝔷;zfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL Z
1D538;A;𝔸;Aopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL A
1D539;A;𝔹;Bopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL B
A;;Copf;ISOMOPF;;DOUBLE - STRUCK CAPITAL C < reserved >
1D53B;A;𝔻;Dopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL D
1D53C;A;𝔼;Eopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL E
1D53D;A;𝔽;Fopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL F
1D53E;A;𝔾;Gopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL G
#1D53F=210D;A;;Hopf;ISOMOPF;;DOUBLE-STRUCK CAPITAL H <reserved>
1D540;A;𝕀;Iopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL I
1D541;A;𝕁;Jopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL J
1D542;A;𝕂;Kopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL K
1D543;A;𝕃;Lopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL L
1D544;A;𝕄;Mopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL M
#1D545=2115;A;;Nopf;ISOMOPF;;DOUBLE-STRUCK CAPITAL N <reserved>
1D546;A;𝕆;Oopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL O
A;;Popf;ISOMOPF;;DOUBLE - STRUCK CAPITAL P < reserved >
#1D548=211A;A;;Qopf;ISOMOPF;;DOUBLE-STRUCK CAPITAL Q <reserved>
#1D549=211D;A;;Ropf;ISOMOPF;;DOUBLE-STRUCK CAPITAL R <reserved>
A;𝕊;Sopf;ISOMOPF;;MATHEMATICAL DOUBLE - STRUCK CAPITAL S
1D54B;A;𝕋;Topf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL T
1D54C;A;𝕌;Uopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL U
A;𝕍;Vopf;ISOMOPF;;MATHEMATICAL DOUBLE - STRUCK CAPITAL V
1D54E;A;𝕎;Wopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL W
1D54F;A;𝕏;Xopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL X
1D550;A;𝕐;Yopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL Y
A;;Zopf;ISOMOPF;;DOUBLE - STRUCK CAPITAL Z < reserved >
1D552;A;𝕒;aopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL A
1D553;A;𝕓;bopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL B
1D554;A;𝕔;copf;;;MATHEMATICAL DOUBLE-STRUCK SMALL C
A;𝕕;dopf;;;MATHEMATICAL DOUBLE - STRUCK SMALL D
1D556;A;𝕖;eopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL E
1D557;A;𝕗;fopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL F
1D558;A;𝕘;gopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL G
A;𝕙;hopf;;;MATHEMATICAL DOUBLE - STRUCK SMALL H
1D55A;A;𝕚;iopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL I
A;𝕛;jopf;;;MATHEMATICAL DOUBLE - STRUCK SMALL J
1D55C;A;𝕜;kopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL K
A;𝕝;lopf;;;MATHEMATICAL DOUBLE - STRUCK SMALL L
1D55E;A;𝕞;mopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL M
1D55F;A;𝕟;nopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL N
1D560;A;𝕠;oopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL O
1D561;A;𝕡;popf;;;MATHEMATICAL DOUBLE-STRUCK SMALL P
SMALL Q
1D563;A;𝕣;ropf;;;MATHEMATICAL DOUBLE-STRUCK SMALL R
A;𝕤;sopf;;;MATHEMATICAL DOUBLE - STRUCK SMALL S
1D565;A;𝕥;topf;;;MATHEMATICAL DOUBLE-STRUCK SMALL T
1D566;A;𝕦;uopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL U
1D567;A;𝕧;vopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL V
1D568;A;𝕨;wopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL W
1D569;A;𝕩;xopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL X
1D56A;A;𝕪;yopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL Y
A;𝕫;zopf;;;MATHEMATICAL DOUBLE - STRUCK SMALL Z
1D56C..1D6A3;A;𝕬..𝚣;;;;MATHEMATICAL BOLD FRAKTUR CAPITAL A..MATHEMATICAL MONOSPACE SMALL Z
A;𝚤;;ISOAMSO;;MATHEMATICAL ITALIC SMALL DOTLESS I
A;𝚥;;ISOAMSO;;MATHEMATICAL ITALIC SMALL DOTLESS J
A;𝚨 .. 𝟉;;;;MATHEMATICAL BOLD CAPITAL ALPHA .. MATHEMATICAL SANS - BOLD ITALIC PI SYMBOL
1D7CA;A;𝟊;b.Gammad;ISOGRK4;;MATHEMATICAL BOLD CAPITAL DIGAMMA
1D7CB;A;𝟋;b.gammad;ISOGRK4;;MATHEMATICAL BOLD SMALL DIGAMMA
1D7CE..1D7FF;N;𝟎..𝟿;;;;MATHEMATICAL BOLD DIGIT ZERO..MATHEMATICAL MONOSPACE DIGIT NINE
1EE00..1EE03;A;𞸀..𞸃;;;;ARABIC MATHEMATICAL ALEF..ARABIC MATHEMATICAL DAL
A;𞸅 .. 𞸟;;;;ARABIC MATHEMATICAL WAW .. ARABIC MATHEMATICAL DOTLESS QAF
A;𞸡;;;;ARABIC MATHEMATICAL INITIAL BEH
1EE22;A;𞸢;;;;ARABIC MATHEMATICAL INITIAL JEEM
1EE24;A;𞸤;;;;ARABIC MATHEMATICAL INITIAL HEH
A;𞸧;;;;ARABIC MATHEMATICAL INITIAL HAH
A;𞸩 .. 𞸲;;;;ARABIC MATHEMATICAL INITIAL YEH .. ARABIC MATHEMATICAL INITIAL QAF
A;𞸴 .. 𞸷;;;;ARABIC MATHEMATICAL INITIAL SHEEN .. ARABIC MATHEMATICAL INITIAL KHAH
A;𞸹;;;;ARABIC MATHEMATICAL INITIAL
1EE3B;A;𞸻;;;;ARABIC MATHEMATICAL INITIAL GHAIN
1EE42;A;𞹂;;;;ARABIC MATHEMATICAL TAILED JEEM
TAILED HAH
TAILED YEH
A;𞹋;;;;ARABIC MATHEMATICAL TAILED LAM
A;𞹍 .. TAILED NOON .. ARABIC MATHEMATICAL TAILED AIN
1EE51;A;𞹑;;;;ARABIC MATHEMATICAL TAILED SAD
A;𞹒;;;;ARABIC MATHEMATICAL TAILED
A;𞹔;;;;ARABIC MATHEMATICAL TAILED
A;𞹗;;;;ARABIC MATHEMATICAL TAILED
A;𞹙;;;;ARABIC MATHEMATICAL TAILED
A;𞹛;;;;ARABIC MATHEMATICAL TAILED
TAILED DOTLESS NOON
1EE5F;A;𞹟;;;;ARABIC MATHEMATICAL TAILED DOTLESS QAF
A;𞹡;;;;ARABIC MATHEMATICAL STRETCHED BEH
1EE62;A;𞹢;;;;ARABIC MATHEMATICAL STRETCHED JEEM
1EE64;A;𞹤;;;;ARABIC MATHEMATICAL STRETCHED HEH
1EE67..1EE6A;A;𞹧..𞹪;;;;ARABIC MATHEMATICAL STRETCHED HAH..ARABIC MATHEMATICAL STRETCHED KAF
A;𞹬 .. 𞹲;;;;ARABIC MATHEMATICAL STRETCHED MEEM .. ARABIC MATHEMATICAL STRETCHED QAF
A;𞹴 .. 𞹷;;;;ARABIC MATHEMATICAL STRETCHED SHEEN .. ARABIC MATHEMATICAL STRETCHED KHAH
A;𞹹 .. 𞹼;;;;ARABIC MATHEMATICAL STRETCHED .. ARABIC MATHEMATICAL STRETCHED DOTLESS BEH
FEH
A;𞺀 .. 𞺉;;;;ARABIC MATHEMATICAL LOOPED ALEF .. ARABIC MATHEMATICAL LOOPED YEH
A;𞺋 .. 𞺛;;;;ARABIC MATHEMATICAL LOOPED LAM .. ARABIC MATHEMATICAL LOOPED GHAIN
A;𞺡 .. 𞺣;;;;ARABIC MATHEMATICAL DOUBLE - STRUCK BEH .. ARABIC MATHEMATICAL DOUBLE - STRUCK DAL
A;𞺥 .. 𞺩;;;;ARABIC MATHEMATICAL DOUBLE - STRUCK WAW .. ARABIC MATHEMATICAL DOUBLE - STRUCK YEH
A;𞺫 .. 𞺻;;;;ARABIC MATHEMATICAL DOUBLE - STRUCK LAM .. ARABIC MATHEMATICAL DOUBLE - STRUCK GHAIN
1EEF0;L;𞻰;;;;ARABIC MATHEMATICAL OPERATOR MEEM WITH HAH WITH TATWEEL
L;𞻱;;;;ARABIC MATHEMATICAL OPERATOR HAH WITH DAL
;;;;BLACK SLIGHTLY SMALL
N; ;;;;BLACK
1F78D;N;🞍;;;;BLACK SLIGHTLY SMALL SQUARE
N; 🞗 .. 🞙 ;;;;BLACK .. BLACK MEDIUM SMALL DIAMOND
N; 🞝 .. 🞟 ;;;;BLACK TINY LOZENGE .. BLACK MEDIUM SMALL LOZENGE
# EOF | null | https://raw.githubusercontent.com/mbutterick/quad/395447f35c2fb9fc7b6199ed185850906d80811d/quad/unicode/math.rkt | racket | '. Spaces adjacent to the delimiter or the '#' are
class;char;entity name;entity set;note/description;CHARACTER NAME
S; ;;;;SPACE
N;!;excl;ISONUM;Factorial spacing;EXCLAMATION MARK
N;!;fact;;;EXCLAMATION MARK
N;#;num;ISONUM;;NUMBER SIGN
N;$;dollar;ISONUM;;DOLLAR SIGN
N;%;percnt;ISONUM;;PERCENT SIGN
N;&;amp;ISONUM;;AMPERSAND
);rpar;ISONUM;;RIGHT PARENTHESIS
*;ast;ISONUM;[high , not /ast];ASTERISK
+;plus;;;PLUS SIGN
P;,;comma;ISONUM;;COMMA
N;-;;;(deprecated for math) ;HYPHEN-MINUS
P;.;period;ISONUM;period;FULL STOP
/;sol;ISONUM;No extra spacing , stretchy;SOLIDUS
N;0..9;;;;DIGIT ZERO..DIGIT NINE
:;colon;ISONUM;;COLON
P;;;semi;ISONUM;;SEMICOLON
=;equals;ISONUM;;EQUALS SIGN
R;>;gt;ISONUM;;GREATER-THAN SIGN
N;@;commat;ISONUM;;COMMERCIAL AT
A .. Z;;;;LATIN CAPITAL LETTER A .. LATIN CAPITAL LETTER Z
\;bsol;ISONUM;No extra spacing , stretchy;REVERSE SOLIDUS
C;];rsqb;ISONUM;;RIGHT SQUARE BRACKET
^;;;TeX superscript operator;CIRCUMFLEX ACCENT
N;_;;;TeX subscript operator;LOW LINE
GRAVE ACCENT
A;a..z;;;;LATIN SMALL LETTER A..LATIN SMALL LETTER Z
{;lcub;ISONUM;;LEFT CURLY BRACKET
|;verbar;ISONUM;;VERTICAL LINE
};rcub;ISONUM;;RIGHT BRACKET
N;~;;;;TILDE
; nbsp;;;NO - BREAK SPACE
¡;iexcl;ISONUM;;INVERTED EXCLAMATION MARK
N;¢;cent;ISONUM;;CENT SIGN
N;£;pound;ISONUM;;POUND SIGN
N;¤;curren;ISONUM;;CURRENCY SIGN
¥;yen;ISONUM;;YEN SIGN
N;¦;brvbar;ISONUM; (vertical) ;BROKEN BAR
§;sect;ISONUM;;SECTION SIGN
¨;Dot;;/die , for 0308;DIAERESIS
¬;not;ISONUM ; /neg /lnot ; NOT SIGN
¯;macr;;Alias for 0304 ;
N;°;deg;ISONUM;;DEGREE SIGN
V;±;plusmn;ISONUM;;PLUS-MINUS SIGN
N;²;sup2;ISONUM;;SUPERSCRIPT TWO
³;sup3;ISONUM;;SUPERSCRIPT THREE
´;acute;;Alias for 0301;ACUTE ACCENT
N;µ;micro;ISONUM;;MICRO SIGN
¶;para;ISONUM ; ( paragraph sign ) ; PILCROW SIGN
·;middot;ISONUM ; /centerdot ; MIDDLE DOT
N;¹;sup1;ISONUM;;SUPERSCRIPT ONE
N;¼;frac14;ISONUM;;VULGAR FRACTION ONE QUARTER
N;½;frac12;ISONUM;;VULGAR FRACTION ONE HALF
N;¾;frac34;ISONUM;;VULGAR FRACTION THREE QUARTERS
P;¿;iquest;ISONUM;;INVERTED QUESTION MARK
B;×;times;ISONUM;;MULTIPLICATION SIGN
B;÷;divide;ISONUM;;DIVISION SIGN
A;ı;imath;;;LATIN SMALL LETTER DOTLESS I
A;ȷ;jmath;;;LATIN SMALL LETTER DOTLESS J
MODIFIER LETTER CIRCUMFLEX ACCENT
ˇ;caron;;Alias for
BREVE
D;˙;dot;;Alias for 0307;DOT ABOVE
D;˚;ring;;Alias for 030A;RING ABOVE
˜;tilde;;Alias for 0303;SMALL TILDE
COMBINING GRAVE ACCENT
́;;ISODIA;MathML prefers 00B4;COMBINING ACUTE ACCENT
D;̂;;ISODIA;MathML prefers 02C6;COMBINING CIRCUMFLEX ACCENT
̃;;ISODIA;MathML prefers 02DC;COMBINING TILDE
̄;;ISODIA;MathML prefers 00AF;COMBINING MACRON
̅;;;;COMBINING OVERLINE
D;̆;;ISODIA;MathML prefers 02D8;COMBINING BREVE
̇;;ISODIA;MathML prefers 02D9;COMBINING DOT ABOVE
̈;;ISODIA;MathML prefers 00A8;COMBINING DIAERESIS
COMBINING RING ABOVE
̌;;ISODIA;MathML prefers 02C7;COMBINING CARON
̑;;;;COMBINING INVERTED BREVE
D;̣;;;;COMBINING DOT BELOW
D;̭;;;;COMBINING CIRCUMFLEX ACCENT BELOW
D;̮;;;;COMBINING BREVE BELOW
D;̰;;;;COMBINING TILDE BELOW
D;̱;;;;COMBINING MACRON BELOW
D;̲;;;;COMBINING LOW LINE
D;̳;2lowbar;;;COMBINING DOUBLE LOW LINE
; ; negation slash ; COMBINING LONG SOLIDUS OVERLAY
D;̺;;;;COMBINING INVERTED BRIDGE BELOW
̿;;;;COMBINING DOUBLE OVERLINE
D;͆;;;;COMBINING BRIDGE ABOVE
A;Α;Agr;ISOGRK1;;GREEK CAPITAL LETTER ALPHA
A;Β;Bgr;ISOGRK1;;GREEK CAPITAL LETTER BETA
A;Γ;Gamma;ISOGRK3;;GREEK CAPITAL LETTER GAMMA
A;Δ;Delta;ISOGRK3;;GREEK CAPITAL LETTER DELTA
A;Ε;Egr;ISOGRK1;;GREEK CAPITAL LETTER EPSILON
A;Ζ;Zgr;ISOGRK1;;GREEK CAPITAL LETTER ZETA
Η;EEgr;ISOGRK1;;GREEK CAPITAL LETTER ETA
Θ;Theta;ISOGRK3;;GREEK CAPITAL LETTER THETA
A;Ι;Igr;ISOGRK1;;GREEK CAPITAL LETTER IOTA
A;Κ;Kgr;ISOGRK1;;GREEK CAPITAL LETTER KAPPA
A;Λ;Lambda;ISOGRK3;;GREEK CAPITAL LETTER LAMDA
A;Μ;Mgr;ISOGRK1;;GREEK CAPITAL LETTER MU
A;Ν;Ngr;ISOGRK1;;GREEK CAPITAL LETTER NU
A;Ξ;Xi;ISOGRK3;;GREEK CAPITAL LETTER XI
A;Ο;Ogr;ISOGRK1;;GREEK CAPITAL LETTER OMICRON
A;Π;Pi;ISOGRK3;;GREEK CAPITAL LETTER PI
A;Ρ;Rgr;ISOGRK1;;GREEK CAPITAL LETTER RHO
Σ;Sigma;ISOGRK3;;GREEK CAPITAL LETTER SIGMA
A;Τ;Tgr;ISOGRK1;;GREEK CAPITAL LETTER TAU
Φ;Phi;ISOGRK3;;GREEK CAPITAL LETTER PHI
Χ;KHgr;ISOGRK1;;GREEK CAPITAL LETTER CHI
A;Ψ;Psi;ISOGRK3;;GREEK CAPITAL LETTER PSI
A;Ω;Omega;ISOGRK3;;GREEK CAPITAL LETTER OMEGA
A;α;alpha;ISOGRK3;;GREEK SMALL LETTER ALPHA
A;β;beta;ISOGRK3;;GREEK SMALL LETTER BETA
A;γ;gamma;ISOGRK3;;GREEK SMALL LETTER GAMMA
A;δ;delta;ISOGRK3;;GREEK SMALL LETTER DELTA
A;ε;epsiv;ISOGRK3; rounded;GREEK SMALL LETTER EPSILON
A;ζ;zeta;ISOGRK3;;GREEK SMALL LETTER ZETA
η;eta;ISOGRK3;;GREEK SMALL LETTER ETA
A;θ;theta;ISOGRK3; straight theta ;GREEK SMALL LETTER THETA
ι;iota;ISOGRK3;;GREEK SMALL LETTER IOTA
A;κ;kappa;ISOGRK3;;GREEK SMALL LETTER KAPPA
A;λ;lambda;ISOGRK3;;GREEK SMALL LETTER LAMDA
μ;mu;ISOGRK3;;GREEK SMALL LETTER MU
A;ν;nu;ISOGRK3;;GREEK SMALL LETTER NU
A;ξ;xi;ISOGRK3;;GREEK SMALL LETTER XI
ο;ogr;ISOGRK1;;GREEK SMALL LETTER OMICRON
A;π;pi;ISOGRK3;;GREEK SMALL LETTER PI
A;ρ;rho;ISOGRK3;;GREEK SMALL LETTER RHO
A;σ;sigma;ISOGRK3;;GREEK SMALL LETTER SIGMA
A;τ;tau;ISOGRK3;;GREEK SMALL LETTER TAU
A;υ;upsi;ISOGRK3;;GREEK SMALL LETTER UPSILON
φ;phiv;ISOGRK3 ; \varphi ; GREEK SMALL LETTER PHI
χ;chi;ISOGRK3;;GREEK SMALL LETTER CHI
A;ψ;psi;ISOGRK3;;GREEK SMALL LETTER PSI
A;ω;omega;ISOGRK3;;GREEK SMALL LETTER OMEGA
A;ϐ;;;;GREEK BETA SYMBOL
ϑ;thetav;ISOGRK3 ; \vartheta - curly or open small theta ; GREEK THETA SYMBOL
A;ϒ;Upsi;ISOGRK3;;GREEK UPSILON WITH HOOK SYMBOL
A;ϕ;phi;ISOGRK3;;GREEK PHI SYMBOL
ϖ;piv;ISOGRK3 ; rounded ( pomega ) ; GREEK PI SYMBOL
Ϙ;;;;GREEK LETTER ARCHAIC KOPPA
N;ϙ;;;;GREEK SMALL LETTER ARCHAIC KOPPA
A;Ϛ;;; capital;GREEK LETTER STIGMA
A;ϛ;stigma;;;GREEK SMALL LETTER STIGMA
Ϝ;Gammad;ISOGRK3 ; capital;GREEK LETTER DIGAMMA
A;ϝ;gammad;ISOGRK3; old;GREEK SMALL LETTER DIGAMMA
Ϡ ; ; ; capital;GREEK LETTER
ϡ;sampi;;;GREEK SMALL LETTER
A;ϰ;kappav;ISOGRK3; rounded;GREEK KAPPA SYMBOL
ϱ;rhov;ISOGRK3 ; rounded;GREEK RHO SYMBOL
A;ϴ;Thetav;;;GREEK CAPITAL THETA SYMBOL
ϵ;epsi;ISOGRK3;straight;GREEK LUNATE EPSILON SYMBOL
϶;bepsi;ISOAMSR;;GREEK REVERSED SYMBOL
Ш;Shcy;ISOCYR1;;CYRILLIC CAPITAL LETTER SHA
A;؈;;;;ARABIC RAY
; enquad;;;EN QUAD
; emquad;;;EM QUAD
; ensp;ISOPUB ; ( half an em ) ; EN SPACE
S; ;emsp;ISOPUB;;EM SPACE
; ; ; ; THREE - PER - EM SPACE
; ; ; mid space;FOUR - PER - EM SPACE
S; ;;;;SIX-PER-EM SPACE
S; ;;;;FIGURE SPACE
; ; ; ; THIN SPACE
S; ;;;;HAIR SPACE
;zwsp;;;ZERO WIDTH SPACE
P;‐;hyphen;ISOPUB; (true graphic) ;HYPHEN
‒;dash;ISOPUB;;FIGURE DASH
P;–;ndash;ISOPUB;;EN DASH
—;mdash;ISOPUB;;EM DASH
R;†;dagger;ISOAMSB;(N in ISOPUB);DAGGER
R;‡;Dagger;ISOAMSB;(N in ISOPUB);DOUBLE DAGGER
/bullet round bullet , filled ; BULLET
… ;hellip;ISOPUB;;HORIZONTAL ELLIPSIS
N;′;prime;ISOTECH; minute ;PRIME
″;Prime;ISOTECH ; second ; DOUBLE PRIME
‴;tprime;ISOTECH;;TRIPLE PRIME
‵;bprime;ISOAMSO;;REVERSED PRIME
‶;bPrime;;;REVERSED DOUBLE PRIME
‷;btprime;;;REVERSED TRIPLE PRIME
※;;;Japanese
N;‼;;;Factorial spacing;DOUBLE EXCLAMATION MARK
⁀;;;z notation sequence concatenation ; CHARACTER TIE
B;⁄;;;stretchy;FRACTION SLASH
B;⁎;lowast;ISOTECH;;LOW ASTERISK
R;⁐;closur;;;CLOSE UP
⁒;;;;COMMERCIAL MINUS SIGN
N;⁗;qprime;ISOTECH;;QUADRUPLE PRIME
; ; ; ; MEDIUM MATHEMATICAL SPACE
B;;;;;FUNCTION APPLICATION
B;;;;;INVISIBLE TIMES
P;;;;;INVISIBLE SEPARATOR
X;;;;;INVISIBLE PLUS
N;⁺..⁾;;; subscript operators;SUPERSCRIPT PLUS SIGN..SUPERSCRIPT RIGHT PARENTHESIS
; ; superscript operators;SUBSCRIPT PLUS SIGN .. SUBSCRIPT RIGHT PARENTHESIS
D;⃐;;;;COMBINING LEFT HARPOON ABOVE
D;⃑;;;;COMBINING RIGHT HARPOON ABOVE
D;⃒;;;;COMBINING LONG VERTICAL LINE OVERLAY
X;⃓;;;;COMBINING SHORT VERTICAL LINE OVERLAY
⃔;;;;COMBINING ANTICLOCKWISE ARROW ABOVE
D;⃖;;;;COMBINING LEFT ARROW ABOVE
D;⃗;;;;COMBINING RIGHT ARROW ABOVE
D;⃘;;;;COMBINING RING OVERLAY
D;⃙;;;;COMBINING CLOCKWISE RING OVERLAY
D;⃚;;;;COMBINING ANTICLOCKWISE RING OVERLAY
D;⃛;tdot;ISOTECH;;COMBINING THREE DOTS ABOVE
D;⃜;DotDot;ISOTECH;;COMBINING FOUR DOTS ABOVE
⃝;;;;COMBINING ENCLOSING CIRCLE
D;⃞;;;;COMBINING ENCLOSING SQUARE
⃟;;;;COMBINING ENCLOSING DIAMOND
D;⃡;;;;COMBINING LEFT RIGHT ARROW ABOVE
D;⃤;;;;COMBINING ENCLOSING UPWARD POINTING TRIANGLE
D;⃥;;;;COMBINING REVERSE SOLIDUS OVERLAY
D;⃦;;;;COMBINING DOUBLE VERTICAL STROKE OVERLAY
⃧ ; ; ; actuary;COMBINING ANNUITY SYMBOL
⃨;;;;COMBINING TRIPLE UNDERDOT
D;⃩;;;;COMBINING WIDE BRIDGE ABOVE
⃪;;;;COMBINING LEFTWARDS ARROW OVERLAY
⃫;;;;COMBINING LONG DOUBLE SOLIDUS OVERLAY
⃬;;;;COMBINING RIGHTWARDS HARPOON WITH BARB DOWNWARDS
⃭;;;;COMBINING LEFTWARDS HARPOON WITH BARB DOWNWARDS
D;⃮;;;;COMBINING LEFT ARROW BELOW
D;⃯;;;;COMBINING RIGHT ARROW BELOW
A;ℂ;Copf;ISOMOPF; /Bbb C, open face C;DOUBLE-STRUCK CAPITAL C
N;ℇ;;;;EULER CONSTANT
ℊ;gscr;ISOMSCR ; ; SCRIPT SMALL G
A;ℋ;Hscr;ISOMSCR; /scr H ;SCRIPT CAPITAL H
A;ℌ;Hfr;ISOMFRK; /frak H;BLACK-LETTER CAPITAL H
ℍ;Hopf;ISOMOPF ; /Bbb H ; DOUBLE - STRUCK CAPITAL H
N;ℎ;;;;PLANCK CONSTANT
N;ℏ;plankv;ISOAMSO; /hslash - variant;PLANCK CONSTANT OVER TWO PI
ℐ;Iscr;ISOMSCR ; /scr I ; SCRIPT CAPITAL I
A;ℑ;image;ISOAMSO; imaginary part ;BLACK-LETTER CAPITAL I
A;ℒ;lagran;ISOTECH; Lagrangian ;SCRIPT CAPITAL L
A;ℓ;ell;ISOAMSO; cursive small l;SCRIPT SMALL L
ℕ;Nopf;ISOMOPF ; /Bbb N , open face N;DOUBLE - STRUCK CAPITAL N
℘;weierp;ISOAMSO ; ( Unicode name is a misnomer ) ; SCRIPT CAPITAL P
A;ℙ;Popf;ISOMOPF; /Bbb P, open face P;DOUBLE-STRUCK CAPITAL P
A;ℚ;Qopf;ISOMOPF; /Bbb Q, open face Q;DOUBLE-STRUCK CAPITAL Q
A;ℛ;Rscr;ISOMSCR; /scr R ;SCRIPT CAPITAL R
A;ℜ;real;ISOAMSO;;BLACK-LETTER CAPITAL R
ℝ;Ropf;ISOMOPF ; /Bbb R , open face R;DOUBLE - STRUCK CAPITAL R
A;ℤ;Zopf;ISOMOPF; /Bbb Z, open face Z;DOUBLE-STRUCK CAPITAL Z
Ω;ohm;ISONUM ; ( deprecated in math , use greek letter ) ; OHM SIGN
℧ ;mho;ISOAMSO ; conductance;INVERTED OHM SIGN
A;ℨ;Zfr;ISOMFRK; /frak Z;BLACK-LETTER CAPITAL Z
℩ ;iiota;ISOAMSO ; inverted iota;TURNED GREEK SMALL LETTER IOTA
Å;angst;ISOTECH ; capital A , ring ( deprecated in math ) ; ANGSTROM SIGN
ℬ;bernou;ISOTECH ; function ; SCRIPT CAPITAL B
A;ℭ;Cfr;ISOMFRK;;BLACK-LETTER CAPITAL C
ℯ;escr;ISOMSCR ; /scr e ; SCRIPT SMALL E
A;ℰ;Escr;ISOMSCR; /scr E ;SCRIPT CAPITAL E
A;ℱ;Fscr;ISOMSCR; /scr F ;SCRIPT CAPITAL F
A;ℳ;phmmat;ISOTECH; physics M-matrix ;SCRIPT CAPITAL M
A;ℴ;order;ISOTECH; order of ;SCRIPT SMALL O
ℵ;aleph;ISOTECH ; aleph , Hebrew;ALEF SYMBOL
ℶ;beth;ISOAMSO ; , Hebrew ; BET SYMBOL
ℷ;gimel;ISOAMSO ; gimel , Hebrew;GIMEL SYMBOL
ℸ;daleth;ISOAMSO ; , Hebrew ; DALET SYMBOL
A;ℽ;opfgamma;;;DOUBLE-STRUCK SMALL GAMMA
N;ℾ;opfGam;;;DOUBLE-STRUCK CAPITAL GAMMA
A;ℿ;opfPi;;;DOUBLE-STRUCK CAPITAL PI
L;⅀;opfsum;;;DOUBLE-STRUCK N-ARY SUMMATION
⅃;;;;REVERSED SANS - SERIF CAPITAL L
ⅅ;;;;DOUBLE - STRUCK ITALIC CAPITAL D
ⅆ;;;;DOUBLE - STRUCK ITALIC SMALL D
ⅇ;;;;DOUBLE - STRUCK ITALIC SMALL E
N;ⅈ;;;;DOUBLE-STRUCK ITALIC SMALL I
N;ⅉ;;;;DOUBLE-STRUCK ITALIC SMALL J
⅋;turnamp;;;TURNED AMPERSAND
/leftarrow /gets ; LEFTWARDS ARROW
R;↑;uarr;ISONUM;;UPWARDS ARROW
→;rarr;ISONUM ; /rightarrow /to;RIGHTWARDS ARROW
R;↓;darr;ISONUM;;DOWNWARDS ARROW
R;↔;harr;ISOAMSA; left and right arrow ;LEFT RIGHT ARROW
R;↕;varr;ISOAMSA; up and down arrow;UP DOWN ARROW
↖ ;nwarr;ISOAMSA ; NW pointing arrow;NORTH WEST ARROW
; NE pointing arrow;NORTH EAST ARROW
↘ ;searr;ISOAMSA ; SE pointing arrow;SOUTH EAST ARROW
↙ ;swarr;ISOAMSA ; SW pointing arrow;SOUTH WEST ARROW
↚;nlarr;ISOAMSA ; not left arrow ; LEFTWARDS ARROW WITH STROKE
R;↛;nrarr;ISOAMSA; not right arrow;RIGHTWARDS ARROW WITH STROKE
↜ ;larrw ; ; left arrow - wavy;LEFTWARDS WAVE ARROW
↝ ;rarrw;ISOAMSA ; right arrow - wavy ; RIGHTWARDS WAVE ARROW
↞ ;Larr;ISOAMSA;;LEFTWARDS TWO HEADED ARROW
↟ ;Uarr;ISOAMSA;;UPWARDS TWO HEADED ARROW
↠;Rarr;ISOAMSA;;RIGHTWARDS TWO HEADED ARROW
↡ ;Darr;ISOAMSA;;DOWNWARDS TWO HEADED ARROW
↢ ;larrtl;ISOAMSA ; left arrow - tailed;LEFTWARDS ARROW WITH TAIL
↣;rarrtl;ISOAMSA ; right arrow - tailed ; RIGHTWARDS ARROW WITH TAIL
↤ ;mapstoleft ; ; maps to , leftward;LEFTWARDS ARROW FROM BAR
↥ ;mapstoup ; ; maps to , upward;UPWARDS ARROW FROM BAR
↦;map;ISOAMSA ; maps to , rightward ; RIGHTWARDS ARROW FROM BAR
↧ ;mapstodown ; ; maps to , downward;DOWNWARDS ARROW FROM BAR
↨ ;varrb ; ; up and down arrow , bar under ; UP DOWN ARROW WITH BASE
R;↩;larrhk;ISOAMSA;;LEFTWARDS ARROW WITH HOOK
↪ ;rarrhk;ISOAMSA;;RIGHTWARDS ARROW WITH HOOK
R;↫;larrlp;ISOAMSA;;LEFTWARDS ARROW WITH LOOP
↬ ;rarrlp;ISOAMSA;;RIGHTWARDS ARROW WITH LOOP
↭ ;harrw;ISOAMSA ; left and right arrow - wavy;LEFT RIGHT WAVE ARROW
R;↮;nharr;ISOAMSA; not left and right arrow ;LEFT RIGHT ARROW WITH STROKE
↯ ;zigdarr;;;DOWNWARDS ARROW
↰ ; /Lsh ; UPWARDS ARROW WITH TIP LEFTWARDS
R;↱;rsh;ISOAMSA; /Rsh ;UPWARDS ARROW WITH TIP RIGHTWARDS
↲ ;ldsh;ISOAMSA ; left down angled arrow ; DOWNWARDS ARROW WITH TIP LEFTWARDS
R;↳;rdsh;ISOAMSA; right down angled arrow;DOWNWARDS ARROW WITH TIP RIGHTWARDS
; left curved arrow;ANTICLOCKWISE TOP SEMICIRCLE ARROW
↷ ;curarr;ISOAMSA ; right curved arrow ; CLOCKWISE TOP SEMICIRCLE ARROW
R;↺;;;;ANTICLOCKWISE OPEN CIRCLE ARROW
↻ ;;;;CLOCKWISE OPEN CIRCLE ARROW
R;↼;lharu;ISOAMSA; left harpoon-up;LEFTWARDS HARPOON WITH BARB UPWARDS
↽ ;lhard;ISOAMSA ; left harpoon - down;LEFTWARDS HARPOON WITH BARB DOWNWARDS
↾ ;uharr;ISOAMSA ; /upharpoonright /restriction up harpoon - right ; UPWARDS HARPOON WITH BARB RIGHTWARDS
R;↿;uharl;ISOAMSA; up harpoon-left;UPWARDS HARPOON WITH BARB LEFTWARDS
⇀ ;rharu;ISOAMSA ; right harpoon - up ; RIGHTWARDS HARPOON WITH BARB UPWARDS
⇁ ;rhard;ISOAMSA ; right harpoon - down ; RIGHTWARDS HARPOON WITH BARB DOWNWARDS
⇂ ;dharr;ISOAMSA ; down harpoon - right ; DOWNWARDS HARPOON WITH BARB RIGHTWARDS
R;⇃;dharl;ISOAMSA; down harpoon-left;DOWNWARDS HARPOON WITH BARB LEFTWARDS
R;⇄;rlarr;ISOAMSA; right arrow over left arrow;RIGHTWARDS ARROW OVER LEFTWARDS ARROW
⇅ ;udarr;ISOAMSA ; up arrow , down arrow ; UPWARDS ARROW LEFTWARDS OF DOWNWARDS ARROW
⇆ ;lrarr;ISOAMSA ; left arrow over right arrow;LEFTWARDS ARROW OVER RIGHTWARDS ARROW
⇇ ;llarr;ISOAMSA ; two left arrows;LEFTWARDS PAIRED ARROWS
⇈ ;uuarr;ISOAMSA ; two up arrows;UPWARDS PAIRED ARROWS
⇉ ;rrarr;ISOAMSA ; two right arrows ; RIGHTWARDS PAIRED ARROWS
⇊ ;ddarr;ISOAMSA ; two down arrows;DOWNWARDS PAIRED ARROWS
R;⇋;lrhar;ISOAMSA; left harpoon over right;LEFTWARDS HARPOON OVER RIGHTWARDS HARPOON
R;⇌;rlhar;ISOAMSA; right harpoon over left;RIGHTWARDS HARPOON OVER LEFTWARDS HARPOON
⇍ ;nlArr;ISOAMSA ; not implied by ; LEFTWARDS DOUBLE ARROW WITH STROKE
R;⇎;nhArr;ISOAMSA; not left and right double arrows ;LEFT RIGHT DOUBLE ARROW WITH STROKE
⇏;nrArr;ISOAMSA ; not implies;RIGHTWARDS DOUBLE ARROW WITH STROKE
⇐ ;lArr;ISOTECH ; is implied by;LEFTWARDS DOUBLE ARROW
R;⇑;uArr;ISOAMSA; up double arrow;UPWARDS DOUBLE ARROW
⇒;rArr;ISOTECH ; implies;RIGHTWARDS DOUBLE ARROW
⇓ ;dArr;ISOAMSA ; down double arrow;DOWNWARDS DOUBLE ARROW
R;⇔;hArr;ISOAMSA; left and right double arrow;LEFT RIGHT DOUBLE ARROW
R;⇕;vArr;ISOAMSA; up and down double arrow ;UP DOWN DOUBLE ARROW
⇖ ;nwArr;ISOAMSA ; NW pointing double arrow ; NORTH WEST DOUBLE ARROW
⇗ ;neArr;ISOAMSA ; NE pointing double arrow ; NORTH EAST DOUBLE ARROW
⇘ ;seArr;ISOAMSA ; SE pointing double arrow ; SOUTH EAST DOUBLE ARROW
⇙ ;swArr;ISOAMSA ; SW pointing double arrow ; SOUTH WEST DOUBLE ARROW
;lAarr;ISOAMSA ; left triple arrow;LEFTWARDS TRIPLE ARROW
⇛ ;rAarr;ISOAMSA ; right triple arrow ; RIGHTWARDS TRIPLE ARROW
⇜ ;ziglarr ; ; left zig - zag arrow ; LEFTWARDS SQUIGGLE ARROW
R;⇝;zigrarr;ISOAMSA; right zig-zag arrow;RIGHTWARDS SQUIGGLE ARROW
R;⇞;;;;UPWARDS ARROW WITH DOUBLE STROKE
R;⇟;;;;DOWNWARDS ARROW WITH DOUBLE STROKE
R;⇠;;;;LEFTWARDS DASHED ARROW
R;⇡;;;;UPWARDS DASHED ARROW
⇢ ;;;;RIGHTWARDS DASHED ARROW
R;⇣;;;;DOWNWARDS DASHED ARROW
⇤ ;larrb;;;LEFTWARDS ARROW TO BAR
R;⇥;rarrb;;;RIGHTWARDS ARROW TO BAR
R;⇦;;;;LEFTWARDS WHITE ARROW
R;⇧;;;;UPWARDS WHITE ARROW
⇨ ;;;;RIGHTWARDS WHITE ARROW
⇩ ;;;;DOWNWARDS WHITE ARROW
.. ARROW FROM BAR .. SOUTH EAST ARROW TO CORNER
⇳ ;;;;UP DOWN WHITE ARROW
⇴;;;;RIGHT ARROW WITH SMALL
⇵;duarr;ISOAMSA;;DOWNWARDS ARROW LEFTWARDS OF UPWARDS ARROW
R;⇶;rarr3;;;THREE RIGHTWARDS ARROWS
R;⇷;nvlarr;;;LEFTWARDS ARROW WITH VERTICAL STROKE
R;⇸;nvrarr;;;RIGHTWARDS ARROW WITH VERTICAL STROKE
R;⇹;nvharr;;;LEFT RIGHT ARROW WITH VERTICAL STROKE
R;⇺;;;;LEFTWARDS ARROW WITH DOUBLE VERTICAL STROKE
R;⇻;;;;RIGHTWARDS ARROW WITH DOUBLE VERTICAL STROKE
R;⇼;;;;LEFT RIGHT ARROW WITH DOUBLE VERTICAL STROKE
R;⇽;loarr;ISOAMSA;;LEFTWARDS OPEN-HEADED ARROW
R;⇾;roarr;ISOAMSA;;RIGHTWARDS OPEN-HEADED ARROW
R;⇿;hoarr;ISOAMSA;;LEFT RIGHT OPEN-HEADED ARROW
U;∀;forall;ISOTECH;;FOR ALL
∁;comp;ISOAMSO;;COMPLEMENT
∃;exist;ISOTECH ; at least one exists;THERE EXISTS
U;∄;nexist;ISOAMSO; negated exists ;THERE DOES NOT EXIST
∅;emptyv;ISOAMSO ; circle , slash;EMPTY SET
∆ ; ; ; Laplacian ( Delta , nabla^2 ) ; INCREMENT
∇;nabla;ISOTECH ; nabla , del , operator;NABLA
R;∈;isin;ISOTECH; set membership, variant;ELEMENT OF
R;∉;notin;ISOTECH; negated set membership ;NOT AN ELEMENT OF
R;∊;isinv;ISOTECH; set membership ;SMALL ELEMENT OF
R;∋;ni;ISOTECH; contains, variant;CONTAINS AS MEMBER
∌;notni;ISOTECH ; negated contains , variant;DOES NOT CONTAIN AS MEMBER
/ni /owns contains ; SMALL CONTAINS AS MEMBER
∎;qed;;;END OF PROOF
∏;prod;ISOAMSO ; product operator ; N - ARY PRODUCT
∐;coprod;ISOAMSB ; coproduct operator ; N - ARY COPRODUCT
∑;sum;ISOAMSB ; summation operator ; N - ARY SUMMATION
V;−;minus;ISOTECH;;MINUS SIGN
V;∓;mnplus;;;MINUS-OR-PLUS SIGN
∔;plusdo;ISOAMSB ; plus sign , dot above ; DOT PLUS
∖;ssetmn;ISOAMSB ; small set minus ( cf . reverse solidus ) ; SET MINUS
∗;midast;ISOAMSB ; centered asterisk;ASTERISK OPERATOR
B;∘;compfn;ISOTECH; composite function (small circle);RING OPERATOR
B;∙;;;;BULLET OPERATOR
L;√;radic;ISOTECH; radical;SQUARE ROOT
L;∛;;;;CUBE ROOT
L;∜;;;;FOURTH ROOT
∝;prop;ISOTECH;;PROPORTIONAL TO
N;∞;infin;ISOTECH;;INFINITY
∟;angrt;ISOTECH ; ( 90 degree);RIGHT ANGLE
∠;ang;ISOAMSO;;ANGLE
N;∡;angmsd;ISOAMSO;;MEASURED ANGLE
∢;angsph;ISOTECH;;SPHERICAL ANGLE
∣;mid;ISOAMSR;/mid ; DIVIDES
R;∤;nmid;ISOAMSN;negated mid;DOES NOT DIVIDE
R;∥;par;ISOTECH;;PARALLEL TO
R;∦;npar;ISOAMSN;;NOT PARALLEL TO
B;∧;and;ISOTECH;/wedge /land ;LOGICAL AND
∨;or;ISOTECH;/vee /lor;LOGICAL OR
∩;cap;ISOTECH;;INTERSECTION
∪;cup;ISOTECH;logical sum;UNION
L;∫;int;ISOTECH;;INTEGRAL
L;∬;Int;ISOTECH;;DOUBLE INTEGRAL
L;∮;conint;ISOTECH;;CONTOUR INTEGRAL
L;∯;Conint;ISOTECH;double contour integral operator ;SURFACE INTEGRAL
∰;Cconint;ISOTECH;triple contour integral operator ; VOLUME INTEGRAL
L;∱;cwint;ISOTECH;;CLOCKWISE INTEGRAL
L;∲;cwconint;ISOTECH;;CLOCKWISE CONTOUR INTEGRAL
L;∳;awconint;ISOTECH;;ANTICLOCKWISE CONTOUR INTEGRAL
∴;there4;ISOTECH;;THEREFORE
R;∵;becaus;ISOTECH;;BECAUSE
R;∶;ratio;ISOAMSR;;RATIO
∷;Colon;ISOAMSR;two colons ; PROPORTION
∸;minusd;ISOAMSB;minus sign , dot above;DOT MINUS
R;∹;excess;; excess (-:);EXCESS
∺;mDDot;ISOAMSR;minus with four dots , geometric properties ; GEOMETRIC PROPORTION
∻;homtht;ISOAMSR;;HOMOTHETIC
R;∼;sim;ISOTECH; similar;TILDE OPERATOR
R;∽;bsim;ISOAMSR; reverse similar;REVERSED TILDE
∾;ac;ISOAMSB ; most positive;INVERTED LAZY S
∿;;;;SINE WAVE
B;≀;wreath;ISOAMSB;;WREATH PRODUCT
≁;nsim;ISOAMSO ; not
equals , similar;MINUS TILDE
≃;sime;ISOTECH ; similar , equals;ASYMPTOTICALLY EQUAL TO
R;≄;nsime;ISOAMSN; not similar, equals;NOT ASYMPTOTICALLY EQUAL TO
R;≅;cong;ISOTECH; congruent with ;APPROXIMATELY EQUAL TO
≆;simne;ISOAMSN;similar , not equals [ vert only for 9573 entity ] ; APPROXIMATELY BUT NOT ACTUALLY EQUAL TO
≇;ncong;ISOAMSN ; not congruent with ; NOR ACTUALLY EQUAL TO
R;≈;ap;ISOTECH; approximate;ALMOST EQUAL TO
≉;nap;ISOAMSN ; not EQUAL TO
R;≊;ape;ISOAMSR; approximate, equals;ALMOST EQUAL OR EQUAL TO
R;≋;apid;ISOAMSR; approximately identical to ;TRIPLE TILDE
R;≌;bcong;ISOAMSR;;ALL EQUAL TO
R;≍;asymp;ISOAMSR; asymptotically equal to;EQUIVALENT TO
≎;bump;ISOAMSR ; bumpy equals ; GEOMETRICALLY EQUIVALENT TO
R;≏;bumpe;ISOAMSR; bumpy equals, equals ;DIFFERENCE BETWEEN
equals , single dot above ; APPROACHES THE LIMIT
≑;eDot;ISOAMSR ; /doteqdot /Doteq equals , even dots ; GEOMETRICALLY EQUAL TO
≒;efDot;ISOAMSR ; equals , falling dots ; APPROXIMATELY EQUAL TO OR THE IMAGE OF
≓;erDot;ISOAMSR ; equals , rising dots;IMAGE OF OR APPROXIMATELY EQUAL TO
≔;colone;ISOAMSR;;COLON EQUALS
circle on equals sign;RING IN EQUAL TO
R;≗;cire;ISOAMSR; circle, equals ;RING EQUAL TO
R;≘;arceq;; arc, equals;CORRESPONDS TO
≙;wedgeq;ISOTECH ; corresponds to ( wedge , equals ) ; ESTIMATES
R;≚;veeeq;ISOTECH; logical or, equals ;EQUIANGULAR TO
≛;;;;STAR EQUALS
≜;trie;ISOAMSR ; triangle , equals ; DELTA EQUAL TO
R;≝;eqdef;;;EQUAL TO BY DEFINITION
R;≞;measeq;; (m over equals);MEASURED BY
equal with question mark;QUESTIONED EQUAL TO
≠;ne;ISOTECH ; /ne /neq ; NOT EQUAL TO
R;≡;equiv;ISOTECH;;IDENTICAL TO
≣;Equiv ; ; ( 4 lines);STRICTLY EQUIVALENT TO
≤;le;ISOTECH ; /leq /le ; LESS - THAN OR EQUAL TO
≥;ge;ISOTECH ; /geq /ge ; GREATER - THAN OR EQUAL TO
less , double equals;LESS - THAN OVER EQUAL TO
R;≧;gE;ISOAMSR; greater, double equals ;GREATER-THAN OVER EQUAL TO
≨;lnE;ISOAMSN ; less , not double equals;LESS - THAN BUT NOT EQUAL TO
R;≩;gnE;ISOAMSN; greater, not double equals ;GREATER-THAN BUT NOT EQUAL TO
≪;Lt ; ; much less than , type 2 ; MUCH LESS - THAN
≫;Gt ; ; much greater than , type 2;MUCH GREATER - THAN
R;≬;twixt;ISOAMSR;;BETWEEN
≭;nasymp ; ; not asymptotically equal EQUIVALENT TO
R;≰;nle;ISOAMSN;;NEITHER LESS-THAN NOR EQUAL TO
R;≲;lsim;ISOAMSR; less, similar;LESS-THAN OR EQUIVALENT TO
R;≳;gsim;ISOAMSR; greater, similar ;GREATER-THAN OR EQUIVALENT TO
R;≴;nlsim;ISOAMSN; not less, similar;NEITHER LESS-THAN NOR EQUIVALENT TO
≵;ngsim;ISOAMSN ; not greater , similar ; NOR EQUIVALENT TO
≶;lg;ISOAMSR ; less , greater;LESS - THAN OR GREATER - THAN
R;≷;gl;ISOAMSR; greater, less;GREATER-THAN OR LESS-THAN
R;≺;pr;ISOAMSR;;PRECEDES
R;≻;sc;ISOAMSR;;SUCCEEDS
≼;prcue;ISOAMSR ; precedes , curly equals ; PRECEDES OR EQUAL TO
R;≽;sccue;ISOAMSR; succeeds, curly equals ;SUCCEEDS OR EQUAL TO
≾;prsim;ISOAMSR ; precedes , similar;PRECEDES OR EQUIVALENT TO
R;≿;scsim;ISOAMSR; succeeds, similar;SUCCEEDS OR EQUIVALENT TO
⊀;npr;ISOAMSN ; not precedes ; DOES NOT PRECEDE
R;⊁;nsc;ISOAMSN; not succeeds ;DOES NOT SUCCEED
⊂;sub;ISOTECH ; subset or is implied by;SUBSET OF
⊃;sup;ISOTECH ; superset or implies;SUPERSET OF
⊄;nsub;ISOAMSN;;NOT A SUBSET OF
⊆;sube;ISOTECH ; subset , equals ; SUBSET OF OR EQUAL TO
⊇;supe;ISOTECH ; superset , equals ; OF OR EQUAL TO
⊈;nsube;ISOAMSN ; not subset , equals ; NEITHER A SUBSET OF NOR EQUAL TO
⊉;nsupe;ISOAMSN ; not superset , equals ; NEITHER A SUPERSET OF NOR EQUAL TO
⊊;subne;ISOAMSN ; subset , not equals ; SUBSET OF WITH NOT EQUAL TO
⊋;supne;ISOAMSN ; superset , not equals ; OF WITH NOT EQUAL TO
B;⊌;;;;MULTISET
⊍;cupdot;ISOAMSB ; union , with dot;MULTISET
⊎;uplus;ISOAMSB ; plus sign in union ; MULTISET UNION
R;⊏;sqsub;ISOAMSR; square subset;SQUARE IMAGE OF
⊐;sqsup;ISOAMSR ; square superset;SQUARE ORIGINAL OF
R;⊑;sqsube;ISOAMSR; square subset, equals;SQUARE IMAGE OF OR EQUAL TO
⊒;sqsupe;ISOAMSR ; square superset , equals;SQUARE ORIGINAL OF OR EQUAL TO
⊓;sqcap;ISOAMSB ; square intersection;SQUARE CAP
⊔;sqcup;ISOAMSB ; square union ; SQUARE CUP
⊕;oplus;ISOAMSB ; plus sign in circle;CIRCLED PLUS
⊖;ominus;ISOAMSB ; minus sign in circle ; CIRCLED MINUS
⊗;otimes;ISOAMSB ; multiply sign in circle;CIRCLED TIMES
⊘;osol;ISOAMSB ; solidus in circle;CIRCLED DIVISION SLASH
⊙;odot;ISOAMSB ; middle dot in circle ; CIRCLED DOT OPERATOR
⊚;ocir;ISOAMSB ; small circle in circle ; CIRCLED RING OPERATOR
asterisk in circle ; CIRCLED ASTERISK OPERATOR
B;⊜;oeq;; equal in circle;CIRCLED EQUALS
B;⊝;odash;ISOAMSB; hyphen in circle ;CIRCLED DASH
B;⊞;plusb;ISOAMSB; plus sign in box \boxplus;SQUARED PLUS
⊟;minusb;ISOAMSB ; minus sign in box \boxminus;SQUARED MINUS
⊠;timesb;ISOAMSB ; multiply sign in box \boxtimes;SQUARED TIMES
⊡;sdotb;ISOAMSB ; small dot in box \dotsquare \boxdot ; SQUARED DOT OPERATOR
R;⊢;vdash;ISOAMSR; vertical, dash ;RIGHT TACK
dash , vertical ; LEFT TACK
⊤;top;ISOTECH ; top;DOWN
R;⊥;bottom;ISOTECH; bottom ;UP TACK
⊦ ; ; ; ( vertical , short dash ) ;
R;⊧;models;ISOAMSR; (vertical, short double dash);MODELS
R;⊨;vDash;ISOAMSR; vertical, double dash;TRUE
R;⊩;Vdash;ISOAMSR; double vertical, dash;FORCES
⊪;Vvdash;ISOAMSR ; triple vertical , dash;TRIPLE VERTICAL BAR RIGHT TURNSTILE
R;⊫;VDash;ISOAMSR; double vert, double dash ;DOUBLE VERTICAL BAR DOUBLE RIGHT TURNSTILE
R;⊬;nvdash;ISOAMSN; not vertical, dash ;DOES NOT PROVE
⊭;nvDash;ISOAMSN ; not vertical , double dash;NOT TRUE
R;⊮;nVdash;ISOAMSN; not double vertical, dash;DOES NOT FORCE
⊯;nVDash;ISOAMSN ; not double vert , double dash ; NEGATED DOUBLE VERTICAL BAR DOUBLE RIGHT TURNSTILE
⊰;prurel;ISOAMSR ; element precedes under relation;PRECEDES UNDER RELATION
R;⊱;scurel;;;SUCCEEDS UNDER RELATION
left triangle , open , variant ; OF
R;⊳;vrtri;ISOAMSR; right triangle, open, variant;CONTAINS AS NORMAL SUBGROUP
R;⊴;ltrie;ISOAMSR; left triangle, equals;NORMAL SUBGROUP OF OR EQUAL TO
⊵;rtrie;ISOAMSR ; right triangle , equals ; CONTAINS AS NORMAL SUBGROUP OR EQUAL TO
R;⊶;origof;ISOAMSA;;ORIGINAL OF
R;⊸;mumap;ISOAMSA; /multimap;MULTIMAP
B;⊹;hercon;ISOAMSB;;HERMITIAN CONJUGATE MATRIX
⊺;intcal;ISOAMSB ; intercal ; INTERCALATE
; logical or , bar below ( large vee ) , exclusive disjunction ; XOR
B;⊼;barwed;ISOAMSB; bar, wedge (large wedge) ;NAND
⊽;;ISOAMSB ; bar , vee ( large vee ) ; NOR
N;⊾;angrtvb;ISOAMSO; right angle-measured [with arc];RIGHT ANGLE WITH ARC
⊿;;;;RIGHT TRIANGLE
L;⋀;xwedge;ISOAMSB; logical or operator;N-ARY LOGICAL AND
⋁;xvee;ISOAMSB ; logical and operator ; N - ARY LOGICAL OR
⋂;xcap;ISOAMSB ; intersection operator;N - ARY INTERSECTION
⋃;xcup;ISOAMSB ; union operator ; N - ARY UNION
⋄;diam;ISOAMSB ; white diamond;DIAMOND OPERATOR
⋅;sdot;ISOAMSB ; small middle dot ; DOT OPERATOR
B;⋆;sstarf;ISOAMSB; small star, filled, low;STAR OPERATOR
⋇;divonx;ISOAMSB ; division on times;DIVISION TIMES
R;⋈;bowtie;ISOAMSR;;BOWTIE
⋉;ltimes;ISOAMSB ; times sign , left closed;LEFT NORMAL FACTOR SEMIDIRECT PRODUCT
⋊;rtimes;ISOAMSB ; times sign , right closed ; RIGHT NORMAL FACTOR SEMIDIRECT PRODUCT
⋋;lthree;ISOAMSB;;LEFT SEMIDIRECT PRODUCT
⋌;rthree;ISOAMSB;;RIGHT SEMIDIRECT PRODUCT
⋍;bsime;ISOAMSR ; reverse similar , equals;REVERSED TILDE EQUALS
B;⋎;cuvee;ISOAMSB;;CURLY LOGICAL OR
B;⋏;cuwed;ISOAMSB;;CURLY LOGICAL AND
R;⋐;Sub;ISOAMSR;;DOUBLE SUBSET
⋑;Sup;ISOAMSR;;DOUBLE
⋒;Cap;ISOAMSB ; /Cap /doublecap;DOUBLE INTERSECTION
⋓;Cup;ISOAMSB ; /Cup /doublecup;DOUBLE UNION
R;⋔;fork;ISOAMSR;;PITCHFORK
⋕;epar;ISOTECH ; parallel , equal;EQUAL AND PARALLEL TO
⋖;ltdot;ISOAMSR;;LESS - THAN WITH DOT
⋗;gtdot;ISOAMSR;;GREATER - THAN WITH DOT
R;⋘;Ll;ISOAMSR; /Ll /lll /llless triple less-than;VERY MUCH LESS-THAN
R;⋙;Gg;ISOAMSR; /ggg /Gg /gggtr triple greater-than;VERY MUCH GREATER-THAN
⋚;leg;ISOAMSR ; less , equals , greater;LESS - THAN EQUAL TO OR GREATER - THAN
⋛;gel;ISOAMSR ; greater , equals , less;GREATER - THAN EQUAL TO OR LESS - THAN
equal - or - less;EQUAL TO OR LESS - THAN
⋝;eg;ISOAMSR ; equal - or - greater ; EQUAL TO OR GREATER - THAN
⋞;cuepr;ISOAMSR ; curly equals , precedes ; EQUAL TO OR PRECEDES
⋟;cuesc;ISOAMSR ; curly equals , succeeds ; EQUAL TO OR SUCCEEDS
R;⋠;nprcue;ISOAMSN; not precedes, curly equals ;DOES NOT PRECEDE OR EQUAL
R;⋡;nsccue;ISOAMSN; not succeeds, curly equals ;DOES NOT SUCCEED OR EQUAL
R;⋢;nsqsube;ISOAMSN; not, square subset, equals ;NOT SQUARE IMAGE OF OR EQUAL TO
R;⋣;nsqsupe;ISOAMSN; not, square superset, equals ;NOT SQUARE ORIGINAL OF OR EQUAL TO
R;⋤;sqsubne;; square subset, not equals;SQUARE IMAGE OF OR NOT EQUAL TO
⋥;sqsupne ; ; square superset , not equals;SQUARE ORIGINAL OF OR NOT EQUAL TO
⋦;lnsim;ISOAMSN ; less , not similar;LESS - THAN BUT NOT EQUIVALENT TO
R;⋧;gnsim;ISOAMSN; greater, not similar ;GREATER-THAN BUT NOT EQUIVALENT TO
⋨;prnsim;ISOAMSN ; precedes , not similar;PRECEDES BUT NOT EQUIVALENT TO
⋩;scnsim;ISOAMSN ; succeeds , not similar;SUCCEEDS BUT NOT EQUIVALENT TO
⋪;nltri;ISOAMSN ; not left triangle;NOT OF
⋫;nrtri;ISOAMSN ; not right triangle ; DOES NOT CONTAIN AS NORMAL SUBGROUP
⋬;nltrie;ISOAMSN ; not left triangle , equals;NOT NORMAL SUBGROUP OF OR EQUAL TO
R;⋭;nrtrie;ISOAMSN; not right triangle, equals;DOES NOT CONTAIN AS NORMAL SUBGROUP OR EQUAL
⋮;vellip;ISOPUB ; vertical ellipsis;VERTICAL ELLIPSIS
⋯;ctdot;ISOTECH ; three dots , centered ; MIDLINE HORIZONTAL ELLIPSIS
⋰;utdot;ISOTECH ; three dots , ascending;UP RIGHT DIAGONAL ELLIPSIS
⋱;dtdot;ISOTECH ; three dots , descending ; DOWN RIGHT DIAGONAL ELLIPSIS
R;⋲;disin;ISOTECH;;ELEMENT OF WITH LONG HORIZONTAL STROKE
R;⋳;isinsv;ISOTECH;;ELEMENT OF WITH VERTICAL BAR AT END OF HORIZONTAL STROKE
R;⋴;isins;ISOTECH;;SMALL ELEMENT OF WITH VERTICAL BAR AT END OF HORIZONTAL STROKE
R;⋵;isindot;ISOTECH;;ELEMENT OF WITH DOT ABOVE
R;⋶;notinvc;ISOTECH;;ELEMENT OF WITH OVERBAR
R;⋷;notinvb;ISOTECH;;SMALL ELEMENT OF WITH OVERBAR
⋸;isinvb;;;ELEMENT OF WITH UNDERBAR
R;⋺;nisd;ISOTECH;;CONTAINS WITH LONG HORIZONTAL STROKE
R;⋻;xnis;ISOTECH;;CONTAINS WITH VERTICAL BAR AT END OF HORIZONTAL STROKE
R;⋼;nis;ISOTECH;;SMALL CONTAINS WITH VERTICAL BAR AT END OF HORIZONTAL STROKE
R;⋽;notnivc;ISOTECH;;CONTAINS WITH OVERBAR
R;⋾;notnivb;ISOTECH;;SMALL CONTAINS WITH OVERBAR
R;⋿;;;;Z NOTATION BAG MEMBERSHIP
⌀ ;diameter ; ; 2205 diameter sign ; DIAMETER SIGN
N;⌂;;;;HOUSE
⌅ ;;ISOAMSB;;PROJECTIVE
⌆ ;;ISOAMSB;;PERSPECTIVE
O;⌈;lceil;ISOAMSC;;LEFT CEILING
C;⌉;rceil;ISOAMSC;;RIGHT CEILING
C;⌋;rfloor;ISOAMSC;;RIGHT FLOOR
N;⌐;bnot;ISOTECH;;REVERSED NOT SIGN
⌑ ;;;;SQUARE LOZENGE
N;⌙;;;;TURNED NOT SIGN
⌜ LEFT CORNER
C;⌝;urcorn;ISOAMSC;;TOP RIGHT CORNER
O;⌞;dlcorn;ISOAMSC;;BOTTOM LEFT CORNER
C;⌟;drcorn;ISOAMSC;;BOTTOM RIGHT CORNER
⌠ .. ⌡ ; ; ; ( integral parts ) ; TOP HALF INTEGRAL .. BOTTOM HALF INTEGRAL
R;⌢;frown;ISOAMSR; down curve ;FROWN
⌣ ;smile;ISOAMSR ; up curve ; SMILE
; ; ; left angle bracket ( deprecated for math use , use 27E8 ) ; LEFT - POINTING ANGLE BRACKET
C;;;; right angle bracket (deprecated for math use, use 27E9);RIGHT-POINTING ANGLE BRACKET
N;⌶;topbot;ISOTECH; top and bottom ;APL FUNCTIONAL SYMBOL I-BEAM
⌽ ;ovbar;ISOAMSB ; circle with vertical bar ; APL FUNCTIONAL SYMBOL CIRCLE STILE
R;⌿;solbar;ISOAMSN; solidus, bar through ;APL FUNCTIONAL SYMBOL SLASH BAR
⍼;;;;RIGHT ANGLE WITH DOWNWARDS ZIGZAG ARROW
N;⎔;hbenzen;ISOCHEM; horizontal benzene ring [hexagon flat open] ;SOFTWARE-FUNCTION SYMBOL
⎛ .. ⎯ ; ; ; ( bracket parts);LEFT PARENTHESIS UPPER HOOK .. HORIZONTAL LINE EXTENSION
R;⎰;lmoust;ISOAMSC;;UPPER LEFT OR LOWER RIGHT CURLY BRACKET SECTION
⎱;rmoust;ISOAMSC;;UPPER RIGHT OR LOWER LEFT CURLY BRACKET SECTION
G;⎲..⎳;;; (summation parts);SUMMATION TOP..SUMMATION BOTTOM
N;⎴;tbrk;ISOAMSO;;TOP SQUARE BRACKET
N;⎵;bbrk;ISOAMSO;;BOTTOM SQUARE BRACKET
⎶ BRACKET OVER TOP SQUARE BRACKET
⎷ ; ; ; ( square root part ) ; RADICAL SYMBOL BOTTOM
; ; ; ( vertical line extension);VERTICAL LINE EXTENSION
⏜;ovrpar ; ; over parenthesis ; TOP PARENTHESIS
⏝;udrpar ; ; under parenthesis;BOTTOM PARENTHESIS
⏞;ovrcub ; ; over brace ; TOP CURLY BRACKET
N;⏟;udrcub;; under brace;BOTTOM CURLY BRACKET
⏠;;;;TOP TORTOISE SHELL BRACKET
N;⏡;;;;BOTTOM TORTOISE SHELL BRACKET
N;⏢;;ISOTECH;;WHITE TRAPEZIUM
⏣ ;;ISOCHEM;;BENZENE RING WITH
⏤ ;;ISOTECH;;STRAIGHTNESS
⏦ ;;ISOTECH;;AC CURRENT
N;⏧;;ISOTECH;;ELECTRICAL INTERSECTION
N;Ⓢ;oS;ISOAMSO; capital S in circle;CIRCLED LATIN CAPITAL LETTER S
■ ;squarf;ISOPUB ; square , filled ; BLACK SQUARE
□ ;square;ISOPUB ; square , open ; WHITE SQUARE
N;▪;squf;ISOPUB;? /blacksquare - sq bullet, filled ;BLACK SMALL SQUARE
N;▫;;;;WHITE SMALL SQUARE
N;▭;rect;; horizontal rectangle, open;WHITE RECTANGLE
; histogram marker ; BLACK VERTICAL RECTANGLE
N;▯;;ISOPUB;;WHITE VERTICAL RECTANGLE
▰ ;;;;BLACK PARALLELOGRAM
; ; parallelogram , open;WHITE PARALLELOGRAM
▲ ;;;;BLACK UP - POINTING TRIANGLE
△ ;xutri;ISOAMSB ; big up triangle , open;WHITE UP - POINTING TRIANGLE
▴ ;utrif;ISOPUB ; up triangle , filled;BLACK UP - POINTING SMALL TRIANGLE
▵ ;utri;ISOPUB ; /triangle - up triangle , open;WHITE UP - POINTING SMALL TRIANGLE
B;▶;vrtrif;; (large) right triangle, filled ;BLACK RIGHT-POINTING TRIANGLE
▷;vrtri ; ; ( large ) right triangle , open , Z notation range restriction ; WHITE RIGHT - POINTING TRIANGLE
B;▸;rtrif;ISOPUB; right triangle, filled ;BLACK RIGHT-POINTING SMALL TRIANGLE
▹ ; right triangle , open ; WHITE RIGHT - POINTING SMALL TRIANGLE
B;▼;;; big down triangle, filled;BLACK DOWN-POINTING TRIANGLE
B;▽;xdtri;ISOAMSB; big down triangle, open;WHITE DOWN-POINTING TRIANGLE
;dtrif;ISOPUB ; down triangle , filled;BLACK DOWN - POINTING SMALL TRIANGLE
▿ ;dtri;ISOPUB ; down triangle , open;WHITE DOWN - POINTING SMALL TRIANGLE
◀ ;vltrif ; ; ( large ) left triangle , filled;BLACK LEFT - POINTING TRIANGLE
B;◁;vltri;; (large) left triangle, open, Z notation domain restriction ;WHITE LEFT-POINTING TRIANGLE
◂ ;ltrif;ISOPUB ; left triangle , filled;BLACK LEFT - POINTING SMALL TRIANGLE
◃ ;ltri;ISOPUB ; left triangle , open;WHITE LEFT - POINTING SMALL TRIANGLE
◄ ;;;;BLACK LEFT - POINTING POINTER
B;◅;;;;WHITE LEFT-POINTING POINTER
N;◆;diamondf;ISOPUB;;BLACK DIAMOND
;;;;WHITE DIAMOND
N;◈;;;;WHITE DIAMOND CONTAINING BLACK SMALL DIAMOND
◉
B;◊;loz;ISOPUB; lozenge or total mark;LOZENGE
B;○;;ISOAMSB; large circle ;WHITE CIRCLE
◎ ;;;;BULLSEYE
N;●;circlef;ISOPUB; circle, filled ;BLACK CIRCLE
N;◐..◓;;;;CIRCLE WITH LEFT HALF BLACK..CIRCLE WITH UPPER HALF BLACK
◖ ;;;;LEFT HALF BLACK CIRCLE
N;◗;;;;RIGHT HALF BLACK CIRCLE
N;◢;lrtrif;; lower right triangle, filled ;BLACK LOWER RIGHT TRIANGLE
◣ ;lltrif ; ; lower left triangle , filled;BLACK LOWER LEFT TRIANGLE
N;◤;ultrif;; upper left triangle, filled;BLACK UPPER LEFT TRIANGLE
◥ ;urtrif ; ; upper right triangle , filled ; BLACK UPPER RIGHT TRIANGLE
◦ ;;;;WHITE BULLET
N;◧..◪;;;;SQUARE WITH LEFT HALF BLACK..SQUARE WITH LOWER RIGHT DIAGONAL HALF BLACK
◫ ;midb ; ; vertical bar in box;WHITE SQUARE WITH VERTICAL BISECTING LINE
◬ ;tridot;ISOAMSB ; triangle with centered dot ; WHITE UP - POINTING TRIANGLE WITH DOT
◯ ;xcirc;;;LARGE CIRCLE
◸;ultri;ISOAMSO;;UPPER LEFT TRIANGLE
B;◹;urtri;ISOAMSO;;UPPER RIGHT TRIANGLE
B;◺;lltri;ISOAMSO;;LOWER LEFT TRIANGLE
B;◻;xsqu;;;WHITE MEDIUM SQUARE
◼;xsquf;;;BLACK MEDIUM SQUARE
B;◽;vssqu;;;WHITE MEDIUM SMALL SQUARE
◾;vssquf;;;BLACK MEDIUM SMALL SQUARE
B;◿;lrtri;ISOAMSO;;LOWER RIGHT TRIANGLE
B;★;starf;ISOPUB; star, filled ;BLACK STAR
☆ ;star;ISOPUB ; star , open ; WHITE STAR
;;;;SUN
N;☌;;;;CONJUNCTION
☽ ;;;;FIRST QUARTER MOON
☾ ;;;;LAST
N;☿;;;;MERCURY
N;♀;female;ISOPUB; Venus;FEMALE SIGN
♁ ;;;;EARTH
♂ ;male;ISOPUB ; Mars ; MALE SIGN
N;♃;;;;JUPITER
N;♄;;;;SATURN
N;♆;;;;NEPTUNE
N;♇;;;;PLUTO
N;♈;;;;ARIES
♉ ;;;;TAURUS
♠ ;spades;ISOPUB ; spades suit symbol ; BLACK SPADE SUIT
♡ ;hearts;ISOPUB ; heart suit symbol;WHITE HEART SUIT
♢ ; diamond suit symbol;WHITE DIAMOND SUIT
♣ ;clubs;ISOPUB ; club suit symbol ; BLACK CLUB SUIT
♤ ;spadeso ; ; spade , white ( card suit ) ; WHITE SPADE SUIT
♥ ;heartsf ; ; filled heart ( card suit ) ; BLACK HEART SUIT
N;♦;diamsf;; filled diamond (card suit) ;BLACK DIAMOND SUIT
♧ ; ; club , white ( card suit);WHITE CLUB SUIT
N;♩;sung;ISONUM; music note (sung text sign);QUARTER NOTE
N;♭;flat;ISOPUB;;MUSIC FLAT SIGN
♮ ;natur;ISOPUB;;MUSIC NATURAL SIGN
N;♯;sharp;ISOPUB;;MUSIC SHARP SIGN
FACE-1
N;⚁;;;;DIE FACE-2
N;⚂;;;;DIE FACE-3
N;⚃;;;;DIE FACE-4
N;⚅;;;;DIE FACE-6
⚆ ;;;;WHITE CIRCLE WITH DOT RIGHT
⚇ ;;;;WHITE WITH TWO DOTS
⚈ ;;;;BLACK WITH WHITE DOT RIGHT
⚉ ;;;;BLACK WITH TWO WHITE DOTS
N;⚪;;;;MEDIUM WHITE CIRCLE
N;⚫;;;;MEDIUM BLACK CIRCLE
N;⚬;;;;MEDIUM SMALL WHITE CIRCLE
N;⚲;;;;NEUTER
N;✓;check;ISOPUB; tick ;CHECK MARK
✗ ;cross;ISOPUB ; ballot cross ; BALLOT X
✠ ;malt;ISOPUB;;MALTESE CROSS
N;✪;;;;CIRCLED WHITE STAR
N;✶;sext;ISOPUB;;SIX POINTED BLACK STAR
❲;lbbrk;ISOTECH ; left broken bracket;LIGHT LEFT TORTOISE SHELL BRACKET ORNAMENT
❳;rbbrk;ISOTECH ; right broken bracket ; LIGHT RIGHT TORTOISE SHELL BRACKET ORNAMENT
⟀;;;;THREE DIMENSIONAL ANGLE
N;⟁;;;;WHITE TRIANGLE CONTAINING SMALL WHITE TRIANGLE
R;⟂;perp;ISOTECH; perpendicular;PERPENDICULAR
R;⟃;;;;OPEN SUBSET
⟄;;;;OPEN
R;⟅;;;;LEFT S-SHAPED BAG DELIMITER
R;⟆;;;;RIGHT S-SHAPED BAG DELIMITER
R;⟇;;;;OR WITH DOT INSIDE
R;⟈;bsolsub;;;REVERSE SOLIDUS PRECEDING SUBSET
⟉;subsol;;;SUPERSET PRECEDING SOLIDUS
⟊;;;;VERTICAL BAR WITH HORIZONTAL STROKE
R;⟋;diagup;;;MATHEMATICAL RISING DIAGONAL
L;⟌;;;;LONG DIVISION
⟍;diagdown;;;MATHEMATICAL FALLING DIAGONAL
B;⟎;;;;SQUARED LOGICAL AND
B;⟏;;;;SQUARED LOGICAL OR
⟐;diamdot;;;WHITE DIAMOND WITH CENTRED DOT
⟑;;;;AND WITH DOT
R;⟒;;;;ELEMENT OF OPENING UPWARDS
⟓;;;;LOWER RIGHT CORNER WITH DOT
⟔;;;;UPPER LEFT CORNER WITH DOT
L;⟖;;;;RIGHT OUTER JOIN
⟗;;;;FULL JOIN
⟘;;;;LARGE UP TACK
⟙;;;;LARGE DOWN
R;⟚;;;;LEFT AND RIGHT DOUBLE TURNSTILE
R;⟛;;;;LEFT AND RIGHT TACK
R;⟝;;;;LONG RIGHT TACK
R;⟞;;;;LONG LEFT TACK
⟠;;;;LOZENGE BY HORIZONTAL RULE
⟡;;;;WHITE CONCAVE - SIDED DIAMOND
⟢;;;;WHITE CONCAVE - SIDED DIAMOND WITH LEFTWARDS TICK
B;⟤;;;;WHITE SQUARE WITH LEFTWARDS TICK
⟦;lobrk;ISOTECH;;MATHEMATICAL LEFT WHITE SQUARE BRACKET
C;⟧;robrk;ISOTECH;;MATHEMATICAL RIGHT WHITE SQUARE BRACKET
O;⟨;lang;ISOTECH;;MATHEMATICAL LEFT ANGLE BRACKET
C;⟩;rang;ISOTECH;;MATHEMATICAL RIGHT ANGLE BRACKET
O;⟪;Lang;ISOTECH; left angle bracket, double ;MATHEMATICAL LEFT DOUBLE ANGLE BRACKET
⟫;Rang;ISOTECH ; right angle bracket , double;MATHEMATICAL RIGHT DOUBLE ANGLE BRACKET
O;⟬;loang;ISOTECK;;MATHEMATICAL LEFT WHITE TORTOISE SHELL BRACKET
⟭;roang;ISOTECH;;MATHEMATICAL RIGHT WHITE TORTOISE SHELL BRACKET
⟮ ; ; ; MATHEMATICAL LEFT FLATTENED PARENTHESIS
; ; rgroup ; MATHEMATICAL RIGHT FLATTENED PARENTHESIS
⟰;;;;UPWARDS QUADRUPLE ARROW
R;⟱;;;;DOWNWARDS QUADRUPLE ARROW
R;⟴;;;;RIGHT ARROW WITH CIRCLED PLUS
R;⟷;xharr;ISOAMSA;;LONG LEFT RIGHT ARROW
⟸;xlArr;ISOAMSA;;LONG LEFTWARDS DOUBLE ARROW
R;⟹;xrArr;ISOAMSA;;LONG RIGHTWARDS DOUBLE ARROW
R;⟺;xhArr;ISOAMSA;;LONG LEFT RIGHT DOUBLE ARROW
⟻;xmapfrom;;;LONG LEFTWARDS ARROW FROM BAR
⟼;xmap;ISOAMSA;;LONG RIGHTWARDS ARROW FROM BAR
⟽;xMapfrom;;;LONG LEFTWARDS DOUBLE ARROW FROM BAR
⟾;xMapto;;;LONG RIGHTWARDS DOUBLE ARROW FROM BAR
⟿;xzigrarr;ISOAMSA;;LONG RIGHTWARDS SQUIGGLE ARROW
⤀;;;;RIGHTWARDS TWO - HEADED ARROW WITH VERTICAL STROKE
⤁;;;;RIGHTWARDS TWO - HEADED ARROW WITH DOUBLE VERTICAL STROKE
R;⤂;nvlArr;ISOAMSA;;LEFTWARDS DOUBLE ARROW WITH VERTICAL STROKE
⤃;nvrArr;ISOAMSA;;RIGHTWARDS DOUBLE ARROW WITH VERTICAL STROKE
R;⤄;nvhArr;ISOAMSA;;LEFT RIGHT DOUBLE ARROW WITH VERTICAL STROKE
⤅;Map;ISOAMSA;;RIGHTWARDS TWO - HEADED ARROW FROM BAR
⤆;Mapfrom;;;LEFTWARDS DOUBLE ARROW FROM BAR
⤇;Mapto;;;RIGHTWARDS DOUBLE ARROW FROM BAR
R;⤈;darrln;;;DOWNWARDS ARROW WITH HORIZONTAL STROKE
⤉;uarrln;;;UPWARDS ARROW WITH HORIZONTAL STROKE
R;⤊;uAarr;;;UPWARDS TRIPLE ARROW
R;⤋;dAarr;;;DOWNWARDS TRIPLE ARROW
R;⤌;lbarr;ISOAMSA;;LEFTWARDS DOUBLE DASH ARROW
R;⤍;rbarr;ISOAMSA;;RIGHTWARDS DOUBLE DASH ARROW
R;⤎;lBarr;ISOAMSA;;LEFTWARDS TRIPLE DASH ARROW
⤏;rBarr;ISOAMSA;;RIGHTWARDS TRIPLE DASH ARROW
⤐;RBarr;ISOAMSA;;RIGHTWARDS TWO - HEADED TRIPLE DASH ARROW
⤑;DDotrahd;ISOAMSA;;RIGHTWARDS ARROW WITH DOTTED STEM
⤒;uarrb;;;UPWARDS ARROW TO BAR
R;⤓;darrb;;;DOWNWARDS ARROW TO BAR
⤔;;;;RIGHTWARDS ARROW WITH TAIL WITH VERTICAL STROKE
R;⤕;;;;RIGHTWARDS ARROW WITH TAIL WITH DOUBLE VERTICAL STROKE
⤖;Rarrtl;ISOAMSA;;RIGHTWARDS TWO - HEADED ARROW WITH TAIL
⤗;;;;RIGHTWARDS TWO - HEADED ARROW WITH TAIL WITH VERTICAL STROKE
⤘;;;;RIGHTWARDS TWO - HEADED ARROW WITH TAIL WITH DOUBLE VERTICAL STROKE
R;⤚;ratail;ISOAMSA;;RIGHTWARDS ARROW-TAIL
R;⤛;lAtail;ISOAMSA;;LEFTWARDS DOUBLE ARROW-TAIL
R;⤜;rAtail;ISOAMSA;;RIGHTWARDS DOUBLE ARROW-TAIL
R;⤝;larrfs;ISOAMSA;;LEFTWARDS ARROW TO BLACK DIAMOND
⤞;rarrfs;ISOAMSA;;RIGHTWARDS ARROW TO BLACK DIAMOND
R;⤟;larrbfs;ISOAMSA;;LEFTWARDS ARROW FROM BAR TO BLACK DIAMOND
R;⤠;rarrbfs;ISOAMSA;;RIGHTWARDS ARROW FROM BAR TO BLACK DIAMOND
⤡;nwsesarr;;;NORTH WEST AND SOUTH EAST ARROW
⤢;neswsarr;;;NORTH EAST AND SOUTH WEST ARROW
R;⤥;searhk;ISOAMSA;;SOUTH EAST ARROW WITH HOOK
R;⤦;swarhk;ISOAMSA;;SOUTH WEST ARROW WITH HOOK
⤧;nwnear;ISOAMSA;;NORTH WEST ARROW AND NORTH EAST ARROW
⤨;nesear;ISOAMSA;;NORTH EAST ARROW AND SOUTH EAST ARROW
⤩;seswar;ISOAMSA;;SOUTH EAST ARROW AND SOUTH WEST ARROW
⤪;swnwar;ISOAMSA;;SOUTH WEST ARROW AND NORTH WEST ARROW
⤫;rdiofdi;;;RISING DIAGONAL CROSSING FALLING DIAGONAL
⤬;fdiordi;;;FALLING DIAGONAL CROSSING RISING DIAGONAL
⤭;seonearr;;;SOUTH EAST ARROW CROSSING NORTH EAST ARROW
⤮;neosearr;;;NORTH EAST ARROW CROSSING SOUTH EAST ARROW
⤯;fdonearr;;;FALLING DIAGONAL CROSSING NORTH EAST ARROW
⤰;rdosearr;;;RISING DIAGONAL CROSSING SOUTH EAST ARROW
⤱;neonwarr;;;NORTH EAST ARROW CROSSING NORTH WEST ARROW
⤲;nwonearr;;;NORTH WEST ARROW CROSSING NORTH EAST ARROW
⤳;rarrc;ISOAMSA;;WAVE ARROW POINTING DIRECTLY RIGHT
R;⤴;;;;ARROW POINTING RIGHTWARDS THEN CURVING UPWARDS
R;⤵;;;;ARROW POINTING RIGHTWARDS THEN CURVING DOWNWARDS
⤶;ldca;ISOAMSA;;ARROW POINTING DOWNWARDS THEN CURVING LEFTWARDS
R;⤷;rdca;ISOAMSA;;ARROW POINTING DOWNWARDS THEN CURVING RIGHTWARDS
R;⤸;cudarrl;ISOAMSA;;RIGHT-SIDE ARC CLOCKWISE ARROW
R;⤹;cudarrr;ISOAMSA;;LEFT-SIDE ARC ANTICLOCKWISE ARROW
⤺;;;;TOP ARC ANTICLOCKWISE ARROW
R;⤻;;;;BOTTOM ARC ANTICLOCKWISE ARROW
⤽;cularrp;ISOAMSA;;TOP ARC ANTICLOCKWISE ARROW WITH PLUS
⤿;;;;LOWER LEFT SEMICIRCULAR ARROW
⥀;olarr;ISOAMSA;;ANTICLOCKWISE CLOSED CIRCLE ARROW
⥁;orarr;ISOAMSA;;CLOCKWISE CLOSED CIRCLE ARROW
R;⥂;arrlrsl;;;RIGHTWARDS ARROW ABOVE SHORT LEFTWARDS ARROW
R;⥃;arrllsr;;;LEFTWARDS ARROW ABOVE SHORT RIGHTWARDS ARROW
⥄;arrsrll;;;SHORT RIGHTWARDS ARROW ABOVE LEFTWARDS ARROW
R;⥅;rarrpl;ISOAMSA;;RIGHTWARDS ARROW WITH PLUS BELOW
R;⥆;larrpl;ISOAMSA;;LEFTWARDS ARROW WITH PLUS BELOW
R;⥇;rarrx;;;RIGHTWARDS ARROW THROUGH X
⥉;Uarrocir;ISOAMSA;;UPWARDS TWO - HEADED ARROW FROM SMALL CIRCLE
⥊;lurdshar;ISOAMSA;;LEFT BARB UP RIGHT BARB DOWN HARPOON
R;⥋;ldrushar;ISOAMSA;;LEFT BARB DOWN RIGHT BARB UP HARPOON
R;⥍;uldrshar;;;UP BARB LEFT DOWN BARB RIGHT HARPOON
⥎;lurushar;;;LEFT BARB UP RIGHT BARB UP HARPOON
R;⥏;urdrshar;;;UP BARB RIGHT DOWN BARB RIGHT HARPOON
R;⥐;ldrdshar;;;LEFT BARB DOWN RIGHT BARB DOWN HARPOON
R;⥑;uldlshar;;;UP BARB LEFT DOWN BARB LEFT HARPOON
R;⥒;luharb;;;LEFTWARDS HARPOON WITH BARB UP TO BAR
R;⥓;ruharb;;;RIGHTWARDS HARPOON WITH BARB UP TO BAR
R;⥔;urharb;;;UPWARDS HARPOON WITH BARB RIGHT TO BAR
R;⥕;drharb;;;DOWNWARDS HARPOON WITH BARB RIGHT TO BAR
R;⥖;ldharb;;;LEFTWARDS HARPOON WITH BARB DOWN TO BAR
R;⥗;rdharb;;;RIGHTWARDS HARPOON WITH BARB DOWN TO BAR
R;⥘;ulharb;;;UPWARDS HARPOON WITH BARB LEFT TO BAR
R;⥙;dlharb;;;DOWNWARDS HARPOON WITH BARB LEFT TO BAR
⥚;bluhar;;;LEFTWARDS HARPOON WITH BARB UP FROM BAR
⥛;bruhar;;;RIGHTWARDS HARPOON WITH BARB UP FROM BAR
R;⥜;burhar;;;UPWARDS HARPOON WITH BARB RIGHT FROM BAR
R;⥝;bdrhar;;;DOWNWARDS HARPOON WITH BARB RIGHT FROM BAR
⥞;bldhar;;;LEFTWARDS HARPOON WITH BARB DOWN FROM BAR
R;⥟;brdhar;;;RIGHTWARDS HARPOON WITH BARB DOWN FROM BAR
⥠;bulhar;;;UPWARDS HARPOON WITH BARB LEFT FROM BAR
R;⥡;bdlhar;;;DOWNWARDS HARPOON WITH BARB LEFT FROM BAR
R;⥢;lHar;ISOAMSA;;LEFTWARDS HARPOON WITH BARB UP ABOVE LEFTWARDS HARPOON WITH BARB DOWN
⥣;uHar;ISOAMSA;;UPWARDS HARPOON WITH BARB LEFT BESIDE UPWARDS HARPOON WITH BARB RIGHT
R;⥤;rHar;ISOAMSA;;RIGHTWARDS HARPOON WITH BARB UP ABOVE RIGHTWARDS HARPOON WITH BARB DOWN
⥥;dHar;ISOAMSA;;DOWNWARDS HARPOON WITH BARB LEFT BESIDE DOWNWARDS HARPOON WITH BARB RIGHT
⥦;luruhar;ISOAMSA;;LEFTWARDS HARPOON WITH BARB UP ABOVE RIGHTWARDS HARPOON WITH BARB UP
R;⥧;ldrdhar;ISOAMSA;;LEFTWARDS HARPOON WITH BARB DOWN ABOVE RIGHTWARDS HARPOON WITH BARB DOWN
R;⥨;ruluhar;ISOAMSA;;RIGHTWARDS HARPOON WITH BARB UP ABOVE LEFTWARDS HARPOON WITH BARB UP
⥩;rdldhar;ISOAMSA;;RIGHTWARDS HARPOON WITH BARB DOWN ABOVE LEFTWARDS HARPOON WITH BARB DOWN
R;⥪;lharul;ISOAMSA;;LEFTWARDS HARPOON WITH BARB UP ABOVE LONG DASH
R;⥫;llhard;ISOAMSA;;LEFTWARDS HARPOON WITH BARB DOWN BELOW LONG DASH
R;⥬;rharul;ISOAMSA;;RIGHTWARDS HARPOON WITH BARB UP ABOVE LONG DASH
R;⥭;lrhard;ISOAMSA;;RIGHTWARDS HARPOON WITH BARB DOWN BELOW LONG DASH
R;⥮;udhar;ISOAMSA;;UPWARDS HARPOON WITH BARB LEFT BESIDE DOWNWARDS HARPOON WITH BARB RIGHT
R;⥯;duhar;ISOAMSA;;DOWNWARDS HARPOON WITH BARB LEFT BESIDE UPWARDS HARPOON WITH BARB RIGHT
⥰;rimply;;;RIGHT DOUBLE ARROW WITH ROUNDED HEAD
R;⥱;erarr;ISOAMSA;;EQUALS SIGN ABOVE RIGHTWARDS ARROW
R;⥲;simrarr;ISOAMSA;;TILDE OPERATOR ABOVE RIGHTWARDS ARROW
R;⥳;larrsim;ISOAMSA;;LEFTWARDS ARROW ABOVE TILDE OPERATOR
⥴;rarrsim;ISOAMSA;;RIGHTWARDS ARROW ABOVE TILDE OPERATOR
R;⥵;rarrap;ISOAMSA;;RIGHTWARDS ARROW ABOVE ALMOST EQUAL TO
⥶;ltlarr;ISOAMSR;;LESS - THAN ABOVE LEFTWARDS ARROW
R;⥷;;;;LEFTWARDS ARROW THROUGH LESS-THAN
R;⥸;gtrarr;ISOAMSR;;GREATER-THAN ABOVE RIGHTWARDS ARROW
⥹;subrarr;ISOAMSR;;SUBSET ABOVE RIGHTWARDS ARROW
R;⥺;;;;LEFTWARDS ARROW THROUGH SUBSET
⥻;suplarr;ISOAMSR;;SUPERSET ABOVE LEFTWARDS ARROW
R;⥼;lfisht;ISOAMSA;;LEFT FISH TAIL
R;⥽;rfisht;ISOAMSA;;RIGHT FISH TAIL
R;⥾;ufisht;ISOAMSA;;UP FISH TAIL
R;⥿;dfisht;ISOAMSA;;DOWN FISH TAIL
⦀;tverbar;;;TRIPLE VERTICAL BAR DELIMITER
⦁;scirclef;;;Z NOTATION SPOT
F;⦂;;;;Z NOTATION TYPE COLON
O;⦃;locub;;;LEFT WHITE CURLY BRACKET
C;⦄;rocub;;;RIGHT WHITE CURLY BRACKET
O;⦅;lopar;ISOTECH;;LEFT WHITE PARENTHESIS
C;⦆;ropar;ISOTECH;;RIGHT WHITE PARENTHESIS
O;⦇;;;;Z NOTATION LEFT IMAGE BRACKET
⦈;;;;Z NOTATION RIGHT IMAGE BRACKET
O;⦉;;;;Z NOTATION LEFT BINDING BRACKET
⦋;lbrke;ISOAMSC;;LEFT BRACKET WITH UNDERBAR
⦍;lbrkslu;ISOAMSC;;LEFT SQUARE BRACKET WITH TICK IN TOP CORNER
C;⦎;rbrksld;ISOAMSC;;RIGHT SQUARE BRACKET WITH TICK IN BOTTOM CORNER
⦑;langd;ISOAMSC;;LEFT ANGLE BRACKET WITH DOT
⦒;rangd;ISOAMSC;;RIGHT ANGLE BRACKET WITH DOT
⦓;lparlt;ISOAMSC;;LEFT ARC LESS - THAN BRACKET
C;⦔;rpargt;ISOAMSC;;RIGHT ARC GREATER-THAN BRACKET
O;⦕;gtlPar;ISOAMSC;;DOUBLE LEFT ARC GREATER-THAN BRACKET
⦗;;;;LEFT BLACK TORTOISE SHELL BRACKET
C;⦘;;;;RIGHT BLACK TORTOISE SHELL BRACKET
⦙;vellip4;;;DOTTED FENCE
F;⦚;vzigzag;ISOAMSO;;VERTICAL ZIGZAG LINE
N;⦛;;;;MEASURED ANGLE OPENING LEFT
N;⦜;vangrt;ISOTECH;;RIGHT ANGLE VARIANT WITH SQUARE
⦝;angrtvbd;ISOAMSO;;MEASURED RIGHT ANGLE WITH DOT
⦞;angles;;;ANGLE WITH S INSIDE
N;⦟;angdnr;;;ACUTE ANGLE
N;⦠;gtlpar;;;SPHERICAL ANGLE OPENING LEFT
N;⦡;;;;SPHERICAL ANGLE OPENING UP
N;⦢;angdnl;;;TURNED ANGLE
N;⦣;angupl;;;REVERSED ANGLE
⦤;ange;ISOAMSO;;ANGLE WITH UNDERBAR
N;⦦;dwangle;ISOTECH;;OBLIQUE ANGLE OPENING UP
N;⦧;uwangle;ISOTECH;;OBLIQUE ANGLE OPENING DOWN
N;⦨;angmsdaa;ISOAMSO;;MEASURED ANGLE WITH OPEN ARM ENDING IN ARROW POINTING UP AND RIGHT
N;⦩;angmsdab;ISOAMSO;;MEASURED ANGLE WITH OPEN ARM ENDING IN ARROW POINTING UP AND LEFT
N;⦪;angmsdac;ISOAMSO;;MEASURED ANGLE WITH OPEN ARM ENDING IN ARROW POINTING DOWN AND RIGHT
N;⦫;angmsdad;ISOAMSO;;MEASURED ANGLE WITH OPEN ARM ENDING IN ARROW POINTING DOWN AND LEFT
N;⦬;angmsdae;ISOAMSO;;MEASURED ANGLE WITH OPEN ARM ENDING IN ARROW POINTING RIGHT AND UP
N;⦭;angmsdaf;ISOAMSO;;MEASURED ANGLE WITH OPEN ARM ENDING IN ARROW POINTING LEFT AND UP
N;⦮;angmsdag;ISOAMSO;;MEASURED ANGLE WITH OPEN ARM ENDING IN ARROW POINTING RIGHT AND DOWN
N;⦯;angmsdah;ISOAMSO;;MEASURED ANGLE WITH OPEN ARM ENDING IN ARROW POINTING LEFT AND DOWN
N;⦰;bemptyv;ISOAMSO;;REVERSED EMPTY SET
⦱;demptyv;ISOAMSO;;EMPTY SET WITH OVERBAR
N;⦲;cemptyv;ISOAMSO;;EMPTY SET WITH SMALL CIRCLE ABOVE
N;⦳;raemptyv;ISOAMSO;;EMPTY SET WITH RIGHT ARROW ABOVE
N;⦴;laemptyv;ISOAMSO;;EMPTY SET WITH LEFT ARROW ABOVE
N;⦵;ohbar;ISOAMSB;;CIRCLE WITH HORIZONTAL BAR
B;⦶;omid;ISOAMSB;;CIRCLED VERTICAL BAR
⦷;opar;ISOAMSB;;CIRCLED PARALLEL
B;⦸;obsol;;;CIRCLED REVERSE SOLIDUS
B;⦹;operp;ISOAMSB;;CIRCLED PERPENDICULAR
⦺;;;;CIRCLE BY HORIZONTAL BAR AND TOP HALF BY VERTICAL BAR
⦻;olcross;ISOTECH;;CIRCLE WITH SUPERIMPOSED X
⦼;odsold;ISOAMSB;;CIRCLED - ROTATED DIVISION SIGN
⦽;oxuarr;;;UP ARROW THROUGH
N;⦾;olcir;ISOAMSB;;CIRCLED WHITE BULLET
N;⦿;ofcir;ISOAMSB;;CIRCLED BULLET
B;⧀;olt;ISOAMSB;;CIRCLED LESS-THAN
B;⧁;ogt;ISOAMSB;;CIRCLED GREATER-THAN
⧂;cirscir;ISOAMSO;;CIRCLE WITH SMALL CIRCLE TO THE RIGHT
⧃;cirE;ISOAMSO;;CIRCLE WITH TWO HORIZONTAL STROKES TO THE RIGHT
B;⧄;solb;ISOAMSB;;SQUARED RISING DIAGONAL SLASH
⧅;bsolb;ISOAMSB;;SQUARED FALLING DIAGONAL SLASH
⧆;astb;;;SQUARED ASTERISK
⧇;cirb;;;SQUARED SMALL CIRCLE
B;⧈;squb;;;SQUARED SQUARE
N;⧉;boxbox;ISOAMSO;;TWO JOINED SQUARES
⧊;tridoto;;;TRIANGLE WITH DOT ABOVE
⧋;tribar;;;TRIANGLE WITH UNDERBAR
N;⧌;triS;;;S IN TRIANGLE
⧍;trisb;ISOAMSB;;TRIANGLE WITH SERIFS AT BOTTOM
⧎;rtriltri;ISOAMSR;;RIGHT TRIANGLE ABOVE LEFT TRIANGLE
R;⧏;ltrivb;;;LEFT TRIANGLE BESIDE VERTICAL BAR
R;⧐;vbrtri;;;VERTICAL BAR BESIDE RIGHT TRIANGLE
⧑;lfbowtie;;;BOWTIE WITH LEFT HALF BLACK
R;⧒;rfbowtie;;;BOWTIE WITH RIGHT HALF BLACK
R;⧔;lftimes;;;TIMES WITH LEFT HALF BLACK
R;⧕;rftimes;;;TIMES WITH RIGHT HALF BLACK
B;⧖;hrglass;;;WHITE HOURGLASS
⧗;fhrglass;;;BLACK HOURGLASS
O;⧘;;;;LEFT WIGGLY FENCE
C;⧙;;;;RIGHT WIGGLY FENCE
O;⧚;;;;LEFT DOUBLE WIGGLY FENCE
C;⧛;;;;RIGHT DOUBLE WIGGLY FENCE
⧝;infintie;ISOTECH;;TIE OVER INFINITY
⧞;nvinfin;ISOTECH;;INFINITY NEGATED WITH VERTICAL BAR
⧟;dumap;;;DOUBLE - ENDED MULTIMAP
N;⧠;dalembrt;;;SQUARE WITH CONTOURED OUTLINE
⧡;lrtrieq;;;INCREASES AS
B;⧢;shuffle;;;SHUFFLE PRODUCT
R;⧤;smeparsl;ISOTECH;;EQUALS SIGN AND SLANTED PARALLEL WITH TILDE ABOVE
⧥;eqvparsl;ISOTECH;;IDENTICAL TO AND SLANTED PARALLEL
⧦;;;;GLEICH STARK
N;⧧;thermod;;;THERMODYNAMIC
⧨;dtrilf;;;DOWN - POINTING TRIANGLE WITH LEFT HALF BLACK
N;⧩;dtrirf;;;DOWN-POINTING TRIANGLE WITH RIGHT HALF BLACK
N;⧪;diamdarr;;;BLACK DIAMOND WITH DOWN ARROW
⧫;lozf;ISOPUB;;BLACK LOZENGE
⧬;cirdarr;;;WHITE WITH DOWN ARROW
N;⧭;cirfdarr;;;BLACK CIRCLE WITH DOWN ARROW
⧮;squerr;;;ERROR - BARRED WHITE SQUARE
⧯;squferr;;;ERROR - BARRED BLACK SQUARE
⧰;diamerr;;;ERROR - BARRED WHITE DIAMOND
⧱;diamerrf;;;ERROR - BARRED BLACK DIAMOND
N;⧲;cirerr;;;ERROR-BARRED WHITE CIRCLE
⧳;cirferr;;;ERROR - BARRED BLACK CIRCLE
R;⧴;;;;RULE-DELAYED
B;⧵;;;;REVERSE SOLIDUS OPERATOR
B;⧶;dsol;ISOTECH;;SOLIDUS WITH OVERBAR
⧷;rsolbar;;;REVERSE SOLIDUS WITH HORIZONTAL STROKE
L;⧸;xsol;;;BIG SOLIDUS
⧹;xbsol;;;BIG REVERSE SOLIDUS
B;⧺;;;;DOUBLE PLUS
⧻;;;;TRIPLE PLUS
O;⧼;;;;LEFT-POINTING CURVED ANGLE BRACKET
C;⧽;;;;RIGHT-POINTING CURVED ANGLE BRACKET
⧿;;;;MINY
⨀;xodot;ISOAMSB;;N - ARY CIRCLED DOT OPERATOR
L;⨁;xoplus;ISOAMSB;;N-ARY CIRCLED PLUS OPERATOR
L;⨂;xotime;ISOAMSB;;N-ARY CIRCLED TIMES OPERATOR
⨃;xcupdot;;;N - ARY UNION OPERATOR WITH DOT
L;⨄;xuplus;ISOAMSB;;N-ARY UNION OPERATOR WITH PLUS
⨅;xsqcap;ISOAMSB;;N - ARY SQUARE INTERSECTION OPERATOR
L;⨆;xsqcup;ISOAMSB;;N-ARY SQUARE UNION OPERATOR
L;⨇;xandand;;;TWO LOGICAL AND OPERATOR
L;⨈;xoror;;;TWO LOGICAL OR OPERATOR
⨉;xtimes;;;N - ARY TIMES OPERATOR
⨊;;;;MODULO TWO SUM
⨋;sumint;;;SUMMATION WITH INTEGRAL
⨌;qint;ISOTECH;;QUADRUPLE INTEGRAL OPERATOR
L;⨍;fpartint;ISOTECH;;FINITE PART INTEGRAL
L;⨏;slint;;;INTEGRAL AVERAGE WITH SLASH
L;⨐;cirfnint;ISOTECH;;CIRCULATION FUNCTION
L;⨑;awint;ISOTECH;;ANTICLOCKWISE INTEGRATION
L;⨒;rppolint;ISOTECH;;LINE INTEGRATION WITH RECTANGULAR PATH AROUND POLE
L;⨓;scpolint;ISOTECH;;LINE INTEGRATION WITH SEMICIRCULAR PATH AROUND POLE
L;⨔;npolint;ISOTECH;;LINE INTEGRATION NOT INCLUDING THE POLE
L;⨕;pointint;ISOTECH;;INTEGRAL AROUND A POINT OPERATOR
⨗;intlarhk;ISOTECH;;INTEGRAL WITH LEFTWARDS ARROW WITH HOOK
⨘;timeint;;;INTEGRAL WITH TIMES SIGN
⨙;capint;;;INTEGRAL WITH INTERSECTION
⨚;cupint;;;INTEGRAL WITH UNION
⨛;upint;;;INTEGRAL WITH OVERBAR
⨜;lowint;;;INTEGRAL WITH UNDERBAR
⨝;Join;;;JOIN
⨞;xltri;;;LARGE LEFT TRIANGLE OPERATOR
L;⨟;;;;Z NOTATION SCHEMA COMPOSITION
L;⨠;;;;Z NOTATION SCHEMA PIPING
⨡;;;;Z NOTATION SCHEMA PROJECTION
B;⨢;pluscir;ISOAMSB;;PLUS SIGN WITH SMALL CIRCLE ABOVE
B;⨣;plusacir;ISOAMSB;;PLUS SIGN WITH CIRCUMFLEX ACCENT ABOVE
B;⨤;simplus;ISOAMSB;;PLUS SIGN WITH TILDE ABOVE
B;⨥;plusdu;ISOAMSB;;PLUS SIGN WITH DOT BELOW
⨦;plussim;ISOAMSB;;PLUS SIGN WITH TILDE BELOW
⨧;plustwo;ISOAMSB;;PLUS SIGN WITH SUBSCRIPT TWO
⨨;plustrif;;;PLUS SIGN WITH BLACK TRIANGLE
B;⨩;mcomma;ISOAMSR;;MINUS SIGN WITH COMMA ABOVE
B;⨪;minusdu;ISOAMSB;;MINUS SIGN WITH DOT BELOW
B;⨫;;;;MINUS SIGN WITH FALLING DOTS
B;⨬;;;;MINUS SIGN WITH RISING DOTS
⨭;loplus;ISOAMSB;;PLUS SIGN IN LEFT HALF
⨮;roplus;ISOAMSB;;PLUS SIGN IN RIGHT HALF
B;⨰;timesd;ISOAMSB;;MULTIPLICATION SIGN WITH DOT ABOVE
⨱;timesbar;ISOAMSB;;MULTIPLICATION SIGN WITH UNDERBAR
B;⨲;btimes;;;SEMIDIRECT PRODUCT WITH BOTTOM CLOSED
B;⨳;smashp;ISOAMSB;;SMASH PRODUCT
⨴;lotimes;ISOAMSB;;MULTIPLICATION SIGN IN LEFT HALF
⨵;rotimes;ISOAMSB;;MULTIPLICATION SIGN IN RIGHT HALF
⨶;otimesas;ISOAMSB;;CIRCLED SIGN WITH CIRCUMFLEX ACCENT
B;⨷;Otimes;ISOAMSB;;MULTIPLICATION SIGN IN DOUBLE CIRCLE
B;⨸;odiv;ISOAMSB;;CIRCLED DIVISION SIGN
⨹;triplus;ISOAMSB;;PLUS SIGN IN TRIANGLE
B;⨺;triminus;ISOAMSB;;MINUS SIGN IN TRIANGLE
⨻;tritime;ISOAMSB;;MULTIPLICATION SIGN IN TRIANGLE
B;⨼;iprod;ISOAMSB;;INTERIOR PRODUCT
⨽;iprodr;ISOAMSB;;RIGHTHAND INTERIOR PRODUCT
⨾;;;;Z NOTATION RELATIONAL COMPOSITION
⩀;capdot;ISOAMSB;;INTERSECTION WITH DOT
B;⩁;;;;UNION WITH MINUS SIGN
B;⩂;ncup;ISOAMSB;;UNION WITH OVERBAR
B;⩃;ncap;ISOAMSB;;INTERSECTION WITH OVERBAR
B;⩄;capand;ISOAMSB;;INTERSECTION WITH LOGICAL AND
⩅;cupor;ISOAMSB;;UNION WITH LOGICAL OR
B;⩆;cupcap;ISOAMSB;;UNION ABOVE INTERSECTION
B;⩇;capcup;ISOAMSB;;INTERSECTION ABOVE UNION
B;⩈;cupbrcap;ISOAMSB;;UNION ABOVE BAR ABOVE INTERSECTION
B;⩉;capbrcup;ISOAMSB;;INTERSECTION ABOVE BAR ABOVE UNION
B;⩊;cupcup;ISOAMSB;;UNION BESIDE AND JOINED WITH UNION
B;⩋;capcap;ISOAMSB;;INTERSECTION BESIDE AND JOINED WITH INTERSECTION
⩌;ccups;ISOAMSB;;CLOSED UNION WITH SERIFS
⩎;;;;DOUBLE SQUARE INTERSECTION
B;⩏;;;;DOUBLE SQUARE UNION
B;⩐;ccupssm;ISOAMSB;;CLOSED UNION WITH SERIFS AND SMASH PRODUCT
B;⩑;anddot;;;LOGICAL AND WITH DOT ABOVE
⩒;ordot;;;LOGICAL OR WITH DOT ABOVE
⩓;And;ISOTECH;;DOUBLE LOGICAL AND
B;⩔;Or;ISOTECH;;DOUBLE LOGICAL OR
⩕;andand;ISOTECH;;TWO INTERSECTING LOGICAL AND
B;⩖;oror;ISOTECH;;TWO INTERSECTING LOGICAL OR
B;⩗;orslope;ISOTECH;;SLOPING LARGE OR
B;⩘;andslope;ISOTECH;;SLOPING LARGE AND
R;⩙;;;;LOGICAL OR OVERLAPPING LOGICAL AND
B;⩚;andv;ISOTECH;;LOGICAL AND WITH MIDDLE STEM
B;⩛;orv;ISOTECH;;LOGICAL OR WITH MIDDLE STEM
B;⩜;andd;ISOTECH;;LOGICAL AND WITH HORIZONTAL DASH
⩝;ord;ISOTECH;;LOGICAL OR WITH HORIZONTAL DASH
B;⩞;Barwed;ISOAMSB;;LOGICAL AND WITH DOUBLE OVERBAR
⩟;wedbar;ISOAMSB;;LOGICAL AND WITH UNDERBAR
⩠;wedBar;;;LOGICAL AND WITH DOUBLE UNDERBAR
⩡;;ISOAMSB;;SMALL VEE WITH UNDERBAR
B;⩢;Barvee;;;LOGICAL OR WITH DOUBLE OVERBAR
⩣;veeBar;;;LOGICAL OR WITH DOUBLE UNDERBAR
⩤;;;;Z NOTATION DOMAIN
⩥;;;;Z NOTATION RANGE
R;⩧;;;;IDENTICAL WITH DOT ABOVE
R;⩨;;;;TRIPLE HORIZONTAL BAR WITH DOUBLE VERTICAL STROKE
R;⩩;;;;TRIPLE HORIZONTAL BAR WITH TRIPLE VERTICAL STROKE
⩪;simdot;ISOTECH;;TILDE OPERATOR WITH DOT ABOVE
R;⩫;;;;TILDE OPERATOR WITH RISING DOTS
⩬;;;;SIMILAR MINUS SIMILAR
R;⩭;congdot;ISOAMSR;;CONGRUENT WITH DOT ABOVE
⩮;easter;ISOAMSR;;EQUALS WITH ASTERISK
⩯;apacir;ISOTECH;;ALMOST EQUAL TO WITH CIRCUMFLEX ACCENT
⩰;apE;ISOAMSR;;APPROXIMATELY EQUAL OR EQUAL TO
B;⩲;pluse;ISOAMSB;;PLUS SIGN ABOVE EQUALS SIGN
R;⩳;Esim;ISOAMSR;;EQUALS SIGN ABOVE TILDE OPERATOR
⩴;Colone;ISOAMSR;;DOUBLE COLON EQUAL
⩵;eqeq;;;TWO CONSECUTIVE EQUALS SIGNS
⩶;;;;THREE CONSECUTIVE EQUALS SIGNS
⩷;eDDot;ISOAMSR;;EQUALS SIGN WITH TWO DOTS ABOVE AND TWO DOTS BELOW
⩹;ltcir;ISOAMSR;;LESS - THAN WITH CIRCLE INSIDE
R;⩺;gtcir;ISOAMSR;;GREATER-THAN WITH CIRCLE INSIDE
R;⩻;ltquest;ISOAMSR;;LESS-THAN WITH QUESTION MARK ABOVE
⩼;gtquest;ISOAMSR;;GREATER - THAN WITH QUESTION MARK ABOVE
R;⩽;les;ISOAMSR;;LESS-THAN OR SLANTED EQUAL TO
⩾;ges;ISOAMSR;;GREATER - THAN OR SLANTED EQUAL TO
⩿;lesdot;ISOAMSR;;LESS - THAN OR SLANTED EQUAL TO WITH DOT INSIDE
⪀;gesdot;ISOAMSR;;GREATER - THAN OR SLANTED EQUAL TO WITH DOT INSIDE
⪁;lesdoto;ISOAMSR;;LESS - THAN OR SLANTED EQUAL TO WITH DOT ABOVE
⪂;gesdoto;ISOAMSR;;GREATER - THAN OR SLANTED EQUAL TO WITH DOT ABOVE
R;⪃;lesdotor;ISOAMSR;;LESS-THAN OR SLANTED EQUAL TO WITH DOT ABOVE RIGHT
⪄;gesdotol;ISOAMSR;;GREATER - THAN OR SLANTED EQUAL TO WITH DOT ABOVE LEFT
⪅;lap;ISOAMSR;;LESS - THAN OR APPROXIMATE
⪆;gap;ISOAMSR;;GREATER - THAN OR APPROXIMATE
R;⪇;lne;ISOAMSN;;LESS-THAN AND SINGLE-LINE NOT EQUAL TO
R;⪈;gne;ISOAMSN;;GREATER-THAN AND SINGLE-LINE NOT EQUAL TO
R;⪉;lnap;ISOAMSN;;LESS-THAN AND NOT APPROXIMATE
R;⪊;gnap;ISOAMSN;;GREATER-THAN AND NOT APPROXIMATE
R;⪋;lEg;ISOAMSR;;LESS-THAN ABOVE DOUBLE-LINE EQUAL ABOVE GREATER-THAN
R;⪌;gEl;ISOAMSR;;GREATER-THAN ABOVE DOUBLE-LINE EQUAL ABOVE LESS-THAN
R;⪍;lsime;ISOAMSR;;LESS-THAN ABOVE SIMILAR OR EQUAL
R;⪎;gsime;ISOAMSR;;GREATER-THAN ABOVE SIMILAR OR EQUAL
R;⪏;lsimg;ISOAMSR;;LESS-THAN ABOVE SIMILAR ABOVE GREATER-THAN
R;⪐;gsiml;ISOAMSR;;GREATER-THAN ABOVE SIMILAR ABOVE LESS-THAN
R;⪑;lgE;ISOAMSR;;LESS-THAN ABOVE GREATER-THAN ABOVE DOUBLE-LINE EQUAL
R;⪒;glE;ISOAMSR;;GREATER-THAN ABOVE LESS-THAN ABOVE DOUBLE-LINE EQUAL
R;⪓;lesges;ISOAMSR;;LESS-THAN ABOVE SLANTED EQUAL ABOVE GREATER-THAN ABOVE SLANTED EQUAL
R;⪔;gesles;ISOAMSR;;GREATER-THAN ABOVE SLANTED EQUAL ABOVE LESS-THAN ABOVE SLANTED EQUAL
⪕;els;ISOAMSR;;SLANTED EQUAL TO OR LESS - THAN
⪖;egs;ISOAMSR;;SLANTED EQUAL TO OR GREATER - THAN
⪗;elsdot;ISOAMSR;;SLANTED EQUAL TO OR LESS - THAN WITH DOT INSIDE
⪘;egsdot;ISOAMSR;;SLANTED EQUAL TO OR GREATER - THAN WITH DOT INSIDE
⪙;;;;DOUBLE - LINE EQUAL TO OR LESS - THAN
R;⪚;;;;DOUBLE-LINE EQUAL TO OR GREATER-THAN
R;⪛;;;;DOUBLE-LINE SLANTED EQUAL TO OR LESS-THAN
R;⪜;;;;DOUBLE-LINE SLANTED EQUAL TO OR GREATER-THAN
R;⪝;siml;ISOAMSR;;SIMILAR OR LESS-THAN
R;⪞;simg;ISOAMSR;;SIMILAR OR GREATER-THAN
R;⪟;simlE;ISOAMSR;;SIMILAR ABOVE LESS-THAN ABOVE EQUALS SIGN
⪠;simgE;ISOAMSR;;SIMILAR ABOVE GREATER - THAN ABOVE EQUALS SIGN
R;⪡;;ISOAMSR;;DOUBLE NESTED LESS-THAN
R;⪢;;ISOAMSR;;DOUBLE NESTED GREATER-THAN
⪣;Ltbar;;;DOUBLE NESTED LESS - THAN WITH UNDERBAR
R;⪤;glj;ISOAMSR;;GREATER-THAN OVERLAPPING LESS-THAN
R;⪥;gla;ISOAMSR;;GREATER-THAN BESIDE LESS-THAN
⪦;ltcc;ISOAMSR;;LESS - THAN CLOSED BY CURVE
⪧;gtcc;ISOAMSR;;GREATER - THAN CLOSED BY CURVE
⪨;lescc;ISOAMSR;;LESS - THAN CLOSED BY CURVE ABOVE SLANTED EQUAL
R;⪩;gescc;ISOAMSR;;GREATER-THAN CLOSED BY CURVE ABOVE SLANTED EQUAL
R;⪪;smt;ISOAMSR;;SMALLER THAN
R;⪫;lat;ISOAMSR;;LARGER THAN
R;⪬;smte;ISOAMSR;;SMALLER THAN OR EQUAL TO
R;⪭;late;ISOAMSR;;LARGER THAN OR EQUAL TO
R;⪮;bumpE;ISOAMSR;;EQUALS SIGN WITH BUMPY ABOVE
R;⪯;pre;ISOAMSR;;PRECEDES ABOVE SINGLE-LINE EQUALS SIGN
R;⪰;sce;ISOAMSR;;SUCCEEDS ABOVE SINGLE-LINE EQUALS SIGN
R;⪱;;;;PRECEDES ABOVE SINGLE-LINE NOT EQUAL TO
R;⪲;;;;SUCCEEDS ABOVE SINGLE-LINE NOT EQUAL TO
R;⪳;prE;ISOAMSR;;PRECEDES ABOVE EQUALS SIGN
⪴;scE;ISOAMSR;;SUCCEEDS ABOVE EQUALS SIGN
R;⪵;prnE;ISOAMSN;;PRECEDES ABOVE NOT EQUAL TO
R;⪶;scnE;ISOAMSN;;SUCCEEDS ABOVE NOT EQUAL TO
R;⪷;prap;ISOAMSR;;PRECEDES ABOVE ALMOST EQUAL TO
R;⪸;scap;ISOAMSR;;SUCCEEDS ABOVE ALMOST EQUAL TO
R;⪹;prnap;ISOAMSN;;PRECEDES ABOVE NOT ALMOST EQUAL TO
R;⪺;scnap;ISOAMSN;;SUCCEEDS ABOVE NOT ALMOST EQUAL TO
R;⪻;Pr;ISOAMSR;;DOUBLE PRECEDES
R;⪼;Sc;ISOAMSR;;DOUBLE SUCCEEDS
⪽;subdot;ISOAMSB;;SUBSET WITH DOT
R;⪾;supdot;ISOAMSB;;SUPERSET WITH DOT
⪿;subplus;ISOAMSR;;SUBSET WITH PLUS SIGN BELOW
⫁;submult;ISOAMSR;;SUBSET WITH MULTIPLICATION SIGN BELOW
R;⫂;supmult;ISOAMSR;;SUPERSET WITH MULTIPLICATION SIGN BELOW
⫃;subedot;ISOAMSR;;SUBSET OF OR EQUAL TO WITH DOT ABOVE
⫄;supedot;ISOAMSR;;SUPERSET OF OR EQUAL TO WITH DOT ABOVE
⫅;subE;ISOAMSR;;SUBSET OF ABOVE EQUALS SIGN
R;⫆;supE;ISOAMSR;;SUPERSET OF ABOVE EQUALS SIGN
R;⫇;subsim;ISOAMSR;;SUBSET OF ABOVE TILDE OPERATOR
R;⫈;supsim;ISOAMSR;;SUPERSET OF ABOVE TILDE OPERATOR
R;⫉;;;;SUBSET OF ABOVE ALMOST EQUAL TO
R;⫊;;;;SUPERSET OF ABOVE ALMOST EQUAL TO
R;⫋;subnE;ISOAMSN;;SUBSET OF ABOVE NOT EQUAL TO
⫌;supnE;ISOAMSN;;SUPERSET OF ABOVE NOT EQUAL TO
R;⫍;;;;SQUARE LEFT OPEN BOX OPERATOR
R;⫎;;;;SQUARE RIGHT OPEN BOX OPERATOR
R;⫐;csup;ISOAMSR;;CLOSED SUPERSET
⫑;csube;ISOAMSR;;CLOSED SUBSET OR EQUAL TO
⫒;csupe;ISOAMSR;;CLOSED OR EQUAL TO
R;⫓;subsup;ISOAMRR;;SUBSET ABOVE SUPERSET
R;⫔;supsub;ISOAMSR;;SUPERSET ABOVE SUBSET
R;⫕;subsub;ISOAMSR;;SUBSET ABOVE SUBSET
⫖;supsup;ISOAMSR;;SUPERSET ABOVE SUPERSET
R;⫗;suphsub;ISOAMSR;;SUPERSET BESIDE SUBSET
⫘;supdsub;ISOAMSR;;SUPERSET BESIDE AND JOINED BY DASH WITH SUBSET
R;⫙;forkv;ISOAMSR;;ELEMENT OF OPENING DOWNWARDS
R;⫚;topfork;ISOAMSR;;PITCHFORK WITH TEE TOP
R;⫛;mlcp;ISOAMSR;;TRANSVERSAL INTERSECTION
R;⫝̸;;;;FORKING
R;⫝;;;;NONFORKING
⫞;;;;SHORT LEFT TACK
⫟;;;;SHORT DOWN
R;⫠;;;;SHORT UP TACK
N;⫡;;;;PERPENDICULAR WITH S
⫢;vDdash;;;VERTICAL BAR TRIPLE RIGHT TURNSTILE
R;⫣;dashV;;;DOUBLE VERTICAL BAR LEFT TURNSTILE
R;⫤;Dashv;ISOAMSR;;VERTICAL BAR DOUBLE LEFT TURNSTILE
R;⫥;;;;DOUBLE VERTICAL BAR DOUBLE LEFT TURNSTILE
R;⫦;Vdashl;ISOAMSR;;LONG DASH FROM LEFT MEMBER OF DOUBLE VERTICAL
R;⫧;Barv;ISOAMSR;;SHORT DOWN TACK WITH OVERBAR
⫨;vBar;ISOAMSR;;SHORT UP TACK WITH UNDERBAR
R;⫩;vBarv;ISOAMSR;;SHORT UP TACK ABOVE SHORT DOWN TACK
⫪;barV;;;DOUBLE DOWN
⫫;Vbar;ISOAMSR;;DOUBLE UP TACK
R;⫬;Not;ISOTECH;;DOUBLE STROKE NOT SIGN
R;⫭;bNot;ISOTECH;;REVERSED DOUBLE STROKE NOT SIGN
⫮;rnmid;ISOAMSN;;DOES NOT DIVIDE WITH REVERSED NEGATION SLASH
⫯;cirmid;ISOAMSA;;VERTICAL LINE WITH CIRCLE ABOVE
R;⫰;midcir;ISOAMSA;;VERTICAL LINE WITH CIRCLE BELOW
⫱;topcir;ISOTECH;;DOWN TACK WITH CIRCLE BELOW
R;⫲;nhpar;ISOTECH;;PARALLEL WITH HORIZONTAL STROKE
R;⫳;parsim;ISOAMSN;;PARALLEL WITH TILDE OPERATOR
⫴;vert3;;;TRIPLE VERTICAL BAR BINARY RELATION
B;⫵;;;;TRIPLE VERTICAL BAR WITH HORIZONTAL STROKE
⫶;vellipv;;;TRIPLE COLON OPERATOR
R;⫷;;;;TRIPLE NESTED LESS-THAN
R;⫹;;;;DOUBLE-LINE SLANTED LESS-THAN OR EQUAL TO
R;⫺;;;;DOUBLE-LINE SLANTED GREATER-THAN OR EQUAL TO
L;⫼;;;;LARGE TRIPLE VERTICAL BAR OPERATOR
B;⫽;parsl;ISOTECH;;DOUBLE SOLIDUS OPERATOR
B;⫾;;;;WHITE VERTICAL BAR
⬀ ;;;;NORTH EAST WHITE ARROW
⬁ ;;;;NORTH WEST WHITE ARROW
⬂ ;;;;SOUTH EAST WHITE ARROW
⬃ ;;;;SOUTH WEST WHITE ARROW
R;⬄;;;;LEFT RIGHT WHITE ARROW
R;⬅;;;;LEFTWARDS BLACK ARROW
R;⬆;;;;UPWARDS BLACK ARROW
R;⬇;;;;DOWNWARDS BLACK ARROW
⬈ ;;;;NORTH EAST BLACK ARROW
⬉ ;;;;NORTH WEST BLACK ARROW
⬊ ;;;;SOUTH EAST BLACK ARROW
R;⬋;;;;SOUTH WEST BLACK ARROW
⬌ ;;;;LEFT RIGHT BLACK ARROW
⬍ ;;;;UP DOWN BLACK ARROW
⬎ ;;;;RIGHTWARDS ARROW WITH TIP DOWNWARDS
R;⬏;;;;RIGHTWARDS ARROW WITH TIP UPWARDS
⬐ ;;;;LEFTWARDS ARROW WITH TIP DOWNWARDS
R;⬑;;;;LEFTWARDS ARROW WITH TIP UPWARDS
⬒ ;squarft?;;;SQUARE WITH TOP HALF BLACK
⬓ ;squarfb?;;;SQUARE WITH BOTTOM HALF BLACK
N;⬔;squarftr;ISOPUB;;SQUARE WITH UPPER RIGHT DIAGONAL HALF BLACK
N;⬕;squarfbl;ISOPUB;;SQUARE WITH LOWER LEFT DIAGONAL HALF BLACK
;diamonfl;ISOPUB;;DIAMOND WITH LEFT HALF BLACK
N;⬗;diamonfr;ISOPUB;;DIAMOND WITH RIGHT HALF BLACK
N;⬘;diamonft;ISOPUB;;DIAMOND WITH TOP HALF BLACK
N;⬙;diamonfb;ISOPUB;;DIAMOND WITH BOTTOM HALF BLACK
N;⬛..⬟;;;;BLACK LARGE SQUARE..BLACK PENTAGON
⬠ ;;;;WHITE PENTAGON
N;⬡;benzen;ISOCHEM;;WHITE HEXAGON
⬢ ;;;;BLACK HEXAGON
⬣ ;;;;HORIZONTAL BLACK HEXAGON
⬤ .. ⬯ ;;;;BLACK LARGE CIRCLE .. WHITE VERTICAL ELLIPSE
⬰ .. ⭄;;;;LEFT ARROW WITH SMALL .. RIGHTWARDS ARROW THROUGH SUPERSET
R;⭅;;;;LEFTWARDS QUADRUPLE ARROW
R;⭆;;;;RIGHTWARDS QUADRUPLE ARROW
R;⭇..⭌;;;;REVERSE TILDE OPERATOR ABOVE RIGHTWARDS ARROW..RIGHTWARDS ARROW ABOVE REVERSE TILDE OPERATOR
⭐ .. ;;;;WHITE MEDIUM STAR .. WHITE RIGHT - POINTING PENTAGON
⮕ ;;;;RIGHTWARDS BLACK ARROW
N;⯂..⯈;;;;TURNED BLACK PENTAGON..BLACK MEDIUM RIGHT-POINTING TRIANGLE CENTRED
N;⯊;;;;TOP HALF BLACK CIRCLE
N;⯋;;;;BOTTOM HALF BLACK CIRCLE
〈 ; ; ; ( deprecated for math use);LEFT ANGLE BRACKET
〉 ; ; ; ( deprecated for math use);RIGHT ANGLE BRACKET
lbbrk;ISOTECH ; left broken bracket;LEFT TORTOISE SHELL BRACKET
rbbrk;ISOTECH ; right broken bracket ; RIGHT TORTOISE SHELL BRACKET
O;;loang;ISOTECH;;LEFT WHITE TORTOISE SHELL BRACKET
roang;ISOTECH;;RIGHT WHITE TORTOISE SHELL BRACKET
; ; ( deprecated for math use);LEFT WHITE SQUARE BRACKET
; ; ( deprecated for math use);RIGHT WHITE SQUARE BRACKET
N;の;;;;HIRAGANA LETTER NO
X;﬩;;; (doesn't have cross shape) ;HEBREW LETTER ALTERNATIVE PLUS SIGN
︀;;;;VARIATION SELECTOR-1
(deprecated for math use);PRESENTATION FORM FOR VERTICAL LEFT PARENTHESIS
(deprecated for math use);PRESENTATION FORM FOR VERTICAL RIGHT PARENTHESIS
PRESENTATION FORM FOR VERTICAL LEFT CURLY BRACKET
(deprecated for math use);PRESENTATION FORM FOR VERTICAL RIGHT CURLY BRACKET
X;﹡..﹦;;; ("small" compatibility variants of ASCII math operators) ;SMALL ASTERISK..SMALL EQUALS SIGN
X;﹨;;; ("small" compatibility variant of ASCII math operators) ;SMALL REVERSE SOLIDUS
+ ; ; ; ( " wide " compatibility variant of ASCII math operators ) ; FULLWIDTH PLUS SIGN
< .. ; ; ; ( " wide " compatibility variants of ASCII math operators ) ; FULLWIDTH LESS - THAN SIGN .. FULLWIDTH GREATER - THAN SIGN
\ ; ; ; ( " wide " compatibility variant of ASCII math operators ) ; FULLWIDTH REVERSE SOLIDUS
^ ; ; ; ( " wide " compatibility variant of ASCII math operators ) ; FULLWIDTH CIRCUMFLEX ACCENT
| ; ; ; ( " wide " compatibility variant of ASCII math operators ) ; FULLWIDTH VERTICAL LINE
~ ; ; ; ( " wide " compatibility variant of math operator ) ; FULLWIDTH TILDE
X;¬;;; ("wide" compatibility variant of ASCII math operators) ;FULLWIDTH NOT SIGN
← .. ↓ ; ; ; ( " wide " compatibility variants of arrows);HALFWIDTH LEFTWARDS ARROW .. HALFWIDTH DOWNWARDS ARROW
A;𝐀..𝑔;;;;MATHEMATICAL BOLD CAPITAL A..MATHEMATICAL ITALIC SMALL G
ITALIC SMALL H < reserved >
𝑖 .. 𝒛;;;;MATHEMATICAL ITALIC SMALL I .. MATHEMATICAL BOLD ITALIC SMALL Z
A;𝒜;Ascr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL A
A;;Bscr;ISOMSCR;;SCRIPT CAPITAL B <reserved>
A;𝒞;Cscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL C
A;𝒟;Dscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL D
A;;Escr;ISOMSCR;;SCRIPT CAPITAL E <reserved>
A;𝒢;Gscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL G
A;;Hscr;ISOMSCR;;SCRIPT CAPITAL H <reserved>
A;;Iscr;ISOMSCR;;SCRIPT CAPITAL I <reserved>
A;𝒥;Jscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL J
A;𝒦;Kscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL K
A;;Lscr;ISOMSCR;;SCRIPT CAPITAL L <reserved>
A;;Mscr;ISOMSCR;;SCRIPT CAPITAL M <reserved>
A;𝒩;Nscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL N
A;𝒪;Oscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL O
A;𝒫;Pscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL P
A;𝒬;Qscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL Q
A;;Rscr;ISOMSCR;;SCRIPT CAPITAL R <reserved>
A;𝒮;Sscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL S
A;𝒯;Tscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL T
A;𝒰;Uscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL U
A;𝒱;Vscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL V
A;𝒲;Wscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL W
A;𝒳;Xscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL X
A;𝒴;Yscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL Y
A;𝒵;Zscr;ISOMSCR;;MATHEMATICAL SCRIPT CAPITAL Z
A;𝒶;ascr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL A
A;𝒷;bscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL B
A;𝒸;cscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL C
A;𝒹;dscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL D
A;;escr;ISOMSCR;;SCRIPT SMALL E <reserved>
𝒻;fscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL F
A;;gscr;ISOMSCR;;SCRIPT SMALL G <reserved>
A;𝒽;hscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL H
A;𝒾;iscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL I
A;𝒿;jscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL J
A;𝓀;kscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL K
A;𝓁;lscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL L
A;𝓂;mscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL M
A;𝓃;nscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL N
A;;oscr;ISOMSCR;;SCRIPT SMALL O <reserved>
A;𝓅;pscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL P
A;𝓆;qscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL Q
A;𝓇;rscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL R
A;𝓈;sscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL S
A;𝓉;tscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL T
A;𝓊;uscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL U
A;𝓌;wscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL W
A;𝓍;xscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL X
A;𝓎;yscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL Y
A;𝓏;zscr;ISOMSCR;;MATHEMATICAL SCRIPT SMALL Z
A;𝓐..𝔃;;;;MATHEMATICAL BOLD SCRIPT CAPITAL A..MATHEMATICAL BOLD SCRIPT SMALL Z
A;𝔄;Afr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL A
A;𝔅;Bfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL B
Cfr;ISOMFRK;;FRAKTUR CAPITAL C < reserved >
A;𝔇;Dfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL D
A;𝔉;Ffr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL F
A;𝔊;Gfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL G
A;;Hfr;ISOMFRK;;FRAKTUR CAPITAL H <reserved>
A;;Ifr;ISOMFRK;;FRAKTUR CAPITAL I <reserved>
A;𝔍;Jfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL J
A;𝔎;Kfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL K
A;𝔏;Lfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL L
A;𝔐;Mfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL M
A;𝔑;Nfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL N
𝔒;Ofr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL O
A;𝔓;Pfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL P
A;𝔔;Qfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL Q
A;;Rfr;ISOMFRK;;FRAKTUR CAPITAL R <reserved>
𝔖;Sfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL S
A;𝔗;Tfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL T
A;𝔘;Ufr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL U
𝔙;Vfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL V
𝔚;Wfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL W
𝔛;Xfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL X
A;𝔜;Yfr;ISOMFRK;;MATHEMATICAL FRAKTUR CAPITAL Y
A;;Zfr;ISOMFRK;;FRAKTUR CAPITAL Z <reserved>
A;𝔞;afr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL A
𝔟;bfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL B
A;𝔠;cfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL C
𝔡;dfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL D
𝔢;efr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL E
A;𝔣;ffr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL F
𝔤;gfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL G
A;𝔥;hfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL H
A;𝔦;ifr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL I
A;𝔧;jfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL J
A;𝔨;kfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL K
𝔩;lfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL L
A;𝔪;mfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL M
𝔫;nfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL N
A;𝔬;ofr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL O
A;𝔭;pfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL P
𝔮;qfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL Q
𝔯;rfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL R
A;𝔰;sfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL S
𝔱;tfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL T
A;𝔲;ufr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL U
A;𝔳;vfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL V
A;𝔴;wfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL W
𝔵;xfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL X
𝔶;yfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL Y
𝔷;zfr;ISOMFRK;;MATHEMATICAL FRAKTUR SMALL Z
A;𝔸;Aopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL A
A;𝔹;Bopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL B
Copf;ISOMOPF;;DOUBLE - STRUCK CAPITAL C < reserved >
A;𝔻;Dopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL D
A;𝔼;Eopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL E
A;𝔽;Fopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL F
A;𝔾;Gopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL G
A;;Hopf;ISOMOPF;;DOUBLE-STRUCK CAPITAL H <reserved>
A;𝕀;Iopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL I
A;𝕁;Jopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL J
A;𝕂;Kopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL K
A;𝕃;Lopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL L
A;𝕄;Mopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL M
A;;Nopf;ISOMOPF;;DOUBLE-STRUCK CAPITAL N <reserved>
A;𝕆;Oopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL O
Popf;ISOMOPF;;DOUBLE - STRUCK CAPITAL P < reserved >
A;;Qopf;ISOMOPF;;DOUBLE-STRUCK CAPITAL Q <reserved>
A;;Ropf;ISOMOPF;;DOUBLE-STRUCK CAPITAL R <reserved>
𝕊;Sopf;ISOMOPF;;MATHEMATICAL DOUBLE - STRUCK CAPITAL S
A;𝕋;Topf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL T
A;𝕌;Uopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL U
𝕍;Vopf;ISOMOPF;;MATHEMATICAL DOUBLE - STRUCK CAPITAL V
A;𝕎;Wopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL W
A;𝕏;Xopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL X
A;𝕐;Yopf;ISOMOPF;;MATHEMATICAL DOUBLE-STRUCK CAPITAL Y
Zopf;ISOMOPF;;DOUBLE - STRUCK CAPITAL Z < reserved >
A;𝕒;aopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL A
A;𝕓;bopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL B
A;𝕔;copf;;;MATHEMATICAL DOUBLE-STRUCK SMALL C
𝕕;dopf;;;MATHEMATICAL DOUBLE - STRUCK SMALL D
A;𝕖;eopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL E
A;𝕗;fopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL F
A;𝕘;gopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL G
𝕙;hopf;;;MATHEMATICAL DOUBLE - STRUCK SMALL H
A;𝕚;iopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL I
𝕛;jopf;;;MATHEMATICAL DOUBLE - STRUCK SMALL J
A;𝕜;kopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL K
𝕝;lopf;;;MATHEMATICAL DOUBLE - STRUCK SMALL L
A;𝕞;mopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL M
A;𝕟;nopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL N
A;𝕠;oopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL O
A;𝕡;popf;;;MATHEMATICAL DOUBLE-STRUCK SMALL P
A;𝕣;ropf;;;MATHEMATICAL DOUBLE-STRUCK SMALL R
𝕤;sopf;;;MATHEMATICAL DOUBLE - STRUCK SMALL S
A;𝕥;topf;;;MATHEMATICAL DOUBLE-STRUCK SMALL T
A;𝕦;uopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL U
A;𝕧;vopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL V
A;𝕨;wopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL W
A;𝕩;xopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL X
A;𝕪;yopf;;;MATHEMATICAL DOUBLE-STRUCK SMALL Y
𝕫;zopf;;;MATHEMATICAL DOUBLE - STRUCK SMALL Z
A;𝕬..𝚣;;;;MATHEMATICAL BOLD FRAKTUR CAPITAL A..MATHEMATICAL MONOSPACE SMALL Z
𝚤;;ISOAMSO;;MATHEMATICAL ITALIC SMALL DOTLESS I
𝚥;;ISOAMSO;;MATHEMATICAL ITALIC SMALL DOTLESS J
𝚨 .. 𝟉;;;;MATHEMATICAL BOLD CAPITAL ALPHA .. MATHEMATICAL SANS - BOLD ITALIC PI SYMBOL
A;𝟊;b.Gammad;ISOGRK4;;MATHEMATICAL BOLD CAPITAL DIGAMMA
A;𝟋;b.gammad;ISOGRK4;;MATHEMATICAL BOLD SMALL DIGAMMA
N;𝟎..𝟿;;;;MATHEMATICAL BOLD DIGIT ZERO..MATHEMATICAL MONOSPACE DIGIT NINE
A;𞸀..𞸃;;;;ARABIC MATHEMATICAL ALEF..ARABIC MATHEMATICAL DAL
𞸅 .. 𞸟;;;;ARABIC MATHEMATICAL WAW .. ARABIC MATHEMATICAL DOTLESS QAF
𞸡;;;;ARABIC MATHEMATICAL INITIAL BEH
A;𞸢;;;;ARABIC MATHEMATICAL INITIAL JEEM
A;𞸤;;;;ARABIC MATHEMATICAL INITIAL HEH
𞸧;;;;ARABIC MATHEMATICAL INITIAL HAH
𞸩 .. 𞸲;;;;ARABIC MATHEMATICAL INITIAL YEH .. ARABIC MATHEMATICAL INITIAL QAF
𞸴 .. 𞸷;;;;ARABIC MATHEMATICAL INITIAL SHEEN .. ARABIC MATHEMATICAL INITIAL KHAH
𞸹;;;;ARABIC MATHEMATICAL INITIAL
A;𞸻;;;;ARABIC MATHEMATICAL INITIAL GHAIN
A;𞹂;;;;ARABIC MATHEMATICAL TAILED JEEM
𞹋;;;;ARABIC MATHEMATICAL TAILED LAM
𞹍 .. TAILED NOON .. ARABIC MATHEMATICAL TAILED AIN
A;𞹑;;;;ARABIC MATHEMATICAL TAILED SAD
𞹒;;;;ARABIC MATHEMATICAL TAILED
𞹔;;;;ARABIC MATHEMATICAL TAILED
𞹗;;;;ARABIC MATHEMATICAL TAILED
𞹙;;;;ARABIC MATHEMATICAL TAILED
𞹛;;;;ARABIC MATHEMATICAL TAILED
A;𞹟;;;;ARABIC MATHEMATICAL TAILED DOTLESS QAF
𞹡;;;;ARABIC MATHEMATICAL STRETCHED BEH
A;𞹢;;;;ARABIC MATHEMATICAL STRETCHED JEEM
A;𞹤;;;;ARABIC MATHEMATICAL STRETCHED HEH
A;𞹧..𞹪;;;;ARABIC MATHEMATICAL STRETCHED HAH..ARABIC MATHEMATICAL STRETCHED KAF
𞹬 .. 𞹲;;;;ARABIC MATHEMATICAL STRETCHED MEEM .. ARABIC MATHEMATICAL STRETCHED QAF
𞹴 .. 𞹷;;;;ARABIC MATHEMATICAL STRETCHED SHEEN .. ARABIC MATHEMATICAL STRETCHED KHAH
𞹹 .. 𞹼;;;;ARABIC MATHEMATICAL STRETCHED .. ARABIC MATHEMATICAL STRETCHED DOTLESS BEH
𞺀 .. 𞺉;;;;ARABIC MATHEMATICAL LOOPED ALEF .. ARABIC MATHEMATICAL LOOPED YEH
𞺋 .. 𞺛;;;;ARABIC MATHEMATICAL LOOPED LAM .. ARABIC MATHEMATICAL LOOPED GHAIN
𞺡 .. 𞺣;;;;ARABIC MATHEMATICAL DOUBLE - STRUCK BEH .. ARABIC MATHEMATICAL DOUBLE - STRUCK DAL
𞺥 .. 𞺩;;;;ARABIC MATHEMATICAL DOUBLE - STRUCK WAW .. ARABIC MATHEMATICAL DOUBLE - STRUCK YEH
𞺫 .. 𞺻;;;;ARABIC MATHEMATICAL DOUBLE - STRUCK LAM .. ARABIC MATHEMATICAL DOUBLE - STRUCK GHAIN
L;𞻰;;;;ARABIC MATHEMATICAL OPERATOR MEEM WITH HAH WITH TATWEEL
𞻱;;;;ARABIC MATHEMATICAL OPERATOR HAH WITH DAL
BLACK SLIGHTLY SMALL
;;;;BLACK
N;🞍;;;;BLACK SLIGHTLY SMALL SQUARE
🞗 .. 🞙 ;;;;BLACK .. BLACK MEDIUM SMALL DIAMOND
🞝 .. 🞟 ;;;;BLACK TINY LOZENGE .. BLACK MEDIUM SMALL LOZENGE | #lang quad/unicode/unicode-class-prep
math?
# downloaded from
# -15/MathClassEx-15.txt
# and used under license
# File: MathClassEx.txt
# Revision: 15
# Date: 2017-06-01, 12:35:00 GMT
#
# © 2017 Unicode®, Inc.
# Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries.
# For terms of use, see
# For documentation, see /
#
# ------------------------------------------------
# This file is a classification of characters based on their usage in
# mathematical notation and providing a mapping to standard entity
# sets commonly used for SGML and MathML documents.
#
# While the contents of this file represent the best information
# available to the authors and the Unicode Technical Committee as
# of the date referenced above, it is likely that the information
# in this file will change from time to time. Most importantly,
# the mapping of characters to the ISO standard SGML entity sets
# is under review by the relevant ISO committee and may therefore
# change.
#
# This file is *NOT* formally part of the Unicode Character Database
# at this time.
#
# The data consists of 7 fields. The number and type of fields may change
# in future versions of this file.
#
# The character encoding of this plain-text file is UTF-8.
#
# 1: code point or range
#
# 2: class, one of:
#
# N - Normal - includes all digits and symbols requiring only one form
# A - Alphabetic
# B - Binary
# C - Closing - usually paired with opening delimiter
# D - Diacritic
# F - Fence - unpaired delimiter (often used as opening or closing)
# G - Glyph_Part - piece of large operator
# L - Large - n-ary or large operator, often takes limits
# O - Opening - usually paired with closing delimiter
# P - Punctuation
# R - Relation - includes arrows
# S - Space
# U - Unary - operators that are only unary
# V - Vary - operators that can be unary or binary depending on context
# X - Special - characters not covered by other classes
#
# The C, O, and F operators are stretchy. In addition, some binary operators such
# as U+002F are stretchy as noted in the descriptive comments. The classes are
# also useful in determining extra spacing around the operators as discussed
# in UTR #25.
#
# 3: Unicode character (UTF-8)
#
# 4: ISO entity name
#
# 5: ISO entity set
#
# 6: descriptive comments (of various types)
# The descriptive comments provide more information about a character,
# or its specific appearance. Some descriptions contain common macro
# names (with slash) but in the majority of cases, the description is
# simply the description of the entity in the published entity set, if
# different from the formal Unicode character name. Minor differences
# in word order, punctuation and verb forms have been ignored, but not
# systematic differences in terminology, such as filled vs. black.
# In principle this allows location of entities by their description.
#
# 7: Unicode character name or names
# Character names are provided for ease of reference only.
#
# not significant. Future versions of this file may use different amounts of
# whitespace.
#
# Some character positions in the Mathematical Alphanumeric Symbols block are
# reserved and have been mapped to the Letterlike Symbols block in Unicode.
# This is indicated in 24 special purpose comments.
#
# The character repertoire of this revision is the repertoire of Unicode
# Version 9.0. For more information see Revision 15 or later of UTR #25.
# ------------------------------------------------
SIGN
BRACKET
BELOW
BELOW
CUBE ROOT
ROOT
VERTICAL LINE
VERTICALLY
ARROW ABOVE
SMALL PI
CAPITAL G
CAPITAL L
CAPITAL Y
DIFFERENTIAL
IDENTICAL TO
LESS - THAN
NOR EQUAL TO
NOR GREATER - THAN
NOR LESS - THAN
A SUPERSET OF
OF
OF WITH TWO HORIZONTAL STROKES
FLOOR
JOIN
TACK WITH CIRCLE ABOVE
DIAMOND WITH RIGHTWARDS TICK
TICK
CIRCLE ARROW
CIRCLE ARROW
ARROW
ARROW
ARROW - TAIL
WITH HOOK
WITH HOOK
CLOCKWISE ARROW WITH MINUS
CLOCKWISE ARROW
RIGHT ARROW THROUGH SMALL
RIGHT DOWN BARB LEFT HARPOON
NOTATION RIGHT BINDING BRACKET
BRACKET WITH UNDERBAR
WITH TICK IN BOTTOM CORNER
BRACKET WITH TICK IN TOP CORNER
RIGHT ARC LESS - THAN BRACKET
ANGLE WITH UNDERBAR
INFINITY
SIGN AND SLANTED PARALLEL
WITH DOUBLE STROKE
INTEGRAL OPERATOR
OR CROSS PRODUCT
OR
INTERSECTION WITH SERIFS
SIGN WITH DOT BELOW
SIGN ABOVE PLUS SIGN
WITH FOUR DOTS ABOVE
SIGN BELOW
BINARY RELATION
WHITE VERTICAL BAR
< reserved >
SMALL V
SMALL Q
TAILED HAH
TAILED YEH
TAILED DOTLESS NOON
FEH
# EOF |
20d119639414cd4564a756a608fa5ae3fd0b99746d57a747e7d334979c14a623 | ghc/packages-Cabal | cabal.test.hs | import Test.Cabal.Prelude
main = cabalTest $
cabal' "v2-run" ["pkg-abc:program"] >>= assertOutputContains "pkg-def:publib"
| null | https://raw.githubusercontent.com/ghc/packages-Cabal/6f22f2a789fa23edb210a2591d74ea6a5f767872/cabal-testsuite/PackageTests/MultipleLibraries/Successful/cabal.test.hs | haskell | import Test.Cabal.Prelude
main = cabalTest $
cabal' "v2-run" ["pkg-abc:program"] >>= assertOutputContains "pkg-def:publib"
|
|
f9ae66b364ded16e12cb57d3f0fd9e022f94a1d2d12736a102bf9ba140a31a50 | opencog/learn | gen-dict.scm | #! /usr/bin/env guile
!#
;
; gen-dict.scm - Generate a random artificial grammar
;
Usage : ` ./gen - dict.scm < paramaters-file.scm > < output - dir > `
;
This expects two arguments : a file containing configuration parameters
; and the directory where the generated files should be written.
;
(use-modules (opencog) (opencog nlp fake))
; Get the program arguments
(define param-file (cadr (program-arguments)))
(define dict-dir (caddr (program-arguments)))
Program parameters . Define these so that guile compilation
; does not spew errors. (Maybe these should be #<unspecified>?)
(define num-link-types #f)
(define link-type-exp #f)
(define max-disjunct-size #f)
(define disjunct-exp #f)
(define section-size #f)
(define section-exp #f)
(define num-pos #f)
(define num-classes #f)
(define class-size #f)
(define class-exp #f)
(define num-wall-types #f)
(define num-to-wall #f)
(define sentence-enders #f)
(define sense-frac #f)
(define sense-frac #f)
(define num-senses #f)
(define sense-exp #f)
(define num-synonyms #f)
(define synonym-exp #f)
(define x
(begin
(if (not (access? param-file R_OK))
(begin
(format #t "Error: unable to access parameters file '~A'\n" param-file)
(exit -1)))
(load param-file)
))
; ----------------------------------------------------------
; Generators for each of the different parts of the grammar.
(define secgen
(make-section-generator
num-link-types
max-disjunct-size
section-size
link-type-exp
disjunct-exp
section-exp))
(define posgen
(make-pos-generator
num-pos
secgen))
(define classgen
(make-class-generator
num-classes
num-pos
class-size
class-exp))
(define wallgen
(make-wall-generator
num-classes
num-wall-types
num-to-wall
sentence-enders))
(define sensegen
(make-sense-generator
sense-frac
num-classes
num-senses
sense-exp))
(define wordgen
(make-word-generator
num-classes
num-synonyms
synonym-exp))
; Make a copy of the link-grammar boilerplate
; This copies the boilerplate files from the source dir
; to the target dir.
(define (copy-boilerplate)
(define x
(if (not (getenv "COMMON_DIR"))
(begin
(format #t "Error: Environment variable $COMMON_DIR is not defined.\n")
(format #t "This directory needed for its template files.\n")
(exit -1))))
; Location of the boilerplate files.
(define source-dir (string-append (getenv "COMMON_DIR") "/fake-lang"))
; Recursive copy
(define DIR_STREAM (opendir source-dir))
(define (copy-dir)
(define DIRENT (readdir DIR_STREAM))
(if (not (eof-object? DIRENT))
(let ((lgfi (string-append source-dir "/" DIRENT))
(tofi (string-append dict-dir "/" DIRENT)))
(if (equal? 'regular (stat:type (stat lgfi)))
(copy-file lgfi tofi))
(copy-dir)
)))
; Does the source directory exist?
(if (not (or (getenv "COMMON_DIR") (access? source-dir R_OK)))
(begin
(format #t "Error: unable to access '~A'\n" (source-dir))
(format #t "This directory needed for its template files\n")
(exit -1)))
; Does the target directory exist already?
; If so, do not over-write it.
(if (access? dict-dir R_OK)
(begin
(format #t "Error: target directory exists: ~A\n" dict-dir)
(format #t "Remove or rename this directory and try again\n")
(exit -1)))
(mkdir dict-dir)
(copy-dir)
; Copy the parameters file so that we have a log of what was done.
(copy-file param-file (string-append dict-dir "/dict-conf.scm"))
)
Do the actual copy , first
(define xx (copy-boilerplate))
(define dict-file (string-append dict-dir "/4.0.dict"))
(define port (open-file dict-file "w"))
(format port "%\n% Randomly generated dictionary\n%\n")
(format port "% Version: 0.1\n")
(format port "% Num link types: ~A\n" num-link-types)
(format port "% Link type exponent: ~A\n" link-type-exp)
(format port "% Disjunct size: ~A\n" max-disjunct-size)
(format port "% Disjunct exponent: ~A\n" disjunct-exp)
(format port "% Section size: ~A\n" section-size)
(format port "% Number of POS: ~A\n" num-pos)
(format port "% Number of classes: ~A\n" num-classes)
(format port "%\n")
(format port "% Class size: ~A\n" class-size)
(format port "% Class exp: ~A\n" class-exp)
(format port "%\n")
(format port "% Wall connector types: ~A\n" num-wall-types)
(format port "% Wall connections: ~A\n" num-to-wall)
(format port "% Sentence-ending punctuation: ~A\n" sentence-enders)
(format port "%\n")
(format port "% Word-sense fraction: ~A\n" sense-frac)
(format port "% Number of word-senses: ~A\n" num-senses)
(format port "% Word-sense exponent: ~A\n" sense-exp)
(format port "%\n")
(format port "% Number of synonyms: ~A\n" num-synonyms)
(format port "% Synonym exponent: ~A\n" synonym-exp)
(format port "%\n")
(format port "#define dictionary-version-number 5.9.0;\n")
(format port "#define dictionary-locale C;\n")
(print-LG-flat port (posgen))
(print-LG-flat port (classgen))
(print-LG-flat port (wallgen))
(print-LG-flat port (sensegen))
(print-LG-flat port (wordgen))
(format port "\n<UNKNOWN-WORD>: XXXXXX+;\n")
(close port)
; If we got to here, then everything must have worked.
(format #t "Created dictionary at ~A\n" dict-dir)
(exit 0)
| null | https://raw.githubusercontent.com/opencog/learn/fb038fd29848bee57ebb1c80ad3f8310ac7f3075/run-common/attic/gen-dict.scm | scheme |
gen-dict.scm - Generate a random artificial grammar
and the directory where the generated files should be written.
Get the program arguments
does not spew errors. (Maybe these should be #<unspecified>?)
----------------------------------------------------------
Generators for each of the different parts of the grammar.
Make a copy of the link-grammar boilerplate
This copies the boilerplate files from the source dir
to the target dir.
Location of the boilerplate files.
Recursive copy
Does the source directory exist?
Does the target directory exist already?
If so, do not over-write it.
Copy the parameters file so that we have a log of what was done.
If we got to here, then everything must have worked. | #! /usr/bin/env guile
!#
Usage : ` ./gen - dict.scm < paramaters-file.scm > < output - dir > `
This expects two arguments : a file containing configuration parameters
(use-modules (opencog) (opencog nlp fake))
(define param-file (cadr (program-arguments)))
(define dict-dir (caddr (program-arguments)))
Program parameters . Define these so that guile compilation
(define num-link-types #f)
(define link-type-exp #f)
(define max-disjunct-size #f)
(define disjunct-exp #f)
(define section-size #f)
(define section-exp #f)
(define num-pos #f)
(define num-classes #f)
(define class-size #f)
(define class-exp #f)
(define num-wall-types #f)
(define num-to-wall #f)
(define sentence-enders #f)
(define sense-frac #f)
(define sense-frac #f)
(define num-senses #f)
(define sense-exp #f)
(define num-synonyms #f)
(define synonym-exp #f)
(define x
(begin
(if (not (access? param-file R_OK))
(begin
(format #t "Error: unable to access parameters file '~A'\n" param-file)
(exit -1)))
(load param-file)
))
(define secgen
(make-section-generator
num-link-types
max-disjunct-size
section-size
link-type-exp
disjunct-exp
section-exp))
(define posgen
(make-pos-generator
num-pos
secgen))
(define classgen
(make-class-generator
num-classes
num-pos
class-size
class-exp))
(define wallgen
(make-wall-generator
num-classes
num-wall-types
num-to-wall
sentence-enders))
(define sensegen
(make-sense-generator
sense-frac
num-classes
num-senses
sense-exp))
(define wordgen
(make-word-generator
num-classes
num-synonyms
synonym-exp))
(define (copy-boilerplate)
(define x
(if (not (getenv "COMMON_DIR"))
(begin
(format #t "Error: Environment variable $COMMON_DIR is not defined.\n")
(format #t "This directory needed for its template files.\n")
(exit -1))))
(define source-dir (string-append (getenv "COMMON_DIR") "/fake-lang"))
(define DIR_STREAM (opendir source-dir))
(define (copy-dir)
(define DIRENT (readdir DIR_STREAM))
(if (not (eof-object? DIRENT))
(let ((lgfi (string-append source-dir "/" DIRENT))
(tofi (string-append dict-dir "/" DIRENT)))
(if (equal? 'regular (stat:type (stat lgfi)))
(copy-file lgfi tofi))
(copy-dir)
)))
(if (not (or (getenv "COMMON_DIR") (access? source-dir R_OK)))
(begin
(format #t "Error: unable to access '~A'\n" (source-dir))
(format #t "This directory needed for its template files\n")
(exit -1)))
(if (access? dict-dir R_OK)
(begin
(format #t "Error: target directory exists: ~A\n" dict-dir)
(format #t "Remove or rename this directory and try again\n")
(exit -1)))
(mkdir dict-dir)
(copy-dir)
(copy-file param-file (string-append dict-dir "/dict-conf.scm"))
)
Do the actual copy , first
(define xx (copy-boilerplate))
(define dict-file (string-append dict-dir "/4.0.dict"))
(define port (open-file dict-file "w"))
(format port "%\n% Randomly generated dictionary\n%\n")
(format port "% Version: 0.1\n")
(format port "% Num link types: ~A\n" num-link-types)
(format port "% Link type exponent: ~A\n" link-type-exp)
(format port "% Disjunct size: ~A\n" max-disjunct-size)
(format port "% Disjunct exponent: ~A\n" disjunct-exp)
(format port "% Section size: ~A\n" section-size)
(format port "% Number of POS: ~A\n" num-pos)
(format port "% Number of classes: ~A\n" num-classes)
(format port "%\n")
(format port "% Class size: ~A\n" class-size)
(format port "% Class exp: ~A\n" class-exp)
(format port "%\n")
(format port "% Wall connector types: ~A\n" num-wall-types)
(format port "% Wall connections: ~A\n" num-to-wall)
(format port "% Sentence-ending punctuation: ~A\n" sentence-enders)
(format port "%\n")
(format port "% Word-sense fraction: ~A\n" sense-frac)
(format port "% Number of word-senses: ~A\n" num-senses)
(format port "% Word-sense exponent: ~A\n" sense-exp)
(format port "%\n")
(format port "% Number of synonyms: ~A\n" num-synonyms)
(format port "% Synonym exponent: ~A\n" synonym-exp)
(format port "%\n")
(format port "#define dictionary-version-number 5.9.0;\n")
(format port "#define dictionary-locale C;\n")
(print-LG-flat port (posgen))
(print-LG-flat port (classgen))
(print-LG-flat port (wallgen))
(print-LG-flat port (sensegen))
(print-LG-flat port (wordgen))
(format port "\n<UNKNOWN-WORD>: XXXXXX+;\n")
(close port)
(format #t "Created dictionary at ~A\n" dict-dir)
(exit 0)
|
620fd0f5bfa0902942a744cf124980f5819976ec4be94374e14c03d3a198bd1e | GregoryTravis/rhythmr | Graph.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE TupleSections #
module Graph
( Graph
, empty
, nullGraph
, connectedTo
, add
, addMulti
, nodes
, components
, fromComponents
, showGraphAsComponents
, longestPathComponent
, thresholdedWalks
, MetaGraph
, buildMetaGraph
, graphInfo
, graphStruct
, graphTest ) where
---- Really dumb undirected graph: extremely slow!!
import Control.DeepSeq
import GHC.Generics (Generic, Generic1)
import Data.Containers.ListUtils (nubOrd)
import Data.List (intercalate, intersect, maximum, nub)
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import Util
-- An unordered graph expressed as a map from each node to all the nodes it
-- shares an edge with. Each edge is represented twice, once for each of its
-- nodes. This representation does not permit nodes without any edges attached
-- to them.
data Graph a = Graph (M.Map a (S.Set a))
deriving (Eq, Generic)
instance NFData a => NFData (Graph a)
instance (Eq a, Ord a, Show a) => Show (Graph a) where
show g = show $ edges g
empty :: Graph a
empty = Graph (M.empty)
nullGraph :: Graph a -> Bool
nullGraph (Graph m) = M.null m
graphInfo :: (Show a, Ord a) => Graph a -> (Int, Int, Double, Int)
graphInfo g = (length $ nodes g, length $ edges g, density, length $ components g)
where density = (fromIntegral $ length $ edges g) / (fromIntegral $ numPairs $ length $ nodes g)
numPairs x = x * (x-1)
graphStruct :: Ord a => Graph a -> [(Int, [Int])]
graphStruct g =
let ns = S.toList (nodes g)
elemToInt = M.fromList (zip ns [0..])
lup x = elemToInt M.! x
showNode n = (lup n, map lup (connectedTo g n))
in map showNode ns
-- Add an edge (x, y). Adds y to the adjacency list for x, and vice versa,
-- because I'm a jerk.
add :: (Ord a, Show a) => Graph a -> a -> a -> Graph a
add g x y =
let g' = addKeyIfMissing g x
g'' = addKeyIfMissing g' y
(Graph m) = g''
m' = M.adjust (S.insert x) y m
m'' = M.adjust (S.insert y) x m'
in eesp ( show ( " hoy " , m , m ' , m '' ) ) $ Graph m ''
in Graph m''
-- Add multiple edges.
TODO this is a fold
addMulti :: (Ord a, Show a) => Graph a -> [(a, a)] -> Graph a
addMulti g ((x, y) : ps) = addMulti (add g x y) ps
addMulti g [] = g
-- Add the given elements as a connected component: given (x:ys), add (x, y)
-- for each y in ys.
addComponent :: (Ord a, Show a) => Graph a -> [a] -> Graph a
addComponent g (x:xs) = addMulti g (zip (repeat x) xs)
addComponent g [] = g
edges :: (Eq a, Ord a) => Graph a -> [(a, a)]
edges g = nubOrd $ map sortEdge $ directedEdges g
sortEdge :: Ord a => (a, a) -> (a, a)
sortEdge (a, b) | a > b = (b, a)
sortEdge (a, b) | otherwise = (a, b)
-- Return each edge twice, in each ordering
directedEdges :: Ord a => Graph a -> [(a, a)]
directedEdges g@(Graph m) = concat (Prelude.map (nodeEdges g) (M.keys m))
-- Return all nodes connected by an edge to the given node
connectedTo :: Ord a => Graph a -> a -> [a]
connectedTo (Graph m) x = S.toList (m M.! x)
-- Return all edges (x, y) for the given x
nodeEdges :: Ord a => Graph a -> a -> [(a, a)]
nodeEdges g x = map (x,) $ (connectedTo g x)
-- Return connected components of the graph
-- This is extremely inefficient; it constructs a size-n component n times
components :: (Eq a, Ord a, Show a) => Graph a -> [S.Set a]
components g = nub $ Prelude.map (closure g) (S.toList (nodes g))
showComponents :: Show a => [S.Set a] -> String
showComponents sets = intercalate " " $ map show (map S.toList sets)
showGraphAsComponents :: (Eq a, Ord a, Show a) => Graph a -> String
showGraphAsComponents = showComponents . components
Construct a graph from the given connected components .
They do n't have to be disjoint , so ( components . ) /= i d
fromComponents :: (Show a, Ord a) => [[a]] -> Graph a
fromComponents [] = Graph M.empty
fromComponents (c:cs) = addComponent (fromComponents cs) c
nodes :: Ord a => Graph a -> S.Set a
nodes (Graph m) = flatten (M.elems m)
closure :: (Ord a, Show a) => Graph a -> a -> S.Set a
closure g x = converge (closure' g) (S.singleton x)
closure' :: (Ord a, Show a) => Graph a -> S.Set a -> S.Set a
closure' (Graph m) xs = xs `S.union` (flatten $ Prelude.map (m M.!) (S.toList xs))
flatten :: Ord a => [S.Set a] -> S.Set a
flatten sets = S.fromList (concat (Prelude.map S.toList sets))
-- I wonder if this doesn't recompute (f x)
converge :: Eq a => (a -> a) -> a -> a
converge f x | (f x) == x = x
converge f x | otherwise = converge f (f x)
addKeyIfMissing :: Ord a => Graph a -> a -> Graph a
addKeyIfMissing g x | graphMember x g = g
addKeyIfMissing (Graph m) x | otherwise = Graph $ M.insert x S.empty m
graphMember :: Ord a => a -> Graph a -> Bool
graphMember x (Graph m) = M.member x m
-- -- Starting at the given element, walk the connectivity tree emanating from it,
-- -- avoiding cycles.
walkAndCount : : ( Show a , a ) = > Graph a - > a - > [ ( a , Int ) ]
walkAndCount g x = walk S.empty 0 g x
where walk : : ( Show a , a ) = > S.Set a - > Int - > Graph a - > a - > [ ( a , Int ) ]
walk seen n g x = ( x , n ) : ( concat ( map ( walk seen ' ( n+1 ) g ) ) )
where nexts = S.toList ( ( S.fromList ( connectedTo g x ) ) ` S.difference ` seen )
-- seen' = S.insert x seen
-- Starting at the given element, walk the connectivity tree emanating from it,
-- avoiding cycles. Returns all paths.
pathsFrom :: (Show a, Ord a) => Graph a -> a -> [[a]]
pathsFrom g x = walk S.empty g x
where walk :: (Show a, Ord a) => S.Set a -> Graph a -> a -> [[a]]
walk seen g x = map (x:) ([] : (concat (map (walk seen' g) nexts)))
where nexts = S.toList ((S.fromList (connectedTo g x)) `S.difference` seen)
seen' = S.insert x seen
-- Return a longest path from a. If there are multiple paths of that length,
the first one found is returned .
longestPathFrom :: (NFData a, Show a, Ord a) => Graph a -> a -> [a]
longestPathFrom g x =
let paths = tv "pathsFrom " $ pathsFrom g x
maxLength = maximum (map length (check paths))
check paths = assertM "longestPathFrom" (not (null paths)) paths
longestPaths = filter ((== maxLength) . length) paths
in head longestPaths
-- Find the longest path in a connected component. If there are multiple paths
of that length , the first one found is returned .
longestPathComponent :: (NFData a, Show a, Ord a) => Graph a -> a -> [a]
longestPathComponent g x =
let firstPath = longestPathFrom g x
secondStartingPoint = last firstPath
secondPath = longestPathFrom g secondStartingPoint
in secondPath
-- Find a longest path through each component, and concatenate them.
-- If the argument is an empty graph, then an empty list is returned.
allLongestPathComponents :: (NFData a, Show a, Ord a) => Graph a -> [a]
allLongestPathComponents g =
let cs = map S.toList (tv "components" $ components g)
startingPoints = map head cs
longestPaths = map (longestPathComponent g) startingPoints
in concat longestPaths
-- Separate module?
type MetaGraph a = Graph [a]
Construct a k - metagraph .
-- Such a graph has an edge (x, y) if the intersection of x and y is of size k or greater.
buildMetaGraph :: (Eq a, Show a, Ord a) => [[a]] -> Int -> MetaGraph a
buildMetaGraph xses k = addMulti empty (findOverlappingBy k xses)
-- Return pairs of lists that overlap by the specified number of elements
-- (excluding self-overlapping)
findOverlappingBy :: (Eq a, Show a, Ord a) => Int -> [[a]] -> [([a], [a])]
findOverlappingBy k xses | k >= 1 = filter (uncurry ok) (findOverlapping xses)
| otherwise = error ("findOverlapping: k must be >= 1, is " ++ (show k))
where ok xs ys = overlapBy k xs ys && (xs /= ys)
Return pairs of lists that overlap by at least one element .
findOverlapping :: (Eq a, Show a, Ord a) => [[a]] -> [([a], [a])]
findOverlapping xses =
let e2l = elementToListsMap xses
allPairs xs = [(x, y) | x <- xs, y <- xs]
in sfesp "uniq" length $ nubOrd $ sfesp "all" length $ concat $ map allPairs (M.elems e2l)
-- Build a map from each element to the lists that contain it
elementToListsMap :: (Eq a, Ord a) => [[a]] -> M.Map a [[a]]
elementToListsMap xses = mapFromListAccum $ concat (map kvs xses)
where kvs :: [a] -> [(a, [a])]
kvs xs = map (,xs) xs
Nonempty intersection ?
overlapBy :: Eq a => Int -> [a] -> [a] -> Bool
overlapBy k xs ys = length (intersect xs ys) >= k
For each k > = 1 , build the k - metagraph and return ( k , walk ) . Stop when the
-- walks become empty.
thresholdedWalks :: (NFData a, Show a, Ord a) => [[a]] -> [(Int, [[a]])]
thresholdedWalks xses = nonEmpty
where walks = map walk [0..]
walk k = (k, tv "allLongestPathComponents" $ allLongestPathComponents (mg k))
mg k = tv "buildMetaGraph" $ buildMetaGraph xses k
nonEmpty = tv "nonEmpty" $ takeWhile (\x -> tv "x" $ ((\(_, walk) -> not (null walk)) x)) walks
evaled = unsafeTime " thresholdedWalks " nonEmpty
debug = map d [ 0 .. 4 ]
-- where d k =
-- let mg = buildMetaGraph xses k
-- cs = components mg
adjs = case mg of Graph m - > map length ( M.elems m )
-- in ("debug", k, adjs, map length cs)
debug = map ( \(k , walk ) - > ( k , length walk ) ) $ take 20 walks
debug = map graphInfo ( map ( buildMetaGraph xses ) [ 0 .. 14 ] )
debug = allLongestPathComponents ( : : Graph Int )
graphInfo ( m ) = ( " gi " , map length ( M.elems m ) )
graphTest :: IO ()
graphTest = do
let likes =
Int here for NFData while debugging performance
, [1, 3, 4]
, [3, 2, 5]
, [5, 1, 3]
, [10, 11, 12]
, [10, 13, 14] ]
mg = buildMetaGraph likes 1
walked = pathsFrom mg [0, 1, 2]
msp $ length walked
msp $ length $ nubOrd walked
msp $ map length walked
msp $ longestPathComponent mg [0, 1, 2]
msp $ nubOrd walked
msp mg
-- let m = case mg of (Graph m) -> m
msp ( length ( M.keys m ) )
mapM _ ( \k - > msp ( k , ( S.size ( m M. ! k ) ) ) ) ( M.keys m )
| null | https://raw.githubusercontent.com/GregoryTravis/rhythmr/55fd5b5af6a52da16b7d730c27fb408aa3c61538/src/Graph.hs | haskell | -- Really dumb undirected graph: extremely slow!!
An unordered graph expressed as a map from each node to all the nodes it
shares an edge with. Each edge is represented twice, once for each of its
nodes. This representation does not permit nodes without any edges attached
to them.
Add an edge (x, y). Adds y to the adjacency list for x, and vice versa,
because I'm a jerk.
Add multiple edges.
Add the given elements as a connected component: given (x:ys), add (x, y)
for each y in ys.
Return each edge twice, in each ordering
Return all nodes connected by an edge to the given node
Return all edges (x, y) for the given x
Return connected components of the graph
This is extremely inefficient; it constructs a size-n component n times
I wonder if this doesn't recompute (f x)
-- Starting at the given element, walk the connectivity tree emanating from it,
-- avoiding cycles.
seen' = S.insert x seen
Starting at the given element, walk the connectivity tree emanating from it,
avoiding cycles. Returns all paths.
Return a longest path from a. If there are multiple paths of that length,
Find the longest path in a connected component. If there are multiple paths
Find a longest path through each component, and concatenate them.
If the argument is an empty graph, then an empty list is returned.
Separate module?
Such a graph has an edge (x, y) if the intersection of x and y is of size k or greater.
Return pairs of lists that overlap by the specified number of elements
(excluding self-overlapping)
Build a map from each element to the lists that contain it
walks become empty.
where d k =
let mg = buildMetaGraph xses k
cs = components mg
in ("debug", k, adjs, map length cs)
let m = case mg of (Graph m) -> m | # LANGUAGE DeriveGeneric #
# LANGUAGE TupleSections #
module Graph
( Graph
, empty
, nullGraph
, connectedTo
, add
, addMulti
, nodes
, components
, fromComponents
, showGraphAsComponents
, longestPathComponent
, thresholdedWalks
, MetaGraph
, buildMetaGraph
, graphInfo
, graphStruct
, graphTest ) where
import Control.DeepSeq
import GHC.Generics (Generic, Generic1)
import Data.Containers.ListUtils (nubOrd)
import Data.List (intercalate, intersect, maximum, nub)
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import Util
data Graph a = Graph (M.Map a (S.Set a))
deriving (Eq, Generic)
instance NFData a => NFData (Graph a)
instance (Eq a, Ord a, Show a) => Show (Graph a) where
show g = show $ edges g
empty :: Graph a
empty = Graph (M.empty)
nullGraph :: Graph a -> Bool
nullGraph (Graph m) = M.null m
graphInfo :: (Show a, Ord a) => Graph a -> (Int, Int, Double, Int)
graphInfo g = (length $ nodes g, length $ edges g, density, length $ components g)
where density = (fromIntegral $ length $ edges g) / (fromIntegral $ numPairs $ length $ nodes g)
numPairs x = x * (x-1)
graphStruct :: Ord a => Graph a -> [(Int, [Int])]
graphStruct g =
let ns = S.toList (nodes g)
elemToInt = M.fromList (zip ns [0..])
lup x = elemToInt M.! x
showNode n = (lup n, map lup (connectedTo g n))
in map showNode ns
add :: (Ord a, Show a) => Graph a -> a -> a -> Graph a
add g x y =
let g' = addKeyIfMissing g x
g'' = addKeyIfMissing g' y
(Graph m) = g''
m' = M.adjust (S.insert x) y m
m'' = M.adjust (S.insert y) x m'
in eesp ( show ( " hoy " , m , m ' , m '' ) ) $ Graph m ''
in Graph m''
TODO this is a fold
addMulti :: (Ord a, Show a) => Graph a -> [(a, a)] -> Graph a
addMulti g ((x, y) : ps) = addMulti (add g x y) ps
addMulti g [] = g
addComponent :: (Ord a, Show a) => Graph a -> [a] -> Graph a
addComponent g (x:xs) = addMulti g (zip (repeat x) xs)
addComponent g [] = g
edges :: (Eq a, Ord a) => Graph a -> [(a, a)]
edges g = nubOrd $ map sortEdge $ directedEdges g
sortEdge :: Ord a => (a, a) -> (a, a)
sortEdge (a, b) | a > b = (b, a)
sortEdge (a, b) | otherwise = (a, b)
directedEdges :: Ord a => Graph a -> [(a, a)]
directedEdges g@(Graph m) = concat (Prelude.map (nodeEdges g) (M.keys m))
connectedTo :: Ord a => Graph a -> a -> [a]
connectedTo (Graph m) x = S.toList (m M.! x)
nodeEdges :: Ord a => Graph a -> a -> [(a, a)]
nodeEdges g x = map (x,) $ (connectedTo g x)
components :: (Eq a, Ord a, Show a) => Graph a -> [S.Set a]
components g = nub $ Prelude.map (closure g) (S.toList (nodes g))
showComponents :: Show a => [S.Set a] -> String
showComponents sets = intercalate " " $ map show (map S.toList sets)
showGraphAsComponents :: (Eq a, Ord a, Show a) => Graph a -> String
showGraphAsComponents = showComponents . components
Construct a graph from the given connected components .
They do n't have to be disjoint , so ( components . ) /= i d
fromComponents :: (Show a, Ord a) => [[a]] -> Graph a
fromComponents [] = Graph M.empty
fromComponents (c:cs) = addComponent (fromComponents cs) c
nodes :: Ord a => Graph a -> S.Set a
nodes (Graph m) = flatten (M.elems m)
closure :: (Ord a, Show a) => Graph a -> a -> S.Set a
closure g x = converge (closure' g) (S.singleton x)
closure' :: (Ord a, Show a) => Graph a -> S.Set a -> S.Set a
closure' (Graph m) xs = xs `S.union` (flatten $ Prelude.map (m M.!) (S.toList xs))
flatten :: Ord a => [S.Set a] -> S.Set a
flatten sets = S.fromList (concat (Prelude.map S.toList sets))
converge :: Eq a => (a -> a) -> a -> a
converge f x | (f x) == x = x
converge f x | otherwise = converge f (f x)
addKeyIfMissing :: Ord a => Graph a -> a -> Graph a
addKeyIfMissing g x | graphMember x g = g
addKeyIfMissing (Graph m) x | otherwise = Graph $ M.insert x S.empty m
graphMember :: Ord a => a -> Graph a -> Bool
graphMember x (Graph m) = M.member x m
walkAndCount : : ( Show a , a ) = > Graph a - > a - > [ ( a , Int ) ]
walkAndCount g x = walk S.empty 0 g x
where walk : : ( Show a , a ) = > S.Set a - > Int - > Graph a - > a - > [ ( a , Int ) ]
walk seen n g x = ( x , n ) : ( concat ( map ( walk seen ' ( n+1 ) g ) ) )
where nexts = S.toList ( ( S.fromList ( connectedTo g x ) ) ` S.difference ` seen )
pathsFrom :: (Show a, Ord a) => Graph a -> a -> [[a]]
pathsFrom g x = walk S.empty g x
where walk :: (Show a, Ord a) => S.Set a -> Graph a -> a -> [[a]]
walk seen g x = map (x:) ([] : (concat (map (walk seen' g) nexts)))
where nexts = S.toList ((S.fromList (connectedTo g x)) `S.difference` seen)
seen' = S.insert x seen
the first one found is returned .
longestPathFrom :: (NFData a, Show a, Ord a) => Graph a -> a -> [a]
longestPathFrom g x =
let paths = tv "pathsFrom " $ pathsFrom g x
maxLength = maximum (map length (check paths))
check paths = assertM "longestPathFrom" (not (null paths)) paths
longestPaths = filter ((== maxLength) . length) paths
in head longestPaths
of that length , the first one found is returned .
longestPathComponent :: (NFData a, Show a, Ord a) => Graph a -> a -> [a]
longestPathComponent g x =
let firstPath = longestPathFrom g x
secondStartingPoint = last firstPath
secondPath = longestPathFrom g secondStartingPoint
in secondPath
allLongestPathComponents :: (NFData a, Show a, Ord a) => Graph a -> [a]
allLongestPathComponents g =
let cs = map S.toList (tv "components" $ components g)
startingPoints = map head cs
longestPaths = map (longestPathComponent g) startingPoints
in concat longestPaths
type MetaGraph a = Graph [a]
Construct a k - metagraph .
buildMetaGraph :: (Eq a, Show a, Ord a) => [[a]] -> Int -> MetaGraph a
buildMetaGraph xses k = addMulti empty (findOverlappingBy k xses)
findOverlappingBy :: (Eq a, Show a, Ord a) => Int -> [[a]] -> [([a], [a])]
findOverlappingBy k xses | k >= 1 = filter (uncurry ok) (findOverlapping xses)
| otherwise = error ("findOverlapping: k must be >= 1, is " ++ (show k))
where ok xs ys = overlapBy k xs ys && (xs /= ys)
Return pairs of lists that overlap by at least one element .
findOverlapping :: (Eq a, Show a, Ord a) => [[a]] -> [([a], [a])]
findOverlapping xses =
let e2l = elementToListsMap xses
allPairs xs = [(x, y) | x <- xs, y <- xs]
in sfesp "uniq" length $ nubOrd $ sfesp "all" length $ concat $ map allPairs (M.elems e2l)
elementToListsMap :: (Eq a, Ord a) => [[a]] -> M.Map a [[a]]
elementToListsMap xses = mapFromListAccum $ concat (map kvs xses)
where kvs :: [a] -> [(a, [a])]
kvs xs = map (,xs) xs
Nonempty intersection ?
overlapBy :: Eq a => Int -> [a] -> [a] -> Bool
overlapBy k xs ys = length (intersect xs ys) >= k
For each k > = 1 , build the k - metagraph and return ( k , walk ) . Stop when the
thresholdedWalks :: (NFData a, Show a, Ord a) => [[a]] -> [(Int, [[a]])]
thresholdedWalks xses = nonEmpty
where walks = map walk [0..]
walk k = (k, tv "allLongestPathComponents" $ allLongestPathComponents (mg k))
mg k = tv "buildMetaGraph" $ buildMetaGraph xses k
nonEmpty = tv "nonEmpty" $ takeWhile (\x -> tv "x" $ ((\(_, walk) -> not (null walk)) x)) walks
evaled = unsafeTime " thresholdedWalks " nonEmpty
debug = map d [ 0 .. 4 ]
adjs = case mg of Graph m - > map length ( M.elems m )
debug = map ( \(k , walk ) - > ( k , length walk ) ) $ take 20 walks
debug = map graphInfo ( map ( buildMetaGraph xses ) [ 0 .. 14 ] )
debug = allLongestPathComponents ( : : Graph Int )
graphInfo ( m ) = ( " gi " , map length ( M.elems m ) )
graphTest :: IO ()
graphTest = do
let likes =
Int here for NFData while debugging performance
, [1, 3, 4]
, [3, 2, 5]
, [5, 1, 3]
, [10, 11, 12]
, [10, 13, 14] ]
mg = buildMetaGraph likes 1
walked = pathsFrom mg [0, 1, 2]
msp $ length walked
msp $ length $ nubOrd walked
msp $ map length walked
msp $ longestPathComponent mg [0, 1, 2]
msp $ nubOrd walked
msp mg
msp ( length ( M.keys m ) )
mapM _ ( \k - > msp ( k , ( S.size ( m M. ! k ) ) ) ) ( M.keys m )
|
a04b26007d29ce9840afbf2c389536e62f2191ca100805ad103fc18d5877551a | music-suite/music-suite | Time.hs | module Music.Time
( module Music.Time.Types,
module Music.Time.Transform,
module Music.Time.Duration,
module Music.Time.Position,
module Music.Time.Split,
module Music.Time.Juxtapose,
module Music.Time.Aligned,
module Music.Time.Rest,
module Music.Time.Note,
module Music.Time.Voice,
module Music.Time.Event,
module Music.Time.Score,
module Music.Time.Pattern,
module Music.Time.Reactive,
module Music.Time.Impulses,
module Music.Time.Behavior,
module Data.AffineSpace.Point.Offsets,
)
where
import Data.AffineSpace.Point.Offsets
import Music.Time.Aligned
import Music.Time.Behavior
import Music.Time.Duration
import Music.Time.Event
import Music.Time.Impulses
import Music.Time.Juxtapose
import Music.Time.Note
import Music.Time.Pattern
import Music.Time.Position
import Music.Time.Reactive
import Music.Time.Rest
import Music.Time.Score
import Music.Time.Split
import Music.Time.Transform
import Music.Time.Types
import Music.Time.Voice hiding (map, mapWithSpan, traverse)
| null | https://raw.githubusercontent.com/music-suite/music-suite/7f01fd62334c66418043b7a2d662af127f98685d/src/Music/Time.hs | haskell | module Music.Time
( module Music.Time.Types,
module Music.Time.Transform,
module Music.Time.Duration,
module Music.Time.Position,
module Music.Time.Split,
module Music.Time.Juxtapose,
module Music.Time.Aligned,
module Music.Time.Rest,
module Music.Time.Note,
module Music.Time.Voice,
module Music.Time.Event,
module Music.Time.Score,
module Music.Time.Pattern,
module Music.Time.Reactive,
module Music.Time.Impulses,
module Music.Time.Behavior,
module Data.AffineSpace.Point.Offsets,
)
where
import Data.AffineSpace.Point.Offsets
import Music.Time.Aligned
import Music.Time.Behavior
import Music.Time.Duration
import Music.Time.Event
import Music.Time.Impulses
import Music.Time.Juxtapose
import Music.Time.Note
import Music.Time.Pattern
import Music.Time.Position
import Music.Time.Reactive
import Music.Time.Rest
import Music.Time.Score
import Music.Time.Split
import Music.Time.Transform
import Music.Time.Types
import Music.Time.Voice hiding (map, mapWithSpan, traverse)
|
|
a653de0304707a695e9242f3039c5c095c041ffc4535f93a3b748180646c6a26 | borodust/bodge-ui-window | drawing.lisp | (cl:defpackage :bodge-ui-window.example.drawing
(:use :cl :bodge-ui :bodge-host :bodge-canvas :bodge-math)
(:export #:run))
(cl:in-package :bodge-ui-window.example.drawing)
(defvar *active-layer* :layer-1)
(defvar *active-text* "Board")
(defun layer-updater (new-layer)
(lambda (panel)
(declare (ignore panel))
(setf *active-layer* new-layer)))
(defun refresh-text (panel)
(declare (ignore panel))
(setf *active-text* (text-of (find-element :text))))
(defpanel (main-panel
(:title "Control Panel")
(:origin 25 355)
(:width 200) (:height 220)
(:options :movable :resizable
:minimizable :scrollable
:closable))
(radio-group
(radio :label "All" :on-click (layer-updater nil))
(radio :label "Layer 1" :activated t :on-click (layer-updater :layer-1))
(radio :label "Layer 2" :on-click (layer-updater :layer-2))
(radio :label "Layer 3" :on-click (layer-updater :layer-3)))
(horizontal-layout
(text-edit :name :text :text "Board")
(button :label "Refresh" :expandable nil :width 70 :on-click #'refresh-text)))
(cl:in-package :bodge-ui-window.example.drawing)
(defparameter *window-width* 800)
(defparameter *window-height* 600)
;; Define main window
(defclass main-window (bodge-ui-window:ui-window) ()
(:default-initargs
:title "Bodge 2D Drawing Example"
:width *window-width*
:height *window-height*
:panels '(main-panel)
:floating t
:opengl-version #+bodge-gl2 '(2 1)
#-bodge-gl2 '(3 3)))
(cl:in-package :bodge-ui-window.example.drawing)
(defun draw-layer-1 ()
(with-retained-canvas
(translate-canvas 10 10)
(loop for i from 0 below 5
do (draw-rect (vec2 (* i 125) 0) 80 180 :fill-paint (vec4 0.4 0.2 0.2 1)))))
(defun draw-layer-2 ()
(with-retained-canvas
(translate-canvas 10 10)
(loop for i from 0 below 5
do (draw-rect (vec2 0 (* i 40)) 580 20 :fill-paint (vec4 0.2 0.4 0.2 1)))))
(defun draw-layer-3 ()
(with-retained-canvas
(translate-canvas 112 50)
(loop for i from 0 below 4
do (loop for j from 0 below 2
do (draw-circle (vec2 (* i 125) (* j 100)) 40 :fill-paint (vec4 0.2 0.2 0.4 1))))))
(defun draw-focused (active-layer)
(let ((defocused-alpha (if active-layer 0.2 1)))
(flet ((select-alpha (layer)
(if (eq active-layer layer) 1 defocused-alpha)))
(with-alpha ((select-alpha :layer-1))
(draw-layer-1))
(with-alpha ((select-alpha :layer-2))
(draw-layer-2))
(with-alpha ((select-alpha :layer-3))
(draw-layer-3)))))
(defun draw-all ()
(with-alpha (1)
(draw-layer-1))
(with-alpha (0.75)
(draw-layer-2))
(with-alpha (0.5)
(draw-layer-3)))
(defun draw-board (active-layer)
(draw-rect (vec2 0 0) 600 200 :fill-paint (vec4 0.3 0.3 0.3 1))
(if active-layer
(draw-focused active-layer)
(draw-all)))
(defmethod bodge-ui-window:on-draw ((this main-window))
(with-retained-canvas
(translate-canvas 100 100)
(draw-board *active-layer*))
(translate-canvas 100 50)
(scale-canvas 2 2)
(draw-text (vec2 0 0) *active-text* (vec4 0.7 0.7 0.7 1)))
(cl:in-package :bodge-ui-window.example.drawing)
(export 'run)
(defun run ()
(bodge-host:open-window (make-instance 'main-window)))
| null | https://raw.githubusercontent.com/borodust/bodge-ui-window/f560998c5fcade7cda195cd9d5410d46aba32ada/examples/drawing.lisp | lisp | Define main window | (cl:defpackage :bodge-ui-window.example.drawing
(:use :cl :bodge-ui :bodge-host :bodge-canvas :bodge-math)
(:export #:run))
(cl:in-package :bodge-ui-window.example.drawing)
(defvar *active-layer* :layer-1)
(defvar *active-text* "Board")
(defun layer-updater (new-layer)
(lambda (panel)
(declare (ignore panel))
(setf *active-layer* new-layer)))
(defun refresh-text (panel)
(declare (ignore panel))
(setf *active-text* (text-of (find-element :text))))
(defpanel (main-panel
(:title "Control Panel")
(:origin 25 355)
(:width 200) (:height 220)
(:options :movable :resizable
:minimizable :scrollable
:closable))
(radio-group
(radio :label "All" :on-click (layer-updater nil))
(radio :label "Layer 1" :activated t :on-click (layer-updater :layer-1))
(radio :label "Layer 2" :on-click (layer-updater :layer-2))
(radio :label "Layer 3" :on-click (layer-updater :layer-3)))
(horizontal-layout
(text-edit :name :text :text "Board")
(button :label "Refresh" :expandable nil :width 70 :on-click #'refresh-text)))
(cl:in-package :bodge-ui-window.example.drawing)
(defparameter *window-width* 800)
(defparameter *window-height* 600)
(defclass main-window (bodge-ui-window:ui-window) ()
(:default-initargs
:title "Bodge 2D Drawing Example"
:width *window-width*
:height *window-height*
:panels '(main-panel)
:floating t
:opengl-version #+bodge-gl2 '(2 1)
#-bodge-gl2 '(3 3)))
(cl:in-package :bodge-ui-window.example.drawing)
(defun draw-layer-1 ()
(with-retained-canvas
(translate-canvas 10 10)
(loop for i from 0 below 5
do (draw-rect (vec2 (* i 125) 0) 80 180 :fill-paint (vec4 0.4 0.2 0.2 1)))))
(defun draw-layer-2 ()
(with-retained-canvas
(translate-canvas 10 10)
(loop for i from 0 below 5
do (draw-rect (vec2 0 (* i 40)) 580 20 :fill-paint (vec4 0.2 0.4 0.2 1)))))
(defun draw-layer-3 ()
(with-retained-canvas
(translate-canvas 112 50)
(loop for i from 0 below 4
do (loop for j from 0 below 2
do (draw-circle (vec2 (* i 125) (* j 100)) 40 :fill-paint (vec4 0.2 0.2 0.4 1))))))
(defun draw-focused (active-layer)
(let ((defocused-alpha (if active-layer 0.2 1)))
(flet ((select-alpha (layer)
(if (eq active-layer layer) 1 defocused-alpha)))
(with-alpha ((select-alpha :layer-1))
(draw-layer-1))
(with-alpha ((select-alpha :layer-2))
(draw-layer-2))
(with-alpha ((select-alpha :layer-3))
(draw-layer-3)))))
(defun draw-all ()
(with-alpha (1)
(draw-layer-1))
(with-alpha (0.75)
(draw-layer-2))
(with-alpha (0.5)
(draw-layer-3)))
(defun draw-board (active-layer)
(draw-rect (vec2 0 0) 600 200 :fill-paint (vec4 0.3 0.3 0.3 1))
(if active-layer
(draw-focused active-layer)
(draw-all)))
(defmethod bodge-ui-window:on-draw ((this main-window))
(with-retained-canvas
(translate-canvas 100 100)
(draw-board *active-layer*))
(translate-canvas 100 50)
(scale-canvas 2 2)
(draw-text (vec2 0 0) *active-text* (vec4 0.7 0.7 0.7 1)))
(cl:in-package :bodge-ui-window.example.drawing)
(export 'run)
(defun run ()
(bodge-host:open-window (make-instance 'main-window)))
|
ee5a2abf3fe945d4ec000a75a224a5a59d21a24f9b3c14cb35ad94084792b671 | maximedenes/native-coq | mod_checking.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
val check_module : Environ.env -> Names.module_path -> Declarations.module_body -> unit
| null | https://raw.githubusercontent.com/maximedenes/native-coq/3623a4d9fe95c165f02f7119c0e6564a83a9f4c9/checker/mod_checking.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
********************************************************************** | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
val check_module : Environ.env -> Names.module_path -> Declarations.module_body -> unit
|
c75d61479e5acda19341fd01dcde14be8ff11ee11fe849d0460ce4a8657165cc | runexec/Static-Prime | handler.clj | (ns static-prime.handler
(:use compojure.core)
(:require [compojure.handler :as handler]
[compojure.route :as route]
[hiccup.core :as hc]
[hiccup.form :as hf]
[noir.response :as nr]
[noir.session :as ns]
[clojure.java.io :as io]))
;; Website Settings
(def index-title "My Website")
(def auth-set
{"user1" "password1"
"admin" "admin"})
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; Edit at your own risk!!
(def admin-path "/sp")
(def auth-path "/auth")
(def save-path "/save")
;; /read/ { html file path }
(def read-url-prefix "/r/")
(def html-path "resources/public/html")
(def template-path "resources/")
(def template-default
(str template-path "site.template"))
(def index-page "/index")
(def index-path (str html-path index-page))
(def index-page-url (str read-url-prefix index-page))
(def admin-actions-panel
(let [edit (format
"static_prime.core.adminPanelEdit('%s','%s');"
read-url-prefix
admin-path)
delete (format
"static_prime.core.adminPanelDelete('%s','%s');"
read-url-prefix
admin-path)]
(hc/html
[:div
[:h3 "Admin Actions"]
[:button
{:id "admin-edit"
:onclick edit}
"Edit"]
" "
[:button
{:id "admin-delete"
:onclick delete}
"Delete"]
" "])))
(def ^:dynamic *display-admin* true)
;; Website Template Loader
(defn load-template [title body & [template-path]]
(-> (or template-path
template-default)
(slurp :encoding "UTF-8")
(.replace "{{{title}}}" (hc/h title))
(.replace "{{{body}}}" body)
;; Add admin panel
(.replace "{{{admin}}}"
(if-not (ns/get :admin)
""
(if-not *display-admin*
""
admin-actions-panel)))))
;; Index Generator
(defn load-index []
(let [categories (atom {})
path->header #(-> (io/file %)
.getParent
(.split html-path)
last)
path->link #(let [link-path (-> % (.split html-path) last)
link-path (str read-url-prefix link-path)
link-text (-> link-path (.split "/") last hc/h)]
[:a {:href link-path} link-text])
path->header-and-link (fn [p]
[(path->header p)
(path->link p)])
links (->> html-path
io/file
file-seq
(filter #(-> % .isDirectory not))
(map (memfn getPath))
(map path->header-and-link)
;; sort by header
(sort-by first)
;; least to greatest
reverse)]
;; insert links in their proper categories
(doseq [x links
:let [[catg links] x]]
(swap! categories
update-in
[catg]
#(into (or (seq %) []) [links])))
;; category is the header and the links go under the category
(apply str
(for [c @categories
:let [[c links] c
header-text (-> c str (.replace "/" " ") hc/h)
links (sort-by #(get-in % [2]) links)]
:when c]
(hc/html
[:div
[:h3 header-text]
(map #(into [:p] [%]) links)])))))
;; Static Prime Editor
(defn static-prime []
(let [csrf (ns/get :csrf)
csrf (if-not csrf
(let [u (str (java.util.UUID/randomUUID))]
(ns/put! :csrf u)
u)
csrf)]
(.. (slurp "./static-prime.html" :encoding "UTF-8")
(replace "{{{csrf}}}"
(hc/html
[:input {:type "hidden"
:name "csrf"
:id "csrf"
:value csrf}])))))
Route Path Finder
(defn route-html-path [file-path]
(let [fp (clojure.string/replace file-path #"\.\." "")
fp (if (.. fp (startsWith "/")) fp (str "/" fp))
fp (str html-path fp)]
fp))
(defn write-index! []
(binding [*display-admin* false]
(spit index-path
(load-template index-title
(load-index)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; Routes
(defroutes app-routes
(GET "/" []
(println "GET /index")
(slurp index-path :encoding "UTF-8"))
;; Create new
(GET admin-path []
(if (ns/get :admin)
(static-prime)
(nr/redirect auth-path)))
(GET "/admin" [] (nr/redirect admin-path))
;; Edit existing
(GET [(str admin-path "/:file-path/edit")
:file-path #".*"]
[file-path]
(if-not (ns/get :admin)
(nr/redirect auth-path)
(let [file (route-html-path file-path)
page (static-prime)
file-path (.. file-path (replace "\"" "\\\""))
[title & body] (-> file
(slurp :encoding "UTF-8")
(.split "\n"))
formats [["<div id=\"preview-page\"></div>"
(format
"<div id=\"preview-page\">%s</div>"
(apply str body))]
["<input name=\"title\" type=\"text\" value=\"\"/>"
(format
"<input name=\"title\" type=\"text\" value=\"%s\"/>"
title)]
["<input name=\"route\" type=\"text\" value=\"\"/>"
(format
"<input name=\"route\" type=\"text\" value=\"%s\"/>"
file-path)]]]
(loop [f formats
p page]
(if-not (seq f)
p
(let [[original replace] (first f)]
(recur
(rest f)
(clojure.string/replace
p
(re-pattern original)
replace))))))))
;; Delete Existing
(GET [(str admin-path "/:file-path/delete")
:file-path #".*"]
[file-path]
(if-not (ns/get :admin)
(nr/redirect auth-path)
(let [file (route-html-path file-path)
key (str (java.util.UUID/randomUUID))]
(ns/put! :delete-path file)
(ns/put! :delete-key key)
(hc/html
(hf/form-to
[:post (str admin-path "/delete")]
[:p "Removing File: " (hc/h file)]
[:input {:type "hidden"
:value key
:name "key"}]
(hf/submit-button "Delete Forever!"))))))
(POST (str admin-path "/delete")
{{:keys [key]} :params}
(if-not (ns/get :admin)
(nr/redirect auth-path)
(let [k :delete-path
k2 :delete-key
path (ns/get k)
valid-key (ns/get k2)
error (str "Couldn't delete " path)]
(ns/remove! k)
(ns/remove! k2)
(if-not (= valid-key key)
"Invalid Delete Token!"
(do
(try
(if-not (-> path io/file .delete)
(println error)
(do (println "Deleted =>" path)
(write-index!)))
(catch Exception ex
(println "admin delete Exception => " ex)))
(nr/redirect admin-path))))))
(GET [(str read-url-prefix ":file-path")
:file-path #".*"]
[file-path]
;; remove potential directory traversal
(let [fp (route-html-path file-path)]
(println "GET" fp)
first line is the html title and the rest is the body
(try
(let [[title & html] (clojure.string/split-lines
(slurp fp :encoding "UTF-8"))
html (apply str html)]
(load-template title html))
(catch Exception ex
(println ex)
{:status 404
:headers {}
:body "Not Found"}))))
(POST save-path
{{:keys [title
html
route
csrf]} :params}
(if-not (and (ns/get :admin)
(= (ns/get :csrf) csrf))
"Not Authorized!"
(let [url (str read-url-prefix route)
route (-> (route-html-path route)
;; remove potential directory traversal
(clojure.string/replace #"\.\." "")
io/file
.getAbsolutePath
;; remove any whitespace at the end
clojure.string/trimr
io/file)
dirs (-> route .getParent io/file)]
;; dirs must exists before files
(if-not (.exists dirs)
(.mkdirs dirs))
;; write file
(spit (.getAbsolutePath route)
(str title "\n\n" html))
(write-index!)
(nr/redirect url))))
(GET auth-path []
(if (ns/get :admin)
(nr/redirect admin-path)
(hc/html
(hf/form-to
[:post auth-path]
[:p (hf/label "label-admin-user" "Admin User")]
(hf/text-field "username")
[:p (hf/label "label-admin-password" "Admin Password")]
(hf/password-field "password")
[:p (hf/submit-button "Login")]))))
(POST auth-path
{{:keys [username
password]} :params}
(if (= password (get auth-set username))
(do (ns/put! :admin true)
(nr/redirect admin-path))
(nr/redirect auth-path)))
(route/resources "/")
(route/not-found "Not Found"))
(def app
(-> app-routes
handler/site
ns/wrap-noir-session))
| null | https://raw.githubusercontent.com/runexec/Static-Prime/8ca74e513372fece1ac93a22da62f86aedd1736b/src/static_prime/handler.clj | clojure | Website Settings
Edit at your own risk!!
/read/ { html file path }
Website Template Loader
Add admin panel
Index Generator
sort by header
least to greatest
insert links in their proper categories
category is the header and the links go under the category
Static Prime Editor
Routes
Create new
Edit existing
Delete Existing
remove potential directory traversal
remove potential directory traversal
remove any whitespace at the end
dirs must exists before files
write file | (ns static-prime.handler
(:use compojure.core)
(:require [compojure.handler :as handler]
[compojure.route :as route]
[hiccup.core :as hc]
[hiccup.form :as hf]
[noir.response :as nr]
[noir.session :as ns]
[clojure.java.io :as io]))
(def index-title "My Website")
(def auth-set
{"user1" "password1"
"admin" "admin"})
(def admin-path "/sp")
(def auth-path "/auth")
(def save-path "/save")
(def read-url-prefix "/r/")
(def html-path "resources/public/html")
(def template-path "resources/")
(def template-default
(str template-path "site.template"))
(def index-page "/index")
(def index-path (str html-path index-page))
(def index-page-url (str read-url-prefix index-page))
(def admin-actions-panel
(let [edit (format
"static_prime.core.adminPanelEdit('%s','%s');"
read-url-prefix
admin-path)
delete (format
"static_prime.core.adminPanelDelete('%s','%s');"
read-url-prefix
admin-path)]
(hc/html
[:div
[:h3 "Admin Actions"]
[:button
{:id "admin-edit"
:onclick edit}
"Edit"]
" "
[:button
{:id "admin-delete"
:onclick delete}
"Delete"]
" "])))
(def ^:dynamic *display-admin* true)
(defn load-template [title body & [template-path]]
(-> (or template-path
template-default)
(slurp :encoding "UTF-8")
(.replace "{{{title}}}" (hc/h title))
(.replace "{{{body}}}" body)
(.replace "{{{admin}}}"
(if-not (ns/get :admin)
""
(if-not *display-admin*
""
admin-actions-panel)))))
(defn load-index []
(let [categories (atom {})
path->header #(-> (io/file %)
.getParent
(.split html-path)
last)
path->link #(let [link-path (-> % (.split html-path) last)
link-path (str read-url-prefix link-path)
link-text (-> link-path (.split "/") last hc/h)]
[:a {:href link-path} link-text])
path->header-and-link (fn [p]
[(path->header p)
(path->link p)])
links (->> html-path
io/file
file-seq
(filter #(-> % .isDirectory not))
(map (memfn getPath))
(map path->header-and-link)
(sort-by first)
reverse)]
(doseq [x links
:let [[catg links] x]]
(swap! categories
update-in
[catg]
#(into (or (seq %) []) [links])))
(apply str
(for [c @categories
:let [[c links] c
header-text (-> c str (.replace "/" " ") hc/h)
links (sort-by #(get-in % [2]) links)]
:when c]
(hc/html
[:div
[:h3 header-text]
(map #(into [:p] [%]) links)])))))
(defn static-prime []
(let [csrf (ns/get :csrf)
csrf (if-not csrf
(let [u (str (java.util.UUID/randomUUID))]
(ns/put! :csrf u)
u)
csrf)]
(.. (slurp "./static-prime.html" :encoding "UTF-8")
(replace "{{{csrf}}}"
(hc/html
[:input {:type "hidden"
:name "csrf"
:id "csrf"
:value csrf}])))))
Route Path Finder
(defn route-html-path [file-path]
(let [fp (clojure.string/replace file-path #"\.\." "")
fp (if (.. fp (startsWith "/")) fp (str "/" fp))
fp (str html-path fp)]
fp))
(defn write-index! []
(binding [*display-admin* false]
(spit index-path
(load-template index-title
(load-index)))))
(defroutes app-routes
(GET "/" []
(println "GET /index")
(slurp index-path :encoding "UTF-8"))
(GET admin-path []
(if (ns/get :admin)
(static-prime)
(nr/redirect auth-path)))
(GET "/admin" [] (nr/redirect admin-path))
(GET [(str admin-path "/:file-path/edit")
:file-path #".*"]
[file-path]
(if-not (ns/get :admin)
(nr/redirect auth-path)
(let [file (route-html-path file-path)
page (static-prime)
file-path (.. file-path (replace "\"" "\\\""))
[title & body] (-> file
(slurp :encoding "UTF-8")
(.split "\n"))
formats [["<div id=\"preview-page\"></div>"
(format
"<div id=\"preview-page\">%s</div>"
(apply str body))]
["<input name=\"title\" type=\"text\" value=\"\"/>"
(format
"<input name=\"title\" type=\"text\" value=\"%s\"/>"
title)]
["<input name=\"route\" type=\"text\" value=\"\"/>"
(format
"<input name=\"route\" type=\"text\" value=\"%s\"/>"
file-path)]]]
(loop [f formats
p page]
(if-not (seq f)
p
(let [[original replace] (first f)]
(recur
(rest f)
(clojure.string/replace
p
(re-pattern original)
replace))))))))
(GET [(str admin-path "/:file-path/delete")
:file-path #".*"]
[file-path]
(if-not (ns/get :admin)
(nr/redirect auth-path)
(let [file (route-html-path file-path)
key (str (java.util.UUID/randomUUID))]
(ns/put! :delete-path file)
(ns/put! :delete-key key)
(hc/html
(hf/form-to
[:post (str admin-path "/delete")]
[:p "Removing File: " (hc/h file)]
[:input {:type "hidden"
:value key
:name "key"}]
(hf/submit-button "Delete Forever!"))))))
(POST (str admin-path "/delete")
{{:keys [key]} :params}
(if-not (ns/get :admin)
(nr/redirect auth-path)
(let [k :delete-path
k2 :delete-key
path (ns/get k)
valid-key (ns/get k2)
error (str "Couldn't delete " path)]
(ns/remove! k)
(ns/remove! k2)
(if-not (= valid-key key)
"Invalid Delete Token!"
(do
(try
(if-not (-> path io/file .delete)
(println error)
(do (println "Deleted =>" path)
(write-index!)))
(catch Exception ex
(println "admin delete Exception => " ex)))
(nr/redirect admin-path))))))
(GET [(str read-url-prefix ":file-path")
:file-path #".*"]
[file-path]
(let [fp (route-html-path file-path)]
(println "GET" fp)
first line is the html title and the rest is the body
(try
(let [[title & html] (clojure.string/split-lines
(slurp fp :encoding "UTF-8"))
html (apply str html)]
(load-template title html))
(catch Exception ex
(println ex)
{:status 404
:headers {}
:body "Not Found"}))))
(POST save-path
{{:keys [title
html
route
csrf]} :params}
(if-not (and (ns/get :admin)
(= (ns/get :csrf) csrf))
"Not Authorized!"
(let [url (str read-url-prefix route)
route (-> (route-html-path route)
(clojure.string/replace #"\.\." "")
io/file
.getAbsolutePath
clojure.string/trimr
io/file)
dirs (-> route .getParent io/file)]
(if-not (.exists dirs)
(.mkdirs dirs))
(spit (.getAbsolutePath route)
(str title "\n\n" html))
(write-index!)
(nr/redirect url))))
(GET auth-path []
(if (ns/get :admin)
(nr/redirect admin-path)
(hc/html
(hf/form-to
[:post auth-path]
[:p (hf/label "label-admin-user" "Admin User")]
(hf/text-field "username")
[:p (hf/label "label-admin-password" "Admin Password")]
(hf/password-field "password")
[:p (hf/submit-button "Login")]))))
(POST auth-path
{{:keys [username
password]} :params}
(if (= password (get auth-set username))
(do (ns/put! :admin true)
(nr/redirect admin-path))
(nr/redirect auth-path)))
(route/resources "/")
(route/not-found "Not Found"))
(def app
(-> app-routes
handler/site
ns/wrap-noir-session))
|
4a807790d48e739d8f1f50819b932e61b367c550602c99a630bdc3093b64b69a | hverr/haskell-obd | Car.hs | # LANGUAGE FlexibleContexts #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE NoImplicitPrelude #
{-# LANGUAGE RankNTypes #-}
-- | A generic car data type
module System.Hardware.ELM327.Car (
-- * Base car structure
Car
, defaultCar
* transformer for cashing
, CarT(..)
, runCarT
, flushCache
, cache
-- * Lenses for 'Car'
, engineCoolantTemperature
, engineFuelRate
, engineRPM
, intakeAirTemperature
, intakeManifoldAbsolutePressure
, massAirFlowRate
, throttlePosition
, vehicleSpeed
-- * Internal structure
, CarState
, emptyState
) where
import Control.Concurrent.STM (TVar, atomically, newTVarIO, readTVar, writeTVar, modifyTVar)
import Control.Lens (Lens', lens, (^.), (.~))
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Reader (ReaderT, MonadReader, runReaderT, ask)
import Control.Monad.Trans.Class (MonadTrans, lift)
import Control.Monad.Trans.Maybe (MaybeT(..))
import Numeric.Units.Dimensional.Prelude
import System.Hardware.ELM327.Connection (ConT)
import qualified System.Hardware.ELM327.Connection.OBD as OBD
-- | A car that has some properties, see lenses documentation below.
data Car m = Car { _engineCoolantTemperature :: CarT m (ThermodynamicTemperature Double)
, _engineFuelRate :: CarT m (VolumeFlow Double)
, _engineRPM :: CarT m (Frequency Double)
, _intakeAirTemperature :: CarT m (ThermodynamicTemperature Double)
, _intakeManifoldAbsolutePressure :: CarT m (Pressure Double)
, _massAirFlowRate :: CarT m (MassFlow Double)
, _throttlePosition :: CarT m Double
, _vehicleSpeed :: CarT m (Velocity Double) }
-- | A monad transformer for 'Car' where requested data is cached until 'flushCache' is called.
newtype CarT m a = CarT { runCarT' :: ReaderT (TVar CarState) m a }
deriving (Functor, Applicative, Monad, MonadIO, MonadReader (TVar CarState), MonadTrans)
-- | Run a 'CarT' with an initial empty state
runCarT :: MonadIO m => CarT m a -> m a
runCarT action = liftIO (newTVarIO emptyState) >>= runReaderT (runCarT' action)
-- | Flush the cache of a 'CarT'
flushCache :: MonadIO m => CarT m ()
flushCache = ask >>= liftIO . atomically . flip writeTVar emptyState
-- | Make an action cachable in 'CarT'
cache :: MonadIO m => (forall n . Lens' (Car n) (CarT n a)) -> CarT m a -> CarT m a
cache property action = do
mv <- runCarT . (^. property) <$> (ask >>= liftIO . atomically . readTVar)
v <- liftIO $ runMaybeT mv
case v of
Just x -> return x
Nothing -> do
v' <- action
ask >>= liftIO . atomically . flip modifyTVar (property .~ lift (MaybeT . return $ Just v'))
return v'
| The default car , that uses straight forward OBD commands to get
-- most of the data.
defaultCar :: MonadIO m => Car (ConT m)
defaultCar = Car { _engineCoolantTemperature = cache engineCoolantTemperature (lift OBD.engineCoolantTemperature)
, _engineFuelRate = cache engineFuelRate (lift OBD.engineFuelRate)
, _engineRPM = cache engineRPM (lift OBD.engineRPM)
, _intakeAirTemperature = cache intakeAirTemperature (lift OBD.intakeAirTemperature)
, _intakeManifoldAbsolutePressure = cache intakeManifoldAbsolutePressure (lift OBD.intakeManifoldAbsolutePressure)
, _massAirFlowRate = cache massAirFlowRate (lift OBD.massAirFlowRate)
, _throttlePosition = cache throttlePosition (lift OBD.throttlePosition)
, _vehicleSpeed = cache vehicleSpeed (lift OBD.vehicleSpeed) }
-- | The engine coolant temperature of the car.
engineCoolantTemperature :: Lens' (Car m) (CarT m (ThermodynamicTemperature Double))
engineCoolantTemperature = lens _engineCoolantTemperature $ \c x -> c { _engineCoolantTemperature = x }
-- | The engine fuel rate of the car.
engineFuelRate :: Lens' (Car m) (CarT m (VolumeFlow Double))
engineFuelRate = lens _engineFuelRate $ \c x -> c { _engineFuelRate = x }
-- | The engine RPM of the car.
engineRPM :: Lens' (Car m) (CarT m (Frequency Double))
engineRPM = lens _engineRPM $ \c x -> c { _engineRPM = x }
-- | The intake air temperature of the car.
intakeAirTemperature :: Lens' (Car m) (CarT m (ThermodynamicTemperature Double))
intakeAirTemperature = lens _intakeAirTemperature $ \c x -> c { _intakeAirTemperature = x }
-- | The intake manifold absolute pressure of the car.
intakeManifoldAbsolutePressure :: Lens' (Car m) (CarT m (Pressure Double))
intakeManifoldAbsolutePressure = lens _intakeManifoldAbsolutePressure $ \c x -> c { _intakeManifoldAbsolutePressure = x }
-- | The mass air flow rate of the car.
massAirFlowRate :: Lens' (Car m) (CarT m (MassFlow Double))
massAirFlowRate = lens _massAirFlowRate $ \c x -> c { _massAirFlowRate = x }
-- | The throttle position of the car.
throttlePosition :: Lens' (Car m) (CarT m Double)
throttlePosition = lens _throttlePosition $ \c x -> c { _throttlePosition = x }
-- | The throttle position of the car.
vehicleSpeed :: Lens' (Car m) (CarT m (Velocity Double))
vehicleSpeed = lens _vehicleSpeed $ \c x -> c { _vehicleSpeed = x }
-- | The pure state of the car, with possibly missing values.
type CarState = Car (MaybeT IO)
| The empty ' CarState '
emptyState :: CarState
emptyState = Car { _engineCoolantTemperature = lift (MaybeT $ return Nothing)
, _engineFuelRate = lift (MaybeT $ return Nothing)
, _engineRPM = lift (MaybeT $ return Nothing)
, _intakeAirTemperature = lift (MaybeT $ return Nothing)
, _intakeManifoldAbsolutePressure = lift (MaybeT $ return Nothing)
, _massAirFlowRate = lift (MaybeT $ return Nothing)
, _throttlePosition = lift (MaybeT $ return Nothing)
, _vehicleSpeed = lift (MaybeT $ return Nothing) }
| null | https://raw.githubusercontent.com/hverr/haskell-obd/f7de2d2c392ad491671fda60552563af181c15dd/src/System/Hardware/ELM327/Car.hs | haskell | # LANGUAGE RankNTypes #
| A generic car data type
* Base car structure
* Lenses for 'Car'
* Internal structure
| A car that has some properties, see lenses documentation below.
| A monad transformer for 'Car' where requested data is cached until 'flushCache' is called.
| Run a 'CarT' with an initial empty state
| Flush the cache of a 'CarT'
| Make an action cachable in 'CarT'
most of the data.
| The engine coolant temperature of the car.
| The engine fuel rate of the car.
| The engine RPM of the car.
| The intake air temperature of the car.
| The intake manifold absolute pressure of the car.
| The mass air flow rate of the car.
| The throttle position of the car.
| The throttle position of the car.
| The pure state of the car, with possibly missing values. | # LANGUAGE FlexibleContexts #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE NoImplicitPrelude #
module System.Hardware.ELM327.Car (
Car
, defaultCar
* transformer for cashing
, CarT(..)
, runCarT
, flushCache
, cache
, engineCoolantTemperature
, engineFuelRate
, engineRPM
, intakeAirTemperature
, intakeManifoldAbsolutePressure
, massAirFlowRate
, throttlePosition
, vehicleSpeed
, CarState
, emptyState
) where
import Control.Concurrent.STM (TVar, atomically, newTVarIO, readTVar, writeTVar, modifyTVar)
import Control.Lens (Lens', lens, (^.), (.~))
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Reader (ReaderT, MonadReader, runReaderT, ask)
import Control.Monad.Trans.Class (MonadTrans, lift)
import Control.Monad.Trans.Maybe (MaybeT(..))
import Numeric.Units.Dimensional.Prelude
import System.Hardware.ELM327.Connection (ConT)
import qualified System.Hardware.ELM327.Connection.OBD as OBD
data Car m = Car { _engineCoolantTemperature :: CarT m (ThermodynamicTemperature Double)
, _engineFuelRate :: CarT m (VolumeFlow Double)
, _engineRPM :: CarT m (Frequency Double)
, _intakeAirTemperature :: CarT m (ThermodynamicTemperature Double)
, _intakeManifoldAbsolutePressure :: CarT m (Pressure Double)
, _massAirFlowRate :: CarT m (MassFlow Double)
, _throttlePosition :: CarT m Double
, _vehicleSpeed :: CarT m (Velocity Double) }
newtype CarT m a = CarT { runCarT' :: ReaderT (TVar CarState) m a }
deriving (Functor, Applicative, Monad, MonadIO, MonadReader (TVar CarState), MonadTrans)
runCarT :: MonadIO m => CarT m a -> m a
runCarT action = liftIO (newTVarIO emptyState) >>= runReaderT (runCarT' action)
flushCache :: MonadIO m => CarT m ()
flushCache = ask >>= liftIO . atomically . flip writeTVar emptyState
cache :: MonadIO m => (forall n . Lens' (Car n) (CarT n a)) -> CarT m a -> CarT m a
cache property action = do
mv <- runCarT . (^. property) <$> (ask >>= liftIO . atomically . readTVar)
v <- liftIO $ runMaybeT mv
case v of
Just x -> return x
Nothing -> do
v' <- action
ask >>= liftIO . atomically . flip modifyTVar (property .~ lift (MaybeT . return $ Just v'))
return v'
| The default car , that uses straight forward OBD commands to get
defaultCar :: MonadIO m => Car (ConT m)
defaultCar = Car { _engineCoolantTemperature = cache engineCoolantTemperature (lift OBD.engineCoolantTemperature)
, _engineFuelRate = cache engineFuelRate (lift OBD.engineFuelRate)
, _engineRPM = cache engineRPM (lift OBD.engineRPM)
, _intakeAirTemperature = cache intakeAirTemperature (lift OBD.intakeAirTemperature)
, _intakeManifoldAbsolutePressure = cache intakeManifoldAbsolutePressure (lift OBD.intakeManifoldAbsolutePressure)
, _massAirFlowRate = cache massAirFlowRate (lift OBD.massAirFlowRate)
, _throttlePosition = cache throttlePosition (lift OBD.throttlePosition)
, _vehicleSpeed = cache vehicleSpeed (lift OBD.vehicleSpeed) }
engineCoolantTemperature :: Lens' (Car m) (CarT m (ThermodynamicTemperature Double))
engineCoolantTemperature = lens _engineCoolantTemperature $ \c x -> c { _engineCoolantTemperature = x }
engineFuelRate :: Lens' (Car m) (CarT m (VolumeFlow Double))
engineFuelRate = lens _engineFuelRate $ \c x -> c { _engineFuelRate = x }
engineRPM :: Lens' (Car m) (CarT m (Frequency Double))
engineRPM = lens _engineRPM $ \c x -> c { _engineRPM = x }
intakeAirTemperature :: Lens' (Car m) (CarT m (ThermodynamicTemperature Double))
intakeAirTemperature = lens _intakeAirTemperature $ \c x -> c { _intakeAirTemperature = x }
intakeManifoldAbsolutePressure :: Lens' (Car m) (CarT m (Pressure Double))
intakeManifoldAbsolutePressure = lens _intakeManifoldAbsolutePressure $ \c x -> c { _intakeManifoldAbsolutePressure = x }
massAirFlowRate :: Lens' (Car m) (CarT m (MassFlow Double))
massAirFlowRate = lens _massAirFlowRate $ \c x -> c { _massAirFlowRate = x }
throttlePosition :: Lens' (Car m) (CarT m Double)
throttlePosition = lens _throttlePosition $ \c x -> c { _throttlePosition = x }
vehicleSpeed :: Lens' (Car m) (CarT m (Velocity Double))
vehicleSpeed = lens _vehicleSpeed $ \c x -> c { _vehicleSpeed = x }
type CarState = Car (MaybeT IO)
| The empty ' CarState '
emptyState :: CarState
emptyState = Car { _engineCoolantTemperature = lift (MaybeT $ return Nothing)
, _engineFuelRate = lift (MaybeT $ return Nothing)
, _engineRPM = lift (MaybeT $ return Nothing)
, _intakeAirTemperature = lift (MaybeT $ return Nothing)
, _intakeManifoldAbsolutePressure = lift (MaybeT $ return Nothing)
, _massAirFlowRate = lift (MaybeT $ return Nothing)
, _throttlePosition = lift (MaybeT $ return Nothing)
, _vehicleSpeed = lift (MaybeT $ return Nothing) }
|
56918ea780bc744a74828d0f091213589c34ec6cf239696746bfd31e7dd20cca | SquidDev/urn | list.lisp | "List manipulation functions.
These include several often-used functions for manipulation of lists,
including functional programming classics such as [[map]] and [[reduce]]
and useful patterns such as [[accumulate-with]].
Most of these functions are tail-recursive unless noted, which means
they will not blow up the stack. Along with the property of
tail-recursiveness, these functions also have favourable performance
characteristics.
## Glossary:
- **Constant time** The function runs in the same time regardless of the
size of the input list.
- **Linear time** The runtime of the function is a linear function of
the size of the input list.
- **Logarithmic time** The runtime of the function grows logarithmically
in proportion to the size of the input list.
- **Exponential time** The runtime of the function grows exponentially
in proportion to the size of the input list. This is generally a bad
thing."
(import core/base (defun defmacro when unless let* set-idx! get-idx for gensym -or
slice /= mod else print error tostring -and if n + - >= > =
not with apply and progn .. * while <= < or values-list first list
second for-pairs))
(import core/base b)
(import core/demand (assert-type!))
(import core/method (pretty eq? neq?))
(import core/type (nil? list? empty? exists? falsey? type))
(import lua/math (min max huge))
(import lua/string)
(import lua/table)
(defun car (x)
"Return the first element present in the list X. This function operates
in constant time.
### Example:
```cl
> (car '(1 2 3))
out = 1
```"
(assert-type! x list)
(b/car x))
(define slicing-view
"Return a mutable reference to the list LIST, with indexing offset
(positively) by OFFSET. Mutation in the original list is reflected in
the view, and updates to the view are reflected in the original. In
this, a sliced view resembles an (offset) pointer. Note that trying
to access a key that doesn't make sense in a list (e.g., not its
`:tag`, its `:n`, or a numerical index) will blow up with an arithmetic
error.
**Note** that the behaviour of a sliced view when the underlying list
changes length may be confusing: accessing elements will still work,
but the reported length of the slice will be off. Furthermore, If the
original list shrinks, the view will maintain its length, but will
have an adequate number of `nil`s at the end.
```cl
> (define foo '(1 2 3 4 5))
out = (1 2 3 4 5)
> (define foo-view (cdr foo))
out = (2 3 4 5)
> (remove-nth! foo 5)
out = 5
> foo-view
out = (2 3 4 nil)
```
Also **note** that functions that modify a list in-place, like
`insert-nth!', `remove-nth!`, `pop-last!` and `push!` will not
modify the view *or* the original list.
```cl :no-test
> (define bar '(1 2 3 4 5))
out = (1 2 3 4 5)
> (define bar-view (cdr bar))
out = (2 3 4 5)
> (remove-nth! bar-view 4)
out = nil
> bar
out = (1 2 3 4 5)
```
### Example:
```cl
> (define baz '(1 2 3))
out = (1 2 3)
> (slicing-view baz 1)
out = (2 3)
> (.<! (slicing-view baz 1) 1 5)
out = nil
> baz
out = (1 5 3)
```"
(let* [(ref-mt { :__index (lambda (t k)
(get-idx (get-idx t :parent) (+ k (get-idx t :offset))))
:__newindex (lambda (t k v)
(set-idx! (get-idx t :parent) (+ k (get-idx t :offset)) v)) })]
(lambda (list offset)
(cond
[(<= (n list) offset) '()]
[(and (get-idx list :parent)
(get-idx list :offset))
(b/setmetatable { :parent (get-idx list :parent)
:offset (+ (get-idx list :offset) offset)
:n (- (n list) offset)
:tag (type list) }
ref-mt)]
[else (b/setmetatable { :parent list
:offset offset
:n (- (n list) offset)
:tag (type list) }
ref-mt)]))))
(defun cdr (x)
"Return a reference the list X without the first element present. In
the case that X is nil, the empty list is returned. Note that
mutating the reference will not mutate the
### Example:
```cl
> (cdr '(1 2 3))
out = (2 3)
```"
(slicing-view x 1))
(defun take (xs n)
"Take the first N elements of the list XS.
### Example:
```cl
> (take '(1 2 3 4 5) 2)
out = (1 2)
```"
(slice xs 1 (min n (b/n xs))))
(defun drop (xs n)
"Remove the first N elements of the list XS.
### Example:
```cl
> (drop '(1 2 3 4 5) 2)
out = (3 4 5)
```"
(slice xs (+ n 1) nil))
(defun snoc (xss &xs)
"Return a copy of the list XS with the element XS added to its end.
This function runs in linear time over the two input lists: That is,
it runs in O(n+k) time proportional both to `(n XSS)` and `(n XS)`.
### Example:
```cl
> (snoc '(1 2 3) 4 5 6)
out = (1 2 3 4 5 6)
``` "
`(,@xss ,@xs))
(defun cons (&xs xss)
"Return a copy of the list XSS with the elements XS added to its head.
### Example:
```cl
> (cons 1 2 3 '(4 5 6))
out = (1 2 3 4 5 6)
```"
`(,@xs ,@xss))
(defun reduce (f z xs)
"Accumulate the list XS using the binary function F and the zero
element Z. This function is also called `foldl` by some authors. One
can visualise `(reduce f z xs)` as replacing the [[cons]] operator in
building lists with F, and the empty list with Z.
Consider:
- `'(1 2 3)` is equivalent to `(cons 1 (cons 2 (cons 3 '())))`
- `(reduce + 0 '(1 2 3))` is equivalent to `(+ 1 (+ 2 (+ 3 0)))`.
### Example:
```cl
> (reduce append '() '((1 2) (3 4)))
out = (1 2 3 4)
equivalent to ( append ' ( 1 2 ) ( append ' ( 3 4 ) ' ( ) ) )
```"
(assert-type! f function)
(let* [(start 1)]
(if (and (nil? xs)
(list? z))
(progn
(set! start 2)
(set! xs z)
(set! z (car z)))
nil)
(assert-type! xs list)
(let* [(accum z)]
(for i start (n xs) 1
(set! accum (f accum (nth xs i))))
accum)))
(defun map (fn &xss)
"Iterate over all the successive cars of XSS, producing a single list
by applying FN to all of them. For example:
### Example:
```cl
> (map list '(1 2 3) '(4 5 6) '(7 8 9))
out = ((1 4 7) (2 5 8) (3 6 9))
> (map succ '(1 2 3))
out = (2 3 4)
```"
(let* [(ns (let* [(out '())]
(for i 1 (n xss) 1
(if (not (list? (nth xss i)))
(error (.. "that's no list! " (pretty (nth xss i))
" (it's a " (type (nth xss i)) "!)"))
true)
(push! out (n (nth xss i))))
out))
(out '())]
(for i 1 (apply min ns) 1
(push! out (apply fn (nths xss i))))
out))
(defun maybe-map (fn &xss)
"Iterate over all successive cars of XSS, producing a single list by
applying FN to all of them, while discarding any `nil`s.
### Example:
```cl
> (maybe-map (lambda (x)
. (if (even? x)
. nil
. (succ x)))
. (range :from 1 :to 10))
out = (2 4 6 8 10)
```"
(let* [(lengths (let* [(out '())]
(for i 1 (n xss) 1
(if (not (list? (nth xss i)))
(error (.. "that's no list! " (pretty (nth xss i))
" (it's a " (type (nth xss i)) "!)"))
true)
(push! out (n (nth xss i))))
out))
(out '())]
(for i 1 (apply min lengths) 1
(let* [(vl (apply fn (nths xss i)))]
(if (/= vl nil)
(push! out vl)
nil)))
out))
(defun flat-map (fn &xss)
"Map the function FN over the lists XSS, then flatten the result
lists.
### Example:
```cl
> (flat-map list '(1 2 3) '(4 5 6))
out = (1 4 2 5 3 6)
```"
(flatten (apply map fn xss)))
(defun partition (p xs)
"Split XS based on the predicate P. Values for which the predicate
returns true are returned in the first list, whereas values which
don't pass the predicate are returned in the second list.
### Example:
```cl
> (list (partition even? '(1 2 3 4 5 6)))
out = ((2 4 6) (1 3 5))
```"
(assert-type! p function)
(assert-type! xs list)
(let* [(passed '())
(failed '())]
(for i 1 (n xs) 1
(with (x (nth xs i))
(push! (if (p x) passed failed) x)))
(values-list passed failed)))
(defun filter (p xs)
"Return a list with only the elements of XS that match the predicate
P.
### Example:
```cl
> (filter even? '(1 2 3 4 5 6))
out = (2 4 6)
```"
(first (partition p xs)))
(defun exclude (p xs)
"Return a list with only the elements of XS that don't match the
predicate P.
### Example:
```cl
> (exclude even? '(1 2 3 4 5 6))
out = (1 3 5)
```"
(second (partition p xs)))
(defun any (p xs)
"Check for the existence of an element in XS that matches the predicate
P.
### Example:
```cl
> (any exists? '(nil 1 \"foo\"))
out = true
```"
(assert-type! p function)
(assert-type! xs list)
(let* [(len (n xs))
(fun nil)]
(set! fun (lambda (i)
(cond
[(> i len) false]
[(p (nth xs i)) true]
[else (fun (+ i 1))])))
(fun 1)))
(defun none (p xs)
"Check that no elements in XS match the predicate P.
### Example:
```cl
> (none nil? '(\"foo\" \"bar\" \"baz\"))
out = true
```"
(not (any p xs)))
(defun \\ (xs ys)
"The difference between XS and YS (non-associative.)
### Example:
```cl
> (\\\\ '(1 2 3) '(1 3 5 7))
out = (2)
```"
(filter (lambda (x)
(not (elem? x ys)))
xs))
(defun nub (xs)
"Remove duplicate elements from XS. This runs in linear time.
### Example:
```cl
> (nub '(1 1 2 2 3 3))
out = (1 2 3)
```"
(let* ((hm {})
(out '[]))
(for-each elm xs
(with (szd (pretty elm))
(cond
[(nil? (get-idx hm szd))
(push! out elm)
(set-idx! hm szd elm)]
[else])))
out))
(defun union (&xss)
"Set-like union of all the lists in XSS. Note that this function does
not preserve the lists' orders.
### Example:
```cl
> (union '(1 2 3 4) '(1 2 3 4 5))
out = (1 2 3 4 5)
```"
(let* [(set {})
(out '())]
(do [(xs xss)]
(if (list? xs)
(do [(x xs)]
(set-idx! set x x))
(set-idx! set xs xs)))
(for-pairs (k v) set
(push! out v))
out))
(defun all (p xs)
"Test if all elements of XS match the predicate P.
### Example:
```cl
> (all symbol? '(foo bar baz))
out = true
> (all number? '(1 2 foo))
out = false
```"
(assert-type! p function)
(assert-type! xs list)
(let* [(len (n xs))
(fun nil)]
(set! fun (lambda (i)
(cond
[(> i len) true]
[(p (nth xs i)) (fun (+ i 1))]
[else false])))
(fun 1)))
(defun elem? (x xs)
"Test if X is present in the list XS.
### Example:
```cl
> (elem? 1 '(1 2 3))
out = true
> (elem? 'foo '(1 2 3))
out = false
```"
(assert-type! xs list)
(any (lambda (y) (eq? x y)) xs))
(defun find-index (p xs)
"Finds the first index in XS where the item matches the predicate
P. Returns `nil` if no such item exists.
### Example:
```cl
> (find-index even? '(3 4 5))
out = 2
> (find-index even? '(1 3 5))
out = nil
```"
(assert-type! p function)
(assert-type! xs list)
(let* [(len (n xs))
(fun nil)]
(set! fun (lambda (i)
(cond
[(> i len) nil]
[(p (nth xs i)) i]
[else (fun (+ i 1))])))
(fun 1)))
(defun element-index (x xs)
"Finds the first index in XS where the item matches X. Returns `nil` if
no such item exists.
### Example:
```cl
> (element-index 4 '(3 4 5))
out = 2
> (element-index 2 '(1 3 5))
out = nil
```"
(assert-type! xs list)
(find-index (lambda (y) (eq? x y)) xs))
(defun prune (xs)
"Remove values matching the predicates [[empty?]] or [[nil?]] from
the list XS.
### Example:
```cl
> (prune (list '() nil 1 nil '() 2))
out = (1 2)
```"
(assert-type! xs list)
(filter (lambda (x) (and (not (nil? x)) (not (empty? x)))) xs))
(defun traverse (xs f)
:deprecated "Use [[map]] instead."
"An alias for [[map]] with the arguments XS and F flipped.
### Example:
```cl
> (traverse '(1 2 3) succ)
out = (2 3 4)
```"
(map f xs))
(defun last (xs)
"Return the last element of the list XS.
Counterintutively, this function runs in constant time.
### Example:
```cl
> (last (range :from 1 :to 100))
out = 100
```"
(assert-type! xs list)
(get-idx xs (n xs)))
(defun init (xs)
"Return the list XS with the last element removed.
This is the dual of LAST.
### Example:
```cl
> (init (range :from 1 :to 10))
out = (1 2 3 4 5 6 7 8 9)
```"
(assert-type! xs list)
(slice xs 1 (- (n xs) 1)))
(defun nth (xs idx)
"Get the IDX th element in the list XS. The first element is 1.
This function runs in constant time.
### Example:
```cl
> (nth (range :from 1 :to 100) 10)
out = 10
```"
(if (>= idx 0)
(get-idx xs idx)
(get-idx xs (+ (get-idx xs :n) 1 idx))))
(defun nths (xss idx)
"Get the IDX-th element in all the lists given at XSS. The first
element is1.
### Example:
```cl
> (nths '((1 2 3) (4 5 6) (7 8 9)) 2)
out = (2 5 8)
```"
(let* [(out '())]
(for i 1 (n xss) 1
(push! out (nth (nth xss i) idx)))
out))
(defun push! (xs &vals)
"Mutate the list XS, adding VALS to its end.
### Example:
```cl
> (define list '(1 2 3))
> (push! list 4)
out = (1 2 3 4)
> list
out = (1 2 3 4)
```"
(assert-type! xs list)
(let* [(nxs (n xs))
(len (+ nxs (n vals)))]
(set-idx! xs "n" len)
(for i 1 (n vals) 1
(set-idx! xs (+ nxs i) (get-idx vals i)))
xs))
(define push-cdr!
"Mutate the list XS, adding VALS to its end.
### Example:
```cl
> (define list '(1 2 3))
> (push-cdr! list 4)
out = (1 2 3 4)
> list
out = (1 2 3 4)
```"
:deprecated "Use [[push!]] instead."
push!)
(defun pop-last! (xs)
"Mutate the list XS, removing and returning its last element.
### Example:
```cl
> (define list '(1 2 3))
> (pop-last! list)
out = 3
> list
out = (1 2)
``` "
(assert-type! xs list)
(with (x (get-idx xs (n xs)))
(set-idx! xs (n xs) nil)
(set-idx! xs "n" (- (n xs) 1))
x))
(defun remove-nth! (li idx)
"Mutate the list LI, removing the value at IDX and returning it.
### Example:
```cl
> (define list '(1 2 3))
> (remove-nth! list 2)
out = 2
> list
out = (1 3)
``` "
(assert-type! li list)
(set-idx! li "n" (- (get-idx li "n") 1))
(lua/table/remove li idx))
(defun insert-nth! (li idx val)
"Mutate the list LI, inserting VAL at IDX.
### Example:
```cl
> (define list '(1 2 3))
> (insert-nth! list 2 5)
> list
out = (1 5 2 3)
``` "
(assert-type! li list)
(set-idx! li "n" (+ (get-idx li "n") 1))
(lua/table/insert li idx val))
(defmacro for-each (var lst &body)
:deprecated "Use [[do]]/[[dolist]] instead"
"Perform the set of actions BODY for all values in LST, binding the current value to VAR.
### Example:
```cl
> (for-each var '(1 2 3)
. (print! var))
1
2
3
out = nil
```"
`(do [(,var ,lst)]
,@body))
(defmacro dolist (vars &stmts)
"Iterate over all given VARS, running STMTS and collecting the results.
### Example:
```cl
> (dolist [(a '(1 2 3))
. (b '(1 2 3))]
. (list a b))
out = ((1 1) (1 2) (1 3) (2 1) (2 2) (2 3) (3 1) (3 2) (3 3))
```"
(let* [(collect (gensym 'list))
(arg (gensym 'val))
(yield (gensym 'yield))
(out `(,yield (progn ,@stmts)))]
(for i (n vars) 1 -1
(let* [(var (nth vars i))
(cur-list (gensym))
(i (gensym 'i))]
(set! out
`(let* [(,cur-list ,(cadr var))]
(for ,i 1 (n ,cur-list) 1
(let* [(,(car var) (get-idx ,cur-list ,i))]
,out))))))
`(let* [(,collect '())
(,yield (lambda (,arg)
(when (/= ,arg nil)
(push! ,collect ,arg))))]
,out
,collect)))
(defmacro do (vars &stmts)
"Iterate over all given VARS, running STMTS **without** collecting the
results.
### Example:
```cl
> (do [(a '(1 2))
. (b '(1 2))]
. (print! $\"a = ${a}, b = ${b}\"))
a = 1, b = 1
a = 1, b = 2
a = 2, b = 1
a = 2, b = 2
out = nil
```"
(let* [(out `(progn ,@stmts))]
(for i (n vars) 1 -1
(let* [(var (nth vars i))
(cur-list (gensym))
(i (gensym 'i))]
(set! out
`(let* [(,cur-list ,(cadr var))]
(for ,i 1 (n ,cur-list) 1
(let* [(,(car var) (get-idx ,cur-list ,i))]
,out))))))
out))
(defun append (xs ys)
"Concatenate XS and YS.
### Example:
```cl
> (append '(1 2) '(3 4))
out = (1 2 3 4)
``` "
`(,@xs ,@ys))
(defun flatten (xss)
"Concatenate all the lists in XSS. XSS must not contain elements which
are not lists.
### Example:
```cl
> (flatten '((1 2) (3 4)))
out = (1 2 3 4)
```"
(reduce append '() xss))
(defun range (&args)
"Build a list from :FROM to :TO, optionally passing by :BY.
### Example:
```cl
> (range :from 1 :to 10)
out = (1 2 3 4 5 6 7 8 9 10)
> (range :from 1 :to 10 :by 3)
out = (1 3 5 7 9)
```"
(let* [(x (let* [(out {})]
(when (= (mod (n args) 2) 1)
(error "Expected an even number of arguments to range" 2))
(for i 1 (n args) 2
(set-idx! out (get-idx args i) (get-idx args (+ i 1))))
out))
(st (or (get-idx x :from) 1))
(ed (or (+ 1 (get-idx x :to))
(error "Expected end index, got nothing")))
(inc (- (or (get-idx x :by) (+ 1 st)) st))
(tst (if (>= st ed)
> <))]
(let* [(c st)
(out '())]
(while (tst c ed)
(push! out c)
(set! c (+ c inc)))
out)))
(defun reverse (xs)
"Reverse the list XS, using the accumulator ACC.
### Example:
```cl
> (reverse (range :from 1 :to 10))
out = (10 9 8 7 6 5 4 3 2 1)
```"
(let* [(out '())]
(for i (n xs) 1 -1
(push! out (nth xs i)))
out))
(defun accumulate-with (f ac z xs)
"A composition of [[reduce]] and [[map]].
Transform the values of XS using the function F, then accumulate them
starting form Z using the function AC.
This function behaves as if it were folding over the list XS with the
monoid described by (F, AC, Z), that is, F constructs the monoid, AC
is the binary operation, and Z is the zero element.
### Example:
```cl
> (accumulate-with tonumber + 0 '(1 2 3 4 5))
out = 15
```"
(assert-type! f function)
(assert-type! ac function)
(reduce ac z (map f xs)))
(defun sum (xs)
"Return the sum of all elements in XS.
### Example:
```cl
> (sum '(1 2 3 4))
out = 10
```"
(reduce + 0 xs))
(defun prod (xs)
"Return the product of all elements in XS.
### Example:
```cl
> (prod '(1 2 3 4))
out = 24
```"
(reduce * 1 xs))
(defun take-while (p xs idx)
"Takes elements from the list XS while the predicate P is true,
starting at index IDX. Works like `filter`, but stops after the
first non-matching element.
### Example:
```cl
> (define list '(2 2 4 3 9 8 4 6))
> (define p (lambda (x) (= (mod x 2) 0)))
> (filter p list)
out = (2 2 4 8 4 6)
> (take-while p list 1)
out = (2 2 4)
```"
(assert-type! p function)
(assert-type! xs list)
(unless (= (type idx) "number")
(set! idx 1))
(let* [(l '())
(ln (n xs))
(x (nth xs idx))]
(unless (nil? x)
(while (and (<= idx ln) (p x))
(push! l x)
(set! idx (+ idx 1))
(set! x (nth xs idx))))
l))
(defun split (xs y)
"Splits a list into sub-lists by the separator Y.
### Example:
```cl
> (split '(1 2 3 4) 3)
out = ((1 2) (4))
```"
(assert-type! xs list)
(let* [(l '())
(p (lambda (x) (neq? x y)))
(idx 1)
(b (take-while p xs idx))]
(while (not (empty? b))
(push! l b)
(set! idx (+ idx (n b) 1))
(set! b (take-while p xs idx)))
l))
(defun groups-of (xs num)
"Splits the list XS into sub-lists of size NUM.
### Example:
```cl
> (groups-of '(1 2 3 4 5 6) 3)
out = ((1 2 3) (4 5 6))
```"
(assert-type! xs list)
(let* [(result '())
(group nil)]
(for idx 1 (n xs) 1
(when (= (mod (- idx 1) num) 0)
(set! group '())
(push! result group))
(push! group (nth xs idx)))
result))
(defun sort (xs f)
"Sort the list XS, non-destructively, optionally using F as a
comparator. A sorted version of the list is returned, while the
original remains untouched.
### Example:
```cl
> (define li '(9 5 7 2 1))
out = (9 5 7 2 1)
> (sort li)
out = (1 2 5 7 9)
> li
out = (9 5 7 2 1)
```"
(let* [(copy (map (lambda (x) x) xs))]
(lua/table/sort copy f)
copy))
(defun sort! (xs f)
"Sort the list XS in place, optionally using F as a comparator.
### Example:
> (define li '(9 5 7 2 1))
out = (9 5 7 2 1)
> (sort! li)
out = (1 2 5 7 9)
> li
out = (1 2 5 7 9)
```"
(lua/table/sort xs f)
xs)
;; Auto-generate all `c[ad]r`/`c[ad]rs` methods.
,@(let* [(out '())
(symb (lambda (x) { :tag "symbol" :contents x }))
(depth-symb (lambda (idx mode) (symb (.. "c" mode (lua/string/rep "d" (- idx 1)) "r"))))
(pair (lambda (x y) (list y x)))
(generate nil)]
(set! generate (lambda (name stack do-idx idx depth)
(when (> (n name) 1)
(with (head (if do-idx `(get-idx ,'xs ,idx) `(slicing-view ,'xs ,idx)))
(push! out `(define ,(symb (.. "c" name "r"))
(lambda (,'xs)
(assert-type! ,'xs ,'list)
,(reduce pair head stack))))))
(when (> (n name) 0)
(push! out `(define ,(symb (.. "c" name "rs")) (lambda (,'xs) (map ,(symb (.. "c" name "r")) ,'xs)))))
(cond
[(<= depth 0)]
[do-idx
(generate (.. name "a") (cons (depth-symb idx "a") stack) true 1 (- depth 1))
(generate (.. name "d") stack true (+ idx 1) (- depth 1))]
[else
(generate (.. name "a") (cons (depth-symb idx "d") stack) true 1 (- depth 1))
(generate (.. name "d") stack false (+ idx 1) (- depth 1))])))
(generate "a" '() true 1 3)
(generate "d" '() false 1 3)
out)
| null | https://raw.githubusercontent.com/SquidDev/urn/6e6717cf1376b0950e569e3771cb7e287aed291d/lib/core/list.lisp | lisp | Auto-generate all `c[ad]r`/`c[ad]rs` methods. | "List manipulation functions.
These include several often-used functions for manipulation of lists,
including functional programming classics such as [[map]] and [[reduce]]
and useful patterns such as [[accumulate-with]].
Most of these functions are tail-recursive unless noted, which means
they will not blow up the stack. Along with the property of
tail-recursiveness, these functions also have favourable performance
characteristics.
## Glossary:
- **Constant time** The function runs in the same time regardless of the
size of the input list.
- **Linear time** The runtime of the function is a linear function of
the size of the input list.
- **Logarithmic time** The runtime of the function grows logarithmically
in proportion to the size of the input list.
- **Exponential time** The runtime of the function grows exponentially
in proportion to the size of the input list. This is generally a bad
thing."
(import core/base (defun defmacro when unless let* set-idx! get-idx for gensym -or
slice /= mod else print error tostring -and if n + - >= > =
not with apply and progn .. * while <= < or values-list first list
second for-pairs))
(import core/base b)
(import core/demand (assert-type!))
(import core/method (pretty eq? neq?))
(import core/type (nil? list? empty? exists? falsey? type))
(import lua/math (min max huge))
(import lua/string)
(import lua/table)
(defun car (x)
"Return the first element present in the list X. This function operates
in constant time.
### Example:
```cl
> (car '(1 2 3))
out = 1
```"
(assert-type! x list)
(b/car x))
(define slicing-view
"Return a mutable reference to the list LIST, with indexing offset
(positively) by OFFSET. Mutation in the original list is reflected in
the view, and updates to the view are reflected in the original. In
this, a sliced view resembles an (offset) pointer. Note that trying
to access a key that doesn't make sense in a list (e.g., not its
`:tag`, its `:n`, or a numerical index) will blow up with an arithmetic
error.
**Note** that the behaviour of a sliced view when the underlying list
changes length may be confusing: accessing elements will still work,
but the reported length of the slice will be off. Furthermore, If the
original list shrinks, the view will maintain its length, but will
have an adequate number of `nil`s at the end.
```cl
> (define foo '(1 2 3 4 5))
out = (1 2 3 4 5)
> (define foo-view (cdr foo))
out = (2 3 4 5)
> (remove-nth! foo 5)
out = 5
> foo-view
out = (2 3 4 nil)
```
Also **note** that functions that modify a list in-place, like
`insert-nth!', `remove-nth!`, `pop-last!` and `push!` will not
modify the view *or* the original list.
```cl :no-test
> (define bar '(1 2 3 4 5))
out = (1 2 3 4 5)
> (define bar-view (cdr bar))
out = (2 3 4 5)
> (remove-nth! bar-view 4)
out = nil
> bar
out = (1 2 3 4 5)
```
### Example:
```cl
> (define baz '(1 2 3))
out = (1 2 3)
> (slicing-view baz 1)
out = (2 3)
> (.<! (slicing-view baz 1) 1 5)
out = nil
> baz
out = (1 5 3)
```"
(let* [(ref-mt { :__index (lambda (t k)
(get-idx (get-idx t :parent) (+ k (get-idx t :offset))))
:__newindex (lambda (t k v)
(set-idx! (get-idx t :parent) (+ k (get-idx t :offset)) v)) })]
(lambda (list offset)
(cond
[(<= (n list) offset) '()]
[(and (get-idx list :parent)
(get-idx list :offset))
(b/setmetatable { :parent (get-idx list :parent)
:offset (+ (get-idx list :offset) offset)
:n (- (n list) offset)
:tag (type list) }
ref-mt)]
[else (b/setmetatable { :parent list
:offset offset
:n (- (n list) offset)
:tag (type list) }
ref-mt)]))))
(defun cdr (x)
"Return a reference the list X without the first element present. In
the case that X is nil, the empty list is returned. Note that
mutating the reference will not mutate the
### Example:
```cl
> (cdr '(1 2 3))
out = (2 3)
```"
(slicing-view x 1))
(defun take (xs n)
"Take the first N elements of the list XS.
### Example:
```cl
> (take '(1 2 3 4 5) 2)
out = (1 2)
```"
(slice xs 1 (min n (b/n xs))))
(defun drop (xs n)
"Remove the first N elements of the list XS.
### Example:
```cl
> (drop '(1 2 3 4 5) 2)
out = (3 4 5)
```"
(slice xs (+ n 1) nil))
(defun snoc (xss &xs)
"Return a copy of the list XS with the element XS added to its end.
This function runs in linear time over the two input lists: That is,
it runs in O(n+k) time proportional both to `(n XSS)` and `(n XS)`.
### Example:
```cl
> (snoc '(1 2 3) 4 5 6)
out = (1 2 3 4 5 6)
``` "
`(,@xss ,@xs))
(defun cons (&xs xss)
"Return a copy of the list XSS with the elements XS added to its head.
### Example:
```cl
> (cons 1 2 3 '(4 5 6))
out = (1 2 3 4 5 6)
```"
`(,@xs ,@xss))
(defun reduce (f z xs)
"Accumulate the list XS using the binary function F and the zero
element Z. This function is also called `foldl` by some authors. One
can visualise `(reduce f z xs)` as replacing the [[cons]] operator in
building lists with F, and the empty list with Z.
Consider:
- `'(1 2 3)` is equivalent to `(cons 1 (cons 2 (cons 3 '())))`
- `(reduce + 0 '(1 2 3))` is equivalent to `(+ 1 (+ 2 (+ 3 0)))`.
### Example:
```cl
> (reduce append '() '((1 2) (3 4)))
out = (1 2 3 4)
equivalent to ( append ' ( 1 2 ) ( append ' ( 3 4 ) ' ( ) ) )
```"
(assert-type! f function)
(let* [(start 1)]
(if (and (nil? xs)
(list? z))
(progn
(set! start 2)
(set! xs z)
(set! z (car z)))
nil)
(assert-type! xs list)
(let* [(accum z)]
(for i start (n xs) 1
(set! accum (f accum (nth xs i))))
accum)))
(defun map (fn &xss)
"Iterate over all the successive cars of XSS, producing a single list
by applying FN to all of them. For example:
### Example:
```cl
> (map list '(1 2 3) '(4 5 6) '(7 8 9))
out = ((1 4 7) (2 5 8) (3 6 9))
> (map succ '(1 2 3))
out = (2 3 4)
```"
(let* [(ns (let* [(out '())]
(for i 1 (n xss) 1
(if (not (list? (nth xss i)))
(error (.. "that's no list! " (pretty (nth xss i))
" (it's a " (type (nth xss i)) "!)"))
true)
(push! out (n (nth xss i))))
out))
(out '())]
(for i 1 (apply min ns) 1
(push! out (apply fn (nths xss i))))
out))
(defun maybe-map (fn &xss)
"Iterate over all successive cars of XSS, producing a single list by
applying FN to all of them, while discarding any `nil`s.
### Example:
```cl
> (maybe-map (lambda (x)
. (if (even? x)
. nil
. (succ x)))
. (range :from 1 :to 10))
out = (2 4 6 8 10)
```"
(let* [(lengths (let* [(out '())]
(for i 1 (n xss) 1
(if (not (list? (nth xss i)))
(error (.. "that's no list! " (pretty (nth xss i))
" (it's a " (type (nth xss i)) "!)"))
true)
(push! out (n (nth xss i))))
out))
(out '())]
(for i 1 (apply min lengths) 1
(let* [(vl (apply fn (nths xss i)))]
(if (/= vl nil)
(push! out vl)
nil)))
out))
(defun flat-map (fn &xss)
"Map the function FN over the lists XSS, then flatten the result
lists.
### Example:
```cl
> (flat-map list '(1 2 3) '(4 5 6))
out = (1 4 2 5 3 6)
```"
(flatten (apply map fn xss)))
(defun partition (p xs)
"Split XS based on the predicate P. Values for which the predicate
returns true are returned in the first list, whereas values which
don't pass the predicate are returned in the second list.
### Example:
```cl
> (list (partition even? '(1 2 3 4 5 6)))
out = ((2 4 6) (1 3 5))
```"
(assert-type! p function)
(assert-type! xs list)
(let* [(passed '())
(failed '())]
(for i 1 (n xs) 1
(with (x (nth xs i))
(push! (if (p x) passed failed) x)))
(values-list passed failed)))
(defun filter (p xs)
"Return a list with only the elements of XS that match the predicate
P.
### Example:
```cl
> (filter even? '(1 2 3 4 5 6))
out = (2 4 6)
```"
(first (partition p xs)))
(defun exclude (p xs)
"Return a list with only the elements of XS that don't match the
predicate P.
### Example:
```cl
> (exclude even? '(1 2 3 4 5 6))
out = (1 3 5)
```"
(second (partition p xs)))
(defun any (p xs)
"Check for the existence of an element in XS that matches the predicate
P.
### Example:
```cl
> (any exists? '(nil 1 \"foo\"))
out = true
```"
(assert-type! p function)
(assert-type! xs list)
(let* [(len (n xs))
(fun nil)]
(set! fun (lambda (i)
(cond
[(> i len) false]
[(p (nth xs i)) true]
[else (fun (+ i 1))])))
(fun 1)))
(defun none (p xs)
"Check that no elements in XS match the predicate P.
### Example:
```cl
> (none nil? '(\"foo\" \"bar\" \"baz\"))
out = true
```"
(not (any p xs)))
(defun \\ (xs ys)
"The difference between XS and YS (non-associative.)
### Example:
```cl
> (\\\\ '(1 2 3) '(1 3 5 7))
out = (2)
```"
(filter (lambda (x)
(not (elem? x ys)))
xs))
(defun nub (xs)
"Remove duplicate elements from XS. This runs in linear time.
### Example:
```cl
> (nub '(1 1 2 2 3 3))
out = (1 2 3)
```"
(let* ((hm {})
(out '[]))
(for-each elm xs
(with (szd (pretty elm))
(cond
[(nil? (get-idx hm szd))
(push! out elm)
(set-idx! hm szd elm)]
[else])))
out))
(defun union (&xss)
"Set-like union of all the lists in XSS. Note that this function does
not preserve the lists' orders.
### Example:
```cl
> (union '(1 2 3 4) '(1 2 3 4 5))
out = (1 2 3 4 5)
```"
(let* [(set {})
(out '())]
(do [(xs xss)]
(if (list? xs)
(do [(x xs)]
(set-idx! set x x))
(set-idx! set xs xs)))
(for-pairs (k v) set
(push! out v))
out))
(defun all (p xs)
"Test if all elements of XS match the predicate P.
### Example:
```cl
> (all symbol? '(foo bar baz))
out = true
> (all number? '(1 2 foo))
out = false
```"
(assert-type! p function)
(assert-type! xs list)
(let* [(len (n xs))
(fun nil)]
(set! fun (lambda (i)
(cond
[(> i len) true]
[(p (nth xs i)) (fun (+ i 1))]
[else false])))
(fun 1)))
(defun elem? (x xs)
"Test if X is present in the list XS.
### Example:
```cl
> (elem? 1 '(1 2 3))
out = true
> (elem? 'foo '(1 2 3))
out = false
```"
(assert-type! xs list)
(any (lambda (y) (eq? x y)) xs))
(defun find-index (p xs)
"Finds the first index in XS where the item matches the predicate
P. Returns `nil` if no such item exists.
### Example:
```cl
> (find-index even? '(3 4 5))
out = 2
> (find-index even? '(1 3 5))
out = nil
```"
(assert-type! p function)
(assert-type! xs list)
(let* [(len (n xs))
(fun nil)]
(set! fun (lambda (i)
(cond
[(> i len) nil]
[(p (nth xs i)) i]
[else (fun (+ i 1))])))
(fun 1)))
(defun element-index (x xs)
"Finds the first index in XS where the item matches X. Returns `nil` if
no such item exists.
### Example:
```cl
> (element-index 4 '(3 4 5))
out = 2
> (element-index 2 '(1 3 5))
out = nil
```"
(assert-type! xs list)
(find-index (lambda (y) (eq? x y)) xs))
(defun prune (xs)
"Remove values matching the predicates [[empty?]] or [[nil?]] from
the list XS.
### Example:
```cl
> (prune (list '() nil 1 nil '() 2))
out = (1 2)
```"
(assert-type! xs list)
(filter (lambda (x) (and (not (nil? x)) (not (empty? x)))) xs))
(defun traverse (xs f)
:deprecated "Use [[map]] instead."
"An alias for [[map]] with the arguments XS and F flipped.
### Example:
```cl
> (traverse '(1 2 3) succ)
out = (2 3 4)
```"
(map f xs))
(defun last (xs)
"Return the last element of the list XS.
Counterintutively, this function runs in constant time.
### Example:
```cl
> (last (range :from 1 :to 100))
out = 100
```"
(assert-type! xs list)
(get-idx xs (n xs)))
(defun init (xs)
"Return the list XS with the last element removed.
This is the dual of LAST.
### Example:
```cl
> (init (range :from 1 :to 10))
out = (1 2 3 4 5 6 7 8 9)
```"
(assert-type! xs list)
(slice xs 1 (- (n xs) 1)))
(defun nth (xs idx)
"Get the IDX th element in the list XS. The first element is 1.
This function runs in constant time.
### Example:
```cl
> (nth (range :from 1 :to 100) 10)
out = 10
```"
(if (>= idx 0)
(get-idx xs idx)
(get-idx xs (+ (get-idx xs :n) 1 idx))))
(defun nths (xss idx)
"Get the IDX-th element in all the lists given at XSS. The first
element is1.
### Example:
```cl
> (nths '((1 2 3) (4 5 6) (7 8 9)) 2)
out = (2 5 8)
```"
(let* [(out '())]
(for i 1 (n xss) 1
(push! out (nth (nth xss i) idx)))
out))
(defun push! (xs &vals)
"Mutate the list XS, adding VALS to its end.
### Example:
```cl
> (define list '(1 2 3))
> (push! list 4)
out = (1 2 3 4)
> list
out = (1 2 3 4)
```"
(assert-type! xs list)
(let* [(nxs (n xs))
(len (+ nxs (n vals)))]
(set-idx! xs "n" len)
(for i 1 (n vals) 1
(set-idx! xs (+ nxs i) (get-idx vals i)))
xs))
(define push-cdr!
"Mutate the list XS, adding VALS to its end.
### Example:
```cl
> (define list '(1 2 3))
> (push-cdr! list 4)
out = (1 2 3 4)
> list
out = (1 2 3 4)
```"
:deprecated "Use [[push!]] instead."
push!)
(defun pop-last! (xs)
"Mutate the list XS, removing and returning its last element.
### Example:
```cl
> (define list '(1 2 3))
> (pop-last! list)
out = 3
> list
out = (1 2)
``` "
(assert-type! xs list)
(with (x (get-idx xs (n xs)))
(set-idx! xs (n xs) nil)
(set-idx! xs "n" (- (n xs) 1))
x))
(defun remove-nth! (li idx)
"Mutate the list LI, removing the value at IDX and returning it.
### Example:
```cl
> (define list '(1 2 3))
> (remove-nth! list 2)
out = 2
> list
out = (1 3)
``` "
(assert-type! li list)
(set-idx! li "n" (- (get-idx li "n") 1))
(lua/table/remove li idx))
(defun insert-nth! (li idx val)
"Mutate the list LI, inserting VAL at IDX.
### Example:
```cl
> (define list '(1 2 3))
> (insert-nth! list 2 5)
> list
out = (1 5 2 3)
``` "
(assert-type! li list)
(set-idx! li "n" (+ (get-idx li "n") 1))
(lua/table/insert li idx val))
(defmacro for-each (var lst &body)
:deprecated "Use [[do]]/[[dolist]] instead"
"Perform the set of actions BODY for all values in LST, binding the current value to VAR.
### Example:
```cl
> (for-each var '(1 2 3)
. (print! var))
1
2
3
out = nil
```"
`(do [(,var ,lst)]
,@body))
(defmacro dolist (vars &stmts)
"Iterate over all given VARS, running STMTS and collecting the results.
### Example:
```cl
> (dolist [(a '(1 2 3))
. (b '(1 2 3))]
. (list a b))
out = ((1 1) (1 2) (1 3) (2 1) (2 2) (2 3) (3 1) (3 2) (3 3))
```"
(let* [(collect (gensym 'list))
(arg (gensym 'val))
(yield (gensym 'yield))
(out `(,yield (progn ,@stmts)))]
(for i (n vars) 1 -1
(let* [(var (nth vars i))
(cur-list (gensym))
(i (gensym 'i))]
(set! out
`(let* [(,cur-list ,(cadr var))]
(for ,i 1 (n ,cur-list) 1
(let* [(,(car var) (get-idx ,cur-list ,i))]
,out))))))
`(let* [(,collect '())
(,yield (lambda (,arg)
(when (/= ,arg nil)
(push! ,collect ,arg))))]
,out
,collect)))
(defmacro do (vars &stmts)
"Iterate over all given VARS, running STMTS **without** collecting the
results.
### Example:
```cl
> (do [(a '(1 2))
. (b '(1 2))]
. (print! $\"a = ${a}, b = ${b}\"))
a = 1, b = 1
a = 1, b = 2
a = 2, b = 1
a = 2, b = 2
out = nil
```"
(let* [(out `(progn ,@stmts))]
(for i (n vars) 1 -1
(let* [(var (nth vars i))
(cur-list (gensym))
(i (gensym 'i))]
(set! out
`(let* [(,cur-list ,(cadr var))]
(for ,i 1 (n ,cur-list) 1
(let* [(,(car var) (get-idx ,cur-list ,i))]
,out))))))
out))
(defun append (xs ys)
"Concatenate XS and YS.
### Example:
```cl
> (append '(1 2) '(3 4))
out = (1 2 3 4)
``` "
`(,@xs ,@ys))
(defun flatten (xss)
"Concatenate all the lists in XSS. XSS must not contain elements which
are not lists.
### Example:
```cl
> (flatten '((1 2) (3 4)))
out = (1 2 3 4)
```"
(reduce append '() xss))
(defun range (&args)
"Build a list from :FROM to :TO, optionally passing by :BY.
### Example:
```cl
> (range :from 1 :to 10)
out = (1 2 3 4 5 6 7 8 9 10)
> (range :from 1 :to 10 :by 3)
out = (1 3 5 7 9)
```"
(let* [(x (let* [(out {})]
(when (= (mod (n args) 2) 1)
(error "Expected an even number of arguments to range" 2))
(for i 1 (n args) 2
(set-idx! out (get-idx args i) (get-idx args (+ i 1))))
out))
(st (or (get-idx x :from) 1))
(ed (or (+ 1 (get-idx x :to))
(error "Expected end index, got nothing")))
(inc (- (or (get-idx x :by) (+ 1 st)) st))
(tst (if (>= st ed)
> <))]
(let* [(c st)
(out '())]
(while (tst c ed)
(push! out c)
(set! c (+ c inc)))
out)))
(defun reverse (xs)
"Reverse the list XS, using the accumulator ACC.
### Example:
```cl
> (reverse (range :from 1 :to 10))
out = (10 9 8 7 6 5 4 3 2 1)
```"
(let* [(out '())]
(for i (n xs) 1 -1
(push! out (nth xs i)))
out))
(defun accumulate-with (f ac z xs)
"A composition of [[reduce]] and [[map]].
Transform the values of XS using the function F, then accumulate them
starting form Z using the function AC.
This function behaves as if it were folding over the list XS with the
monoid described by (F, AC, Z), that is, F constructs the monoid, AC
is the binary operation, and Z is the zero element.
### Example:
```cl
> (accumulate-with tonumber + 0 '(1 2 3 4 5))
out = 15
```"
(assert-type! f function)
(assert-type! ac function)
(reduce ac z (map f xs)))
(defun sum (xs)
"Return the sum of all elements in XS.
### Example:
```cl
> (sum '(1 2 3 4))
out = 10
```"
(reduce + 0 xs))
(defun prod (xs)
"Return the product of all elements in XS.
### Example:
```cl
> (prod '(1 2 3 4))
out = 24
```"
(reduce * 1 xs))
(defun take-while (p xs idx)
"Takes elements from the list XS while the predicate P is true,
starting at index IDX. Works like `filter`, but stops after the
first non-matching element.
### Example:
```cl
> (define list '(2 2 4 3 9 8 4 6))
> (define p (lambda (x) (= (mod x 2) 0)))
> (filter p list)
out = (2 2 4 8 4 6)
> (take-while p list 1)
out = (2 2 4)
```"
(assert-type! p function)
(assert-type! xs list)
(unless (= (type idx) "number")
(set! idx 1))
(let* [(l '())
(ln (n xs))
(x (nth xs idx))]
(unless (nil? x)
(while (and (<= idx ln) (p x))
(push! l x)
(set! idx (+ idx 1))
(set! x (nth xs idx))))
l))
(defun split (xs y)
"Splits a list into sub-lists by the separator Y.
### Example:
```cl
> (split '(1 2 3 4) 3)
out = ((1 2) (4))
```"
(assert-type! xs list)
(let* [(l '())
(p (lambda (x) (neq? x y)))
(idx 1)
(b (take-while p xs idx))]
(while (not (empty? b))
(push! l b)
(set! idx (+ idx (n b) 1))
(set! b (take-while p xs idx)))
l))
(defun groups-of (xs num)
"Splits the list XS into sub-lists of size NUM.
### Example:
```cl
> (groups-of '(1 2 3 4 5 6) 3)
out = ((1 2 3) (4 5 6))
```"
(assert-type! xs list)
(let* [(result '())
(group nil)]
(for idx 1 (n xs) 1
(when (= (mod (- idx 1) num) 0)
(set! group '())
(push! result group))
(push! group (nth xs idx)))
result))
(defun sort (xs f)
"Sort the list XS, non-destructively, optionally using F as a
comparator. A sorted version of the list is returned, while the
original remains untouched.
### Example:
```cl
> (define li '(9 5 7 2 1))
out = (9 5 7 2 1)
> (sort li)
out = (1 2 5 7 9)
> li
out = (9 5 7 2 1)
```"
(let* [(copy (map (lambda (x) x) xs))]
(lua/table/sort copy f)
copy))
(defun sort! (xs f)
"Sort the list XS in place, optionally using F as a comparator.
### Example:
> (define li '(9 5 7 2 1))
out = (9 5 7 2 1)
> (sort! li)
out = (1 2 5 7 9)
> li
out = (1 2 5 7 9)
```"
(lua/table/sort xs f)
xs)
,@(let* [(out '())
(symb (lambda (x) { :tag "symbol" :contents x }))
(depth-symb (lambda (idx mode) (symb (.. "c" mode (lua/string/rep "d" (- idx 1)) "r"))))
(pair (lambda (x y) (list y x)))
(generate nil)]
(set! generate (lambda (name stack do-idx idx depth)
(when (> (n name) 1)
(with (head (if do-idx `(get-idx ,'xs ,idx) `(slicing-view ,'xs ,idx)))
(push! out `(define ,(symb (.. "c" name "r"))
(lambda (,'xs)
(assert-type! ,'xs ,'list)
,(reduce pair head stack))))))
(when (> (n name) 0)
(push! out `(define ,(symb (.. "c" name "rs")) (lambda (,'xs) (map ,(symb (.. "c" name "r")) ,'xs)))))
(cond
[(<= depth 0)]
[do-idx
(generate (.. name "a") (cons (depth-symb idx "a") stack) true 1 (- depth 1))
(generate (.. name "d") stack true (+ idx 1) (- depth 1))]
[else
(generate (.. name "a") (cons (depth-symb idx "d") stack) true 1 (- depth 1))
(generate (.. name "d") stack false (+ idx 1) (- depth 1))])))
(generate "a" '() true 1 3)
(generate "d" '() false 1 3)
out)
|
a4215c3415a0196934d78de5e2cc91b95640a46633f31697193de2e31d301e7d | BioHaskell/hPDB | OpenAnyFile.hs | # LANGUAGE OverloadedStrings , CPP #
-- | Opening and reading a either normal or gzipped file in an efficient way -
either using strict ' ByteString ' or mmap
module Bio.PDB.IO.OpenAnyFile(readFile, writeFile)
where
import Prelude hiding (readFile, writeFile)
import System.Directory (doesFileExist, getPermissions, Permissions(..))
import System.IO.Error (userError, IOError)
import System.IO (withFile, IOMode(..))
import Control.Monad (void)
-- if we have zlib:
import qualified Codec.Compression.GZip as GZip
import qualified Data.ByteString.Lazy as BSL
import qualified Control.Exception as Exc
if we have
--import qualified Codec.Compression.BZip as BZip
if we have MMap :
#ifdef HAVE_MMAP
import System.IO.MMap (mmapFileByteString)
#endif
-- otherwise:
import qualified Data.ByteString.Char8 as BS
| Read file contents as strict ' ByteString ' . Uses mmap if possible . May decompress file contents , if needed .
readFile fname = do r <- isReadable fname
if r
then
readFile' fname
else
throwNotFound fname
readFile' fname = do content <- simpleRead fname
let r = let codec = getCodec fname content
in BS.concat $ BSL.toChunks $ codec $ BSL.fromChunks [content]
return r
throwNotFound :: String -> IO a
throwNotFound fname = ioError $ userError $ concat ["Cannot read ", show fname, "!"]
getCodec fname c | (".gz" `BS.isSuffixOf` BS.pack fname) ||
(".Z" `BS.isSuffixOf` BS.pack fname) = GZip.decompressWith (gzipParams c)
getCodec fname c | ( " .bz2 " ` BS.isSuffixOf ` ( BS.pack fname ) ) = ( bzipParams c ) -- DOESN'T WORK ! ! !
getCodec fname c = id
gzipParams c = GZip.DecompressParams GZip.defaultWindowBits (fromIntegral (BS.length c * 5))
#if MIN_VERSION_zlib(0,5,4)
Nothing
#endif
#if MIN_VERSION_zlib(0,6,1)
True
#endif
Upper bound : compression rate never exceeded 4.7 for big test files .
c = BZip . DecompressParams BZip . ( fromIntegral ( BS.length c * 7 + 4 * 1024 * 1024 ) ) -- Upper bound : compression rate never exceeded 6.7 for big test files + 4MiB buffering .
isReadable fname = do exists <- doesFileExist fname
if exists
then do perms <- getPermissions fname
return $! readable perms
else return False
#ifndef HAVE_MMAP
simpleRead = BS.readFile
#else
simpleRead fname = mmapFileByteString fname Nothing `Exc.catch` \e -> do reportError (e :: IOError) -- cannot mmap
BS.readFile fname
where
reportError e = do putStrLn $ concat [show e, "while trying to mmap('", fname, "')"]
#endif
| Write file contents as strict ' ByteString ' .
writeFile fname writer = void $ withFile fname WriteMode writer
| null | https://raw.githubusercontent.com/BioHaskell/hPDB/5be747e2f2c57370b498f4c11f9f1887fdab0418/Bio/PDB/IO/OpenAnyFile.hs | haskell | | Opening and reading a either normal or gzipped file in an efficient way -
if we have zlib:
import qualified Codec.Compression.BZip as BZip
otherwise:
DOESN'T WORK ! ! !
Upper bound : compression rate never exceeded 6.7 for big test files + 4MiB buffering .
cannot mmap | # LANGUAGE OverloadedStrings , CPP #
either using strict ' ByteString ' or mmap
module Bio.PDB.IO.OpenAnyFile(readFile, writeFile)
where
import Prelude hiding (readFile, writeFile)
import System.Directory (doesFileExist, getPermissions, Permissions(..))
import System.IO.Error (userError, IOError)
import System.IO (withFile, IOMode(..))
import Control.Monad (void)
import qualified Codec.Compression.GZip as GZip
import qualified Data.ByteString.Lazy as BSL
import qualified Control.Exception as Exc
if we have
if we have MMap :
#ifdef HAVE_MMAP
import System.IO.MMap (mmapFileByteString)
#endif
import qualified Data.ByteString.Char8 as BS
| Read file contents as strict ' ByteString ' . Uses mmap if possible . May decompress file contents , if needed .
readFile fname = do r <- isReadable fname
if r
then
readFile' fname
else
throwNotFound fname
readFile' fname = do content <- simpleRead fname
let r = let codec = getCodec fname content
in BS.concat $ BSL.toChunks $ codec $ BSL.fromChunks [content]
return r
throwNotFound :: String -> IO a
throwNotFound fname = ioError $ userError $ concat ["Cannot read ", show fname, "!"]
getCodec fname c | (".gz" `BS.isSuffixOf` BS.pack fname) ||
(".Z" `BS.isSuffixOf` BS.pack fname) = GZip.decompressWith (gzipParams c)
getCodec fname c = id
gzipParams c = GZip.DecompressParams GZip.defaultWindowBits (fromIntegral (BS.length c * 5))
#if MIN_VERSION_zlib(0,5,4)
Nothing
#endif
#if MIN_VERSION_zlib(0,6,1)
True
#endif
Upper bound : compression rate never exceeded 4.7 for big test files .
isReadable fname = do exists <- doesFileExist fname
if exists
then do perms <- getPermissions fname
return $! readable perms
else return False
#ifndef HAVE_MMAP
simpleRead = BS.readFile
#else
BS.readFile fname
where
reportError e = do putStrLn $ concat [show e, "while trying to mmap('", fname, "')"]
#endif
| Write file contents as strict ' ByteString ' .
writeFile fname writer = void $ withFile fname WriteMode writer
|
ea94bc50c183d520cf794ce010a6a179cff08cf88a35b6e2b0189e30552133ac | hexlet-codebattle/battle_asserts | leaders_in_an_array.clj | (ns battle-asserts.issues.leaders-in-an-array
(:require [clojure.test.check.generators :as gen]))
(def level :easy)
(def tags ["collections"])
(def description
{:en "Print all the leaders in the array.
An element is a leader if it is greater than all the elements to its right side.
And the rightmost element is always a leader. For example in the array [16 17 4 3 5 2],
leaders are 17, 5 and 2."
:ru "Выведите всех лидеров в массиве.
Элемент является лидером, если он больше всех элементов расположенных правее него.
Самый правый элемент - всегда лидер. Например в массиве [16 17 4 3 5 2]
лидерами являются 17, 5 и 2."})
(def signature
{:input [{:argument-name "arr" :type {:name "array" :nested {:name "integer"}}}]
:output {:type {:name "array" :nested {:name "integer"}}}})
(defn arguments-generator []
(gen/tuple (gen/vector (gen/choose 1 70) 5 10)))
(def test-data
[{:expected [17 5 2]
:arguments [[16 17 4 3 5 2]]}
{:expected [67 45 35 8]
:arguments [[4 3 7 12 6 67 5 45 34 35 2 8]]}
{:expected [12 8 7 6]
:arguments [[12 10 12 8 7 6]]}
{:expected [5 4]
:arguments [[1 2 3 4 5 4]]}])
(defn first-is-biggest [[head & tail]]
(every? #(> head %) tail))
(defn solution [array]
(->>
array
(map-indexed vector)
(filter
#(first-is-biggest (subvec array (first %))))
(mapv second)))
| null | https://raw.githubusercontent.com/hexlet-codebattle/battle_asserts/7ad6006443614addd35d0f85df3abd075566c215/src/battle_asserts/issues/leaders_in_an_array.clj | clojure | (ns battle-asserts.issues.leaders-in-an-array
(:require [clojure.test.check.generators :as gen]))
(def level :easy)
(def tags ["collections"])
(def description
{:en "Print all the leaders in the array.
An element is a leader if it is greater than all the elements to its right side.
And the rightmost element is always a leader. For example in the array [16 17 4 3 5 2],
leaders are 17, 5 and 2."
:ru "Выведите всех лидеров в массиве.
Элемент является лидером, если он больше всех элементов расположенных правее него.
Самый правый элемент - всегда лидер. Например в массиве [16 17 4 3 5 2]
лидерами являются 17, 5 и 2."})
(def signature
{:input [{:argument-name "arr" :type {:name "array" :nested {:name "integer"}}}]
:output {:type {:name "array" :nested {:name "integer"}}}})
(defn arguments-generator []
(gen/tuple (gen/vector (gen/choose 1 70) 5 10)))
(def test-data
[{:expected [17 5 2]
:arguments [[16 17 4 3 5 2]]}
{:expected [67 45 35 8]
:arguments [[4 3 7 12 6 67 5 45 34 35 2 8]]}
{:expected [12 8 7 6]
:arguments [[12 10 12 8 7 6]]}
{:expected [5 4]
:arguments [[1 2 3 4 5 4]]}])
(defn first-is-biggest [[head & tail]]
(every? #(> head %) tail))
(defn solution [array]
(->>
array
(map-indexed vector)
(filter
#(first-is-biggest (subvec array (first %))))
(mapv second)))
|
|
d4f847430a294771bbc08578ac66d305132a4e84547a4faa53e4ccc1e1470026 | cyverse-archive/DiscoveryEnvironmentBackend | seen.clj | (ns notification-agent.seen
"This namespace provides the endpoint processing logic for marking messages as received or seen."
(:use [notification-agent.common]
[slingshot.slingshot :only [throw+]])
(:require [notification-agent.db :as db]))
(defn- validate-uuids
"Validates the list of UUIDs that was passed in."
[uuids body]
(when (or (nil? uuids)
(not (coll? uuids)))
(throw+ {:type :clojure-commons.exception/bad-request-field
:field_name :uuids
:body body})))
(defn mark-messages-seen
"Marks one or more notification messages as seen."
[body {:keys [user]}]
(validate-user user)
(let [uuids (:uuids (parse-body body))]
(validate-uuids uuids body)
(db/mark-notifications-seen user uuids)
{:count (str (db/count-matching-messages user {:seen false}))}))
(defn mark-all-messages-seen
"Marks all notification messages as seen."
[body]
(let [user (validate-user (:user (parse-body body)))]
(db/mark-matching-notifications-seen user {:seen false})
{:count (str (db/count-matching-messages user {:seen false}))}))
(defn mark-system-messages-received
"Marks one or more system notifications as being received by a given user.
Parameters:
body - The body of the HTTP post as formatted by ring
params - The query parameters as formatted by ring
Returns:
It returns the number of system notifications that have not be marked as received by the
given user. The return is formatted as a map that ring can use to format an HTTP response."
[body {:keys [user]}]
(validate-user user)
(let [uuids (:uuids (parse-body body))]
(validate-uuids uuids body)
(db/mark-system-notifications-received user uuids)
{:count (str (db/count-new-system-notifications user))}))
(defn mark-all-system-messages-received
"Marks all system messages as being received by a given user.
Parameters:
body - The body of the HTTP post as formatted by ring
Returns:
It returns the number of system notifications that have not be marked as received by the
given user. The return is formatted as a map that ring can use to format an HTTP response."
[body]
(let [user (validate-user (:user (parse-body body)))]
(db/mark-all-system-notifications-received user)
{:count (str (db/count-new-system-notifications user))}))
(defn mark-system-messages-seen
"Marks one or more system notifications as seen."
[body {:keys [user]}]
(validate-user user)
(let [uuids (:uuids (parse-body body))]
(validate-uuids uuids body)
(db/mark-system-notifications-seen user uuids)
{:count (str (db/count-unseen-system-notifications user))}))
(defn mark-all-system-messages-seen
"Marks all system notifications as seen for a user."
[body]
(let [user (validate-user (:user (parse-body body)))]
(db/mark-all-system-notifications-seen user)
{:count (str (db/count-unseen-system-notifications user))}))
| null | https://raw.githubusercontent.com/cyverse-archive/DiscoveryEnvironmentBackend/7f6177078c1a1cb6d11e62f12cfe2e22d669635b/services/NotificationAgent/src/notification_agent/seen.clj | clojure | (ns notification-agent.seen
"This namespace provides the endpoint processing logic for marking messages as received or seen."
(:use [notification-agent.common]
[slingshot.slingshot :only [throw+]])
(:require [notification-agent.db :as db]))
(defn- validate-uuids
"Validates the list of UUIDs that was passed in."
[uuids body]
(when (or (nil? uuids)
(not (coll? uuids)))
(throw+ {:type :clojure-commons.exception/bad-request-field
:field_name :uuids
:body body})))
(defn mark-messages-seen
"Marks one or more notification messages as seen."
[body {:keys [user]}]
(validate-user user)
(let [uuids (:uuids (parse-body body))]
(validate-uuids uuids body)
(db/mark-notifications-seen user uuids)
{:count (str (db/count-matching-messages user {:seen false}))}))
(defn mark-all-messages-seen
"Marks all notification messages as seen."
[body]
(let [user (validate-user (:user (parse-body body)))]
(db/mark-matching-notifications-seen user {:seen false})
{:count (str (db/count-matching-messages user {:seen false}))}))
(defn mark-system-messages-received
"Marks one or more system notifications as being received by a given user.
Parameters:
body - The body of the HTTP post as formatted by ring
params - The query parameters as formatted by ring
Returns:
It returns the number of system notifications that have not be marked as received by the
given user. The return is formatted as a map that ring can use to format an HTTP response."
[body {:keys [user]}]
(validate-user user)
(let [uuids (:uuids (parse-body body))]
(validate-uuids uuids body)
(db/mark-system-notifications-received user uuids)
{:count (str (db/count-new-system-notifications user))}))
(defn mark-all-system-messages-received
"Marks all system messages as being received by a given user.
Parameters:
body - The body of the HTTP post as formatted by ring
Returns:
It returns the number of system notifications that have not be marked as received by the
given user. The return is formatted as a map that ring can use to format an HTTP response."
[body]
(let [user (validate-user (:user (parse-body body)))]
(db/mark-all-system-notifications-received user)
{:count (str (db/count-new-system-notifications user))}))
(defn mark-system-messages-seen
"Marks one or more system notifications as seen."
[body {:keys [user]}]
(validate-user user)
(let [uuids (:uuids (parse-body body))]
(validate-uuids uuids body)
(db/mark-system-notifications-seen user uuids)
{:count (str (db/count-unseen-system-notifications user))}))
(defn mark-all-system-messages-seen
"Marks all system notifications as seen for a user."
[body]
(let [user (validate-user (:user (parse-body body)))]
(db/mark-all-system-notifications-seen user)
{:count (str (db/count-unseen-system-notifications user))}))
|
|
ccee593119c9e6ad0d4b2d81795421c13647854970ae8ffee9dfb02edf8b1ec8 | tqtezos/minter-sdk | Burn.hs | -- | Lorentz bindings for the allowlisted swaps contract.
module Lorentz.Contracts.Swaps.Burn where
import Lorentz
import Lorentz.Contracts.NonPausableSimpleAdmin
import Tezos.Address (unsafeParseAddress)
import Lorentz.Contracts.Swaps.Allowlisted
import Lorentz.Contracts.Swaps.Basic
-- Types
----------------------------------------------------------------------------
data BurnSwapStorage = BurnSwapStorage
{ nextSwapId :: SwapId
, swaps :: BigMap SwapId SwapInfo
, burnAddress :: Address
}
customGeneric "BurnSwapStorage" ligoLayout
deriving anyclass instance IsoValue BurnSwapStorage
deriving anyclass instance HasAnnotation BurnSwapStorage
nullAddress :: Address
nullAddress = unsafeParseAddress "tz1Ke2h7sDdakHJQh8WX4Z372du1KChsksyU"
altBurnAddress :: Address
altBurnAddress = unsafeParseAddress "tz1burnburnburnburnburnburnburjAYjjX"
initBurnSwapStorage :: BurnSwapStorage
initBurnSwapStorage = BurnSwapStorage
{ nextSwapId = initSwapId
, swaps = mempty
, burnAddress = nullAddress
}
data AllowlistedBurnSwapStorage = AllowlistedBurnSwapStorage
{ burnSwapStorage :: BurnSwapStorage
, admin :: AdminStorage
, allowlist :: Allowlist
}
customGeneric "AllowlistedBurnSwapStorage" ligoLayout
deriving anyclass instance IsoValue AllowlistedBurnSwapStorage
deriving anyclass instance HasAnnotation AllowlistedBurnSwapStorage
initAllowlistedBurnSwapStorage :: Address -> AllowlistedBurnSwapStorage
initAllowlistedBurnSwapStorage admin = AllowlistedBurnSwapStorage
{ burnSwapStorage = initBurnSwapStorage
, admin = initAdminStorage admin
, allowlist = mempty
}
data AllowlistedBurnSwapEntrypoints
= Swap SwapEntrypoints
| Admin AdminEntrypoints
| Update_allowed (BigMap Address ())
customGeneric "AllowlistedBurnSwapEntrypoints" ligoLayout
deriving anyclass instance IsoValue AllowlistedBurnSwapEntrypoints
deriving anyclass instance HasAnnotation AllowlistedBurnSwapEntrypoints
instance ParameterHasEntrypoints AllowlistedBurnSwapEntrypoints where
type ParameterEntrypointsDerivation AllowlistedBurnSwapEntrypoints = EpdDelegate
-- Used for testing
data ChangeBurnAddressSwapEntrypoints
= Swap' SwapEntrypoints
| Admin' AdminEntrypoints
| Update_allowed' (BigMap Address ())
| Change_burn_address Address
customGeneric "ChangeBurnAddressSwapEntrypoints" ligoLayout
deriving anyclass instance IsoValue ChangeBurnAddressSwapEntrypoints
deriving anyclass instance HasAnnotation ChangeBurnAddressSwapEntrypoints
instance ParameterHasEntrypoints ChangeBurnAddressSwapEntrypoints where
type ParameterEntrypointsDerivation ChangeBurnAddressSwapEntrypoints = EpdDelegate
| null | https://raw.githubusercontent.com/tqtezos/minter-sdk/6239f6ee8435977085c00c194224d4223386841a/packages/minter-contracts/src-hs/Lorentz/Contracts/Swaps/Burn.hs | haskell | | Lorentz bindings for the allowlisted swaps contract.
Types
--------------------------------------------------------------------------
Used for testing | module Lorentz.Contracts.Swaps.Burn where
import Lorentz
import Lorentz.Contracts.NonPausableSimpleAdmin
import Tezos.Address (unsafeParseAddress)
import Lorentz.Contracts.Swaps.Allowlisted
import Lorentz.Contracts.Swaps.Basic
data BurnSwapStorage = BurnSwapStorage
{ nextSwapId :: SwapId
, swaps :: BigMap SwapId SwapInfo
, burnAddress :: Address
}
customGeneric "BurnSwapStorage" ligoLayout
deriving anyclass instance IsoValue BurnSwapStorage
deriving anyclass instance HasAnnotation BurnSwapStorage
nullAddress :: Address
nullAddress = unsafeParseAddress "tz1Ke2h7sDdakHJQh8WX4Z372du1KChsksyU"
altBurnAddress :: Address
altBurnAddress = unsafeParseAddress "tz1burnburnburnburnburnburnburjAYjjX"
initBurnSwapStorage :: BurnSwapStorage
initBurnSwapStorage = BurnSwapStorage
{ nextSwapId = initSwapId
, swaps = mempty
, burnAddress = nullAddress
}
data AllowlistedBurnSwapStorage = AllowlistedBurnSwapStorage
{ burnSwapStorage :: BurnSwapStorage
, admin :: AdminStorage
, allowlist :: Allowlist
}
customGeneric "AllowlistedBurnSwapStorage" ligoLayout
deriving anyclass instance IsoValue AllowlistedBurnSwapStorage
deriving anyclass instance HasAnnotation AllowlistedBurnSwapStorage
initAllowlistedBurnSwapStorage :: Address -> AllowlistedBurnSwapStorage
initAllowlistedBurnSwapStorage admin = AllowlistedBurnSwapStorage
{ burnSwapStorage = initBurnSwapStorage
, admin = initAdminStorage admin
, allowlist = mempty
}
data AllowlistedBurnSwapEntrypoints
= Swap SwapEntrypoints
| Admin AdminEntrypoints
| Update_allowed (BigMap Address ())
customGeneric "AllowlistedBurnSwapEntrypoints" ligoLayout
deriving anyclass instance IsoValue AllowlistedBurnSwapEntrypoints
deriving anyclass instance HasAnnotation AllowlistedBurnSwapEntrypoints
instance ParameterHasEntrypoints AllowlistedBurnSwapEntrypoints where
type ParameterEntrypointsDerivation AllowlistedBurnSwapEntrypoints = EpdDelegate
data ChangeBurnAddressSwapEntrypoints
= Swap' SwapEntrypoints
| Admin' AdminEntrypoints
| Update_allowed' (BigMap Address ())
| Change_burn_address Address
customGeneric "ChangeBurnAddressSwapEntrypoints" ligoLayout
deriving anyclass instance IsoValue ChangeBurnAddressSwapEntrypoints
deriving anyclass instance HasAnnotation ChangeBurnAddressSwapEntrypoints
instance ParameterHasEntrypoints ChangeBurnAddressSwapEntrypoints where
type ParameterEntrypointsDerivation ChangeBurnAddressSwapEntrypoints = EpdDelegate
|
83d0a3e136439fc34c57b34265eeb57ed26396eb84566f2f7f38f30bc639348f | Frama-C/Frama-C-snapshot | source_manager.ml | (**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
type tab = {
tab_name : string ;
tab_file : Datatype.Filepath.t ;
tab_page : int ;
tab_select : line:int -> unit ;
tab_source_view : GSourceView.source_view;
}
type t = {
notebook : GPack.notebook;
file_index : (Datatype.Filepath.t,tab) Hashtbl.t;
name_index : (string,tab) Hashtbl.t;
page_index : (int,tab) Hashtbl.t;
mutable pages : int ;
}
let make ?tab_pos ?packing () =
let notebook = GPack.notebook
~scrollable:true ~show_tabs:true ?tab_pos ?packing ()
in
notebook#set_enable_popup true ;
{
notebook = notebook ;
file_index = Hashtbl.create 7;
name_index = Hashtbl.create 7;
page_index = Hashtbl.create 7;
pages = 0 ;
}
let input_channel b ic =
let buf = Bytes.create 1024 and len = ref 0 in
while len := input ic buf 0 1024; !len > 0 do
Buffer.add_subbytes b buf 0 !len
done
(* returns [true] in case of success, [false] otherwise *)
let with_file name ~f =
try
let ic = open_in_gen [Open_rdonly] 0o644 name in
try f ic; close_in ic; true with _exn ->
close_in ic; (*; !flash_info ("Error: "^Printexc.to_string exn)*)
false
with _exn -> false
let clear w =
begin
for _i=1 to w.pages do w.notebook#remove_page 0 done ;
w.pages <- 0 ;
Hashtbl.clear w.file_index ;
Hashtbl.clear w.name_index ;
Hashtbl.clear w.page_index ;
end
let later f = ignore (Glib.Idle.add (fun () -> f () ; false))
let select_file w filename =
try
let tab = Hashtbl.find w.file_index filename in
later (fun () -> w.notebook#goto_page tab.tab_page)
with Not_found -> ()
let select_name w title =
try
let tab = Hashtbl.find w.name_index title in
later (fun () -> w.notebook#goto_page tab.tab_page)
with Not_found -> ()
let selection_locked = ref false
let load_file w ?title ~(filename : Datatype.Filepath.t) ?(line=(-1)) ~click_cb () =
Gui_parameters.debug ~level:2 "Opening file \"%a\" line %d"
Datatype.Filepath.pretty filename line ;
let tab =
begin
try Hashtbl.find w.file_index filename
with Not_found ->
let name = match title with
| None -> Filepath.Normalized.to_pretty_string filename
| Some s -> s
in
let label = GMisc.label ~text:name () in
let sw = GBin.scrolled_window
~vpolicy:`AUTOMATIC
~hpolicy:`AUTOMATIC
~packing:(fun arg ->
ignore
(w.notebook#append_page ~tab_label:label#coerce arg))
() in
let original_source_view = Source_viewer.make ~name:"original_source"
~packing:sw#add ()
in
let window = (original_source_view :> GText.view) in
let page_num = w.notebook#page_num sw#coerce in
let b = Buffer.create 1024 in
let s =
if with_file (filename :> string) ~f:(input_channel b) then
Wutil.to_utf8 (Buffer.contents b)
else
let f = Filepath.Normalized.to_pretty_string filename in
"Error: cannot open file '" ^ f ^ "'"
in
Buffer.reset b;
let (buffer:GText.buffer) = window#buffer in
buffer#set_text s;
let select_line ~line =
if !selection_locked then
ignore a single call and release the lock for the next one
selection_locked := false
else
begin
w.notebook#goto_page page_num;
if line >= 0 then
let it = buffer#get_iter (`LINE (line-1)) in
buffer#place_cursor ~where:it;
let y = if buffer#line_count < 20 then 0.23 else 0.3 in
window#scroll_to_mark ~use_align:true ~yalign:y `INSERT
end
in
(* Ctrl+click opens the external viewer at the current line and file. *)
ignore (window#event#connect#button_press
~callback:
(fun ev ->
(if GdkEvent.Button.button ev = 1 &&
List.mem `CONTROL
(Gdk.Convert.modifier (GdkEvent.Button.state ev))
then
Wutil.later
(fun () ->
try
let cur_page = w.notebook#current_page in
let tab = Hashtbl.find w.page_index cur_page in
let file = tab.tab_file in
let iter = buffer#get_iter_at_mark `INSERT in
let line = iter#line + 1 in
Gtk_helper.open_in_external_viewer ~line file
with Not_found ->
failwith (Printf.sprintf "ctrl+click cb: invalid page %d"
w.notebook#current_page)
);
if GdkEvent.Button.button ev = 1 then
Wutil.later
(fun () ->
try
let iter = buffer#get_iter_at_mark `INSERT in
let line = iter#line + 1 in
let col = iter#line_index in
let offset = iter#offset in
let pos = {Filepath.pos_path = filename;
Filepath.pos_lnum = line;
Filepath.pos_bol = offset - col;
Filepath.pos_cnum = offset;} in
let localz =
Pretty_source.loc_to_localizable ~precise_col:true pos
in
click_cb localz
with Not_found ->
failwith (Printf.sprintf "click cb: invalid page %d"
w.notebook#current_page)
);
);
false (* other events are processed as usual *)
));
let tab = {
tab_file = filename ;
tab_name = name ;
tab_select = select_line ;
tab_page = page_num ;
tab_source_view = original_source_view;
} in
w.pages <- succ page_num ;
Hashtbl.add w.file_index filename tab ;
Hashtbl.add w.name_index name tab ;
Hashtbl.add w.page_index page_num tab ;
tab
end
in
(* Runs this at idle priority to let the text be displayed before. *)
later (fun () -> tab.tab_select ~line)
let get_current_source_view w =
try
let tab = Hashtbl.find w.page_index w.notebook#current_page in
tab.tab_source_view
with Not_found ->
failwith (Printf.sprintf "get_source_view: invalid page %d"
w.notebook#current_page)
(*
Local Variables:
compile-command: "make -C ../../.."
End:
*)
| null | https://raw.githubusercontent.com/Frama-C/Frama-C-snapshot/639a3647736bf8ac127d00ebe4c4c259f75f9b87/src/plugins/gui/source_manager.ml | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
returns [true] in case of success, [false] otherwise
; !flash_info ("Error: "^Printexc.to_string exn)
Ctrl+click opens the external viewer at the current line and file.
other events are processed as usual
Runs this at idle priority to let the text be displayed before.
Local Variables:
compile-command: "make -C ../../.."
End:
| This file is part of Frama - C.
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
type tab = {
tab_name : string ;
tab_file : Datatype.Filepath.t ;
tab_page : int ;
tab_select : line:int -> unit ;
tab_source_view : GSourceView.source_view;
}
type t = {
notebook : GPack.notebook;
file_index : (Datatype.Filepath.t,tab) Hashtbl.t;
name_index : (string,tab) Hashtbl.t;
page_index : (int,tab) Hashtbl.t;
mutable pages : int ;
}
let make ?tab_pos ?packing () =
let notebook = GPack.notebook
~scrollable:true ~show_tabs:true ?tab_pos ?packing ()
in
notebook#set_enable_popup true ;
{
notebook = notebook ;
file_index = Hashtbl.create 7;
name_index = Hashtbl.create 7;
page_index = Hashtbl.create 7;
pages = 0 ;
}
let input_channel b ic =
let buf = Bytes.create 1024 and len = ref 0 in
while len := input ic buf 0 1024; !len > 0 do
Buffer.add_subbytes b buf 0 !len
done
let with_file name ~f =
try
let ic = open_in_gen [Open_rdonly] 0o644 name in
try f ic; close_in ic; true with _exn ->
false
with _exn -> false
let clear w =
begin
for _i=1 to w.pages do w.notebook#remove_page 0 done ;
w.pages <- 0 ;
Hashtbl.clear w.file_index ;
Hashtbl.clear w.name_index ;
Hashtbl.clear w.page_index ;
end
let later f = ignore (Glib.Idle.add (fun () -> f () ; false))
let select_file w filename =
try
let tab = Hashtbl.find w.file_index filename in
later (fun () -> w.notebook#goto_page tab.tab_page)
with Not_found -> ()
let select_name w title =
try
let tab = Hashtbl.find w.name_index title in
later (fun () -> w.notebook#goto_page tab.tab_page)
with Not_found -> ()
let selection_locked = ref false
let load_file w ?title ~(filename : Datatype.Filepath.t) ?(line=(-1)) ~click_cb () =
Gui_parameters.debug ~level:2 "Opening file \"%a\" line %d"
Datatype.Filepath.pretty filename line ;
let tab =
begin
try Hashtbl.find w.file_index filename
with Not_found ->
let name = match title with
| None -> Filepath.Normalized.to_pretty_string filename
| Some s -> s
in
let label = GMisc.label ~text:name () in
let sw = GBin.scrolled_window
~vpolicy:`AUTOMATIC
~hpolicy:`AUTOMATIC
~packing:(fun arg ->
ignore
(w.notebook#append_page ~tab_label:label#coerce arg))
() in
let original_source_view = Source_viewer.make ~name:"original_source"
~packing:sw#add ()
in
let window = (original_source_view :> GText.view) in
let page_num = w.notebook#page_num sw#coerce in
let b = Buffer.create 1024 in
let s =
if with_file (filename :> string) ~f:(input_channel b) then
Wutil.to_utf8 (Buffer.contents b)
else
let f = Filepath.Normalized.to_pretty_string filename in
"Error: cannot open file '" ^ f ^ "'"
in
Buffer.reset b;
let (buffer:GText.buffer) = window#buffer in
buffer#set_text s;
let select_line ~line =
if !selection_locked then
ignore a single call and release the lock for the next one
selection_locked := false
else
begin
w.notebook#goto_page page_num;
if line >= 0 then
let it = buffer#get_iter (`LINE (line-1)) in
buffer#place_cursor ~where:it;
let y = if buffer#line_count < 20 then 0.23 else 0.3 in
window#scroll_to_mark ~use_align:true ~yalign:y `INSERT
end
in
ignore (window#event#connect#button_press
~callback:
(fun ev ->
(if GdkEvent.Button.button ev = 1 &&
List.mem `CONTROL
(Gdk.Convert.modifier (GdkEvent.Button.state ev))
then
Wutil.later
(fun () ->
try
let cur_page = w.notebook#current_page in
let tab = Hashtbl.find w.page_index cur_page in
let file = tab.tab_file in
let iter = buffer#get_iter_at_mark `INSERT in
let line = iter#line + 1 in
Gtk_helper.open_in_external_viewer ~line file
with Not_found ->
failwith (Printf.sprintf "ctrl+click cb: invalid page %d"
w.notebook#current_page)
);
if GdkEvent.Button.button ev = 1 then
Wutil.later
(fun () ->
try
let iter = buffer#get_iter_at_mark `INSERT in
let line = iter#line + 1 in
let col = iter#line_index in
let offset = iter#offset in
let pos = {Filepath.pos_path = filename;
Filepath.pos_lnum = line;
Filepath.pos_bol = offset - col;
Filepath.pos_cnum = offset;} in
let localz =
Pretty_source.loc_to_localizable ~precise_col:true pos
in
click_cb localz
with Not_found ->
failwith (Printf.sprintf "click cb: invalid page %d"
w.notebook#current_page)
);
);
));
let tab = {
tab_file = filename ;
tab_name = name ;
tab_select = select_line ;
tab_page = page_num ;
tab_source_view = original_source_view;
} in
w.pages <- succ page_num ;
Hashtbl.add w.file_index filename tab ;
Hashtbl.add w.name_index name tab ;
Hashtbl.add w.page_index page_num tab ;
tab
end
in
later (fun () -> tab.tab_select ~line)
let get_current_source_view w =
try
let tab = Hashtbl.find w.page_index w.notebook#current_page in
tab.tab_source_view
with Not_found ->
failwith (Printf.sprintf "get_source_view: invalid page %d"
w.notebook#current_page)
|
9de5a77cf30a49dfa09071250789854300ef0fed4740459079df60caaed4d959 | jwiegley/notes | Buttons840.hs | module Main(main) where
import Control.Concurrent (threadDelay)
import Control.Concurrent.Async
import Control.Concurrent.STM
import Control.Monad
import Pipes
import Pipes.Concurrent
import System.Random (randomRIO)
a :: Producer Int IO ()
a = each [1..10]
b :: Pipe Int Int IO ()
b = do
chan <- liftIO newTChanIO
out <- liftIO newTChanIO
_ <- liftIO $ async $ do
threadDelay =<< randomRIO (1000, 10000000)
x <- atomically $ readTChan chan
atomically $ writeTChan out (x*2)
forever $ do
x <- await
liftIO $ atomically $ writeTChan chan x
z <- liftIO $ atomically $ readTChan out
yield z
c :: Consumer Int IO ()
c = do
x <- await
lift $ print x
c
main :: IO ()
main = do
(output1, input1, seal1) <- spawn' unbounded
(output2, input2, seal2) <- spawn' unbounded
runEffect $ fromInput input1 >-> b >-> toOutput output2 | null | https://raw.githubusercontent.com/jwiegley/notes/24574b02bfd869845faa1521854f90e4e8bf5e9a/gists/67964c2c90918e5940daf3e4a0212a46/Buttons840.hs | haskell | module Main(main) where
import Control.Concurrent (threadDelay)
import Control.Concurrent.Async
import Control.Concurrent.STM
import Control.Monad
import Pipes
import Pipes.Concurrent
import System.Random (randomRIO)
a :: Producer Int IO ()
a = each [1..10]
b :: Pipe Int Int IO ()
b = do
chan <- liftIO newTChanIO
out <- liftIO newTChanIO
_ <- liftIO $ async $ do
threadDelay =<< randomRIO (1000, 10000000)
x <- atomically $ readTChan chan
atomically $ writeTChan out (x*2)
forever $ do
x <- await
liftIO $ atomically $ writeTChan chan x
z <- liftIO $ atomically $ readTChan out
yield z
c :: Consumer Int IO ()
c = do
x <- await
lift $ print x
c
main :: IO ()
main = do
(output1, input1, seal1) <- spawn' unbounded
(output2, input2, seal2) <- spawn' unbounded
runEffect $ fromInput input1 >-> b >-> toOutput output2 |
|
5e5c39c06eecf00a03ac0b0c7de9e7b81bbbb0c4e5f83838245f62451c025b5c | diffusionkinetics/open | GoG.hs | # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
# LANGUAGE TypeApplications #
|
A limited Grammar of Graphics - like interface .
@
myPts : : [ ( Double , Double ) ]
myPts = [ ( 1,2 ) , ( 1.2 , 3 ) , ( 1.4,3.5 ) ]
myTrace : : Trace
myTrace = points ( aes & x .~ fst
& y .~ snd )
myPts
@
A limited Grammar of Graphics-like interface.
@
myPts :: [(Double, Double)]
myPts = [(1,2), (1.2, 3), (1.4,3.5)]
myTrace :: Trace
myTrace = points (aes & x .~ fst
& y .~ snd)
myPts
@
-}
module Graphics.Plotly.GoG where
import Data.Aeson
import Data.Text (Text)
import Data.Time
import Lens.Micro
import qualified Graphics.Plotly.Base as Plot
class ToJSON a => AxisValue a
instance AxisValue Double
instance AxisValue Float
instance AxisValue Text
instance AxisValue String
instance AxisValue Int
instance AxisValue Day
instance AxisValue a => AxisValue [a]
data RGB a = RGB a a a
data RGBA a = RGBA a a a a
instance ToJSON (RGB Int) where
toJSON (RGB r g b) = toJSON $ concat ["rgb(",show r,",",show g, ",", show b,")"]
instance ToJSON (RGB Double) where
toJSON (RGB r g b) = toJSON $ concat ["rgb(",showd r,",",showd g, ",", showd b,")"]
where showd = show @Int. floor . (*256)
instance ToJSON (RGBA Int) where
toJSON (RGBA r g b a) = toJSON $ concat ["rgba(",show r,",",show g, ",", show b,",", show a, ")"]
instance ToJSON (RGBA Double) where
toJSON (RGBA r g b a) = toJSON $ concat ["rgb(",showd r,",",showd g, ",", showd b,",", showd a,")"]
where showd = show @Int. floor . (*256)
class ToJSON a => IsColor a
instance IsColor Int
instance IsColor (RGB Int)
instance IsColor (RGB Double)
instance IsColor (RGBA Int)
instance IsColor (RGBA Double)
type family XVal a
type family YVal a
type family ZVal a
type family CVal a
type family SVal a
type instance XVal (x,y,z,c,s) = x
type instance YVal (x,y,z,c,s) = y
type instance ZVal (x,y,z,c,s) = z
type instance CVal (x,y,z,c,s) = c
type instance SVal (x,y,z,c,s) = s
data Aes t a = Aes
{ _x :: a -> XVal t
, _y :: a -> YVal t
, _z :: a -> ZVal t
, _color :: Maybe (a -> CVal t)
, _size :: Maybe (a -> SVal t)
}
aes :: Aes ((), (), (), (), ()) a
aes = Aes (const ()) (const ()) (const ()) Nothing Nothing
setx :: (AxisValue v)
=> Aes (vx,vy,vz,vc,vs) a -> (a -> v) -> Aes (v, vy, vz, vc, vs) a
setx (Aes _ fy fz fc fs) f = Aes f fy fz fc fs
x :: (AxisValue v)
=> Lens (Aes (vx,vy, vz, vc, vs) a) (Aes (v,vy, vz, vc, vs) a) (a -> vx) (a -> v)
x = lens _x setx
sety :: (AxisValue v)
=> Aes (vx,vy, vz, vc, vs) a -> (a -> v) -> Aes (vx, v, vz, vc, vs) a
sety (Aes fx _ fz fc fs) f = Aes fx f fz fc fs
y :: (AxisValue v)
=> Lens (Aes (vx,vy, vz, vc, vs) a) (Aes (vx,v, vz, vc, vs) a) (a -> vy) (a -> v)
y = lens _y sety
setz :: (AxisValue v)
=> Aes (vx,vy, vz, vc, vs) a -> (a -> v) -> Aes (vx, vy, v, vc, vs) a
setz (Aes fx fy _ fc fs) f = Aes fx fy f fc fs
z :: (AxisValue v)
=> Lens (Aes (vx,vy, vz, vc, vs) a) (Aes (vx,vy, v, vc, vs) a) (a -> vz) (a -> v)
z = lens _z setz
setcol :: (IsColor v)
=> Aes (vx,vy, vz, vc, vs) a -> Maybe (a -> v) -> Aes (vx, vy, vz, v, vs) a
setcol (Aes fx fy fz _ fs) f = Aes fx fy fz f fs
color :: (IsColor v)
=> Lens (Aes (vx,vy, vz, vc, vs) a) (Aes (vx,vy,vz, v,vs) a) (Maybe (a -> vc)) (Maybe (a -> v))
color = lens _color setcol
setsize :: (AxisValue v, Num v)
=> Aes (vx,vy, vz, vc, vs) a -> Maybe (a -> v) -> Aes (vx, vy, vz, vc, v) a
setsize (Aes fx fy fz fc _) = Aes fx fy fz fc
size :: (AxisValue v, Num v)
=> Lens (Aes (vx,vy, vz, vc, vs) a) (Aes (vx,vy,vz, vc,v) a) (Maybe (a -> vs)) (Maybe (a -> v))
size = lens _size setsize
points :: (AxisValue (XVal t), AxisValue (YVal t), ToJSON (CVal t), ToJSON (SVal t))
=> Aes t a -> [a] -> Plot.Trace
points a xs = setSize (_size a) $ setColors (_color a) $ Plot.scatter
& Plot.x ?~ map (toJSON . _x a) xs
& Plot.y ?~ map (toJSON . _y a) xs
& Plot.mode ?~ [Plot.Markers]
where setColors Nothing p = p
setColors (Just setC) p
= p & Plot.marker . non Plot.defMarker . Plot.markercolor ?~ Plot.List (map (toJSON . setC) xs)
setSize Nothing p = p
setSize (Just setS) p
= p & Plot.marker . non Plot.defMarker . Plot.size ?~ Plot.List (map (toJSON . setS) xs)
line :: (AxisValue (XVal t), AxisValue (YVal t))
=> Aes t a -> [a] -> Plot.Trace
line a xs = Plot.scatter & Plot.x ?~ map (toJSON . _x a) xs
& Plot.y ?~ map (toJSON . _y a) xs
& Plot.mode ?~ [Plot.Lines]
-- | Render an Aes (styling header) <a> and data <xs> into a `Plot.Trace box`
-- e.g. `hbox myAes [1,1,4,5,6,9,9]`
--
hbox :: (AxisValue (XVal t), Num (XVal t))
=> Aes t a
-> [a]
-> Plot.Trace
hbox a xs = Plot.box
& Plot.x ?~ map (toJSON . _x a) xs
& Plot.mode ?~ [Plot.Lines]
-- | Render an Aes (styling header) <a> and data <ys> into a `Plot.Trace box`
-- e.g. `vbox myAes [1,1,4,5,6,9,9]`
--
vbox :: (AxisValue (YVal t), Num (YVal t))
=> Aes t a
-> [a]
-> Plot.Trace
vbox a ys = Plot.box
& Plot.y ?~ map (toJSON . _y a) ys
& Plot.mode ?~ [Plot.Lines]
hbars :: (AxisValue (XVal t), AxisValue (YVal t))
=> Aes t a -> [a] -> Plot.Trace
hbars a xs = Plot.bars & Plot.x ?~ map (toJSON . _x a) xs
& Plot.y ?~ map (toJSON . _y a) xs
& Plot.orientation ?~ Plot.Horizontal
hcontour :: (AxisValue (XVal t), AxisValue (YVal t), AxisValue (ZVal t))
=> Aes t a -> [a] -> Plot.Trace
hcontour a xs = Plot.contour
& Plot.x ?~ map (toJSON . _x a) xs
& Plot.y ?~ map (toJSON . _y a) xs
& Plot.z ?~ map (toJSON . _z a) xs
& Plot.orientation ?~ Plot.Horizontal
| null | https://raw.githubusercontent.com/diffusionkinetics/open/673d9a4a099abd9035ccc21e37d8e614a45a1901/plotlyhs/src/Graphics/Plotly/GoG.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE TypeFamilies #
| Render an Aes (styling header) <a> and data <xs> into a `Plot.Trace box`
e.g. `hbox myAes [1,1,4,5,6,9,9]`
| Render an Aes (styling header) <a> and data <ys> into a `Plot.Trace box`
e.g. `vbox myAes [1,1,4,5,6,9,9]`
| # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE TypeApplications #
|
A limited Grammar of Graphics - like interface .
@
myPts : : [ ( Double , Double ) ]
myPts = [ ( 1,2 ) , ( 1.2 , 3 ) , ( 1.4,3.5 ) ]
myTrace : : Trace
myTrace = points ( aes & x .~ fst
& y .~ snd )
myPts
@
A limited Grammar of Graphics-like interface.
@
myPts :: [(Double, Double)]
myPts = [(1,2), (1.2, 3), (1.4,3.5)]
myTrace :: Trace
myTrace = points (aes & x .~ fst
& y .~ snd)
myPts
@
-}
module Graphics.Plotly.GoG where
import Data.Aeson
import Data.Text (Text)
import Data.Time
import Lens.Micro
import qualified Graphics.Plotly.Base as Plot
class ToJSON a => AxisValue a
instance AxisValue Double
instance AxisValue Float
instance AxisValue Text
instance AxisValue String
instance AxisValue Int
instance AxisValue Day
instance AxisValue a => AxisValue [a]
data RGB a = RGB a a a
data RGBA a = RGBA a a a a
instance ToJSON (RGB Int) where
toJSON (RGB r g b) = toJSON $ concat ["rgb(",show r,",",show g, ",", show b,")"]
instance ToJSON (RGB Double) where
toJSON (RGB r g b) = toJSON $ concat ["rgb(",showd r,",",showd g, ",", showd b,")"]
where showd = show @Int. floor . (*256)
instance ToJSON (RGBA Int) where
toJSON (RGBA r g b a) = toJSON $ concat ["rgba(",show r,",",show g, ",", show b,",", show a, ")"]
instance ToJSON (RGBA Double) where
toJSON (RGBA r g b a) = toJSON $ concat ["rgb(",showd r,",",showd g, ",", showd b,",", showd a,")"]
where showd = show @Int. floor . (*256)
class ToJSON a => IsColor a
instance IsColor Int
instance IsColor (RGB Int)
instance IsColor (RGB Double)
instance IsColor (RGBA Int)
instance IsColor (RGBA Double)
type family XVal a
type family YVal a
type family ZVal a
type family CVal a
type family SVal a
type instance XVal (x,y,z,c,s) = x
type instance YVal (x,y,z,c,s) = y
type instance ZVal (x,y,z,c,s) = z
type instance CVal (x,y,z,c,s) = c
type instance SVal (x,y,z,c,s) = s
data Aes t a = Aes
{ _x :: a -> XVal t
, _y :: a -> YVal t
, _z :: a -> ZVal t
, _color :: Maybe (a -> CVal t)
, _size :: Maybe (a -> SVal t)
}
aes :: Aes ((), (), (), (), ()) a
aes = Aes (const ()) (const ()) (const ()) Nothing Nothing
setx :: (AxisValue v)
=> Aes (vx,vy,vz,vc,vs) a -> (a -> v) -> Aes (v, vy, vz, vc, vs) a
setx (Aes _ fy fz fc fs) f = Aes f fy fz fc fs
x :: (AxisValue v)
=> Lens (Aes (vx,vy, vz, vc, vs) a) (Aes (v,vy, vz, vc, vs) a) (a -> vx) (a -> v)
x = lens _x setx
sety :: (AxisValue v)
=> Aes (vx,vy, vz, vc, vs) a -> (a -> v) -> Aes (vx, v, vz, vc, vs) a
sety (Aes fx _ fz fc fs) f = Aes fx f fz fc fs
y :: (AxisValue v)
=> Lens (Aes (vx,vy, vz, vc, vs) a) (Aes (vx,v, vz, vc, vs) a) (a -> vy) (a -> v)
y = lens _y sety
setz :: (AxisValue v)
=> Aes (vx,vy, vz, vc, vs) a -> (a -> v) -> Aes (vx, vy, v, vc, vs) a
setz (Aes fx fy _ fc fs) f = Aes fx fy f fc fs
z :: (AxisValue v)
=> Lens (Aes (vx,vy, vz, vc, vs) a) (Aes (vx,vy, v, vc, vs) a) (a -> vz) (a -> v)
z = lens _z setz
setcol :: (IsColor v)
=> Aes (vx,vy, vz, vc, vs) a -> Maybe (a -> v) -> Aes (vx, vy, vz, v, vs) a
setcol (Aes fx fy fz _ fs) f = Aes fx fy fz f fs
color :: (IsColor v)
=> Lens (Aes (vx,vy, vz, vc, vs) a) (Aes (vx,vy,vz, v,vs) a) (Maybe (a -> vc)) (Maybe (a -> v))
color = lens _color setcol
setsize :: (AxisValue v, Num v)
=> Aes (vx,vy, vz, vc, vs) a -> Maybe (a -> v) -> Aes (vx, vy, vz, vc, v) a
setsize (Aes fx fy fz fc _) = Aes fx fy fz fc
size :: (AxisValue v, Num v)
=> Lens (Aes (vx,vy, vz, vc, vs) a) (Aes (vx,vy,vz, vc,v) a) (Maybe (a -> vs)) (Maybe (a -> v))
size = lens _size setsize
points :: (AxisValue (XVal t), AxisValue (YVal t), ToJSON (CVal t), ToJSON (SVal t))
=> Aes t a -> [a] -> Plot.Trace
points a xs = setSize (_size a) $ setColors (_color a) $ Plot.scatter
& Plot.x ?~ map (toJSON . _x a) xs
& Plot.y ?~ map (toJSON . _y a) xs
& Plot.mode ?~ [Plot.Markers]
where setColors Nothing p = p
setColors (Just setC) p
= p & Plot.marker . non Plot.defMarker . Plot.markercolor ?~ Plot.List (map (toJSON . setC) xs)
setSize Nothing p = p
setSize (Just setS) p
= p & Plot.marker . non Plot.defMarker . Plot.size ?~ Plot.List (map (toJSON . setS) xs)
line :: (AxisValue (XVal t), AxisValue (YVal t))
=> Aes t a -> [a] -> Plot.Trace
line a xs = Plot.scatter & Plot.x ?~ map (toJSON . _x a) xs
& Plot.y ?~ map (toJSON . _y a) xs
& Plot.mode ?~ [Plot.Lines]
hbox :: (AxisValue (XVal t), Num (XVal t))
=> Aes t a
-> [a]
-> Plot.Trace
hbox a xs = Plot.box
& Plot.x ?~ map (toJSON . _x a) xs
& Plot.mode ?~ [Plot.Lines]
vbox :: (AxisValue (YVal t), Num (YVal t))
=> Aes t a
-> [a]
-> Plot.Trace
vbox a ys = Plot.box
& Plot.y ?~ map (toJSON . _y a) ys
& Plot.mode ?~ [Plot.Lines]
hbars :: (AxisValue (XVal t), AxisValue (YVal t))
=> Aes t a -> [a] -> Plot.Trace
hbars a xs = Plot.bars & Plot.x ?~ map (toJSON . _x a) xs
& Plot.y ?~ map (toJSON . _y a) xs
& Plot.orientation ?~ Plot.Horizontal
hcontour :: (AxisValue (XVal t), AxisValue (YVal t), AxisValue (ZVal t))
=> Aes t a -> [a] -> Plot.Trace
hcontour a xs = Plot.contour
& Plot.x ?~ map (toJSON . _x a) xs
& Plot.y ?~ map (toJSON . _y a) xs
& Plot.z ?~ map (toJSON . _z a) xs
& Plot.orientation ?~ Plot.Horizontal
|
366b72e6e1bdafc6b491b60e35640b70ede85838b0f955f0037012e3fbc7bf27 | ronxin/stolzen | ex2.84.scm | #lang scheme
(define (container)
(define hash (make-hash))
(define (put op type item)
(hash-set! hash (cons op type) item)
)
(define (get op type)
(let
((key (cons op type)))
(if (hash-has-key? hash key)
(hash-ref hash key)
false
)
)
)
(lambda (name)
(cond
((eq? name 'get) get)
((eq? name 'put) put)
((eq? name 'hash) hash)
)
)
)
(define storage (container))
(define put (storage 'put))
(define get (storage 'get))
(define (attach-tag type-tag contents)
(cons type-tag contents)
)
(define (type-tag datum)
(if (pair? datum)
(car datum)
(error "Bad tagged datum -- type-tag" datum)
)
)
(define (contents datum)
(if (pair? datum)
(cdr datum)
(error "Bad tagged datum -- contents" datum)
)
)
(define (make-number x)
(attach-tag 'number x)
)
(define (make-rational x)
(attach-tag 'rational x)
)
(define (make-complex x)
(attach-tag 'complex x)
)
(define (raise-number x)
(attach-tag 'rational (contents x))
)
(define (raise-rational x)
(attach-tag 'complex (contents x))
)
(put 'raise 'number raise-number)
(put 'raise 'rational raise-rational)
(define (raise x)
((get 'raise (type-tag x)) x)
)
; 2.84
; representing inheritance tower as list
; the superer type - the nearer it to the head
(define inheritance-tower '(complex rational number))
(define (index-tower tower)
(define (index-inner seq current-index)
(if (null? seq)
null
(cons (cons (car seq) current-index)
(index-inner (cdr seq) (+ 1 current-index) ))
)
)
(index-inner tower 0)
)
; making pairs of {type: ordinal number in inheritance list}
i.e { complex : 0 , rational : 1 , etc }
(define inheritance-indexes (index-tower inheritance-tower))
(define (get-index type indexes)
(cond
((null? indexes) -1)
((eq? type (car (car indexes))) (cdr (car indexes)))
(else
(get-index type (cdr indexes)))
)
)
(equal? 0 (get-index 'complex inheritance-indexes))
(equal? 1 (get-index 'rational inheritance-indexes))
(equal? 2 (get-index 'number inheritance-indexes))
(define (get-type ord indexes)
(cond
((null? indexes) false)
((= ord (cdr (car indexes))) (car (car indexes)))
(else
(get-type ord (cdr indexes)))
)
)
(equal? 'complex (get-type 0 inheritance-indexes))
(equal? 'rational (get-type 1 inheritance-indexes))
(equal? 'number (get-type 2 inheritance-indexes))
(define (find-superest-parent seq inheritance-indexes)
(define (index type)
(get-index type inheritance-indexes)
)
(get-type
(apply min (map index seq))
inheritance-indexes
)
)
(equal? 'rational (find-superest-parent '(number number rational) inheritance-indexes))
(equal? 'number (find-superest-parent '(number number number) inheritance-indexes))
(equal? 'complex (find-superest-parent '(number complex rational) inheritance-indexes))
(define (calc-distance type1 type2 inheritance-indexes)
(define (index type)
(get-index type inheritance-indexes)
)
(abs (- (index type1) (index type2)))
)
(equal? 0 (calc-distance 'complex 'complex inheritance-indexes))
(equal? 1 (calc-distance 'complex 'rational inheritance-indexes))
(equal? 2 (calc-distance 'complex 'number inheritance-indexes))
(define (distances type-tags inheritance-indexes)
(define (distance supertype)
(lambda (type) (calc-distance supertype type inheritance-indexes))
)
(let
((super-parent (find-superest-parent type-tags inheritance-indexes)))
(map (distance super-parent) type-tags)
)
)
(equal? '(0 1 2) (distances '(complex rational number) inheritance-indexes))
(equal? '(0 0 0) (distances '(complex complex complex) inheritance-indexes))
(equal? '(0 0 0) (distances '(number number number) inheritance-indexes))
(define (raise-times arg times)
(if (= times 0)
arg
(raise-times (raise arg) (- times 1))
)
)
(equal? (make-number 1) (raise-times (make-number 1) 0))
(equal? (make-rational 1) (raise-times (make-number 1) 1))
(equal? (make-complex 1) (raise-times (make-number 1) 2))
(define (raise-seq args distances)
(if (null? args)
null
(cons (raise-times (car args) (car distances))
(raise-seq (cdr args) (cdr distances)))
)
)
(equal? (list (make-complex 1) (make-complex 2))
(raise-seq (list (make-number 1) (make-complex 2)) '(2 0)))
(define (coerce args inheritance-indexes)
(let
((dis (distances (map type-tag args) inheritance-indexes)))
(raise-seq args dis)
)
)
(equal? (list (make-complex 1) (make-complex 2))
(coerce (list (make-complex 1) (make-complex 2)) inheritance-indexes))
(define (apply-generic op . args)
(apply-generic2 op args)
)
(define (apply-generic2 op args)
(let
((type-tags (map type-tag args)))
(let
((proc (get op type-tags)))
(if proc
(apply proc (map contents args))
(apply-generic2 op (coerce args inheritance-indexes))
)
)
)
)
2.85
(define (project-complex x)
(attach-tag 'rational (contents x))
)
(define (project-rational x)
(attach-tag 'number (contents x))
)
(put 'project 'complex project-complex)
(put 'project 'rational project-rational)
(define (project x)
(let
((proc (get 'project (type-tag x))))
(if proc
(proc x)
false
)
)
)
; let equal? be the generic equality predicate
(define (can-drop x)
(let
((projection (project x)))
(if projection
(equal? x (raise projection))
false
)
)
)
(define (drop x)
(if (can-drop x)
(drop (project x))
x
)
)
(equal? (make-number 1) (drop (make-complex 1)))
(equal? (make-number 1) (drop (make-rational 1)))
(equal? (make-number 1) (drop (make-number 1)))
(define (apply-generic-improved op . args)
(drop (apply-generic2 op args))
)
2.86
; for this change, it's needed to replace * and + in the complex package onto
generic add and . Also generic sin and cos need to be added . And in the
; apply-generic procedure it's not needed anymore to cut the tag, let packeges
; decide on themselves whether to cut or not
| null | https://raw.githubusercontent.com/ronxin/stolzen/bb13d0a7deea53b65253bb4b61aaf2abe4467f0d/sicp/chapter2/2.5/ex2.84.scm | scheme | 2.84
representing inheritance tower as list
the superer type - the nearer it to the head
making pairs of {type: ordinal number in inheritance list}
let equal? be the generic equality predicate
for this change, it's needed to replace * and + in the complex package onto
apply-generic procedure it's not needed anymore to cut the tag, let packeges
decide on themselves whether to cut or not
| #lang scheme
(define (container)
(define hash (make-hash))
(define (put op type item)
(hash-set! hash (cons op type) item)
)
(define (get op type)
(let
((key (cons op type)))
(if (hash-has-key? hash key)
(hash-ref hash key)
false
)
)
)
(lambda (name)
(cond
((eq? name 'get) get)
((eq? name 'put) put)
((eq? name 'hash) hash)
)
)
)
(define storage (container))
(define put (storage 'put))
(define get (storage 'get))
(define (attach-tag type-tag contents)
(cons type-tag contents)
)
(define (type-tag datum)
(if (pair? datum)
(car datum)
(error "Bad tagged datum -- type-tag" datum)
)
)
(define (contents datum)
(if (pair? datum)
(cdr datum)
(error "Bad tagged datum -- contents" datum)
)
)
(define (make-number x)
(attach-tag 'number x)
)
(define (make-rational x)
(attach-tag 'rational x)
)
(define (make-complex x)
(attach-tag 'complex x)
)
(define (raise-number x)
(attach-tag 'rational (contents x))
)
(define (raise-rational x)
(attach-tag 'complex (contents x))
)
(put 'raise 'number raise-number)
(put 'raise 'rational raise-rational)
(define (raise x)
((get 'raise (type-tag x)) x)
)
(define inheritance-tower '(complex rational number))
(define (index-tower tower)
(define (index-inner seq current-index)
(if (null? seq)
null
(cons (cons (car seq) current-index)
(index-inner (cdr seq) (+ 1 current-index) ))
)
)
(index-inner tower 0)
)
i.e { complex : 0 , rational : 1 , etc }
(define inheritance-indexes (index-tower inheritance-tower))
(define (get-index type indexes)
(cond
((null? indexes) -1)
((eq? type (car (car indexes))) (cdr (car indexes)))
(else
(get-index type (cdr indexes)))
)
)
(equal? 0 (get-index 'complex inheritance-indexes))
(equal? 1 (get-index 'rational inheritance-indexes))
(equal? 2 (get-index 'number inheritance-indexes))
(define (get-type ord indexes)
(cond
((null? indexes) false)
((= ord (cdr (car indexes))) (car (car indexes)))
(else
(get-type ord (cdr indexes)))
)
)
(equal? 'complex (get-type 0 inheritance-indexes))
(equal? 'rational (get-type 1 inheritance-indexes))
(equal? 'number (get-type 2 inheritance-indexes))
(define (find-superest-parent seq inheritance-indexes)
(define (index type)
(get-index type inheritance-indexes)
)
(get-type
(apply min (map index seq))
inheritance-indexes
)
)
(equal? 'rational (find-superest-parent '(number number rational) inheritance-indexes))
(equal? 'number (find-superest-parent '(number number number) inheritance-indexes))
(equal? 'complex (find-superest-parent '(number complex rational) inheritance-indexes))
(define (calc-distance type1 type2 inheritance-indexes)
(define (index type)
(get-index type inheritance-indexes)
)
(abs (- (index type1) (index type2)))
)
(equal? 0 (calc-distance 'complex 'complex inheritance-indexes))
(equal? 1 (calc-distance 'complex 'rational inheritance-indexes))
(equal? 2 (calc-distance 'complex 'number inheritance-indexes))
(define (distances type-tags inheritance-indexes)
(define (distance supertype)
(lambda (type) (calc-distance supertype type inheritance-indexes))
)
(let
((super-parent (find-superest-parent type-tags inheritance-indexes)))
(map (distance super-parent) type-tags)
)
)
(equal? '(0 1 2) (distances '(complex rational number) inheritance-indexes))
(equal? '(0 0 0) (distances '(complex complex complex) inheritance-indexes))
(equal? '(0 0 0) (distances '(number number number) inheritance-indexes))
(define (raise-times arg times)
(if (= times 0)
arg
(raise-times (raise arg) (- times 1))
)
)
(equal? (make-number 1) (raise-times (make-number 1) 0))
(equal? (make-rational 1) (raise-times (make-number 1) 1))
(equal? (make-complex 1) (raise-times (make-number 1) 2))
(define (raise-seq args distances)
(if (null? args)
null
(cons (raise-times (car args) (car distances))
(raise-seq (cdr args) (cdr distances)))
)
)
(equal? (list (make-complex 1) (make-complex 2))
(raise-seq (list (make-number 1) (make-complex 2)) '(2 0)))
(define (coerce args inheritance-indexes)
(let
((dis (distances (map type-tag args) inheritance-indexes)))
(raise-seq args dis)
)
)
(equal? (list (make-complex 1) (make-complex 2))
(coerce (list (make-complex 1) (make-complex 2)) inheritance-indexes))
(define (apply-generic op . args)
(apply-generic2 op args)
)
(define (apply-generic2 op args)
(let
((type-tags (map type-tag args)))
(let
((proc (get op type-tags)))
(if proc
(apply proc (map contents args))
(apply-generic2 op (coerce args inheritance-indexes))
)
)
)
)
2.85
(define (project-complex x)
(attach-tag 'rational (contents x))
)
(define (project-rational x)
(attach-tag 'number (contents x))
)
(put 'project 'complex project-complex)
(put 'project 'rational project-rational)
(define (project x)
(let
((proc (get 'project (type-tag x))))
(if proc
(proc x)
false
)
)
)
(define (can-drop x)
(let
((projection (project x)))
(if projection
(equal? x (raise projection))
false
)
)
)
(define (drop x)
(if (can-drop x)
(drop (project x))
x
)
)
(equal? (make-number 1) (drop (make-complex 1)))
(equal? (make-number 1) (drop (make-rational 1)))
(equal? (make-number 1) (drop (make-number 1)))
(define (apply-generic-improved op . args)
(drop (apply-generic2 op args))
)
2.86
generic add and . Also generic sin and cos need to be added . And in the
|
a35a21b3e61a20f1d380df90bc02e767d7b8732df566fdd58bc3af2241a4addd | thoughtpolice/claap | Types.hs | {-# LANGUAGE DataKinds #-}
# LANGUAGE KindSignatures #
# LANGUAGE GeneralizedNewtypeDeriving #
-- |
-- Module : AAP.Types
Copyright : ( c ) 2016
License : BSD3 - style ( see LICENSE.txt )
--
-- Maintainer :
-- Stability : experimental
Portability : non - portable ( GHC extensions )
--
-- Basic type definitions used throughout the project.
--
module AAP.Types
( -- * Type-level values.
RegFileSize
-- * Types
, W
-- ** Types for the register file and memory
, MemAddr, MemValue
, MemReadAddr, MemWriteAddr
, RegAddr, RegValue
, RegReadAddr, RegWriteAddr
, InstrAddr
, Reg, RegKind(..), toReg, regBV, regUnsigned
, Imm, ImmKind(..), toImm, toSImm, immSigned, immUnsigned
, immUBV, immSBV
) where
import CLaSH.Prelude
--------------------------------------------------------------------------------
-- Basic types
-- | Size of the register file.
type RegFileSize = 64
| 16 - bit word value for the AAP architecture .
type W = 16
-- | Memory address: used to specify where to read from and write to, when
-- addressing values inside the data memory unit.
type MemAddr = BitVector 8
-- | Memory value, read out from the data memory at a specified @'MemAddr'@.
type MemValue = BitVector W
-- | Register value, read out from the register file at a specified @'RegAddr'@.
type RegValue = BitVector W
-- | Fetch address: used to specify where to read from in the instruction
memory . Because the program counter is 24 - bits , this means there is a total
of 2 ^ 24 = 16 MB of instruction memory that is addressable .
type InstrAddr = BitVector 24
| Address to write to in the register file . Constrained to 6 bits , which
allows a maximum address value of 63 ( @0b111111@ ) , allowing 64 registers ( as
defined by the AAP standard ) .
type RegAddr = Unsigned 6
-- | Simple alias to make some types read better. Used to distinguish read ports
-- from the write port.
type RegWriteAddr = RegAddr
-- | Simple alias to make the types read better. Used to distinguish read ports
-- from the write port.
type RegReadAddr = RegAddr
-- | Simple alias to make some types read better. Used to distinguish read ports
-- from the write port.
type MemWriteAddr = RegAddr
-- | Simple alias to make the types read better. Used to distinguish read ports
-- from the write port.
type MemReadAddr = RegAddr
--------------------------------------------------------------------------------
-- Decoder/instruction types
| The \"kind\ " of a register reference in an instruction . AAP instructions
use a simple 3 - operand code , so there are generally up - to three register
references : the /destination/ register , and two source registers /A/ and /B/.
--
-- Note: This is used as a /kind/, not a type - it is promoted to the kind level
-- with @DataKinds@.
data RegKind
= D -- ^ The destination register.
| A -- ^ Source register A.
| B -- ^ Source register B.
-- | The \"kind\" of an immediate value in an instruction. Used to classify
-- whether the immediate is signed or unsigned.
--
-- Note: This is used as a /kind/, not a type - it is promoted to the kind level
-- with @DataKinds@.
data ImmKind
= S -- ^ Signed immediate value
| I -- ^ Unsigned immediate value
-- | Simple type alias, used to give mnemonics to the instruction encoding.
newtype Reg (k :: RegKind) s = Reg (BitVector s)
deriving (Eq, Show, Integral, Enum, Real, Num, Ord)
| Convert a @'BitVector'@ to a @'Reg'@ value . Can be of any @'RegKind'@ , as
-- long as it's the appropriate size.
toReg :: KnownNat s => BitVector s -> Reg k s
toReg = Reg
| Convert a @'Reg'@ to a @'BitVector'@.
regBV :: Reg k s -> BitVector s
regBV (Reg s) = s
-- | Convert a @'Reg'@ to a @'Unsigned'@ value.
regUnsigned :: Reg k s -> Unsigned s
regUnsigned = unpack . regBV
-- | Simple type alias, used to give mnemonics to the instruction encoding.
newtype Imm (k :: ImmKind) s = Imm (BitVector s)
deriving (Eq, Show, Integral, Enum, Real, Num, Ord)
| Convert a @'BitVector'@ to a signed @'Imm'@ value .
toImm :: BitVector s -> Imm 'I s
toImm = Imm
| Convert a @'BitVector'@ to a signed @'Imm'@ value .
toSImm :: BitVector s -> Imm 'S s
toSImm = Imm
-- | Convert an @'Imm'@ to an @'Unsigned'@ Number.
immUnsigned :: KnownNat s => Imm 'I s -> Unsigned s
immUnsigned (Imm s) = unpack s
-- | Convert an @'Imm'@ to a @'Signed'@ number.
immSigned :: KnownNat s => Imm 'S s -> Signed s
immSigned (Imm s) = unpack s
immUBV :: KnownNat s => Imm 'I s -> BitVector s
immUBV = pack . immUnsigned
immSBV :: KnownNat s => Imm 'S s -> BitVector s
immSBV = pack . immSigned
| null | https://raw.githubusercontent.com/thoughtpolice/claap/4944b6c4ad6aff4097f8ef66231ce7d7a59f5ee7/src/aap/AAP/Types.hs | haskell | # LANGUAGE DataKinds #
|
Module : AAP.Types
Maintainer :
Stability : experimental
Basic type definitions used throughout the project.
* Type-level values.
* Types
** Types for the register file and memory
------------------------------------------------------------------------------
Basic types
| Size of the register file.
| Memory address: used to specify where to read from and write to, when
addressing values inside the data memory unit.
| Memory value, read out from the data memory at a specified @'MemAddr'@.
| Register value, read out from the register file at a specified @'RegAddr'@.
| Fetch address: used to specify where to read from in the instruction
| Simple alias to make some types read better. Used to distinguish read ports
from the write port.
| Simple alias to make the types read better. Used to distinguish read ports
from the write port.
| Simple alias to make some types read better. Used to distinguish read ports
from the write port.
| Simple alias to make the types read better. Used to distinguish read ports
from the write port.
------------------------------------------------------------------------------
Decoder/instruction types
Note: This is used as a /kind/, not a type - it is promoted to the kind level
with @DataKinds@.
^ The destination register.
^ Source register A.
^ Source register B.
| The \"kind\" of an immediate value in an instruction. Used to classify
whether the immediate is signed or unsigned.
Note: This is used as a /kind/, not a type - it is promoted to the kind level
with @DataKinds@.
^ Signed immediate value
^ Unsigned immediate value
| Simple type alias, used to give mnemonics to the instruction encoding.
long as it's the appropriate size.
| Convert a @'Reg'@ to a @'Unsigned'@ value.
| Simple type alias, used to give mnemonics to the instruction encoding.
| Convert an @'Imm'@ to an @'Unsigned'@ Number.
| Convert an @'Imm'@ to a @'Signed'@ number. | # LANGUAGE KindSignatures #
# LANGUAGE GeneralizedNewtypeDeriving #
Copyright : ( c ) 2016
License : BSD3 - style ( see LICENSE.txt )
Portability : non - portable ( GHC extensions )
module AAP.Types
RegFileSize
, W
, MemAddr, MemValue
, MemReadAddr, MemWriteAddr
, RegAddr, RegValue
, RegReadAddr, RegWriteAddr
, InstrAddr
, Reg, RegKind(..), toReg, regBV, regUnsigned
, Imm, ImmKind(..), toImm, toSImm, immSigned, immUnsigned
, immUBV, immSBV
) where
import CLaSH.Prelude
type RegFileSize = 64
| 16 - bit word value for the AAP architecture .
type W = 16
type MemAddr = BitVector 8
type MemValue = BitVector W
type RegValue = BitVector W
memory . Because the program counter is 24 - bits , this means there is a total
of 2 ^ 24 = 16 MB of instruction memory that is addressable .
type InstrAddr = BitVector 24
| Address to write to in the register file . Constrained to 6 bits , which
allows a maximum address value of 63 ( @0b111111@ ) , allowing 64 registers ( as
defined by the AAP standard ) .
type RegAddr = Unsigned 6
type RegWriteAddr = RegAddr
type RegReadAddr = RegAddr
type MemWriteAddr = RegAddr
type MemReadAddr = RegAddr
| The \"kind\ " of a register reference in an instruction . AAP instructions
use a simple 3 - operand code , so there are generally up - to three register
references : the /destination/ register , and two source registers /A/ and /B/.
data RegKind
data ImmKind
newtype Reg (k :: RegKind) s = Reg (BitVector s)
deriving (Eq, Show, Integral, Enum, Real, Num, Ord)
| Convert a @'BitVector'@ to a @'Reg'@ value . Can be of any @'RegKind'@ , as
toReg :: KnownNat s => BitVector s -> Reg k s
toReg = Reg
| Convert a @'Reg'@ to a @'BitVector'@.
regBV :: Reg k s -> BitVector s
regBV (Reg s) = s
regUnsigned :: Reg k s -> Unsigned s
regUnsigned = unpack . regBV
newtype Imm (k :: ImmKind) s = Imm (BitVector s)
deriving (Eq, Show, Integral, Enum, Real, Num, Ord)
| Convert a @'BitVector'@ to a signed @'Imm'@ value .
toImm :: BitVector s -> Imm 'I s
toImm = Imm
| Convert a @'BitVector'@ to a signed @'Imm'@ value .
toSImm :: BitVector s -> Imm 'S s
toSImm = Imm
immUnsigned :: KnownNat s => Imm 'I s -> Unsigned s
immUnsigned (Imm s) = unpack s
immSigned :: KnownNat s => Imm 'S s -> Signed s
immSigned (Imm s) = unpack s
immUBV :: KnownNat s => Imm 'I s -> BitVector s
immUBV = pack . immUnsigned
immSBV :: KnownNat s => Imm 'S s -> BitVector s
immSBV = pack . immSigned
|
2d6527ec3639c12c14b204bbe7442fe18d491266066dd4c460432c165b8dc115 | input-output-hk/cardano-wallet-legacy | Fixture.hs | {-- | Helper module which tries to get rid of a bit of the boilerplate
needed to initialise a kernel & an active/passive wallet.
--}
module Test.Spec.Fixture (
withLayer
, withPassiveWalletFixture
, withActiveWalletFixture
, GenActiveWalletFixture
, GenPassiveWalletFixture
-- * Useful generators
, genSpendingPassword
) where
import Universum
import Pos.Util.Wlog (Severity)
import Pos.Crypto (ProtocolMagic)
import Pos.Infra.InjectFail (mkFInjects)
import Test.QuickCheck (arbitrary, frequency)
import Test.QuickCheck.Monadic (PropertyM, pick)
import qualified Cardano.Wallet.API.V1.Types as V1
import qualified Cardano.Wallet.Kernel as Kernel
import qualified Cardano.Wallet.Kernel.Diffusion as Kernel
import Cardano.Wallet.Kernel.Internal (ActiveWallet, PassiveWallet)
import qualified Cardano.Wallet.Kernel.Keystore as Keystore
import Cardano.Wallet.Kernel.NodeStateAdaptor (mockNodeStateDef)
import Cardano.Wallet.WalletLayer (ActiveWalletLayer,
PassiveWalletLayer)
import qualified Cardano.Wallet.WalletLayer.Kernel as WalletLayer.Kernel
-- | Do not pollute the test runner output with logs.
devNull :: Severity -> Text -> IO ()
devNull _ _ = return ()
genSpendingPassword :: PropertyM IO (Maybe V1.SpendingPassword)
genSpendingPassword =
pick (frequency [(20, pure Nothing), (80, Just <$> arbitrary)])
withLayer :: MonadIO m
=> ProtocolMagic
-> (PassiveWalletLayer m -> PassiveWallet -> IO a)
-> PropertyM IO a
withLayer pm cc = do
liftIO $ Keystore.bracketTestKeystore $ \keystore -> do
mockFInjects <- mkFInjects mempty
WalletLayer.Kernel.bracketPassiveWallet
pm
Kernel.UseInMemory
devNull
keystore
mockNodeStateDef
mockFInjects
$ \layer wallet -> cc layer wallet
type GenPassiveWalletFixture x = PropertyM IO (PassiveWallet -> IO x)
type GenActiveWalletFixture x = PropertyM IO (Keystore.Keystore -> ActiveWallet -> IO x)
withPassiveWalletFixture :: MonadIO m
=> ProtocolMagic
-> GenPassiveWalletFixture x
-> (Keystore.Keystore -> PassiveWalletLayer m -> PassiveWallet -> x -> IO a)
-> PropertyM IO a
withPassiveWalletFixture pm prepareFixtures cc = do
generateFixtures <- prepareFixtures
liftIO $ Keystore.bracketTestKeystore $ \keystore -> do
mockFInjects <- mkFInjects mempty
WalletLayer.Kernel.bracketPassiveWallet
pm
Kernel.UseInMemory
devNull
keystore
mockNodeStateDef
mockFInjects
$ \layer wallet -> do
fixtures <- generateFixtures wallet
cc keystore layer wallet fixtures
withActiveWalletFixture :: MonadIO m
=> ProtocolMagic
-> GenActiveWalletFixture x
-> (Keystore.Keystore -> ActiveWalletLayer m -> ActiveWallet -> x -> IO a)
-> PropertyM IO a
withActiveWalletFixture pm prepareFixtures cc = do
generateFixtures <- prepareFixtures
liftIO $ Keystore.bracketTestKeystore $ \keystore -> do
mockFInjects <- mkFInjects mempty
WalletLayer.Kernel.bracketPassiveWallet pm Kernel.UseInMemory devNull keystore mockNodeStateDef mockFInjects $ \passiveLayer passiveWallet -> do
WalletLayer.Kernel.bracketActiveWallet
passiveLayer
passiveWallet
diffusion
$ \activeLayer activeWallet -> do
fixtures <- generateFixtures keystore activeWallet
cc keystore activeLayer activeWallet fixtures
where
diffusion :: Kernel.WalletDiffusion
diffusion = Kernel.WalletDiffusion {
walletSendTx = \_tx -> return False
, walletGetSubscriptionStatus = return mempty
}
| null | https://raw.githubusercontent.com/input-output-hk/cardano-wallet-legacy/143e6d0dac0b28b3274600c6c49ec87e42ec9f37/test/unit/Test/Spec/Fixture.hs | haskell | - | Helper module which tries to get rid of a bit of the boilerplate
needed to initialise a kernel & an active/passive wallet.
-
* Useful generators
| Do not pollute the test runner output with logs. |
module Test.Spec.Fixture (
withLayer
, withPassiveWalletFixture
, withActiveWalletFixture
, GenActiveWalletFixture
, GenPassiveWalletFixture
, genSpendingPassword
) where
import Universum
import Pos.Util.Wlog (Severity)
import Pos.Crypto (ProtocolMagic)
import Pos.Infra.InjectFail (mkFInjects)
import Test.QuickCheck (arbitrary, frequency)
import Test.QuickCheck.Monadic (PropertyM, pick)
import qualified Cardano.Wallet.API.V1.Types as V1
import qualified Cardano.Wallet.Kernel as Kernel
import qualified Cardano.Wallet.Kernel.Diffusion as Kernel
import Cardano.Wallet.Kernel.Internal (ActiveWallet, PassiveWallet)
import qualified Cardano.Wallet.Kernel.Keystore as Keystore
import Cardano.Wallet.Kernel.NodeStateAdaptor (mockNodeStateDef)
import Cardano.Wallet.WalletLayer (ActiveWalletLayer,
PassiveWalletLayer)
import qualified Cardano.Wallet.WalletLayer.Kernel as WalletLayer.Kernel
devNull :: Severity -> Text -> IO ()
devNull _ _ = return ()
genSpendingPassword :: PropertyM IO (Maybe V1.SpendingPassword)
genSpendingPassword =
pick (frequency [(20, pure Nothing), (80, Just <$> arbitrary)])
withLayer :: MonadIO m
=> ProtocolMagic
-> (PassiveWalletLayer m -> PassiveWallet -> IO a)
-> PropertyM IO a
withLayer pm cc = do
liftIO $ Keystore.bracketTestKeystore $ \keystore -> do
mockFInjects <- mkFInjects mempty
WalletLayer.Kernel.bracketPassiveWallet
pm
Kernel.UseInMemory
devNull
keystore
mockNodeStateDef
mockFInjects
$ \layer wallet -> cc layer wallet
type GenPassiveWalletFixture x = PropertyM IO (PassiveWallet -> IO x)
type GenActiveWalletFixture x = PropertyM IO (Keystore.Keystore -> ActiveWallet -> IO x)
withPassiveWalletFixture :: MonadIO m
=> ProtocolMagic
-> GenPassiveWalletFixture x
-> (Keystore.Keystore -> PassiveWalletLayer m -> PassiveWallet -> x -> IO a)
-> PropertyM IO a
withPassiveWalletFixture pm prepareFixtures cc = do
generateFixtures <- prepareFixtures
liftIO $ Keystore.bracketTestKeystore $ \keystore -> do
mockFInjects <- mkFInjects mempty
WalletLayer.Kernel.bracketPassiveWallet
pm
Kernel.UseInMemory
devNull
keystore
mockNodeStateDef
mockFInjects
$ \layer wallet -> do
fixtures <- generateFixtures wallet
cc keystore layer wallet fixtures
withActiveWalletFixture :: MonadIO m
=> ProtocolMagic
-> GenActiveWalletFixture x
-> (Keystore.Keystore -> ActiveWalletLayer m -> ActiveWallet -> x -> IO a)
-> PropertyM IO a
withActiveWalletFixture pm prepareFixtures cc = do
generateFixtures <- prepareFixtures
liftIO $ Keystore.bracketTestKeystore $ \keystore -> do
mockFInjects <- mkFInjects mempty
WalletLayer.Kernel.bracketPassiveWallet pm Kernel.UseInMemory devNull keystore mockNodeStateDef mockFInjects $ \passiveLayer passiveWallet -> do
WalletLayer.Kernel.bracketActiveWallet
passiveLayer
passiveWallet
diffusion
$ \activeLayer activeWallet -> do
fixtures <- generateFixtures keystore activeWallet
cc keystore activeLayer activeWallet fixtures
where
diffusion :: Kernel.WalletDiffusion
diffusion = Kernel.WalletDiffusion {
walletSendTx = \_tx -> return False
, walletGetSubscriptionStatus = return mempty
}
|
b71f83ee08ec3f7ab078c1fb0be2b692e15798c6b7b5a46b6c6bb4dd5ddc03ad | facebook/duckling | Tests.hs | Copyright ( c ) 2016 - present , Facebook , Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.Ordinal.ZH.Tests
( tests ) where
import Prelude
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Ordinal.ZH.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "ZH Tests"
[ makeCorpusTest [Seal Ordinal] corpus
]
| null | https://raw.githubusercontent.com/facebook/duckling/72f45e8e2c7385f41f2f8b1f063e7b5daa6dca94/tests/Duckling/Ordinal/ZH/Tests.hs | haskell | All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree. | Copyright ( c ) 2016 - present , Facebook , Inc.
module Duckling.Ordinal.ZH.Tests
( tests ) where
import Prelude
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Ordinal.ZH.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "ZH Tests"
[ makeCorpusTest [Seal Ordinal] corpus
]
|
38755f5cc5c4ad72d9c9061fd6104f53345b65bd716173d8cd926fd2d1ec70c7 | triffon/fp-2022-23 | 04.dec-to-bin.rkt | #lang racket
(require rackunit)
(require rackunit/text-ui)
# # # зад 4
число от десетична в двоична бройна система .
(define (dec-to-bin n)
'тук)
(run-tests
(test-suite
"dec-to-bin tests"
(check = (dec-to-bin 0) 0)
(check = (dec-to-bin 1) 1)
(check = (dec-to-bin 4) 100)
(check = (dec-to-bin 31) 11111)
(check = (dec-to-bin 64) 1000000)
(check = (dec-to-bin 55) 110111)
(check = (dec-to-bin 3767) 111010110111))
'verbose)
| null | https://raw.githubusercontent.com/triffon/fp-2022-23/caf334dba494954cc7bd62b9f4fc5a1ff8315f2b/exercises/cs2/02.scheme.rec-iter/04.dec-to-bin.rkt | racket | #lang racket
(require rackunit)
(require rackunit/text-ui)
# # # зад 4
число от десетична в двоична бройна система .
(define (dec-to-bin n)
'тук)
(run-tests
(test-suite
"dec-to-bin tests"
(check = (dec-to-bin 0) 0)
(check = (dec-to-bin 1) 1)
(check = (dec-to-bin 4) 100)
(check = (dec-to-bin 31) 11111)
(check = (dec-to-bin 64) 1000000)
(check = (dec-to-bin 55) 110111)
(check = (dec-to-bin 3767) 111010110111))
'verbose)
|
|
849bd2abdcd78eb598eb3c4e12df0bd2a1e408f62ab7c9ca7f7c3f03a4af1ac9 | active-group/reacl-c | dom.cljc | (ns reacl-c.dom
"This namespace contains functions for all HTML and SVG tags, which
all return corresponding dom items. Additionally it contains the
function [[h]], a generic function that creates dom items."
(:require [reacl-c.base :as base]
[reacl-c.dom-base :as dom-base]
#?(:clj [reacl-c.core :as core]
:cljs [reacl-c.core :as core :include-macros true])
#?(:clj [clojure.core :as clj]
:cljs [cljs.core :as clj])
[clojure.string :as str]
[active.clojure.functions :as f]
#?(:clj [reacl-c.impl.macros :as m]))
#?(:cljs (:require-macros [reacl-c.impl.macros :as m]))
(:refer-clojure :exclude (meta map time use set symbol)))
;; TODO: add some way to lift new primitives (with React getSnapshot.... and/or webcomponents)
(defn element? "Returns if `v` is a dom element." [v]
(dom-base/element? v))
(defn dom-attributes? "Returns if v is a map, and not an item." [v]
(and (map? v)
(not (satisfies? base/E v))))
(defn- analyze-dom-args [args]
(if (empty? args)
(cons {} args)
(let [x (first args)]
(if (dom-attributes? x)
args
(cons {} args)))))
(defn- event? [k]
(str/starts-with? (name k) "on"))
(let [none [{} {}]]
(defn- split-events [attrs]
;; optimized on having no or few events; which is the usual case.
(cond
(not (some event? (keys attrs))) [attrs {}]
:else
(let [[attrs events]
(reduce-kv (fn [[attrs events] k v]
(if (event? k)
[(dissoc! attrs k) (assoc! events k v)]
[attrs events]))
[(transient attrs) (transient {})]
attrs)]
[(persistent! attrs) (persistent! events)]))))
(defn- dom-element* [type attrs events & children]
{:pre [(string? type)
(map? attrs)
(map? events)
(every? #(or (ifn? %) (nil? %)) (vals events))]}
(base/assert-item-list type children)
(let [[attrs ref] (if (contains? attrs :ref)
[(dissoc attrs :ref) (:ref attrs)]
[attrs nil])]
(dom-base/make-element type attrs events ref children)))
(defn ^:no-doc dom-element** [type & args]
{:pre [(string? type)]}
(let [[attrs_ & children] (analyze-dom-args args)
[attrs events] (split-events attrs_)]
(apply dom-element* type attrs events children)))
(defn ^:no-doc dom-element [type & args]
(apply dom-element** type args))
(defn- dom-function [type]
{:pre [(string? type)]}
;; Note: could also use (with-async-actions (fn [deliver! ])) and
;; event handlers that call deliver! - but then they aren't pure
;; anymore (at least after a translation)
Note : DOM uses upper - case for the type ( .nodeName ) , but React
;; enforces lower-case, in general; but for something like
;; 'clipPath' is complains only when used outside of an svg; inside
;; camelCase is ok.
(fn [& args]
(apply dom-element type args)))
(let [k (fn [f attrs events children bind]
(apply f
(reduce-kv (fn [attrs ev h]
(assoc attrs ev
(if (some? h)
(bind h)
nil)))
attrs
events)
children))]
(defn ^:no-doc fn-dom-wrapper [f]
;; possible optimization: if all event handlers are already 'bound', we don't need with-bind.
(fn [& args]
(let [[attrs & children] (analyze-dom-args args)
[attrs-ne events] (split-events attrs)]
;; Note: not checking for nil events handlers here, to give the
;; user the chance to have a stable tree if needed (important
;; for focus etc), if he uses something like :onclick (when ok ...)
(if (or (empty? events)
;; check if we actually need the extra classes (not
;; sure if worth it - if we could inline
local - state+handle - action into defn - item , then not )
(every? core/bound-handler? (clj/map second events)))
(apply f attrs children)
(core/with-bind (f/partial k f attrs-ne events children)))))))
(defmacro ^{:arglists '([name [attrs & children] & body])} defn-dom
"Defines a function that works like the dom element functions in this
namespace (e.g. [[div]]), in that the first argument is an optional
attributes map, followed by arbitrarily many child items.
If the defined function is called without an attribute map, then
`{}` will be passed implicitly as the first argument.
Additionally, all attributes starting with `:on` must be event
handlers and are automatically bound to the state of the returned
item, via [[reacl-c.core/with-bind]]. That means you can assign
these event handlers to any item in the body, or
use [[reacl-c.core/call]], irrespective of the state that those
items get.
Tip: Pass the attributes to the toplevel dom element that is
returned, and use [[merge-attributes]] to add some default
attributes in your function body."
[name params & body]
(let [[name static? state-schema? docstring? params & body] (apply core/parse-defn-item-args name params body)]
`(def ~(vary-meta name #(merge {:arglists `'(~params ~(vec (rest params)))
:doc docstring?} %))
(fn-dom-wrapper (core/fn-item* ~name ~static? ~state-schema? ~params ~@body)))))
(defn- join-classes
"Joins multiple class strings (or nils) into one."
[& cs]
(let [cs (remove empty? cs)]
(cond
(empty? cs) ""
(empty? (rest cs)) (first cs)
:else
(str/join " " cs))))
(defn- unify-class-attr [m]
(if (contains? m :className)
(-> m
(dissoc :className)
(assoc :class (get m :className)))
m))
(letfn [(merge-a2! [res a2]
(reduce-kv (fn [res k v]
(case k
;; merge class names (preferring :class over :className)
;; Note, allegedly: "the styles are applied in the order they are declared in the document, the order they are listed in the element has no effect."
(:class :className)
(-> res
(assoc! :class (apply join-classes [(get res :class) v])))
;; Merging styles absolutely correct is very hard (like merging :border and :border-with)
;; This will only cover simple cases.
:style
(assoc! res :style (merge (get res :style) v))
;; for any other attribute, overwrite
(assoc! res k v)))
res
a2))]
(defn merge-attributes
"Merge two or more attribute maps into one. This handles merging
multiple `:style` maps into one, and concatenates `:class` and
`:className` strings."
[& attrs]
(assert (every? #(or (nil? %) (dom-attributes? %)) attrs) (vec (remove #(or (nil? %) (dom-attributes? %)) attrs)))
(let [attrs (remove nil? attrs)]
(cond
(empty? attrs)
{}
(empty? (rest attrs))
(first attrs)
:else
(persistent! (reduce merge-a2!
(transient (unify-class-attr (first attrs)))
(rest attrs)))
))))
(defmacro def-dom
"Defines an alias for a dom function, for example:
```
(def-dom page div)
```
The var `page` will be bound to function that is essentially
identical to `div`, but additionally, the name of the var is attached
to the returned items, which can be helpful in testing and debugging
utilities (see [[reacl-c.core/named]]).
Also, some default attributes can be specified, for example:
```
(def-dom page div
{:style {:margin \"10px\")})
```
These attributes will be merged with attributes passed by the caller
of `page` using [[merge-attributes]].
"
[name base & [attrs]]
`(let [base# ~base
attrs# ~attrs]
(core/defn-item ~(vary-meta name #(merge {:arglists '([attrs & children] [& children])} %))
[& args#]
(let [[attrs2# & children#] (analyze-dom-args args#)]
(apply base# (merge-attributes attrs# attrs2#) children#)))))
(defn ^{:arglists '([type attrs & children]
[type & children])}
h
"Returns a DOM item of the specified `type`, like \"div\" for
example. Arguments are the same as the specific DOM functions,
like [[div]]."
[type & args]
(apply dom-element type args))
;; The following HTML elements are supported by react (-and-attributes.html)
(m/defdom a)
(m/defdom abbr)
(m/defdom address)
(m/defdom area)
(m/defdom article)
(m/defdom aside)
(m/defdom audio)
(m/defdom b)
(m/defdom base)
(m/defdom bdi)
(m/defdom bdo)
(m/defdom big)
(m/defdom blockquote)
(m/defdom body)
(m/defdom br)
(m/defdom button)
(m/defdom canvas)
(m/defdom caption)
(m/defdom cite)
(m/defdom code)
(m/defdom col)
(m/defdom colgroup)
(m/defdom data)
(m/defdom datalist)
(m/defdom dd)
(m/defdom del)
(m/defdom details)
(m/defdom dfn)
(m/defdom div)
(m/defdom dl)
(m/defdom dt)
(m/defdom em)
(m/defdom embed)
(m/defdom fieldset)
(m/defdom figcaption)
(m/defdom figure)
(m/defdom footer)
(m/defdom form)
(m/defdom h1)
(m/defdom h2)
(m/defdom h3)
(m/defdom h4)
(m/defdom h5)
(m/defdom h6)
(m/defdom head)
(m/defdom header)
(m/defdom hr)
(m/defdom html)
(m/defdom i)
(m/defdom iframe)
(m/defdom img)
(m/defdom input)
(m/defdom ins)
(m/defdom kbd)
(m/defdom keygen)
(m/defdom label)
(m/defdom legend)
(m/defdom li)
(m/defdom link)
(m/defdom main)
(m/defdom map)
(m/defdom mark)
(m/defdom menu)
(m/defdom menuitem)
(m/defdom meta)
(m/defdom meter)
(m/defdom nav)
(m/defdom noscript)
(m/defdom object)
(m/defdom ol)
(m/defdom optgroup)
(m/defdom option)
(m/defdom output)
(m/defdom p)
(m/defdom param)
(m/defdom pre)
(m/defdom progress)
(m/defdom q)
(m/defdom rp)
(m/defdom rt)
(m/defdom ruby)
(m/defdom s)
(m/defdom samp)
(m/defdom script)
(m/defdom section)
(m/defdom select)
(m/defdom small)
(m/defdom source)
(m/defdom span)
(m/defdom strong)
(m/defdom style)
(m/defdom sub)
(m/defdom summary)
(m/defdom sup)
(m/defdom table)
(m/defdom tbody)
(m/defdom td)
(m/defdom textarea)
(m/defdom tfoot)
(m/defdom th)
(m/defdom thead)
(m/defdom time)
(m/defdom title)
(m/defdom tr)
(m/defdom track)
(m/defdom u)
(m/defdom ul)
(m/defdom var)
(m/defdom video)
(m/defdom wbr)
(m/defdom svg)
(m/defdom polygon)
(m/defdom line)
(m/defdom rect)
(m/defdom circle)
(m/defdom ellipse)
(m/defdom polyline)
(m/defdom text)
(m/defdom path)
(m/defdom defs)
(m/defdom clipPath)
(m/defdom g)
(m/defdom linearGradient)
(m/defdom radialGradient)
(m/defdom stop)
(m/defdom image)
(m/defdom animate)
(m/defdom animateColor)
(m/defdom animateMotion)
(m/defdom animateTransform)
(m/defdom set)
(m/defdom cursor)
(m/defdom desc)
(m/defdom feBlend)
(m/defdom feColorMatrix)
(m/defdom feComponentTransfer)
(m/defdom feComposite)
(m/defdom feConvolveMatrix)
(m/defdom feDiffuseLighting)
(m/defdom feDisplacementMap)
(m/defdom feDistantLight)
(m/defdom feFlood)
(m/defdom feFuncA)
(m/defdom feFuncB)
(m/defdom feFuncG)
(m/defdom feFuncR)
(m/defdom feGaussianBlur)
(m/defdom feImage)
(m/defdom feMerge)
(m/defdom feMergeNode)
(m/defdom feMorphology)
(m/defdom feOffset)
(m/defdom fePointLight)
(m/defdom feSpecularLighting)
(m/defdom feSpotLight)
(m/defdom feTile)
(m/defdom feTurbulence)
(m/defdom font)
(m/defdom marker)
(m/defdom mask)
(m/defdom metadata)
(m/defdom mpath)
(m/defdom pattern)
(m/defdom switch)
(m/defdom symbol)
(m/defdom textPath)
(m/defdom tspan)
(m/defdom use)
(m/defdom view)
| null | https://raw.githubusercontent.com/active-group/reacl-c/a8113b37573616604ebd00e0cfdd29ec6b663512/src/reacl_c/dom.cljc | clojure | TODO: add some way to lift new primitives (with React getSnapshot.... and/or webcomponents)
optimized on having no or few events; which is the usual case.
Note: could also use (with-async-actions (fn [deliver! ])) and
event handlers that call deliver! - but then they aren't pure
anymore (at least after a translation)
enforces lower-case, in general; but for something like
'clipPath' is complains only when used outside of an svg; inside
camelCase is ok.
possible optimization: if all event handlers are already 'bound', we don't need with-bind.
Note: not checking for nil events handlers here, to give the
user the chance to have a stable tree if needed (important
for focus etc), if he uses something like :onclick (when ok ...)
check if we actually need the extra classes (not
sure if worth it - if we could inline
merge class names (preferring :class over :className)
Note, allegedly: "the styles are applied in the order they are declared in the document, the order they are listed in the element has no effect."
Merging styles absolutely correct is very hard (like merging :border and :border-with)
This will only cover simple cases.
for any other attribute, overwrite
The following HTML elements are supported by react (-and-attributes.html) | (ns reacl-c.dom
"This namespace contains functions for all HTML and SVG tags, which
all return corresponding dom items. Additionally it contains the
function [[h]], a generic function that creates dom items."
(:require [reacl-c.base :as base]
[reacl-c.dom-base :as dom-base]
#?(:clj [reacl-c.core :as core]
:cljs [reacl-c.core :as core :include-macros true])
#?(:clj [clojure.core :as clj]
:cljs [cljs.core :as clj])
[clojure.string :as str]
[active.clojure.functions :as f]
#?(:clj [reacl-c.impl.macros :as m]))
#?(:cljs (:require-macros [reacl-c.impl.macros :as m]))
(:refer-clojure :exclude (meta map time use set symbol)))
(defn element? "Returns if `v` is a dom element." [v]
(dom-base/element? v))
(defn dom-attributes? "Returns if v is a map, and not an item." [v]
(and (map? v)
(not (satisfies? base/E v))))
(defn- analyze-dom-args [args]
(if (empty? args)
(cons {} args)
(let [x (first args)]
(if (dom-attributes? x)
args
(cons {} args)))))
(defn- event? [k]
(str/starts-with? (name k) "on"))
(let [none [{} {}]]
(defn- split-events [attrs]
(cond
(not (some event? (keys attrs))) [attrs {}]
:else
(let [[attrs events]
(reduce-kv (fn [[attrs events] k v]
(if (event? k)
[(dissoc! attrs k) (assoc! events k v)]
[attrs events]))
[(transient attrs) (transient {})]
attrs)]
[(persistent! attrs) (persistent! events)]))))
(defn- dom-element* [type attrs events & children]
{:pre [(string? type)
(map? attrs)
(map? events)
(every? #(or (ifn? %) (nil? %)) (vals events))]}
(base/assert-item-list type children)
(let [[attrs ref] (if (contains? attrs :ref)
[(dissoc attrs :ref) (:ref attrs)]
[attrs nil])]
(dom-base/make-element type attrs events ref children)))
(defn ^:no-doc dom-element** [type & args]
{:pre [(string? type)]}
(let [[attrs_ & children] (analyze-dom-args args)
[attrs events] (split-events attrs_)]
(apply dom-element* type attrs events children)))
(defn ^:no-doc dom-element [type & args]
(apply dom-element** type args))
(defn- dom-function [type]
{:pre [(string? type)]}
Note : DOM uses upper - case for the type ( .nodeName ) , but React
(fn [& args]
(apply dom-element type args)))
(let [k (fn [f attrs events children bind]
(apply f
(reduce-kv (fn [attrs ev h]
(assoc attrs ev
(if (some? h)
(bind h)
nil)))
attrs
events)
children))]
(defn ^:no-doc fn-dom-wrapper [f]
(fn [& args]
(let [[attrs & children] (analyze-dom-args args)
[attrs-ne events] (split-events attrs)]
(if (or (empty? events)
local - state+handle - action into defn - item , then not )
(every? core/bound-handler? (clj/map second events)))
(apply f attrs children)
(core/with-bind (f/partial k f attrs-ne events children)))))))
(defmacro ^{:arglists '([name [attrs & children] & body])} defn-dom
"Defines a function that works like the dom element functions in this
namespace (e.g. [[div]]), in that the first argument is an optional
attributes map, followed by arbitrarily many child items.
If the defined function is called without an attribute map, then
`{}` will be passed implicitly as the first argument.
Additionally, all attributes starting with `:on` must be event
handlers and are automatically bound to the state of the returned
item, via [[reacl-c.core/with-bind]]. That means you can assign
these event handlers to any item in the body, or
use [[reacl-c.core/call]], irrespective of the state that those
items get.
Tip: Pass the attributes to the toplevel dom element that is
returned, and use [[merge-attributes]] to add some default
attributes in your function body."
[name params & body]
(let [[name static? state-schema? docstring? params & body] (apply core/parse-defn-item-args name params body)]
`(def ~(vary-meta name #(merge {:arglists `'(~params ~(vec (rest params)))
:doc docstring?} %))
(fn-dom-wrapper (core/fn-item* ~name ~static? ~state-schema? ~params ~@body)))))
(defn- join-classes
"Joins multiple class strings (or nils) into one."
[& cs]
(let [cs (remove empty? cs)]
(cond
(empty? cs) ""
(empty? (rest cs)) (first cs)
:else
(str/join " " cs))))
(defn- unify-class-attr [m]
(if (contains? m :className)
(-> m
(dissoc :className)
(assoc :class (get m :className)))
m))
(letfn [(merge-a2! [res a2]
(reduce-kv (fn [res k v]
(case k
(:class :className)
(-> res
(assoc! :class (apply join-classes [(get res :class) v])))
:style
(assoc! res :style (merge (get res :style) v))
(assoc! res k v)))
res
a2))]
(defn merge-attributes
"Merge two or more attribute maps into one. This handles merging
multiple `:style` maps into one, and concatenates `:class` and
`:className` strings."
[& attrs]
(assert (every? #(or (nil? %) (dom-attributes? %)) attrs) (vec (remove #(or (nil? %) (dom-attributes? %)) attrs)))
(let [attrs (remove nil? attrs)]
(cond
(empty? attrs)
{}
(empty? (rest attrs))
(first attrs)
:else
(persistent! (reduce merge-a2!
(transient (unify-class-attr (first attrs)))
(rest attrs)))
))))
(defmacro def-dom
"Defines an alias for a dom function, for example:
```
(def-dom page div)
```
The var `page` will be bound to function that is essentially
identical to `div`, but additionally, the name of the var is attached
to the returned items, which can be helpful in testing and debugging
utilities (see [[reacl-c.core/named]]).
Also, some default attributes can be specified, for example:
```
(def-dom page div
{:style {:margin \"10px\")})
```
These attributes will be merged with attributes passed by the caller
of `page` using [[merge-attributes]].
"
[name base & [attrs]]
`(let [base# ~base
attrs# ~attrs]
(core/defn-item ~(vary-meta name #(merge {:arglists '([attrs & children] [& children])} %))
[& args#]
(let [[attrs2# & children#] (analyze-dom-args args#)]
(apply base# (merge-attributes attrs# attrs2#) children#)))))
(defn ^{:arglists '([type attrs & children]
[type & children])}
h
"Returns a DOM item of the specified `type`, like \"div\" for
example. Arguments are the same as the specific DOM functions,
like [[div]]."
[type & args]
(apply dom-element type args))
(m/defdom a)
(m/defdom abbr)
(m/defdom address)
(m/defdom area)
(m/defdom article)
(m/defdom aside)
(m/defdom audio)
(m/defdom b)
(m/defdom base)
(m/defdom bdi)
(m/defdom bdo)
(m/defdom big)
(m/defdom blockquote)
(m/defdom body)
(m/defdom br)
(m/defdom button)
(m/defdom canvas)
(m/defdom caption)
(m/defdom cite)
(m/defdom code)
(m/defdom col)
(m/defdom colgroup)
(m/defdom data)
(m/defdom datalist)
(m/defdom dd)
(m/defdom del)
(m/defdom details)
(m/defdom dfn)
(m/defdom div)
(m/defdom dl)
(m/defdom dt)
(m/defdom em)
(m/defdom embed)
(m/defdom fieldset)
(m/defdom figcaption)
(m/defdom figure)
(m/defdom footer)
(m/defdom form)
(m/defdom h1)
(m/defdom h2)
(m/defdom h3)
(m/defdom h4)
(m/defdom h5)
(m/defdom h6)
(m/defdom head)
(m/defdom header)
(m/defdom hr)
(m/defdom html)
(m/defdom i)
(m/defdom iframe)
(m/defdom img)
(m/defdom input)
(m/defdom ins)
(m/defdom kbd)
(m/defdom keygen)
(m/defdom label)
(m/defdom legend)
(m/defdom li)
(m/defdom link)
(m/defdom main)
(m/defdom map)
(m/defdom mark)
(m/defdom menu)
(m/defdom menuitem)
(m/defdom meta)
(m/defdom meter)
(m/defdom nav)
(m/defdom noscript)
(m/defdom object)
(m/defdom ol)
(m/defdom optgroup)
(m/defdom option)
(m/defdom output)
(m/defdom p)
(m/defdom param)
(m/defdom pre)
(m/defdom progress)
(m/defdom q)
(m/defdom rp)
(m/defdom rt)
(m/defdom ruby)
(m/defdom s)
(m/defdom samp)
(m/defdom script)
(m/defdom section)
(m/defdom select)
(m/defdom small)
(m/defdom source)
(m/defdom span)
(m/defdom strong)
(m/defdom style)
(m/defdom sub)
(m/defdom summary)
(m/defdom sup)
(m/defdom table)
(m/defdom tbody)
(m/defdom td)
(m/defdom textarea)
(m/defdom tfoot)
(m/defdom th)
(m/defdom thead)
(m/defdom time)
(m/defdom title)
(m/defdom tr)
(m/defdom track)
(m/defdom u)
(m/defdom ul)
(m/defdom var)
(m/defdom video)
(m/defdom wbr)
(m/defdom svg)
(m/defdom polygon)
(m/defdom line)
(m/defdom rect)
(m/defdom circle)
(m/defdom ellipse)
(m/defdom polyline)
(m/defdom text)
(m/defdom path)
(m/defdom defs)
(m/defdom clipPath)
(m/defdom g)
(m/defdom linearGradient)
(m/defdom radialGradient)
(m/defdom stop)
(m/defdom image)
(m/defdom animate)
(m/defdom animateColor)
(m/defdom animateMotion)
(m/defdom animateTransform)
(m/defdom set)
(m/defdom cursor)
(m/defdom desc)
(m/defdom feBlend)
(m/defdom feColorMatrix)
(m/defdom feComponentTransfer)
(m/defdom feComposite)
(m/defdom feConvolveMatrix)
(m/defdom feDiffuseLighting)
(m/defdom feDisplacementMap)
(m/defdom feDistantLight)
(m/defdom feFlood)
(m/defdom feFuncA)
(m/defdom feFuncB)
(m/defdom feFuncG)
(m/defdom feFuncR)
(m/defdom feGaussianBlur)
(m/defdom feImage)
(m/defdom feMerge)
(m/defdom feMergeNode)
(m/defdom feMorphology)
(m/defdom feOffset)
(m/defdom fePointLight)
(m/defdom feSpecularLighting)
(m/defdom feSpotLight)
(m/defdom feTile)
(m/defdom feTurbulence)
(m/defdom font)
(m/defdom marker)
(m/defdom mask)
(m/defdom metadata)
(m/defdom mpath)
(m/defdom pattern)
(m/defdom switch)
(m/defdom symbol)
(m/defdom textPath)
(m/defdom tspan)
(m/defdom use)
(m/defdom view)
|
870f97568bff25193d1888ca41b436c1056a8067d80675cec4a49870108a105e | well-typed-lightbulbs/ocaml-esp32 | backtrace_or_exception.ml | (* TEST
flags = "-g"
ocamlrunparam += ",b=1"
* bytecode
reference="${test_source_directory}/backtrace_or_exception.byte.reference"
* native
reference = "${test_source_directory}/backtrace_or_exception.opt.reference"
compare_programs = "false"
*)
exception Exn
let return_exn ?(raise_it_instead=false) () =
if raise_it_instead then
raise Exn
else
Exn
[@@inline never]
let without_reraise () =
match return_exn () with
| Exn as exn
| exception (Exn as exn) ->
raise exn
| _ -> assert false
let with_reraise () =
match return_exn ~raise_it_instead:true () with
| Exn as exn
| exception (Exn as exn) ->
raise exn
| _ -> assert false
let trickier () =
try raise Not_found
with e ->
match return_exn () with
| Exn as exn
| exception (Exn as exn) ->
raise exn
| _ -> assert false
let run f =
try f ()
with exn ->
Printf.printf "exception %s\n" (Printexc.to_string exn);
Printexc.print_backtrace stdout;
Printf.printf "---------------------------\n%!"
let _ =
Printexc.record_backtrace true;
run without_reraise;
run with_reraise;
run trickier
| null | https://raw.githubusercontent.com/well-typed-lightbulbs/ocaml-esp32/c24fcbfbee0e3aa6bb71c9b467c60c6bac326cc7/testsuite/tests/backtrace/backtrace_or_exception.ml | ocaml | TEST
flags = "-g"
ocamlrunparam += ",b=1"
* bytecode
reference="${test_source_directory}/backtrace_or_exception.byte.reference"
* native
reference = "${test_source_directory}/backtrace_or_exception.opt.reference"
compare_programs = "false"
|
exception Exn
let return_exn ?(raise_it_instead=false) () =
if raise_it_instead then
raise Exn
else
Exn
[@@inline never]
let without_reraise () =
match return_exn () with
| Exn as exn
| exception (Exn as exn) ->
raise exn
| _ -> assert false
let with_reraise () =
match return_exn ~raise_it_instead:true () with
| Exn as exn
| exception (Exn as exn) ->
raise exn
| _ -> assert false
let trickier () =
try raise Not_found
with e ->
match return_exn () with
| Exn as exn
| exception (Exn as exn) ->
raise exn
| _ -> assert false
let run f =
try f ()
with exn ->
Printf.printf "exception %s\n" (Printexc.to_string exn);
Printexc.print_backtrace stdout;
Printf.printf "---------------------------\n%!"
let _ =
Printexc.record_backtrace true;
run without_reraise;
run with_reraise;
run trickier
|
10220a15a6a1dcb8f6923bbba1f6878abd4b250251d2bc8964c2673122339781 | tek/proteome | Project.hs | module Proteome.Project where
import Lens.Micro.Extras (preview)
import Path (Abs, Dir, Path, dirname, parent)
import Proteome.Data.Env (Env)
import qualified Proteome.Data.Env as Env (currentProjectIndex, mainProject, projects)
import Proteome.Data.Project (Project)
import Proteome.Data.ProjectName (ProjectName (ProjectName))
import Proteome.Data.ProjectRoot (ProjectRoot (ProjectRoot))
import Proteome.Data.ProjectType (ProjectType (ProjectType))
import Proteome.Path (dropSlash)
allProjects ::
Member (AtomicState Env) r =>
Sem r [Project]
allProjects = do
main <- atomicGets Env.mainProject
extra <- atomicGets Env.projects
pure $ main : extra
currentProject ::
Member (AtomicState Env) r =>
Sem r (Maybe Project)
currentProject = do
index <- atomicGets Env.currentProjectIndex
preview (ix index) <$> allProjects
pathData :: Path Abs Dir -> (ProjectRoot, ProjectName, ProjectType)
pathData root =
(
ProjectRoot root,
ProjectName . dropSlash . dirname $ root,
ProjectType . dropSlash . dirname . parent $ root
)
| null | https://raw.githubusercontent.com/tek/proteome/5090ec3b8134a720bcc98980b57bce039fb71c02/packages/proteome/lib/Proteome/Project.hs | haskell | module Proteome.Project where
import Lens.Micro.Extras (preview)
import Path (Abs, Dir, Path, dirname, parent)
import Proteome.Data.Env (Env)
import qualified Proteome.Data.Env as Env (currentProjectIndex, mainProject, projects)
import Proteome.Data.Project (Project)
import Proteome.Data.ProjectName (ProjectName (ProjectName))
import Proteome.Data.ProjectRoot (ProjectRoot (ProjectRoot))
import Proteome.Data.ProjectType (ProjectType (ProjectType))
import Proteome.Path (dropSlash)
allProjects ::
Member (AtomicState Env) r =>
Sem r [Project]
allProjects = do
main <- atomicGets Env.mainProject
extra <- atomicGets Env.projects
pure $ main : extra
currentProject ::
Member (AtomicState Env) r =>
Sem r (Maybe Project)
currentProject = do
index <- atomicGets Env.currentProjectIndex
preview (ix index) <$> allProjects
pathData :: Path Abs Dir -> (ProjectRoot, ProjectName, ProjectType)
pathData root =
(
ProjectRoot root,
ProjectName . dropSlash . dirname $ root,
ProjectType . dropSlash . dirname . parent $ root
)
|
|
8900834b314cedb82edab9a0e2e9f5c83883bcee84a0ccf1adbea92fdc6fa871 | SanderSpies/ocaml-gist | saved_parts.ml | let attribute = Location.mknoloc "merlin.saved-parts"
let table = Hashtbl.create 7
let gensym =
let counter = ref 0 in
fun () -> incr counter; !counter
let finalize = function
| Asttypes.Const_int id ->
Hashtbl.remove table id;
| _ -> assert false
let store parts =
let id = gensym () in
let key = Asttypes.Const_int id in
Gc.finalise finalize key;
Hashtbl.add table id parts;
key
let find = function
| Asttypes.Const_int id ->
begin
try Hashtbl.find table id
with Not_found -> []
end
| _ -> assert false
| null | https://raw.githubusercontent.com/SanderSpies/ocaml-gist/7dc229aebdf51310e8c7dae12df0cb55b5de5a32/ocaml_webworker/merlin_lite/src/ocaml/typer_4.02.3/saved_parts.ml | ocaml | let attribute = Location.mknoloc "merlin.saved-parts"
let table = Hashtbl.create 7
let gensym =
let counter = ref 0 in
fun () -> incr counter; !counter
let finalize = function
| Asttypes.Const_int id ->
Hashtbl.remove table id;
| _ -> assert false
let store parts =
let id = gensym () in
let key = Asttypes.Const_int id in
Gc.finalise finalize key;
Hashtbl.add table id parts;
key
let find = function
| Asttypes.Const_int id ->
begin
try Hashtbl.find table id
with Not_found -> []
end
| _ -> assert false
|
|
f2fe7e332d2290e3b4bf4a116b3a2fcbb8a052cabd4e6f3f1a4f83060a6fedae | potatosalad/erlang-jose | jose_jws_alg_hmac.erl | -*- mode : erlang ; tab - width : 4 ; indent - tabs - mode : 1 ; st - rulers : [ 70 ] -*-
%% vim: ts=4 sw=4 ft=erlang noet
%%%-------------------------------------------------------------------
@author < >
2014 - 2022 ,
%%% @doc
%%%
%%% @end
Created : 23 Jul 2015 by < >
%%%-------------------------------------------------------------------
-module(jose_jws_alg_hmac).
-behaviour(jose_jws).
-behaviour(jose_jws_alg).
-include("jose_jwk.hrl").
%% jose_jws callbacks
-export([from_map/1]).
-export([to_map/2]).
%% jose_jws_alg callbacks
-export([generate_key/2]).
-export([sign/3]).
-export([verify/4]).
%% API
%% Types
-type alg() :: 'HS256' | 'HS384' | 'HS512'.
-export_type([alg/0]).
%%====================================================================
%% jose_jws callbacks
%%====================================================================
from_map(F = #{ <<"alg">> := <<"HS256">> }) ->
{'HS256', maps:remove(<<"alg">>, F)};
from_map(F = #{ <<"alg">> := <<"HS384">> }) ->
{'HS384', maps:remove(<<"alg">>, F)};
from_map(F = #{ <<"alg">> := <<"HS512">> }) ->
{'HS512', maps:remove(<<"alg">>, F)}.
to_map('HS256', F) ->
F#{ <<"alg">> => <<"HS256">> };
to_map('HS384', F) ->
F#{ <<"alg">> => <<"HS384">> };
to_map('HS512', F) ->
F#{ <<"alg">> => <<"HS512">> }.
%%====================================================================
%% jose_jws_alg callbacks
%%====================================================================
generate_key('HS256', _Fields) ->
jose_jws_alg:generate_key({oct, 32}, <<"HS256">>);
generate_key('HS384', _Fields) ->
jose_jws_alg:generate_key({oct, 48}, <<"HS384">>);
generate_key('HS512', _Fields) ->
jose_jws_alg:generate_key({oct, 64}, <<"HS512">>).
sign(#jose_jwk{kty={KTYModule, KTY}}, Message, ALG) ->
KTYModule:sign(Message, ALG, KTY).
verify(#jose_jwk{kty={KTYModule, KTY}}, Message, Signature, ALG) ->
KTYModule:verify(Message, ALG, Signature, KTY).
%%====================================================================
%% API functions
%%====================================================================
%%%-------------------------------------------------------------------
Internal functions
%%%-------------------------------------------------------------------
| null | https://raw.githubusercontent.com/potatosalad/erlang-jose/dbc4074066080692246afe613345ef6becc2a3fe/src/jws/jose_jws_alg_hmac.erl | erlang | vim: ts=4 sw=4 ft=erlang noet
-------------------------------------------------------------------
@doc
@end
-------------------------------------------------------------------
jose_jws callbacks
jose_jws_alg callbacks
API
Types
====================================================================
jose_jws callbacks
====================================================================
====================================================================
jose_jws_alg callbacks
====================================================================
====================================================================
API functions
====================================================================
-------------------------------------------------------------------
------------------------------------------------------------------- | -*- mode : erlang ; tab - width : 4 ; indent - tabs - mode : 1 ; st - rulers : [ 70 ] -*-
@author < >
2014 - 2022 ,
Created : 23 Jul 2015 by < >
-module(jose_jws_alg_hmac).
-behaviour(jose_jws).
-behaviour(jose_jws_alg).
-include("jose_jwk.hrl").
-export([from_map/1]).
-export([to_map/2]).
-export([generate_key/2]).
-export([sign/3]).
-export([verify/4]).
-type alg() :: 'HS256' | 'HS384' | 'HS512'.
-export_type([alg/0]).
from_map(F = #{ <<"alg">> := <<"HS256">> }) ->
{'HS256', maps:remove(<<"alg">>, F)};
from_map(F = #{ <<"alg">> := <<"HS384">> }) ->
{'HS384', maps:remove(<<"alg">>, F)};
from_map(F = #{ <<"alg">> := <<"HS512">> }) ->
{'HS512', maps:remove(<<"alg">>, F)}.
to_map('HS256', F) ->
F#{ <<"alg">> => <<"HS256">> };
to_map('HS384', F) ->
F#{ <<"alg">> => <<"HS384">> };
to_map('HS512', F) ->
F#{ <<"alg">> => <<"HS512">> }.
generate_key('HS256', _Fields) ->
jose_jws_alg:generate_key({oct, 32}, <<"HS256">>);
generate_key('HS384', _Fields) ->
jose_jws_alg:generate_key({oct, 48}, <<"HS384">>);
generate_key('HS512', _Fields) ->
jose_jws_alg:generate_key({oct, 64}, <<"HS512">>).
sign(#jose_jwk{kty={KTYModule, KTY}}, Message, ALG) ->
KTYModule:sign(Message, ALG, KTY).
verify(#jose_jwk{kty={KTYModule, KTY}}, Message, Signature, ALG) ->
KTYModule:verify(Message, ALG, Signature, KTY).
Internal functions
|
9628585de8aef0c3065996407b73a7d1945852664574d1a9344cca10d1ccbe61 | BillHallahan/G2 | TyApps.hs | module TyApps where
data C a = C a deriving Eq
class Container t where
contains :: t a -> a
instance Container C where
contains (C x) = x
@ getLt10Int : : Container t = > t { x : Int | x < = 10 } - > { y : Int | y < 10 } @
getLt10Int :: Container t => t Int -> Int
getLt10Int = contains
@ getLt10 : : ( a , Container t ) = > t { x : a | x < = 10 } - > { y : a | y < 10 } @
getLt10 :: (Num a, Container t) => t a -> a
getLt10 = contains
@ goodGet : : ( a , Container t ) = > t { x : a | x < 5 } - > { y : a | y < 10 } @
goodGet :: (Num a, Container t) => t a -> a
goodGet = contains
| null | https://raw.githubusercontent.com/BillHallahan/G2/21c648d38c380041a9036d0e375ec1d54120f6b4/tests_lh/Liquid/TyApps.hs | haskell | module TyApps where
data C a = C a deriving Eq
class Container t where
contains :: t a -> a
instance Container C where
contains (C x) = x
@ getLt10Int : : Container t = > t { x : Int | x < = 10 } - > { y : Int | y < 10 } @
getLt10Int :: Container t => t Int -> Int
getLt10Int = contains
@ getLt10 : : ( a , Container t ) = > t { x : a | x < = 10 } - > { y : a | y < 10 } @
getLt10 :: (Num a, Container t) => t a -> a
getLt10 = contains
@ goodGet : : ( a , Container t ) = > t { x : a | x < 5 } - > { y : a | y < 10 } @
goodGet :: (Num a, Container t) => t a -> a
goodGet = contains
|
|
eb7a47d67a8b30daf7e6512f7f62e5e29b100cfb2a52e5d4cb8f0fcdf6ac6ee1 | andrewzhurov/brawl-haus | gravity.cljs | (ns brawl-haus.fit.gravity
(:require [brawl-haus.fit.time :as time]
[brawl-haus.utils :refer [l]]))
m / s^2
(def system
(fn [{:keys [entities time-passed]}]
{:entities
(->> entities
(filter (comp :phys val))
(map (fn [[id {{[a-x-old a-y-old] :a :keys [m]} :phys
{:keys [grounded?]} :collision :as subj}]]
(let [rt (time/relative-time subj time-passed)]
#_(if grounded?
{id subj})
{id (update-in subj [:phys :v] (fn [[old-x old-y]] [(+ old-x
(* a-x-old (/ rt 1000)))
(+ old-y
(* a-y-old (/ rt 1000))
(* (* (/ gravity-strength 10) m) (/ rt 1000)))]))})))
(apply merge))}))
| null | https://raw.githubusercontent.com/andrewzhurov/brawl-haus/7f560c3dcee7b242fda545d87c102471fdb21888/src/cljs/brawl_haus/fit/gravity.cljs | clojure | (ns brawl-haus.fit.gravity
(:require [brawl-haus.fit.time :as time]
[brawl-haus.utils :refer [l]]))
m / s^2
(def system
(fn [{:keys [entities time-passed]}]
{:entities
(->> entities
(filter (comp :phys val))
(map (fn [[id {{[a-x-old a-y-old] :a :keys [m]} :phys
{:keys [grounded?]} :collision :as subj}]]
(let [rt (time/relative-time subj time-passed)]
#_(if grounded?
{id subj})
{id (update-in subj [:phys :v] (fn [[old-x old-y]] [(+ old-x
(* a-x-old (/ rt 1000)))
(+ old-y
(* a-y-old (/ rt 1000))
(* (* (/ gravity-strength 10) m) (/ rt 1000)))]))})))
(apply merge))}))
|
|
64daa353cc77ba1af9fb65038c8e0fec3057d8f42ab8a88f250d9ebd89dba1ac | mirage/qubes-mirage-firewall | uplink.ml | Copyright ( C ) 2015 , < >
See the README file for details .
See the README file for details. *)
open Lwt.Infix
open Fw_utils
module Eth = Ethernet.Make(Netif)
let src = Logs.Src.create "uplink" ~doc:"Network connection to NetVM"
module Log = (val Logs.src_log src : Logs.LOG)
module Make (R:Mirage_random.S) (Clock : Mirage_clock.MCLOCK) (Time : Mirage_time.S) = struct
module Arp = Arp.Make(Eth)(Time)
module I = Static_ipv4.Make(R)(Clock)(Eth)(Arp)
module U = Udp.Make(I)(R)
type t = {
net : Netif.t;
eth : Eth.t;
arp : Arp.t;
interface : interface;
mutable fragments : Fragments.Cache.t;
ip : I.t;
udp: U.t;
}
class netvm_iface eth mac ~my_ip ~other_ip : interface = object
method my_mac = Eth.mac eth
method my_ip = my_ip
method other_ip = other_ip
method writev ethertype fillfn =
mac >>= fun dst ->
Eth.write eth dst ethertype fillfn >|= or_raise "Write to uplink" Eth.pp_error
end
let send_dns_client_query t ~src_port ~dst ~dst_port buf =
U.write ~src_port ~dst ~dst_port t.udp buf >|= function
| Error s -> Log.err (fun f -> f "error sending udp packet: %a" U.pp_error s); Error (`Msg "failure")
| Ok () -> Ok ()
let listen t get_ts dns_responses router =
let handle_packet ip_header ip_packet =
let open Udp_packet in
Log.debug (fun f -> f "received ipv4 packet from %a on uplink" Ipaddr.V4.pp ip_header.Ipv4_packet.src);
match ip_packet with
| `UDP (header, packet) when My_nat.dns_port router.Router.nat header.dst_port ->
Log.debug (fun f -> f "found a DNS packet whose dst_port (%d) was in the list of dns_client ports" header.dst_port);
Lwt_mvar.put dns_responses (header, packet)
| _ ->
Firewall.ipv4_from_netvm router (`IPv4 (ip_header, ip_packet))
in
Netif.listen t.net ~header_size:Ethernet.Packet.sizeof_ethernet (fun frame ->
Handle one Ethernet frame from NetVM
Eth.input t.eth
~arpv4:(Arp.input t.arp)
~ipv4:(fun ip ->
let cache, r =
Nat_packet.of_ipv4_packet t.fragments ~now:(get_ts ()) ip
in
t.fragments <- cache;
match r with
| Error e ->
Log.warn (fun f -> f "Ignored unknown IPv4 message from uplink: %a" Nat_packet.pp_error e);
Lwt.return ()
| Ok None -> Lwt.return_unit
| Ok (Some (`IPv4 (header, packet))) -> handle_packet header packet
)
~ipv6:(fun _ip -> Lwt.return_unit)
frame
) >|= or_raise "Uplink listen loop" Netif.pp_error
let interface t = t.interface
let connect config =
let my_ip = config.Dao.uplink_our_ip in
let gateway = config.Dao.uplink_netvm_ip in
Netif.connect "0" >>= fun net ->
Eth.connect net >>= fun eth ->
Arp.connect eth >>= fun arp ->
Arp.add_ip arp my_ip >>= fun () ->
let cidr = Ipaddr.V4.Prefix.make 0 my_ip in
I.connect ~cidr ~gateway eth arp >>= fun ip ->
U.connect ip >>= fun udp ->
let netvm_mac =
Arp.query arp gateway
>|= or_raise "Getting MAC of our NetVM" Arp.pp_error in
let interface = new netvm_iface eth netvm_mac
~my_ip
~other_ip:config.Dao.uplink_netvm_ip in
let fragments = Fragments.Cache.empty (256 * 1024) in
Lwt.return { net; eth; arp; interface ; fragments ; ip ; udp }
end
| null | https://raw.githubusercontent.com/mirage/qubes-mirage-firewall/065c8bb69a0bc1d77c07a99ab276782fc4264bae/uplink.ml | ocaml | Copyright ( C ) 2015 , < >
See the README file for details .
See the README file for details. *)
open Lwt.Infix
open Fw_utils
module Eth = Ethernet.Make(Netif)
let src = Logs.Src.create "uplink" ~doc:"Network connection to NetVM"
module Log = (val Logs.src_log src : Logs.LOG)
module Make (R:Mirage_random.S) (Clock : Mirage_clock.MCLOCK) (Time : Mirage_time.S) = struct
module Arp = Arp.Make(Eth)(Time)
module I = Static_ipv4.Make(R)(Clock)(Eth)(Arp)
module U = Udp.Make(I)(R)
type t = {
net : Netif.t;
eth : Eth.t;
arp : Arp.t;
interface : interface;
mutable fragments : Fragments.Cache.t;
ip : I.t;
udp: U.t;
}
class netvm_iface eth mac ~my_ip ~other_ip : interface = object
method my_mac = Eth.mac eth
method my_ip = my_ip
method other_ip = other_ip
method writev ethertype fillfn =
mac >>= fun dst ->
Eth.write eth dst ethertype fillfn >|= or_raise "Write to uplink" Eth.pp_error
end
let send_dns_client_query t ~src_port ~dst ~dst_port buf =
U.write ~src_port ~dst ~dst_port t.udp buf >|= function
| Error s -> Log.err (fun f -> f "error sending udp packet: %a" U.pp_error s); Error (`Msg "failure")
| Ok () -> Ok ()
let listen t get_ts dns_responses router =
let handle_packet ip_header ip_packet =
let open Udp_packet in
Log.debug (fun f -> f "received ipv4 packet from %a on uplink" Ipaddr.V4.pp ip_header.Ipv4_packet.src);
match ip_packet with
| `UDP (header, packet) when My_nat.dns_port router.Router.nat header.dst_port ->
Log.debug (fun f -> f "found a DNS packet whose dst_port (%d) was in the list of dns_client ports" header.dst_port);
Lwt_mvar.put dns_responses (header, packet)
| _ ->
Firewall.ipv4_from_netvm router (`IPv4 (ip_header, ip_packet))
in
Netif.listen t.net ~header_size:Ethernet.Packet.sizeof_ethernet (fun frame ->
Handle one Ethernet frame from NetVM
Eth.input t.eth
~arpv4:(Arp.input t.arp)
~ipv4:(fun ip ->
let cache, r =
Nat_packet.of_ipv4_packet t.fragments ~now:(get_ts ()) ip
in
t.fragments <- cache;
match r with
| Error e ->
Log.warn (fun f -> f "Ignored unknown IPv4 message from uplink: %a" Nat_packet.pp_error e);
Lwt.return ()
| Ok None -> Lwt.return_unit
| Ok (Some (`IPv4 (header, packet))) -> handle_packet header packet
)
~ipv6:(fun _ip -> Lwt.return_unit)
frame
) >|= or_raise "Uplink listen loop" Netif.pp_error
let interface t = t.interface
let connect config =
let my_ip = config.Dao.uplink_our_ip in
let gateway = config.Dao.uplink_netvm_ip in
Netif.connect "0" >>= fun net ->
Eth.connect net >>= fun eth ->
Arp.connect eth >>= fun arp ->
Arp.add_ip arp my_ip >>= fun () ->
let cidr = Ipaddr.V4.Prefix.make 0 my_ip in
I.connect ~cidr ~gateway eth arp >>= fun ip ->
U.connect ip >>= fun udp ->
let netvm_mac =
Arp.query arp gateway
>|= or_raise "Getting MAC of our NetVM" Arp.pp_error in
let interface = new netvm_iface eth netvm_mac
~my_ip
~other_ip:config.Dao.uplink_netvm_ip in
let fragments = Fragments.Cache.empty (256 * 1024) in
Lwt.return { net; eth; arp; interface ; fragments ; ip ; udp }
end
|
|
5ceefd57121b903bba7b63cde99b318d357a6cf987e7b88233fa2bbbdcf76983 | life0fun/clojure-idiom | class.clj | ;; A class object system for clojure
( load - file " " )
(: use mixed in all fns , symbols , and mappings from other _ NameSpace _ . )
(: import [ pkg1 ] [ pkg2 fn1 ... ] ) import Java packages
;; (:require [namespace_1 :ref local_namespace_])
;; when defining ns, include only the references that are used.
;; :exclude, :only, :as, :refer-clojure, :import, :use, :load, and :require.
;; ;use naked could corrupt the namespace. (:use :only)
: import working with java deftype defrecord
(ns my-class
(:refer-clojure :exclude [defstruct])
(:use [clojure.test :only (are is)])
(:require (clojure [zip :as z]))
(:import (java.util.Collection)))
; method spec destruct spec expression into name and body
(defn method-spec [sexpr]
(let [name (keyword (second sexpr))
body (next sexpr)]
[name (conj body 'fn)]))
(method-spec '(method age [] (* 2 10)))
; a list of method specs
(defn method-specs [sexprs]
(->> sexprs
(filter #(= 'method (first %)))
(mapcat method-spec)
(apply hash-map)))
; spec quoted as unevaled form.
(method-specs '((method age []
(* 2 10))
(method greet [visitor]
(str "Hello there, " visitor))))
; a new object is just a closure with instance state
(declare this) ; give a dynamic var so that fn can be executed under the bindings of this.
(defn new-object [klass]
(let [state (ref {})]
(fn thiz [command & args] ; give closure an explicit name(thiz), so we can bind it to this.
(condp = command
:class klass
:class-name (klass :name)
:set! (let [[k v] args]
(dosync (alter state assoc k v))
nil)
:get (let [[key] args]
(key @state))
(let [method (klass :method command)]
(if-not method
(throw (RuntimeException.
(str "Unable to respond to " command))))
(binding [this thiz]
(apply method args)))))))
; a new class is a closure on methods
(defn find-method [method-name instance-methods]
(instance-methods method-name))
(defn new-class [class-name methods]
(fn klass [command & args]
(condp = command
:name (name class-name)
:new (new-object klass)
:method (let [[method-name] args]
(find-method method-name methods)))))
use def inside macro to force evaluation of fns fn map { : ( fn [ ] ( * 2 4 ) ) }
;
(defmacro defclass [class-name & specs]
(let [fns (or (method-specs specs) {})]
`(def ~class-name (new-class '~class-name ~fns))))
(defclass Person
(method age []
(* 2 10))
(method greet [visitor]
(str "Hello there, " visitor))
(method about [diff] ; invoke :age method in the same closure by binding to this.
(str "I was born about " (+ diff (this :age)) " years ago")))
;
after parsing method specs , we got fns
{ : age ( fn age [ ] ( * 2 10 ) ) , : greet ( fn greet [ visitor ] ( str " Hello there , " visitor ) ) }
to force evaluate fns , put it through def
( eval fns ) - > produce { : age # < user$age user$age@681e731c >
( def efns { : age ( fn age [ ] ( * 2 8) ) } ) ) - > { : age # < user$age user$age@6165e7a5 > }
; (eval `(def efns ~fns))
;
; (apply (efns :age) [])
;
(Person :method :age)
(def shelly (Person :new))
(shelly :age)
(shelly :greet "Nancy")
;;;;;;;;;;;;;;;;;;;
; macro is asking compiler to generate code for you. s-expr passed to defmacro is not evaled.
;
; normal quote ' yields the unevaluated form.
; Syntax-quote `, resolves the symbol, yielding a fully qualified name in the current context.
; for s-expr, syntax-quote establishes a template of the corresponding data structure.
; Syntax-unquote ~, inside syntax quote, resolve form to data structure and eval the data structure.
;
so in order to convert unevaled s - expr in defmacro to data structure , we use syntax - quote to
; establish a template of corresponding data structure. At the same time, we need to unqote
; the s-expr inside syntax-quote to avoid s-expr symbol being resolved as user/s-expr.
;
; inside defmacro, if you donot unquote your s-expr, error with no such var user/s-expr
; once unquote inside syntax-quote, the s-expr resolved to data structure and it is evaled;
; also, unquote must live inside quote, make sense, right. otherwise, unquote is unbound error.
;
; syntax-quote unquote only resolve variables symbols, no effect on keywords and num, string literals. including ' quote.
;
; ~(unquote) to substitude the value. similar to string intrapolate.
if var is a form , ( * 3 4 ) , unquote it will cause it being evaluated .
; ~() unquote entire () to avoid unquote each one. ~(eval ~x)
` ( ' ~x ) : unquote resolve var data structure . the value is then quoted to unevaled form , so prn ( * 2 3 )
;
; ~@var-name (unquote-splicing): remove the list ().
;
(defmacro dbg [fn-name args & body]
`(defn ~fn-name ~args
(println "dbg ...")
~@body))
when passing ' ( * 2 3 ) to macro , macro wo nt evaluate passed in forms .
(defn gen-map [nm spec] {(keyword nm) spec})
(defmacro fnmacro [name] (let [m (gen-map name '(fn [n] (* 2 n)))] `(prn ~m) `~m))
(fnmacro age)
; arg is task name and spec, not in list data struture, pass to macro and return a
task map where key is fn name and is fn closure
(defmacro fnmacro [name spec] (let [m (gen-map name spec)] `(prn ~m) `~m))
(apply ((fnmacro foo (fn [n] (* 2 n))) :foo) [4])
; if fn body is in quoted list data structure, directly eval the data structure.
(def spec '(age [n] (* 2 n)))
(defn fnw [sexpr]
(eval (conj spec 'fn)))
(apply (fnw spec) [4])
if fn body is code , not list data structure , pass to defmacro to wrap it .
(defmacro fn-wrapper [fname args & body]
`(defn ~fname ~args
(prn "calling " '~fname '~args '~@body) ; when prn, use substituded val, quote to non-evaluated form.
~@body))
(fn-wrapper foo [n] (* 2 n))
(foo 4)
; dbg macro take a form, and prn it and its evaluated result.
; because (eval "x") eval to itself, we can always add eval without side effect, so
the macro can take both ( quote ( * 2 3 ) ) as well as ( * 2 3 )
(defmacro dbg [sexpr]
(prn sexpr)
sexpr when passed into defmacro , is un - evalued . quote unquote restore and eval the form .
(dbg (* 2 4))
(dbg '(* 2 4))
(defmacro dbg-ev [sexpr]
(prn sexpr)
`(eval ~sexpr)) ; eval a data structur from unquoting the formm
(dbg-ev (* 2 4))
(dbg '(* 2 4))
(let [f (dbg-ev '(fn [n] (* 2 n)))] (f 3))
fn composition with
(defmacro fn-wrapper [name arg alg data-list]
(let [bd (conj data-list alg)]
`(defn ~name ~arg ~bd)))
; create a fn that map algebra to a list of data
(fn-wrapper double [n] * (2 n))
; macro examples
(defmacro declare [ & names]
`(do
~@(map #(list 'def %) names)))
(macroexpand-1 '(declare add multiply subtract divide))
; and is just another macro
(defmacro my-and
([] true)
([x] x)
([x & next]
`(if ~x
(my-and ~@next)
~x)))
(defmacro and
([] true)
([x] x)
([x & next]
`(let [and# ~x]
(if and#
(and ~@next)
and#))))
time ( * 1234 12345 )
(defmacro time [expr]
`(let [start# (System/nanotime)
ret# ~expr] ; unquote expr, trigger evaluation of expr.
(prn
(str "Elapsed time :"
(/ (double (- (System/nanotime) start#)) 1000000.0)
" msecs"))
ret#))
| null | https://raw.githubusercontent.com/life0fun/clojure-idiom/481b297eeabea917a68b492b1fb78b8151408507/class.clj | clojure | A class object system for clojure
(:require [namespace_1 :ref local_namespace_])
when defining ns, include only the references that are used.
:exclude, :only, :as, :refer-clojure, :import, :use, :load, and :require.
;use naked could corrupt the namespace. (:use :only)
method spec destruct spec expression into name and body
a list of method specs
spec quoted as unevaled form.
a new object is just a closure with instance state
give a dynamic var so that fn can be executed under the bindings of this.
give closure an explicit name(thiz), so we can bind it to this.
a new class is a closure on methods
invoke :age method in the same closure by binding to this.
(eval `(def efns ~fns))
(apply (efns :age) [])
macro is asking compiler to generate code for you. s-expr passed to defmacro is not evaled.
normal quote ' yields the unevaluated form.
Syntax-quote `, resolves the symbol, yielding a fully qualified name in the current context.
for s-expr, syntax-quote establishes a template of the corresponding data structure.
Syntax-unquote ~, inside syntax quote, resolve form to data structure and eval the data structure.
establish a template of corresponding data structure. At the same time, we need to unqote
the s-expr inside syntax-quote to avoid s-expr symbol being resolved as user/s-expr.
inside defmacro, if you donot unquote your s-expr, error with no such var user/s-expr
once unquote inside syntax-quote, the s-expr resolved to data structure and it is evaled;
also, unquote must live inside quote, make sense, right. otherwise, unquote is unbound error.
syntax-quote unquote only resolve variables symbols, no effect on keywords and num, string literals. including ' quote.
~(unquote) to substitude the value. similar to string intrapolate.
~() unquote entire () to avoid unquote each one. ~(eval ~x)
~@var-name (unquote-splicing): remove the list ().
arg is task name and spec, not in list data struture, pass to macro and return a
if fn body is in quoted list data structure, directly eval the data structure.
when prn, use substituded val, quote to non-evaluated form.
dbg macro take a form, and prn it and its evaluated result.
because (eval "x") eval to itself, we can always add eval without side effect, so
eval a data structur from unquoting the formm
create a fn that map algebra to a list of data
macro examples
and is just another macro
unquote expr, trigger evaluation of expr. | ( load - file " " )
(: use mixed in all fns , symbols , and mappings from other _ NameSpace _ . )
(: import [ pkg1 ] [ pkg2 fn1 ... ] ) import Java packages
: import working with java deftype defrecord
(ns my-class
(:refer-clojure :exclude [defstruct])
(:use [clojure.test :only (are is)])
(:require (clojure [zip :as z]))
(:import (java.util.Collection)))
(defn method-spec [sexpr]
(let [name (keyword (second sexpr))
body (next sexpr)]
[name (conj body 'fn)]))
(method-spec '(method age [] (* 2 10)))
(defn method-specs [sexprs]
(->> sexprs
(filter #(= 'method (first %)))
(mapcat method-spec)
(apply hash-map)))
(method-specs '((method age []
(* 2 10))
(method greet [visitor]
(str "Hello there, " visitor))))
(defn new-object [klass]
(let [state (ref {})]
(condp = command
:class klass
:class-name (klass :name)
:set! (let [[k v] args]
(dosync (alter state assoc k v))
nil)
:get (let [[key] args]
(key @state))
(let [method (klass :method command)]
(if-not method
(throw (RuntimeException.
(str "Unable to respond to " command))))
(binding [this thiz]
(apply method args)))))))
(defn find-method [method-name instance-methods]
(instance-methods method-name))
(defn new-class [class-name methods]
(fn klass [command & args]
(condp = command
:name (name class-name)
:new (new-object klass)
:method (let [[method-name] args]
(find-method method-name methods)))))
use def inside macro to force evaluation of fns fn map { : ( fn [ ] ( * 2 4 ) ) }
(defmacro defclass [class-name & specs]
(let [fns (or (method-specs specs) {})]
`(def ~class-name (new-class '~class-name ~fns))))
(defclass Person
(method age []
(* 2 10))
(method greet [visitor]
(str "Hello there, " visitor))
(str "I was born about " (+ diff (this :age)) " years ago")))
after parsing method specs , we got fns
{ : age ( fn age [ ] ( * 2 10 ) ) , : greet ( fn greet [ visitor ] ( str " Hello there , " visitor ) ) }
to force evaluate fns , put it through def
( eval fns ) - > produce { : age # < user$age user$age@681e731c >
( def efns { : age ( fn age [ ] ( * 2 8) ) } ) ) - > { : age # < user$age user$age@6165e7a5 > }
(Person :method :age)
(def shelly (Person :new))
(shelly :age)
(shelly :greet "Nancy")
so in order to convert unevaled s - expr in defmacro to data structure , we use syntax - quote to
if var is a form , ( * 3 4 ) , unquote it will cause it being evaluated .
` ( ' ~x ) : unquote resolve var data structure . the value is then quoted to unevaled form , so prn ( * 2 3 )
(defmacro dbg [fn-name args & body]
`(defn ~fn-name ~args
(println "dbg ...")
~@body))
when passing ' ( * 2 3 ) to macro , macro wo nt evaluate passed in forms .
(defn gen-map [nm spec] {(keyword nm) spec})
(defmacro fnmacro [name] (let [m (gen-map name '(fn [n] (* 2 n)))] `(prn ~m) `~m))
(fnmacro age)
task map where key is fn name and is fn closure
(defmacro fnmacro [name spec] (let [m (gen-map name spec)] `(prn ~m) `~m))
(apply ((fnmacro foo (fn [n] (* 2 n))) :foo) [4])
(def spec '(age [n] (* 2 n)))
(defn fnw [sexpr]
(eval (conj spec 'fn)))
(apply (fnw spec) [4])
if fn body is code , not list data structure , pass to defmacro to wrap it .
(defmacro fn-wrapper [fname args & body]
`(defn ~fname ~args
~@body))
(fn-wrapper foo [n] (* 2 n))
(foo 4)
the macro can take both ( quote ( * 2 3 ) ) as well as ( * 2 3 )
(defmacro dbg [sexpr]
(prn sexpr)
sexpr when passed into defmacro , is un - evalued . quote unquote restore and eval the form .
(dbg (* 2 4))
(dbg '(* 2 4))
(defmacro dbg-ev [sexpr]
(prn sexpr)
(dbg-ev (* 2 4))
(dbg '(* 2 4))
(let [f (dbg-ev '(fn [n] (* 2 n)))] (f 3))
fn composition with
(defmacro fn-wrapper [name arg alg data-list]
(let [bd (conj data-list alg)]
`(defn ~name ~arg ~bd)))
(fn-wrapper double [n] * (2 n))
(defmacro declare [ & names]
`(do
~@(map #(list 'def %) names)))
(macroexpand-1 '(declare add multiply subtract divide))
(defmacro my-and
([] true)
([x] x)
([x & next]
`(if ~x
(my-and ~@next)
~x)))
(defmacro and
([] true)
([x] x)
([x & next]
`(let [and# ~x]
(if and#
(and ~@next)
and#))))
time ( * 1234 12345 )
(defmacro time [expr]
`(let [start# (System/nanotime)
(prn
(str "Elapsed time :"
(/ (double (- (System/nanotime) start#)) 1000000.0)
" msecs"))
ret#))
|
c5ede003327c74c7d50a2c323b35b8276528160c863a5b314f2eedb7473dd9c1 | mattmundell/nightshade | read.lisp | ;;; Tests of lisp:read.
;;;
;;; Tests of sharp macros (#|, #x...) are in ../sharpm.lisp/.
(in-package "LISP")
(import '(deftest:deftest))
(deftest read (t read-0)
"Test `read'."
(with-input-from-string (in "t") (read in)))
(deftest read (3 read-1)
"Test `read' at EOF, with error signalling."
(handler-case
(with-input-from-string (in "") (read in))
(error (c) (if (eq (type-of c) 'end-of-file) 3))))
(deftest read (:eof read-2)
"Test `read' at EOF, with EOF returning."
(with-input-from-string (in "") (read in () :eof)))
(deftest read (:eof read-3)
"Test `read' on an open list, with error signalling."
(handler-case
(with-input-from-string (in "(") (read in))
(error (c) (if (eq (type-of c) 'end-of-file) :eof))))
(deftest read (:eof read-4)
"Test `read' on an open list, with EOF returning."
(with-input-from-string (in "(") (read in () :eof)))
;;;; Line comment (;).
(deftest read (:eof read-10)
"Test `read' on a comment at EOF, with error signalling."
(handler-case
(with-input-from-string (in "(") (read in))
(error (c) (if (eq (type-of c) 'end-of-file) :eof))))
(deftest read (:eof read-11)
"Test `read' on a comment at EOF, with EOF returning."
(with-input-from-string (in ";") (read in () :eof)))
(deftest read (3 read-12)
"Test `read' on a form after a comment, with error signalling."
(handler-case
(with-input-from-string (in (format () ";~%3")) (read in))
(error (c) (if (eq (type-of c) 'end-of-file) :eof))))
(deftest read (3 read-13)
"Test `read' on a form after a comment, with EOF returning."
(with-input-from-string (in (format () ";~%3")) (read in () :eof)))
(deftest read (() read-14)
"Test `read' on a comment at EOF, with EOF returning where EOF is ()."
(with-input-from-string (in ";") (read in () ())))
(deftest read (3 read-15)
"Test `read' on a form after a comment, with EOF returning where EOF is ()."
(with-input-from-string (in (format () ";~%3")) (read in () ())))
;;;; (())
(deftest read ('(()) read-20)
"Test `read' on (()), with error signalling."
(with-input-from-string (in "(())") (read in)))
(deftest read ('(()) read-21)
"Test `read' on (())."
(with-input-from-string (in "(())") (read in () :eof)))
(deftest read ('(()) read-22)
"Test `read' on (())."
(with-input-from-string (in "(())") (read in () ())))
| null | https://raw.githubusercontent.com/mattmundell/nightshade/68e960eff95e007462f2613beabc6cac11e0dfa1/src/tests/code/reader.lisp/read.lisp | lisp | Tests of lisp:read.
Tests of sharp macros (#|, #x...) are in ../sharpm.lisp/.
Line comment (;).
(()) |
(in-package "LISP")
(import '(deftest:deftest))
(deftest read (t read-0)
"Test `read'."
(with-input-from-string (in "t") (read in)))
(deftest read (3 read-1)
"Test `read' at EOF, with error signalling."
(handler-case
(with-input-from-string (in "") (read in))
(error (c) (if (eq (type-of c) 'end-of-file) 3))))
(deftest read (:eof read-2)
"Test `read' at EOF, with EOF returning."
(with-input-from-string (in "") (read in () :eof)))
(deftest read (:eof read-3)
"Test `read' on an open list, with error signalling."
(handler-case
(with-input-from-string (in "(") (read in))
(error (c) (if (eq (type-of c) 'end-of-file) :eof))))
(deftest read (:eof read-4)
"Test `read' on an open list, with EOF returning."
(with-input-from-string (in "(") (read in () :eof)))
(deftest read (:eof read-10)
"Test `read' on a comment at EOF, with error signalling."
(handler-case
(with-input-from-string (in "(") (read in))
(error (c) (if (eq (type-of c) 'end-of-file) :eof))))
(deftest read (:eof read-11)
"Test `read' on a comment at EOF, with EOF returning."
(with-input-from-string (in ";") (read in () :eof)))
(deftest read (3 read-12)
"Test `read' on a form after a comment, with error signalling."
(handler-case
(with-input-from-string (in (format () ";~%3")) (read in))
(error (c) (if (eq (type-of c) 'end-of-file) :eof))))
(deftest read (3 read-13)
"Test `read' on a form after a comment, with EOF returning."
(with-input-from-string (in (format () ";~%3")) (read in () :eof)))
(deftest read (() read-14)
"Test `read' on a comment at EOF, with EOF returning where EOF is ()."
(with-input-from-string (in ";") (read in () ())))
(deftest read (3 read-15)
"Test `read' on a form after a comment, with EOF returning where EOF is ()."
(with-input-from-string (in (format () ";~%3")) (read in () ())))
(deftest read ('(()) read-20)
"Test `read' on (()), with error signalling."
(with-input-from-string (in "(())") (read in)))
(deftest read ('(()) read-21)
"Test `read' on (())."
(with-input-from-string (in "(())") (read in () :eof)))
(deftest read ('(()) read-22)
"Test `read' on (())."
(with-input-from-string (in "(())") (read in () ())))
|
27583e2dbab06824cfcd25e0e606f39c00551e8a4daf5d00fd548238b4da29af | bjorng/wings | auv_mapping.erl | %%
%% auv_mapping.erl --
%%
%% The UV parametrisation algorithms.
%%
Copyright ( c ) 2002 - 2011 , ,
%%
%% See the file "license.terms" for information on usage and redistribution
%% of this file, and for a DISCLAIMER OF ALL WARRANTIES.
%%
%% $Id$
%%
%%%%%% Least Square Conformal Maps %%%%%%%%%%%%
%% Algorithms based on the paper,
( now probably totally ruined by me or )
%% 'Least Square Conformal Maps for Automatic Texture Generation Atlas'
by , , ,
Presented on Siggraph 2002
%%
The Conjugate Gradient Method ( trad )
%% Algorithms based on the paper:
%% An Introduction to
the Conjugate Gradient Method
Without the Agonizing Pain
%% by
, March 7 , 1994
%%
The Column Norm Preconditioning was stumbled upon , just briefly
%% mentioned, in the paper:
%% Incomplete Factorization Preconditioning
%% for Linear Least Squares Problems
%% by
, 1994
All credits about the LSQCM implementation goes to , who
%% implemented the lot.
-module(auv_mapping).
-export([stretch_opt/2, fs_area/2, area2d2/3,area3d/3, calc_area/3]).
-export([map_chart/3, projectFromChartNormal/2, chart_normal/2]).
%% Internal exports.
-export([model_l2/5]).
-export([lsq/2, lsq/3, % Debug entry points
find_pinned/2,
split_edges_1/2,
loop_to_circle/1
]).
-include_lib("wings/src/wings.hrl").
-include("auv.hrl").
-include_lib("wings/e3d/e3d.hrl").
-import(lists, [foldl/3,reverse/1]).
map_chart(Type, We, Options) ->
Faces = wings_we:visible(We),
case catch auv_placement:group_edge_loops(Faces, We) of
[] ->
{error,?__(1,"A closed surface cannot be mapped. "
"(Either divide it into into two or more charts, "
"or cut it along some edges.)")};
[{_,[_,_]}] ->
{error,?__(2,"A cut in a closed surface must consist of at least two edges.")};
_ when Type == lsqcm, is_list(Options), length(Options) < 2 ->
{error,?__(3,"At least 2 vertices (per chart) must be selected")};
[Best|_] ->
map_chart_1(Type, Faces, Best, Options, We);
Err ->
?dbg(?__(4,"Error:")++" ~p~n", [Err]),
{error, ?__(5,"Error, try to cleanup objects before uv-mapping")}
end.
map_chart_1(Type, Chart, Loop, Options, We) ->
try map_chart_2(Type, Chart, Loop, Options, We)
catch error:{badarith,_} ->
{error,?__(1,"Numeric problem, probably a bad face with an empty area.")};
throw:What ->
{error,lists:flatten(What)};
_:Reason:ST ->
Msg = ?__(2,"Error: try to cleanup objects before uv-mapping"),
?dbg("~p:~p "++?__(3,"Error")++" ~p~n ~p ~n",
[?MODULE,?LINE,Reason,ST]),
{error,lists:flatten(Msg)}
end.
map_chart_2(project, C, _, _, We) -> projectFromChartNormal(C, We);
map_chart_2(camera, C, _, Dir, We) -> projectFromCamera(C, Dir, We);
map_chart_2(lsqcm, C, Loop, Pinned, We) ->
case get(auv_use_erlang_impl) == true
orelse erlang:system_info(wordsize) =:= 4 of
true ->
lsqcm(C, Pinned, Loop, We); %% old
false ->
lscm(C, Pinned, Loop, We)
end;
map_chart_2(harmonic, C, Loop, _Pinned, We) ->
?TC(harmonic(C, Loop, We)); %% debug libigl
map_chart_2(slim, C, Loop, _Pinned, We) ->
slim(C, Loop, We);
map_chart_2(Op, C, Loop, Pinned, We) ->
volproject(Op, C, Pinned, Loop, We).
volproject(Type,Chart,_Pinned,{_,BEdges},We) ->
{Center,Axes,LoopInfo} = find_axes(Chart,BEdges,We),
%%io:format("Res: ~p ~n",[{Center,Axes}]),
Rot = rot_mat(Axes),
CalcUV = case Type of
cyl -> fun cyl/1;
sphere -> fun sphere/1
end,
Vs0 = wings_face:to_vertices(Chart, We),
Transform = fun(V) ->
Pos = wings_vertex:pos(V, We),
Vec = e3d_vec:sub(Pos,Center),
e3d_mat:mul_vector(Rot,Vec)
end,
Vs1 = lists:sort([{V,Transform(V)} || V <- Vs0]),
Tagged = leftOrRight(LoopInfo, Chart, We#we{vp=array:from_orddict(Vs1)}),
%%io:format("Tagged ~w~n",[gb_sets:to_list(Tagged)]),
[{V,fix_positions(V,Pos,CalcUV(Pos),Tagged)} || {V,Pos} <- Vs1].
sphere({X,Y,Z}) ->
S = catchy(catch math:atan2(X,Z)/math:pi()),
T = math:acos(clamp(-Y))/math:pi()-0.5,
{S,T,0.0}.
cyl({X,Y,Z}) ->
S = catchy(catch math:atan2(X,Z)/math:pi()),
T = Y,
{S,T,0.0}.
projectFromChartNormal(Chart, We) ->
Normal = chart_normal(Chart,We),
Vs0 = wings_face:to_vertices(Chart, We),
rotate_to_z(Vs0, Normal, We).
projectFromCamera(Chart,{matrices,{MM,PM,VP}},We) ->
Vs = wings_face:to_vertices(Chart, We),
Proj = fun(V) ->
Pos = wings_vertex:pos(V, We),
{S,T, _} = e3d_transform:project(Pos,MM,PM,VP),
{V,{S,T,0.0}}
end,
lists:map(Proj, Vs).
%% Mostly a test for slim initialization
harmonic(Chart, Loop, We0) ->
{BorderVs0,BorderUVs} = loop_to_circle(Loop),
{_TriWe,_TriFs,Vs,Fs,WeVs2Vs,Vs2WeVs} = init_mappings(Chart,We0),
BorderVs = [maps:get(V, WeVs2Vs) || V <- BorderVs0],
UVs0 = libigl:harmonic(Vs, Fs, BorderVs, BorderUVs),
UVs = remap_uvs(UVs0, Vs2WeVs),
UVs.
slim(Chart, Loop, We0) ->
{BorderVs0,BorderUVs} = loop_to_circle(Loop),
{_TriWe,_TriFs,Vs,Fs,WeVs2Vs,Vs2WeVs} = init_mappings(Chart,We0),
BorderVs = [maps:get(V, WeVs2Vs) || V <- BorderVs0],
UVInit = libigl:harmonic(Vs, Fs, BorderVs, BorderUVs),
UVs0 = libigl:slim(Vs,Fs,UVInit, symmetric_dirichlet, 0.00001),
UVs = remap_uvs(UVs0, Vs2WeVs),
UVs.
lscm(Fs, none, Loop, We) ->
lscm(Fs,find_pinned(Loop,We),Loop,We);
lscm(Fs0, Pinned, _Loop, We0) ->
{TriWe,TriFs,Vs,Fs,WeVs2Vs,Vs2WeVs} = init_mappings(Fs0,We0),
{BIndx,BPos} = split_pinned(Pinned, WeVs2Vs, [], []),
case libigl:lscm(Vs, Fs, BIndx, BPos) of
false ->
?dbg("Fs: ~p~n",[Fs0]),
?dbg("Pinned: ~p~n",[Pinned]),
?dbg("Loop: ~p~n",[_Loop]),
throw(?__(1, "Couldn't calculate uv-coords for chart"));
{error, Reason} ->
?dbg("Error: ~p", [Reason]),
throw(?__(2, "Math error"));
UVs0 ->
UVs = remap_uvs(UVs0, Vs2WeVs),
OrigArea = fs_area(TriFs, TriWe, 0.0),
MappedArea = fs_area(TriFs, TriWe#we{vp=array:from_orddict(UVs)}, 0.0),
scaleVs(UVs, math:sqrt(OrigArea/MappedArea))
end.
lsqcm(Fs, none, Loop, We) ->
lsqcm(Fs,find_pinned(Loop,We),Loop,We);
lsqcm(Fs, Pinned, _Loop, We) ->
?DBG("Project and tri ~n", []),
LSQState = lsq_setup(Fs,We,Pinned),
{ok,Vs2} = lsq(LSQState, Pinned),
%%?DBG("LSQ res ~p~n", [Vs2]),
Patch = fun({Idt, {Ut,Vt}}) -> {Idt,{Ut,Vt,0.0}} end,
Vs3 = lists:sort(lists:map(Patch, Vs2)),
TempVs = array:from_orddict(Vs3),
Area = fs_area(Fs, We, 0.0),
MappedArea = fs_area(Fs, We#we{vp=TempVs}, 0.0),
Scale = Area/MappedArea,
scaleVs(Vs3,math:sqrt(Scale)).
%% Map border edges to circle positions
-spec loop_to_circle({TotDist::float(), [BEs::#be{}]}) -> {[integer()], [{float(),float()}]}.
loop_to_circle({TotDist, BEs}) ->
loop_to_circle(BEs, 0.0, TotDist, [], []).
loop_to_circle([#be{vs=V, dist=D}|BEs], Curr, Tot, Vs, UVs) ->
Pi2 = 2.0*math:pi(),
Frac = Pi2*(1-Curr/Tot),
loop_to_circle(BEs, Curr+D, Tot, [V|Vs], [{math:cos(Frac),math:sin(Frac)}|UVs]);
loop_to_circle([], _, _, Vs, UVs) ->
{Vs, UVs}.
catchy({'EXIT', _}) -> math:pi()/4;
catchy(X) -> X.
clamp(X) when X > 1.0 -> 1.0;
clamp(X) when X < -1.0 -> -1.0;
clamp(X) -> X.
fix_positions(_V,{_,_,Z},Proj,_) when Z > 0.0 -> Proj;
fix_positions(V,_,Proj = {X,Y,Z},Tags) ->
case gb_sets:is_member(V,Tags) of
true when X > 0.0 ->
{X-2.0,Y,Z};
false when X < 0.0 ->
{X+2.0,Y,Z};
_ ->
Proj
end.
leftOrRight({LL,LR}, Free0, We) ->
Del = fun(#be{face=F},{Fs,Ch}) -> {[F|Fs],gb_sets:delete_any(F,Ch)} end,
{F1,Free1} = foldl(Del,{[],gb_sets:from_list(Free0)},LL),
{F2,Free} = foldl(Del,{[],Free1},LR),
[Fs1,Fs2] = expand_faces([F1,F2],Free,[],[F1,F2],[],We),
Set1 = wings_vertex:from_faces(Fs1,We),
Set2 = wings_vertex:from_faces(Fs2,We),
case wings_vertex:center(Set1,We) > wings_vertex:center(Set2,We) of
true -> gb_sets:from_ordset(Set2);
false -> gb_sets:from_ordset(Set1)
end.
expand_faces([Fs0|Rest],Free0,New,[Set|Acc1],Tot,We) ->
{NewFs,Free} = foldl(fun(Face, A) ->
do_face_more(Face, We, A)
end, {[],Free0}, Fs0),
expand_faces(Rest,Free,[NewFs|New],Acc1,[NewFs++Set|Tot],We);
expand_faces([],Free,New,[],Tot,We) ->
case gb_sets:is_empty(Free) of
true -> Tot;
false -> expand_faces(reverse(New),Free,[],reverse(Tot),[],We)
end.
do_face_more(Face, We, Acc) ->
wings_face:fold(fun(_,_,#edge{lf=LF,rf=RF},P={A1,Free}) ->
AFace = if LF == Face -> RF; true -> LF end,
case gb_sets:is_member(AFace,Free) of
true ->
{[AFace|A1],
gb_sets:delete(AFace,Free)};
false ->
P
end
end, Acc, Face,We).
rot_mat({{Ux,Uy,Uz},{Vx,Vy,Vz},{Wx,Wy,Wz}}) ->
{Ux,Vx,Wx,
Uy,Vy,Wy,
Uz,Vz,Wz,
0.0,0.0,0.0}.
find_axes(Fs,BEdges,We) ->
ChartNormal = chart_normal(Fs,We),
case forms_closed_object(BEdges,ChartNormal,We) of
undefined ->
throw(
?__(1,"I currently can't sphere/cylinder map this type of chart/cuts,\n"
"I can't figure out which axes you want as X,Y, and Z,\n"
"please use unfolding or one of the projection mappings."));
find_axes_from_eigenv(Fs , , BEdges , We ) ;
Nice ->
Nice
end.
forms_closed_object(BEdges0,ChartNormal,We=#we{name=#ch{emap=Emap}}) ->
BEdges = [{auv_segment:map_edge(Edge,Emap),BE} || BE = #be{edge=Edge} <- BEdges0],
case is_an_8(BEdges, false) of
false -> undefined;
Edge ->
{North,South,Link,LinkR} = split_edges(Edge,BEdges,We),
NorthSouth = e3d_vec:sub(North,South),
Center = e3d_vec:average(North,South),
io : format("Temp : ~p ~n",[{North , South , Center } ] ) ,
LC = center(Link,We),
LCdir0 = e3d_vec:sub(LC,Center),
LCdir = case e3d_vec:len(LCdir0) > 0.0005 of
true -> LCdir0;
false -> e3d_vec:neg(ChartNormal)
end,
{Center,calc_axis(NorthSouth,LCdir),{Link,LinkR}}
end.
center(Bes,We) ->
Eds = lists:map(fun(#be{edge=E}) -> E end, Bes),
Vs = wings_vertex:from_edges(Eds,We),
wings_vertex:center(Vs,We).
calc_axis(Y0,Z0) ->
Y = e3d_vec:norm(Y0),
X = e3d_vec:norm(e3d_vec:cross(e3d_vec:norm(Z0),Y)),
Z = e3d_vec:norm(e3d_vec:cross(X,Y)),
{X,Y,Z}.
is_an_8([],E) ->
E;
is_an_8([{E,_},{E,_}|R],_) -> %% Skip these
is_an_8(R, E);
is_an_8([{E,_}|R],HaveRemoved) -> %% Hmm we must take them in order
case lists:keysearch(E,1,R) of %% O(N2) I know..
false -> is_an_8(R,HaveRemoved);
_ when HaveRemoved =/= false ->
E;
_ ->
case reverse(R) of
[{E,_}|R2] ->
is_an_8(reverse(R2), E);
_ -> E
end
end.
Split edges splits into three parts two loops
%% and a link between them.
%% _ defg
\_/--\_| = > 2 loops : mnoabc fghijk
%% onmdekji => link: def
%%
d(L ) - > % % DBG
%% lists:map(fun({E,_BE}) -> E end,L).
getEs(L) ->
lists:map(fun({_E,BE}) -> BE end,L).
split_edges(Edge,Bes,We) ->
{L1,L2,Link} = split_edges_1(Edge,Bes),
%% Reorder so that the pinned vertices are longest from each other
North = case L1 of
[] -> wings_vertex:pos((hd(Link))#be.vs,We);
_ -> center(L1,We)
end,
South = case L2 of
[] -> wings_vertex:pos((lists:last(Link))#be.ve,We);
_ -> center(L2,We)
end,
LinkR = (((getEs(Bes) -- L1) -- L2) -- Link),
{North,South,Link,LinkR}.
split_edges_1(Edge,Bes) ->
io : format("Split : , d(Bes ) ] ) ,
{Before,BE1,After} = find_split(Edge,Bes,[]),
{LeftLoop0,BE2,RightLoop0} = find_split(Edge,After,[BE1]),
LeftLoop = LeftLoop0 ++ [BE2],
%% NOTE: Order is important below
RightLoop = reverse(RightLoop0 ++ Before),
{Loop1,Link1} = find_link(LeftLoop, reverse(LeftLoop), []),
{Loop2,Link2} = find_link(RightLoop, reverse(RightLoop), []),
io : format("L1:~w ~ nL2:~w ~ nLink1:~w ~ nLink2:~w ~ n ~ n " ,
%% [Loop1,(Loop2),(Link1),(Link2)]),
Link = reorder_link(Link2++reverse(Link1)),
%% io:format("Link:~w~n",[d(Link)]),
{getEs(Loop1),getEs(Loop2),getEs(Link)}.
find_split(Edge,[G={Edge,_}|Bes],Acc) -> {reverse(Acc),G,Bes};
find_split(Edge,[This|Bes],Acc) ->
find_split(Edge,Bes,[This|Acc]).
find_link([{E,_}|_],[{E,_}|_],Link = [{E,_}|_]) ->
{[],Link};
find_link([G={E,_}|C1],[{E,_}|C2],Link) ->
find_link(C1,C2,[G|Link]);
find_link(C1,_,Link) ->
find_loop(C1,Link,[]).
find_loop([{E,_}|_],[{E,_}|_]=Link, Loop) ->
{Loop,Link};
find_loop([G|C1],Link,Loop) ->
find_loop(C1,Link,[G|Loop]);
find_loop([],[],Loop) -> {Loop,[]};
find_loop([],Link,[]) -> {[],Link}.
reorder_link([]) -> [];
reorder_link(A=[_]) -> A;
reorder_link(Ok = [{_,#be{ve=V}},{_,#be{vs=V}}|_]) -> Ok;
reorder_link(Rev = [{_,#be{vs=V}},{_,#be{ve=V}}|_]) ->
%% reverse(Rev); %% Correctness asserted below
reorder_link(reverse(Rev));
reorder_link(Other) ->
io:format("Other: ~w~n",[Other]),
exit(internal_error).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
init_mappings(Fs0,We0) ->
%wings_util:profile_start(fprof),
#we{vp=Vtab} = TriWe = wings_tesselation:triangulate(Fs0, We0),
%wings_util:profile_stop(fprof),
Fs1 = Fs0 ++ wings_we:new_items_as_ordset(face,We0,TriWe),
Add = fun(V,_,_,{N, Vs,Face, ToNew, ToOld}) ->
case maps:get(V, ToNew, undefined) of
undefined ->
Pos = array:get(V, Vtab),
{N+1, [Pos|Vs], [N|Face], maps:put(V, N, ToNew), [{N,V}|ToOld]};
New ->
{N, Vs, [New|Face], ToNew, ToOld}
end
end,
{Vs,NewFs,ToNew,ToOld} = setup_maps(Fs1, TriWe, Add, 0, [], [], maps:new(), []),
{TriWe, Fs1, Vs, NewFs, ToNew, ToOld}.
setup_maps([F|Fs], We, Add, N0, Vs0, AFs, ToNew0, ToOld0) ->
{N, Vs,RFace,ToNew,ToOld} = wings_face:fold(Add, {N0, Vs0, [], ToNew0, ToOld0}, F, We),
setup_maps(Fs, We, Add, N, Vs, [RFace|AFs], ToNew, ToOld);
setup_maps([], _We, _, _, Vs0, AFs, ToNew, ToOld) ->
{lists:reverse(Vs0), AFs, ToNew, maps:from_list(ToOld)}.
remap_uvs(UVs0, Vs2WeVs) ->
Remap = fun({U,V}, N) -> {{maps:get(N, Vs2WeVs),{U,V,0.0}},N+1} end,
{UVs1,_} = lists:mapfoldl(Remap, 0, UVs0),
lists:sort(UVs1).
split_pinned([{I,Pos}|Ps], WeVs2Vs, Indx, PosL) ->
split_pinned(Ps, WeVs2Vs, [maps:get(I, WeVs2Vs)|Indx], [Pos|PosL]);
split_pinned([], _, Indx, PosL) ->
{reverse(Indx), reverse(PosL)}.
scaleVs(VUVs,Scale) ->
[{Id, {X*Scale,Y*Scale,0.0}} || {Id,{X,Y,0.0}} <- VUVs].
find_pinned({Circumference, BorderEdges}, We) ->
Vs = [array:get(V1, We#we.vp) || #be{vs=V1} <- BorderEdges],
Center = e3d_vec:average(Vs),
AllC = lists:map(fun(#be{vs=Id}) ->
Pos = array:get(Id, We#we.vp),
Dist = e3d_vec:dist(Pos, Center),
{Dist, Id, Pos}
end, BorderEdges),
[{_,V0,_V1Pos}|_] = lists:reverse(lists:sort(AllC)),
BE1 = reorder_edge_loop(V0, BorderEdges, []),
HalfCC = Circumference/2, %% - Circumference/100,
{V1, V2} = find_pinned(BE1, BE1, 0.0, HalfCC, HalfCC, undefined),
[{V1,{0.0,0.0}},{V2,{1.0,1.0}}].
find_pinned(Curr=[#be{vs=C1,dist=Clen}|CR],Start=[#be{ve=S2,dist=Slen}|SR],Len,HCC,Best,BVs) ->
Dlen = HCC-(Clen+Len),
ADlen = abs(Dlen),
% ?DBG("Testing ~p ~p ~p ~p ~p~n", [{S2,C1},Dlen,{Len+Clen,HCC}, Best, BVs]),
if
Dlen >= 0.0 ->
if ADlen < Best ->
find_pinned(CR,Start,Clen+Len,HCC,ADlen,{S2,C1});
true ->
find_pinned(CR,Start,Clen+Len,HCC,Best,BVs)
end;
Dlen < 0.0 ->
if ADlen < Best ->
find_pinned(Curr,SR,Len-Slen,HCC, ADlen,{S2,C1});
true ->
find_pinned(Curr,SR,Len-Slen,HCC,Best,BVs)
end
end;
find_pinned([], _, _, _, _Best, Bvs) ->
? ~p ~p ~ n " , [ _ Best , Bvs ] ) ,
Bvs.
reorder_edge_loop(V1, [Rec=#be{vs=V1}|Ordered], Acc) ->
Ordered ++ lists:reverse([Rec|Acc]);
reorder_edge_loop(V1, [H|Tail], Acc) ->
reorder_edge_loop(V1, Tail, [H|Acc]).
Utils
chart_normal([],_We) -> throw(?__(1,"Can not calculate normal for chart."));
chart_normal(Fs,We = #we{es=Etab}) ->
CalcNormal = fun(Face,Area) -> face_normal(Face,Area,We) end,
N0 = foldl(CalcNormal, e3d_vec:zero(), Fs),
case e3d_vec:norm(N0) of
Bad normal
%% BE = auv_util:outer_edges(Fs,We,false),
[{_,BE}|_] = auv_placement:group_edge_loops(Fs,We),
EdgeNormals =
fun(#be{edge=Edge}, Sum0) ->
#edge{lf=LF,rf=RF} = array:get(Edge, Etab),
Sum1 = CalcNormal(LF,Sum0),
CalcNormal(RF,Sum1)
end,
N1 = foldl(EdgeNormals, e3d_vec:zero(), BE),
case e3d_vec:norm(N1) of
Bad normal Fallback2
NewFs = decrease_chart(Fs,BE),
chart_normal(NewFs, We);
N -> e3d_vec:neg(N)
end;
N -> N
end.
face_normal(Face,Sum,We) ->
Normal = wings_face:normal(Face, We),
Vs0 = wpa:face_vertices(Face, We),
Area = calc_area(Vs0,Normal, We),
e3d_vec:add(Sum, e3d_vec:mul(Normal, Area)).
decrease_chart(Fs0,BE) ->
Fs1 = gb_sets:from_list(Fs0),
Del = fun(#be{face=Face},FSin) ->
gb_sets:del_element(Face,FSin)
end,
Fs = foldl(Del, Fs1, BE),
gb_sets:to_list(Fs).
rotate_to_z(Vs, Normal, We) ->
Rot = e3d_mat:rotate_s_to_t(Normal,{0.0,0.0,1.0}),
[{V,e3d_mat:mul_point(Rot, wings_vertex:pos(V, We))} || V <- Vs].
%% Alg. found in comp.graphics.algorithms faq
%% To be correct it needs the polygons to flat but we
do n't need to be 100 % correct .
fs_area(Fs,We) ->
fs_area(Fs,We,0.0).
fs_area([Face|Rest],We,Area) ->
Vs0 = wpa:face_vertices(Face, We),
NewArea = try
Normal = wings_face:normal(Face, We),
calc_area(Vs0, Normal, We)
catch _:_ ->
0.0
end,
fs_area(Rest,We,NewArea+Area);
fs_area([],_,Area) ->
Area.
calc_area(Vs0, Normal, We) ->
[V|Vs] = [wings_vertex:pos(V, We) || V <- Vs0],
Sum = sum_crossp([V|Vs] ++ [V], e3d_vec:zero()),
0.5 * abs(e3d_vec:dot(Normal, Sum)).
sum_crossp([V1,V2|Vs], Acc) ->
Cross = e3d_vec:cross(V1,V2),
sum_crossp([V2|Vs], e3d_vec:add(Acc, Cross));
sum_crossp([_Last], Acc) ->
Acc.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%% Least Square Conformal Maps %%%%%%%%%%%%
-record(lsq,{a,x0,ap,temp1,temp2,dr}).
lsq_setup(Fs,We,Pinned) ->
{M,N,D,DR,L1,L2} = lsq_init(Fs,We,Pinned),
{Lquv0,{Np,Usum,Vsum}} =
lists:mapfoldl(
fun({P,{U,V} = UV}, {I,X,Y}) ->
{ok,Q} = dict:find(P, D),
{{Q,UV}, {I+1,X+U,Y+V}}
end,{0,0.0,0.0},Pinned),
Lquv = lists:sort(Lquv0), % Must be sorted for pick() and insert() to work.
?DBG("lsq_int - Lquv = ~p~n",[Lquv]),
%% Build the basic submatrixes
M1 = Re(M ) , M2 = Im(M ) , M2n = -M2
{M1,M2,M2n} = build_basic(M,L1,L2),
%% Compile the basic submatrixes into the ones related to
%% free points (Mf*) i.e unknown,
%% and pinned points (Mp*).
{Mfp1c,Mfp2c,Mfp2nc,LuLv} = build_cols(M1,M2,M2n,Lquv),
?DBG("lsq_int - LuLv = ~p~n", [LuLv]),
%% Compose the matrix and vector to solve
%% for a Least Squares solution.
{Af,Ap} = build_matrixes(N,Mfp1c,Mfp2c,Mfp2nc),
?DBG("Solving matrices~n", []),
X0Fix = auv_matrix:vector(lists:duplicate(M-Np, Usum/Np)++
lists:duplicate(M-Np, Vsum/Np)),
#lsq{a=Af,x0=X0Fix,ap=Ap,temp1=LuLv,temp2=Lquv,dr=DR}.
lsq_init(Fs0,We0,Pinned0) ->
%% Do a real triangulation, might be optimized later.
We = wings_tesselation:triangulate(Fs0, We0),
Fs = Fs0 ++ wings_we:new_items_as_ordset(face,We0,We),
Pinned = gb_trees:from_orddict(lists:sort(Pinned0)),
lsq_init_fs(Fs,Pinned,We,{0,dict:new(),dict:new()},0,[],[]).
lsq_init_fs([F|Fs],P,We = #we{vp=Vtab},Ds0,N,Re0,Im0) ->
Vs = [[A0|_],[B0|_],[C0|_]] = wings_va:face_attr([vertex|uv], F, We),
{[A,B,C],Ds} = update_dicts(Vs,Ds0),
{ X1 = Z0x , Y1 = Z0y , X2 = Z1x , Y2 = Z1y , X3 = Z2x , Y3 = Z2y } =
{X1,Y1,X2,Y2,X3,Y3} =
project_tri(array:get(A0,Vtab),array:get(B0,Vtab),
array:get(C0,Vtab)),
Raimos old solution .
SqrtDT0 = try math:sqrt(abs((X2-X1)*(Y3-Y1)-(Y2-Y1)*(X3-X1)))
catch _:_ -> 0.000001
end,
SqrtDT = if SqrtDT0 =:= 0.0 -> 1.0; % this can happen e.g. in a bevel/extrude without offset
true -> SqrtDT0
end,
W1re = X3-X2, W1im = Y3-Y2,
W2re = X1-X3, W2im = Y1-Y3,
W3re = X2-X1, W3im = Y2-Y1,
Re=[[{A,W1re/SqrtDT},{B,W2re/SqrtDT},{C,W3re/SqrtDT}]|Re0],
Im=[[{A,W1im/SqrtDT},{B,W2im/SqrtDT},{C,W3im/SqrtDT}]|Im0],
%% Levy's c-code
%% Vector2 z01 = z1 - z0 ;
%% Vector2 z02 = z2 - z0 ;
%% double a = z01.x ;
%% double b = z01.y ;
double c = z02.x ;
%% double d = z02.y ;
assert(b = = 0.0 ) ;
// Note : 2*id + 0 -- > u
%% // 2*id + 1 --> v
int u0_id = 2*id0 ;
int v0_id = 2*id0 + 1 ;
int u1_id = 2*id1 ;
int v1_id = 2*id1 + 1 ;
int u2_id = 2*id2 ;
int v2_id = 2*id2 + 1 ;
// Note : b = 0
%% // Real part
%% nlBegin(NL_ROW) ;
nlCoefficient(u0_id , -a+c ) ;
%% nlCoefficient(v0_id, b-d) ;
%% nlCoefficient(u1_id, -c) ;
%% nlCoefficient(v1_id, d) ;
%% nlCoefficient(u2_id, a) ;
%% nlEnd(NL_ROW) ;
%% // Imaginary part
%% nlBegin(NL_ROW) ;
%% nlCoefficient(u0_id, -b+d) ;
%% nlCoefficient(v0_id, -a+c) ;
%% nlCoefficient(u1_id, -d) ;
%% nlCoefficient(v1_id, -c) ;
%% nlCoefficient(v2_id, a) ;
%% nlEnd(NL_ROW) ;
%% }
lsq_init_fs(Fs,P,We,Ds,N+1,Re,Im);
lsq_init_fs([],_,_We,{M,D,DR},N,Re0,Im0) ->
{M,N,D,DR,vecs(M,Re0,[]),vecs(M,Im0,[])}.
vecs(M,[R|Rs],Acc) ->
vecs(M,Rs,[auv_matrix:vector(M,R)|Acc]);
vecs(_,[],Acc) -> Acc.
update_dicts(Ids,{N,D,DR}) ->
update_dicts(Ids,N,D,DR,[]).
update_dicts([[P|_]|Rest],N,D,DR,Acc) ->
case dict:find(P,D) of
error ->
N1 = N+1,
update_dicts(Rest,N1,dict:store(P,N1,D),dict:store(N1,P,DR),[N1|Acc]);
{ok,Id} ->
update_dicts(Rest,N,D,DR,[Id|Acc])
end;
update_dicts([],N,D,DR,Acc) ->
{lists:reverse(Acc),{N,D,DR}}.
project_tri(P0,P1,P2) ->
L = e3d_vec:sub(P1,P0),
X = e3d_vec:norm(L),
T = e3d_vec:sub(P2,P0),
Z = e3d_vec:norm(e3d_vec:cross(X,T)),
Y = e3d_vec:cross(Z,X),
{0.0,0.0,
e3d_vec:len(L),0.0,
e3d_vec:dot(T,X), e3d_vec:dot(T,Y)}.
lsq(L, Lpuv) when is_list(Lpuv) ->
lsq(L, Lpuv, env);
lsq(Name, Method) when is_atom(Method) ->
{ok, [{L, Lpuv}]} = file:consult(Name),
lsq(L, Lpuv, Method).
lsq(L, Lpuv, Method0) when is_record(L,lsq), is_list(Lpuv), is_atom(Method0) ->
Method = case Method0 of
env ->
case os:getenv("WINGS_AUTOUV_SOLVER") of
"ge" -> ge;
"cg" -> cg;
"cg_jacobian" -> cg_jacobian;
"cg_colnorm" -> cg_colnorm;
_ -> cg_colnorm
end;
M -> M
end,
try lsq_int(L, Lpuv, Method)
catch
error:badarg:ST ->
error(badarg, {[L,Lpuv,Method],ST})
end;
lsq(L, Lpuv, Method) ->
error(badarg, [L, Lpuv, Method]).
lsq_int(#lsq{a=Af,x0=X0,ap=Ap,temp1=LuLv,temp2=Lquv,dr=Rdict},_Pinned,Method) ->
%% Clean this mess up
{Np,K_LuLv} = keyseq_neg(LuLv),
U = auv_matrix:vector(Np, K_LuLv),
?DBG("build_matrixes - U = ~p~n", [U]),
B = auv_matrix:mult(Ap, U),
X = case Method of
ge -> minimize_ge(Af,B);
_ ->
{_,X1} = minimize_cg(Af,X0,B),
X1
end,
? ~ n " , [ X ] ) ,
%% Extract the vector of previously unknown points,
%% and insert the pinned points. Re-translate the
%% original point identities.
lsq_result(X, Lquv, Rdict).
build_basic(M,L1,L2) ->
M1 = auv_matrix:rows(M, L1),
M2 = auv_matrix:rows(M, L2),
M2n = auv_matrix:rows(M, [auv_matrix:mult(-1, X) || X <- L2]),
{M1,M2,M2n}.
build_cols(M1,M2,M2n,Lquv) ->
%% Build column lists of the M matrixes
M1c = auv_matrix:cols(M1),
M2c = auv_matrix:cols(M2),
M2nc = auv_matrix:cols(M2n),
Split the column lists into free ( ) and pinned ( Mp )
is sorted
{pick(M1c, Lq),pick(M2c, Lq),pick(M2nc, Lq), Lu++Lv}.
split_quv(Lquv) ->
split_quv(Lquv, [], [], []).
split_quv([], Lq, Lu, Lv) ->
{lists:reverse(Lq),lists:reverse(Lu),lists:reverse(Lv)};
split_quv([{Q,{U,V}} | Lquv], Lq, Lu, Lv) ->
split_quv(Lquv, [Q | Lq], [U | Lu], [V | Lv]).
build_matrixes(N,{Mf1c,Mp1c},{Mf2c,Mp2c},{Mf2nc,Mp2nc}) ->
Build the matrixes Af and , and vector B
A = [ M1 -M2 ] , B = Ap U , U is vector of pinned points
%% [ M2 M1 ]
Afu = auv_matrix:cols(N, Mf1c++Mf2nc),
Afl = auv_matrix:cols(N, Mf2c++Mf1c),
Af = auv_matrix:cat_rows(Afu, Afl),
Apu = auv_matrix:cols(N, Mp1c++Mp2nc),
Apl = auv_matrix:cols(N, Mp2c++Mp1c),
Ap = auv_matrix:cat_rows(Apu, Apl),
{Af, Ap}.
keyseq_neg(L) ->
keyseq(1, L, []).
keyseq(N, [], R) ->
{N-1,lists:reverse(R)};
keyseq(N, [X | L], R) ->
keyseq(N+1, L, [{N,-X} | R]).
%% _ _ 2
%% Minimize || A x - b ||
%%
%% t _ t _
%% by solving A A x = A b
%%
using Gaussian Elimination and back substitution .
%%
minimize_ge(A, B) ->
AA = mk_solve_matrix(A, B),
AAA = auv_matrix:reduce(AA),
? DBG("Reduced : ~p ~ n " , [ AAA ] ) ,
X = auv_matrix:backsubst(AAA),
?DBG("Solved~n",[]),
X.
mk_solve_matrix(Af,B) ->
AfT = auv_matrix:trans(Af),
AfTAf = auv_matrix:mult_trans(AfT, AfT),
AfTB = auv_matrix:mult(-1, auv_matrix:mult(AfT, B)),
auv_matrix:cat_cols(AfTAf, AfTB).
%% _ _ 2
%% Minimize || A x - b ||
%%
%% -1 t _ -1 t _
%% by solving M A A x = M A b
%% __
using the Preconditioned Coujugate Gradient method with as
%% iteration start vector.
%%
minimize_cg(A, X0, B) ->
?DBG("minimize_cg - dim A=~p X0=~p B=~p~n",
[auv_matrix:dim(A), auv_matrix:dim(X0), auv_matrix:dim(B)]),
{N,M} = auv_matrix:dim(A),
{M,1} = auv_matrix:dim(X0),
{N,1} = auv_matrix:dim(B),
I = M,
Epsilon = 1.0e-3,
At = auv_matrix:trans(A),
AtB = auv_matrix:mult(At, B),
%% A very cheap preconditioning. The column norm
%% takes no time to calculate compared to
AtA above . The iteration time impact is also
%% very low since it is a matrix multiplication
%% with a diagonal (i.e. very sparse) matrix.
%% The preconditioning effect (on the required
%% number of iterations) is modest, but
%% cost effective.
Diag = auv_matrix:row_norm(At),
M_inv = try [1/V || V <- Diag] of
Diag_inv ->
M_i = auv_matrix:diag(Diag_inv),
fun (R_new) ->
auv_matrix:mult(M_i, R_new)
end
catch
error:badarith ->
fun (R_new) ->
auv_matrix:mult(1, R_new)
end
end,
R = auv_matrix:sub(AtB, auv_matrix:mult(At, auv_matrix:mult(A, X0))),
D = M_inv(R),
Delta = auv_matrix:mult(auv_matrix:trans(R), D),
Delta_max = Epsilon*Epsilon*Delta,
minimize_cg(M_inv, At, A, AtB, Delta_max,
Delta, I, D, R, X0).
minimize_cg(_, _At, _A, _, _,
_, 0, _D, _, X) ->
?DBG("minimize_cg() sizes were ~p ~p ~p~n",
[auv_matrix:dim(_At), auv_matrix:dim(_A), auv_matrix:dim(_D)]),
{stopped, X};
minimize_cg(_, _At, _A, _, Delta_max,
Delta, _, _D, _, X) when Delta < Delta_max ->
?DBG("minimize_cg() sizes were ~p ~p ~p~n",
[auv_matrix:dim(_At), auv_matrix:dim(_A), auv_matrix:dim(_D)]),
{ok, X};
minimize_cg(M_inv, At, A, AtB, Delta_max,
Delta, I, D, R, X) ->
%% ?DBG("minimize_cg() step ~p Delta=~p~n", [I, Delta]),
P = auv_matrix:mult(A, D),
Alpha = Delta / auv_matrix:mult(auv_matrix:trans(P), P),
X_new = auv_matrix:add(X, auv_matrix:mult(Alpha, D)),
if (I + 5) rem 10 == 0 ->
minimize_cg_3(M_inv, At, A, AtB, Delta_max,
Delta, I, D, X_new);
true ->
minimize_cg_2(M_inv, At, A, AtB, Delta_max,
Delta, I, D, R, X_new, Alpha, P)
end.
minimize_cg_2(M_inv, At, A, AtB, Delta_max,
Delta, I, D, R, X_new, Alpha, P) ->
R_new = auv_matrix:sub(R, auv_matrix:mult(Alpha, auv_matrix:mult(At, P))),
S = M_inv(R_new),
Delta_new = auv_matrix:mult(auv_matrix:trans(R_new), S),
if Delta_new < Delta_max ->
minimize_cg_3(M_inv, At, A, AtB, Delta_max,
Delta, I, D, X_new);
true ->
D_new = auv_matrix:add(S, auv_matrix:mult(Delta_new/Delta, D)),
minimize_cg(M_inv, At, A, AtB, Delta_max,
Delta_new, I-1, D_new, R_new, X_new)
end.
minimize_cg_3(M_inv, At, A, AtB, Delta_max,
Delta, I, D, X_new) ->
?DBG("minimize_cg() recalculating residual ~p~n", [Delta]),
R_new = auv_matrix:sub
(AtB, auv_matrix:mult(At, auv_matrix:mult(A, X_new))),
S = M_inv(R_new),
Delta_new = auv_matrix:mult(auv_matrix:trans(R_new), S),
D_new = auv_matrix:add(S, auv_matrix:mult(Delta_new/Delta, D)),
minimize_cg(M_inv, At, A, AtB, Delta_max,
Delta_new, I-1, D_new, R_new, X_new).
Extract the result from vector X and combine it with the
%% pinned points. Re-translate the point identities.
%%
lsq_result(X, Lquv, Rdict) ->
{MM,1} = auv_matrix:dim(X),
{Ulist, Vlist} = split(auv_matrix:vector(X), MM div 2),
{[],UVlistR} =
foldl(
fun (U, {[], R}) ->
{[], [{U,0.0} | R]};
(U, {[V | L], R}) ->
{L, [{U,V} | R]};
(Other, State) ->
throw({error, {?FILE, ?LINE, [Other, State, X]}})
end, {Vlist, []}, Ulist),
UVlist = insert(lists:reverse(UVlistR), Lquv),
{_, TxMapR} =
foldl(
fun (UV, {Q,R}) ->
{Q+1,[{dict:fetch(Q, Rdict),UV} | R]}
end, {1,[]}, UVlist),
TxMap = lists:reverse(TxMapR),
? - TxMap = ~p ~ n " , [ TxMap ] ) ,
{ok, TxMap}.
%% Picks terms with specified indexes from a list.
%%
%% L: list of terms
%% P: list of indexes in ascending order
%%
%% Return: {L_remaining, L_picked}
%%
pick(L, P) when is_list(L), is_list(P) ->
case pick(1, L, P, [], []) of
{_, _} = Ok ->
Ok;
Fault ->
error(Fault, [L, P])
end;
pick(L, P) ->
error(badarg, [L, P]).
pick(_, L, [], R, Q) ->
{lists:reverse(R, L), lists:reverse(Q)};
pick(_, [], _, _, _) ->
badarg;
pick(_, _, [I, J | _], _, _) when I >= J ->
badarg;
pick(I, [V | L], [I | P], R, Q) ->
pick(I+1, L, P, R, [V | Q]);
pick(I, [V | L], P, R, Q) ->
pick(I+1, L, P, [V | R], Q);
pick(_, _, _, _, _) ->
badarg.
%% Insert terms with specified indexes in a list
%%
%% L: List of terms
%% S: List of {Pos,Term} tuples with Term to be
%% inserted at position Pos in L
%%
insert(L, S) when is_list(L), is_list(S) ->
case insert(1, L, S, []) of
R when is_list(R) ->
R;
Fault ->
error(Fault, [L, S])
end;
insert(L, S) ->
error(badarg, [L, S]).
insert(_, L, [], R) ->
lists:reverse(R, L);
insert(_, _, [{I,_}, {J,_} | _], _) when I >= J ->
badarg;
insert(I, L, [{I,E} | S], R) ->
insert(I+1, L, S, [E | R]);
insert(_, [], _, _) ->
badarg;
insert(I, [E | L], S, R) ->
insert(I+1, L, S, [E | R]).
Split a list into two after N terms
%%
split(L, N) ->
split(L, N, []).
split([], _, R) ->
{lists:reverse(R), []};
split(L, 0, R) ->
{lists:reverse(R), L};
split([E | L], N, R) ->
split(L, N-1, [E | R]).
area2d2({S1,T1},{S2,T2},{S3,T3})
when is_float(S1),is_float(S2),is_float(S3),
is_float(T1),is_float(T2),is_float(T3) ->
((S2-S1)*(T3-T1)-(S3-S1)*(T2-T1)).
area3d(V1, V2, V3) ->
e3d_vec:area(V1, V2, V3).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Texture metric stretch
From ' Texture Mapping Progressive Meshes ' by
, ,
-record(s,{f2v, % Face 2 vertex id
Vertex 2 faces
f2a, % Face(s) 3d area
f2ov, % Face original vertex 3d position
bv % Border vertices
}).
-define(MIN_STRETCH, 1.01).
-define(MAX_ITER, 100).
-define(MAX_LEVELS, 6).
-define(VERTEX_STEP, 0.001).
stretch_opt(We0, OVs) ->
Fs = wings_we:visible(We0),
wings_pb:start(?__(1,"optimizing")),
wings_pb:update(0.01, ?__(2,"initializing")),
{ FaceToStretchMean , FaceToStretchWorst , FaceToVerts , VertToFaces , VertToUvs }
{F2S2,_F2S8,Uvs,State,Scale} = stretch_setup(Fs,We0,OVs),
V2S = stretch_per_vertex(gb_trees:to_list(State#s.v2f),F2S2,State,gb_trees:empty()),
S2V = lists:reverse(lists:keysort(2,gb_trees:to_list(V2S))),
{SUvs0,_F2S2} = wings_pb:done(stretch_iter(S2V,1,V2S,F2S2,Uvs,State)),
%% Verify
_Mean2 = model_l2(gb_trees:keys(_F2S2), _F2S2, State#s.f2a,0.0, 0.0),
io:format(?__(3,"After Stretch sum (mean) ~p")++" ~n", [_Mean2]),
SUvs1 = gb_trees:to_list(SUvs0),
Suvs = [{Id,{S0/Scale,T0/Scale,0.0}} || {Id,{S0,T0}} <- SUvs1],
We0#we{vp=array:from_orddict(Suvs)}.
stretch_setup(Fs, We0, OVs) ->
Be = wings_face:outer_edges(Fs, We0),
Bv0 = foldl(fun(Edge, Acc) ->
#edge{vs=Vs,ve=Ve} = array:get(Edge, We0#we.es),
[Vs,Ve|Acc]
end, [], Be),
Bv = gb_sets:from_list(Bv0),
Tris0 = triangulate(Fs,We0),
{S,F2A,F2OV} = calc_scale(Tris0, OVs, 0.0, 0.0, [], []),
Tris = [{Face,[{Id1,{S1*S,T1*S}},{Id2,{S2*S,T2*S}},{Id3,{S3*S,T3*S}}]} ||
{Face,[{Id1,{S1,T1}},{Id2,{S2,T2}},{Id3,{S3,T3}}]} <- Tris0],
{F2S2,F2S8,Uvs,State0} = init_stretch(Tris,F2OV, [], [], [], [], []),
Worst = model_l8(gb_trees:keys(F2S8), F2S8, 0.0),
Mean = model_l2(gb_trees:keys(F2S2), F2S2, F2A,0.0, 0.0),
io:format(?__(1,"Stretch sum (worst) ~p")++" ~n", [Worst]),
io:format(?__(2,"Stretch sum (mean) ~p")++" ~n", [Mean]),
{F2S2,F2S8,Uvs,State0#s{f2a=F2A,f2ov=F2OV,bv=Bv},S}.
stretch_iter(S2V0=[{_,First}|_],I,V2S0,F2S20,Uvs0,State)
when First > ?MIN_STRETCH, I < ?MAX_ITER ->
if
I rem 4 =:= 0 ->
wings_pb:update(I/?MAX_ITER, ?__(1,"iteration")++" "++integer_to_list(I));
true ->
ok
end,
{V2S,F2S2,Uvs} = stretch_iter2(S2V0,V2S0,F2S20,Uvs0,State),
S2V = lists:reverse(lists:keysort(2, gb_trees:to_list(V2S))),
stretch_iter(S2V,I+1,V2S,F2S2,Uvs,State);
stretch_iter(_,_,_,F2S2,Uvs,_) ->
{Uvs,F2S2}.
stretch_iter2([{V,OldVal}|R],V2S0,F2S20,Uvs0,State)
when OldVal > ?MIN_STRETCH ->
Line = random_line(),
#s{f2v=F2Vs,v2f=V2Fs} = State,
Fs = gb_trees:get(V,V2Fs),
Val = gb_trees:get(V,V2S0),
%% ?DBG("~p ~.4f:",[V,Val]),
{PVal,Uvs,F2S2} = opt_v(Val,0,?VERTEX_STEP,V,Line,Fs,F2S20,Uvs0,State),
case PVal == Val of
true ->
stretch_iter2(R,V2S0,F2S20,Uvs0,State);
false ->
Vs0 = lists:usort(lists:append([gb_trees:get(F,F2Vs)|| F<-Fs])),
Upd0 = foldl(fun(Vtx, New) ->
[{Vtx,gb_trees:get(Vtx, V2Fs)}|New]
end, [], Vs0),
V2S = stretch_per_vertex(Upd0,F2S2,State,V2S0),
stretch_iter2(R,V2S,F2S2,Uvs,State)
end;
stretch_iter2(_,V2S,F2S2,Uvs,_) ->
{V2S, F2S2, Uvs}.
random_line() ->
X = rand:uniform()-0.5,
Y = rand:uniform()-0.5,
Len = math:sqrt(X*X+Y*Y),
{X/Len,Y/Len}.
opt_v(PVal,I,Step,V,L,Fs,F2S0,Uvs0,_State=#s{f2v=F2Vs,f2ov=F2OV,f2a=F2A}) ->
UV = gb_trees:get(V, Uvs0),
{Data,F2S1} =
foldl(fun(Face, {Acc,Fs0}) ->
Vs = [V1,V2,V3] = gb_trees:get(Face, F2Vs),
{[{Vs,
{gb_trees:get(V1,Uvs0),
gb_trees:get(V2,Uvs0),
gb_trees:get(V3,Uvs0)},
gb_trees:get(Face, F2OV),
Face,
gb_trees:get(Face, F2A)}|Acc],
[{Face,gb_trees:get(Face,F2S0)}|Fs0]}
end, {[],[]}, Fs),
{Stretch,St,F2S2} = opt_v2(PVal,I,Step,V,UV,L,Data,F2S1),
case Stretch < PVal of
true ->
F2S = update_fs(F2S2,F2S0),
{Stretch,gb_trees:update(V,St,Uvs0),F2S};
false ->
{PVal,Uvs0,F2S0}
end.
update_fs([{Face,S}|Ss],F2S) ->
update_fs(Ss,gb_trees:update(Face,S,F2S));
update_fs([],F2S) -> F2S.
opt_v2(PVal,I,Step,V,UV={S0,T0},L={X,Y},Data,FS0)
when I < ?MAX_LEVELS ->
St = {S0+X*Step,T0+Y*Step},
{Stretch,FS} = calc_stretch(V,Data,St,0.0,0.0,[]),
if
Stretch < PVal ->
% io:format(">"),
opt_v2(Stretch,I,Step,V,St,L,Data,FS);
(I rem 2) == 0 ->
% io:format("S"),
opt_v2(PVal,I+1,-Step*0.9,V,UV,L,Data,FS0);
true ->
% io:format("<"),
NewStep = Step/10,
opt_v2(PVal,I+1,NewStep,V,UV,L,Data,FS0)
end;
opt_v2(PVal,_I,_Step,_V,St,_L,_,FS) ->
% io:format("~n"),
{PVal,St,FS}.
calc_stretch(V,[{[V,_,_],{_,UV2,UV3},{Q1,Q2,Q3},Face,FA}|R],UV1,Mean,Area,FS) ->
S = l2(UV1,UV2,UV3,Q1,Q2,Q3),
calc_stretch(V,R,UV1,S*S*FA+Mean,FA+Area,[{Face,S}|FS]);
calc_stretch(V,[{[_,V,_],{UV1,_,UV3},{Q1,Q2,Q3},Face,FA}|R],UV2,Mean,Area,FS) ->
S = l2(UV1,UV2,UV3,Q1,Q2,Q3),
calc_stretch(V,R,UV2,S*S*FA+Mean,FA+Area,[{Face,S}|FS]);
calc_stretch(V,[{[_,_,V],{UV1,UV2,_},{Q1,Q2,Q3},Face,FA}|R],UV3,Mean,Area,FS) ->
S = l2(UV1,UV2,UV3,Q1,Q2,Q3),
calc_stretch(V,R,UV3,S*S*FA+Mean,FA+Area,[{Face,S}|FS]);
calc_stretch(_,[],_,Mean,Area,FS) ->
{math:sqrt(Mean/Area),reverse(FS)}.
stretch_per_vertex([{V,Fs}|R],F2S,State=#s{bv=Bv,f2a=F2A},Tree) ->
case gb_sets:is_member(V,Bv) of
false ->
Res = model_l2(Fs,F2S,F2A,0.0,0.0),
stretch_per_vertex(R,F2S,State,gb_trees:enter(V,Res,Tree));
true ->
stretch_per_vertex(R,F2S,State,Tree)
end;
stretch_per_vertex([], _, _,Acc) ->
Acc.
init_stretch([{Face,FUvs=[{Id1,P1},{Id2,P2},{Id3,P3}]}|R],
Ovs,F2S2,F2S8,F2Vs,V2Fs,UVs) ->
{Q1,Q2,Q3} = gb_trees:get(Face,Ovs),
S2 = l2(P1,P2,P3,Q1,Q2,Q3),
S8 = l8(P1,P2,P3,Q1,Q2,Q3),
init_stretch(R,Ovs, [{Face,S2}|F2S2],[{Face,S8}|F2S8],
[{Face, [Id1,Id2,Id3]}|F2Vs],
[{Id1,Face},{Id2,Face},{Id3,Face}|V2Fs],
FUvs ++ UVs);
init_stretch([],_,F2S2,F2S8,F2Vs,V2Fs0,Uvs) ->
V2Fs1 = sofs:relation(V2Fs0),
V2Fs2 = sofs:relation_to_family(V2Fs1),
V2Fs = sofs:to_external(V2Fs2),
{gb_trees:from_orddict(lists:sort(F2S2)),
gb_trees:from_orddict(lists:sort(F2S8)),
gb_trees:from_orddict(lists:usort(Uvs)),
#s{f2v = gb_trees:from_orddict(lists:sort(F2Vs)),
v2f = gb_trees:from_orddict(V2Fs)}}.
calc_scale([{Face,[{Id1,P1},{Id2,P2},{Id3,P3}]}|R], Ovs, A2D, A3D,F2A,F2OVs) ->
A2 = abs(area2d2(P1,P2,P3)/2),
Q1 = array:get(Id1,Ovs),
Q2 = array:get(Id2,Ovs),
Q3 = array:get(Id3,Ovs),
A3 = area3d(Q1,Q2,Q3),
calc_scale(R,Ovs,A2+A2D,A3+A3D,[{Face,A3}|F2A],[{Face,{Q1,Q2,Q3}}|F2OVs]);
calc_scale([],_Ovs,A2D,A3D,F2A,F2OVs) ->
{math:sqrt(A3D/A2D),
gb_trees:from_orddict(lists:sort(F2A)),
gb_trees:from_orddict(lists:sort(F2OVs))}.
model_l8([Face|R], F2S8, Worst) ->
FVal = gb_trees:get(Face,F2S8),
New = if FVal > Worst ->
? DBG("Face ~p has worst ~p ~ n " , [ Face , FVal ] ) ,
FVal;
true ->
Worst
end,
model_l8(R,F2S8,New);
model_l8([], _, Worst) -> Worst.
model_l2([Face|R], F2S2, F2A, Mean, Area) ->
TriM = gb_trees:get(Face,F2S2),
case gb_trees:get(Face,F2A) of
A when is_float(TriM), is_float(A) ->
model_l2(R,F2S2,F2A,TriM*TriM*A+Mean,Area+A)
end;
model_l2([],_,_,Mean,Area) ->
math:sqrt(Mean/Area).
l2({S1,T1}, {S2,T2}, {S3,T3},
{Q1x,Q1y,Q1z}, {Q2x,Q2y,Q2z}, {Q3x,Q3y,Q3z})
when is_float(S1), is_float(S2), is_float(S3),
is_float(T1), is_float(T2), is_float(T3),
is_float(Q1x), is_float(Q1y), is_float(Q1z),
is_float(Q2x), is_float(Q2y), is_float(Q2z),
is_float(Q3x), is_float(Q3y), is_float(Q3z) ->
T23 = T2-T3, T31 = T3-T1, T12 = T1-T2,
S32 = S3-S2, S13 = S1-S3, S21 = S2-S1,
case S21*T31-S13*T12 of
DoubleArea when DoubleArea > 0.00000001 ->
SX = Q1x*T23+Q2x*T31+Q3x*T12,
SY = Q1y*T23+Q2y*T31+Q3y*T12,
SZ = Q1z*T23+Q2z*T31+Q3z*T12,
A = SX*SX+SY*SY+SZ*SZ,
TX = Q1x*S32+Q2x*S13+Q3x*S21,
TY = Q1y*S32+Q2y*S13+Q3y*S21,
TZ = Q1z*S32+Q2z*S13+Q3z*S21,
C = TX*TX+TY*TY+TZ*TZ,
math:sqrt((A+C)/(2.0*DoubleArea*DoubleArea));
_ ->
9999999999.9
end.
l8(P1,P2,P3,Q1,Q2,Q3) -> %% Worst stretch value
A2 = area2d2(P1,P2,P3),
if A2 > 0.00000001 ->
SS = ss(P1,P2,P3,Q1,Q2,Q3,A2),
ST = st(P1,P2,P3,Q1,Q2,Q3,A2),
A = e3d_vec:dot(SS,SS),
B = e3d_vec:dot(SS,ST),
C = e3d_vec:dot(ST,ST),
math:sqrt(0.5*((A+C)+math:sqrt((A-C)*(A-C)+4*B*B)));
true ->
9999999999.9
end.
ss({_,T1},{_,T2},{_,T3},{Q1x,Q1y,Q1z},{Q2x,Q2y,Q2z},{Q3x,Q3y,Q3z},A)
when is_float(T1),is_float(T2),is_float(T3),
is_float(Q1x),is_float(Q1y),is_float(Q1z),
is_float(Q2x),is_float(Q2y),is_float(Q2z),
is_float(Q3x),is_float(Q3y),is_float(Q3z) ->
T23 = T2-T3, T31 = T3-T1, T12 = T1-T2,
{(Q1x*T23+Q2x*T31+Q3x*T12)/A,
(Q1y*T23+Q2y*T31+Q3y*T12)/A,
(Q1z*T23+Q2z*T31+Q3z*T12)/A}.
st({S1,_},{S2,_},{S3,_},{Q1x,Q1y,Q1z},{Q2x,Q2y,Q2z},{Q3x,Q3y,Q3z},A)
when is_float(S1),is_float(S2),is_float(S3),
is_float(Q1x),is_float(Q1y),is_float(Q1z),
is_float(Q2x),is_float(Q2y),is_float(Q2z),
is_float(Q3x),is_float(Q3y),is_float(Q3z) ->
S32 = S3-S2, S13 = S1-S3, S21 = S2-S1,
{(Q1x*S32+Q2x*S13+Q3x*S21)/A,
(Q1y*S32+Q2y*S13+Q3y*S21)/A,
(Q1z*S32+Q2z*S13+Q3z*S21)/A}.
triangulate(Fs,We) ->
TriWe = wings_tesselation:triangulate(Fs, We),
TriFs = Fs ++ wings_we:new_items_as_ordset(face, We, TriWe),
get_face_vspos(TriFs,TriWe, []).
get_face_vspos([Face|Fs], We, Tris) ->
Vs0 = wpa:face_vertices(Face, We),
Vs1 = [{V,wings_vertex:pos(V,We)} || V <- Vs0],
if length(Vs0) == 3 ->
Vs2 = [{Vid, {Vx, Vy}} || {Vid,{Vx,Vy,_}} <- Vs1],
get_face_vspos(Fs,We,[{Face, Vs2}|Tris]);
true ->
io:format(?__(1,"Error: Face isn't triangulated ~p with ~p vertices")++"~n",
[Face, Vs1]),
error({triangulation_bug, [Face, Vs1]})
end;
get_face_vspos([], _, Tris) ->
Tris.
| null | https://raw.githubusercontent.com/bjorng/wings/0ebe43abfcb094344c015589d8cd072b643d80ed/plugins_src/autouv/auv_mapping.erl | erlang |
auv_mapping.erl --
The UV parametrisation algorithms.
See the file "license.terms" for information on usage and redistribution
of this file, and for a DISCLAIMER OF ALL WARRANTIES.
$Id$
Least Square Conformal Maps %%%%%%%%%%%%
Algorithms based on the paper,
'Least Square Conformal Maps for Automatic Texture Generation Atlas'
Algorithms based on the paper:
An Introduction to
by
mentioned, in the paper:
Incomplete Factorization Preconditioning
for Linear Least Squares Problems
by
implemented the lot.
Internal exports.
Debug entry points
old
debug libigl
io:format("Res: ~p ~n",[{Center,Axes}]),
io:format("Tagged ~w~n",[gb_sets:to_list(Tagged)]),
Mostly a test for slim initialization
?DBG("LSQ res ~p~n", [Vs2]),
Map border edges to circle positions
Skip these
Hmm we must take them in order
O(N2) I know..
and a link between them.
_ defg
onmdekji => link: def
% DBG
lists:map(fun({E,_BE}) -> E end,L).
Reorder so that the pinned vertices are longest from each other
NOTE: Order is important below
[Loop1,(Loop2),(Link1),(Link2)]),
io:format("Link:~w~n",[d(Link)]),
reverse(Rev); %% Correctness asserted below
wings_util:profile_start(fprof),
wings_util:profile_stop(fprof),
- Circumference/100,
?DBG("Testing ~p ~p ~p ~p ~p~n", [{S2,C1},Dlen,{Len+Clen,HCC}, Best, BVs]),
BE = auv_util:outer_edges(Fs,We,false),
Alg. found in comp.graphics.algorithms faq
To be correct it needs the polygons to flat but we
correct .
Least Square Conformal Maps %%%%%%%%%%%%
Must be sorted for pick() and insert() to work.
Build the basic submatrixes
Compile the basic submatrixes into the ones related to
free points (Mf*) i.e unknown,
and pinned points (Mp*).
Compose the matrix and vector to solve
for a Least Squares solution.
Do a real triangulation, might be optimized later.
this can happen e.g. in a bevel/extrude without offset
Levy's c-code
Vector2 z01 = z1 - z0 ;
Vector2 z02 = z2 - z0 ;
double a = z01.x ;
double b = z01.y ;
double d = z02.y ;
// 2*id + 1 --> v
// Real part
nlBegin(NL_ROW) ;
nlCoefficient(v0_id, b-d) ;
nlCoefficient(u1_id, -c) ;
nlCoefficient(v1_id, d) ;
nlCoefficient(u2_id, a) ;
nlEnd(NL_ROW) ;
// Imaginary part
nlBegin(NL_ROW) ;
nlCoefficient(u0_id, -b+d) ;
nlCoefficient(v0_id, -a+c) ;
nlCoefficient(u1_id, -d) ;
nlCoefficient(v1_id, -c) ;
nlCoefficient(v2_id, a) ;
nlEnd(NL_ROW) ;
}
Clean this mess up
Extract the vector of previously unknown points,
and insert the pinned points. Re-translate the
original point identities.
Build column lists of the M matrixes
[ M2 M1 ]
_ _ 2
Minimize || A x - b ||
t _ t _
by solving A A x = A b
_ _ 2
Minimize || A x - b ||
-1 t _ -1 t _
by solving M A A x = M A b
__
iteration start vector.
A very cheap preconditioning. The column norm
takes no time to calculate compared to
very low since it is a matrix multiplication
with a diagonal (i.e. very sparse) matrix.
The preconditioning effect (on the required
number of iterations) is modest, but
cost effective.
?DBG("minimize_cg() step ~p Delta=~p~n", [I, Delta]),
pinned points. Re-translate the point identities.
Picks terms with specified indexes from a list.
L: list of terms
P: list of indexes in ascending order
Return: {L_remaining, L_picked}
Insert terms with specified indexes in a list
L: List of terms
S: List of {Pos,Term} tuples with Term to be
inserted at position Pos in L
Texture metric stretch
Face 2 vertex id
Face(s) 3d area
Face original vertex 3d position
Border vertices
Verify
?DBG("~p ~.4f:",[V,Val]),
io:format(">"),
io:format("S"),
io:format("<"),
io:format("~n"),
Worst stretch value | Copyright ( c ) 2002 - 2011 , ,
( now probably totally ruined by me or )
by , , ,
Presented on Siggraph 2002
The Conjugate Gradient Method ( trad )
the Conjugate Gradient Method
Without the Agonizing Pain
, March 7 , 1994
The Column Norm Preconditioning was stumbled upon , just briefly
, 1994
All credits about the LSQCM implementation goes to , who
-module(auv_mapping).
-export([stretch_opt/2, fs_area/2, area2d2/3,area3d/3, calc_area/3]).
-export([map_chart/3, projectFromChartNormal/2, chart_normal/2]).
-export([model_l2/5]).
find_pinned/2,
split_edges_1/2,
loop_to_circle/1
]).
-include_lib("wings/src/wings.hrl").
-include("auv.hrl").
-include_lib("wings/e3d/e3d.hrl").
-import(lists, [foldl/3,reverse/1]).
map_chart(Type, We, Options) ->
Faces = wings_we:visible(We),
case catch auv_placement:group_edge_loops(Faces, We) of
[] ->
{error,?__(1,"A closed surface cannot be mapped. "
"(Either divide it into into two or more charts, "
"or cut it along some edges.)")};
[{_,[_,_]}] ->
{error,?__(2,"A cut in a closed surface must consist of at least two edges.")};
_ when Type == lsqcm, is_list(Options), length(Options) < 2 ->
{error,?__(3,"At least 2 vertices (per chart) must be selected")};
[Best|_] ->
map_chart_1(Type, Faces, Best, Options, We);
Err ->
?dbg(?__(4,"Error:")++" ~p~n", [Err]),
{error, ?__(5,"Error, try to cleanup objects before uv-mapping")}
end.
map_chart_1(Type, Chart, Loop, Options, We) ->
try map_chart_2(Type, Chart, Loop, Options, We)
catch error:{badarith,_} ->
{error,?__(1,"Numeric problem, probably a bad face with an empty area.")};
throw:What ->
{error,lists:flatten(What)};
_:Reason:ST ->
Msg = ?__(2,"Error: try to cleanup objects before uv-mapping"),
?dbg("~p:~p "++?__(3,"Error")++" ~p~n ~p ~n",
[?MODULE,?LINE,Reason,ST]),
{error,lists:flatten(Msg)}
end.
map_chart_2(project, C, _, _, We) -> projectFromChartNormal(C, We);
map_chart_2(camera, C, _, Dir, We) -> projectFromCamera(C, Dir, We);
map_chart_2(lsqcm, C, Loop, Pinned, We) ->
case get(auv_use_erlang_impl) == true
orelse erlang:system_info(wordsize) =:= 4 of
true ->
false ->
lscm(C, Pinned, Loop, We)
end;
map_chart_2(harmonic, C, Loop, _Pinned, We) ->
map_chart_2(slim, C, Loop, _Pinned, We) ->
slim(C, Loop, We);
map_chart_2(Op, C, Loop, Pinned, We) ->
volproject(Op, C, Pinned, Loop, We).
volproject(Type,Chart,_Pinned,{_,BEdges},We) ->
{Center,Axes,LoopInfo} = find_axes(Chart,BEdges,We),
Rot = rot_mat(Axes),
CalcUV = case Type of
cyl -> fun cyl/1;
sphere -> fun sphere/1
end,
Vs0 = wings_face:to_vertices(Chart, We),
Transform = fun(V) ->
Pos = wings_vertex:pos(V, We),
Vec = e3d_vec:sub(Pos,Center),
e3d_mat:mul_vector(Rot,Vec)
end,
Vs1 = lists:sort([{V,Transform(V)} || V <- Vs0]),
Tagged = leftOrRight(LoopInfo, Chart, We#we{vp=array:from_orddict(Vs1)}),
[{V,fix_positions(V,Pos,CalcUV(Pos),Tagged)} || {V,Pos} <- Vs1].
sphere({X,Y,Z}) ->
S = catchy(catch math:atan2(X,Z)/math:pi()),
T = math:acos(clamp(-Y))/math:pi()-0.5,
{S,T,0.0}.
cyl({X,Y,Z}) ->
S = catchy(catch math:atan2(X,Z)/math:pi()),
T = Y,
{S,T,0.0}.
projectFromChartNormal(Chart, We) ->
Normal = chart_normal(Chart,We),
Vs0 = wings_face:to_vertices(Chart, We),
rotate_to_z(Vs0, Normal, We).
projectFromCamera(Chart,{matrices,{MM,PM,VP}},We) ->
Vs = wings_face:to_vertices(Chart, We),
Proj = fun(V) ->
Pos = wings_vertex:pos(V, We),
{S,T, _} = e3d_transform:project(Pos,MM,PM,VP),
{V,{S,T,0.0}}
end,
lists:map(Proj, Vs).
harmonic(Chart, Loop, We0) ->
{BorderVs0,BorderUVs} = loop_to_circle(Loop),
{_TriWe,_TriFs,Vs,Fs,WeVs2Vs,Vs2WeVs} = init_mappings(Chart,We0),
BorderVs = [maps:get(V, WeVs2Vs) || V <- BorderVs0],
UVs0 = libigl:harmonic(Vs, Fs, BorderVs, BorderUVs),
UVs = remap_uvs(UVs0, Vs2WeVs),
UVs.
slim(Chart, Loop, We0) ->
{BorderVs0,BorderUVs} = loop_to_circle(Loop),
{_TriWe,_TriFs,Vs,Fs,WeVs2Vs,Vs2WeVs} = init_mappings(Chart,We0),
BorderVs = [maps:get(V, WeVs2Vs) || V <- BorderVs0],
UVInit = libigl:harmonic(Vs, Fs, BorderVs, BorderUVs),
UVs0 = libigl:slim(Vs,Fs,UVInit, symmetric_dirichlet, 0.00001),
UVs = remap_uvs(UVs0, Vs2WeVs),
UVs.
lscm(Fs, none, Loop, We) ->
lscm(Fs,find_pinned(Loop,We),Loop,We);
lscm(Fs0, Pinned, _Loop, We0) ->
{TriWe,TriFs,Vs,Fs,WeVs2Vs,Vs2WeVs} = init_mappings(Fs0,We0),
{BIndx,BPos} = split_pinned(Pinned, WeVs2Vs, [], []),
case libigl:lscm(Vs, Fs, BIndx, BPos) of
false ->
?dbg("Fs: ~p~n",[Fs0]),
?dbg("Pinned: ~p~n",[Pinned]),
?dbg("Loop: ~p~n",[_Loop]),
throw(?__(1, "Couldn't calculate uv-coords for chart"));
{error, Reason} ->
?dbg("Error: ~p", [Reason]),
throw(?__(2, "Math error"));
UVs0 ->
UVs = remap_uvs(UVs0, Vs2WeVs),
OrigArea = fs_area(TriFs, TriWe, 0.0),
MappedArea = fs_area(TriFs, TriWe#we{vp=array:from_orddict(UVs)}, 0.0),
scaleVs(UVs, math:sqrt(OrigArea/MappedArea))
end.
lsqcm(Fs, none, Loop, We) ->
lsqcm(Fs,find_pinned(Loop,We),Loop,We);
lsqcm(Fs, Pinned, _Loop, We) ->
?DBG("Project and tri ~n", []),
LSQState = lsq_setup(Fs,We,Pinned),
{ok,Vs2} = lsq(LSQState, Pinned),
Patch = fun({Idt, {Ut,Vt}}) -> {Idt,{Ut,Vt,0.0}} end,
Vs3 = lists:sort(lists:map(Patch, Vs2)),
TempVs = array:from_orddict(Vs3),
Area = fs_area(Fs, We, 0.0),
MappedArea = fs_area(Fs, We#we{vp=TempVs}, 0.0),
Scale = Area/MappedArea,
scaleVs(Vs3,math:sqrt(Scale)).
-spec loop_to_circle({TotDist::float(), [BEs::#be{}]}) -> {[integer()], [{float(),float()}]}.
loop_to_circle({TotDist, BEs}) ->
loop_to_circle(BEs, 0.0, TotDist, [], []).
loop_to_circle([#be{vs=V, dist=D}|BEs], Curr, Tot, Vs, UVs) ->
Pi2 = 2.0*math:pi(),
Frac = Pi2*(1-Curr/Tot),
loop_to_circle(BEs, Curr+D, Tot, [V|Vs], [{math:cos(Frac),math:sin(Frac)}|UVs]);
loop_to_circle([], _, _, Vs, UVs) ->
{Vs, UVs}.
catchy({'EXIT', _}) -> math:pi()/4;
catchy(X) -> X.
clamp(X) when X > 1.0 -> 1.0;
clamp(X) when X < -1.0 -> -1.0;
clamp(X) -> X.
fix_positions(_V,{_,_,Z},Proj,_) when Z > 0.0 -> Proj;
fix_positions(V,_,Proj = {X,Y,Z},Tags) ->
case gb_sets:is_member(V,Tags) of
true when X > 0.0 ->
{X-2.0,Y,Z};
false when X < 0.0 ->
{X+2.0,Y,Z};
_ ->
Proj
end.
leftOrRight({LL,LR}, Free0, We) ->
Del = fun(#be{face=F},{Fs,Ch}) -> {[F|Fs],gb_sets:delete_any(F,Ch)} end,
{F1,Free1} = foldl(Del,{[],gb_sets:from_list(Free0)},LL),
{F2,Free} = foldl(Del,{[],Free1},LR),
[Fs1,Fs2] = expand_faces([F1,F2],Free,[],[F1,F2],[],We),
Set1 = wings_vertex:from_faces(Fs1,We),
Set2 = wings_vertex:from_faces(Fs2,We),
case wings_vertex:center(Set1,We) > wings_vertex:center(Set2,We) of
true -> gb_sets:from_ordset(Set2);
false -> gb_sets:from_ordset(Set1)
end.
expand_faces([Fs0|Rest],Free0,New,[Set|Acc1],Tot,We) ->
{NewFs,Free} = foldl(fun(Face, A) ->
do_face_more(Face, We, A)
end, {[],Free0}, Fs0),
expand_faces(Rest,Free,[NewFs|New],Acc1,[NewFs++Set|Tot],We);
expand_faces([],Free,New,[],Tot,We) ->
case gb_sets:is_empty(Free) of
true -> Tot;
false -> expand_faces(reverse(New),Free,[],reverse(Tot),[],We)
end.
do_face_more(Face, We, Acc) ->
wings_face:fold(fun(_,_,#edge{lf=LF,rf=RF},P={A1,Free}) ->
AFace = if LF == Face -> RF; true -> LF end,
case gb_sets:is_member(AFace,Free) of
true ->
{[AFace|A1],
gb_sets:delete(AFace,Free)};
false ->
P
end
end, Acc, Face,We).
rot_mat({{Ux,Uy,Uz},{Vx,Vy,Vz},{Wx,Wy,Wz}}) ->
{Ux,Vx,Wx,
Uy,Vy,Wy,
Uz,Vz,Wz,
0.0,0.0,0.0}.
find_axes(Fs,BEdges,We) ->
ChartNormal = chart_normal(Fs,We),
case forms_closed_object(BEdges,ChartNormal,We) of
undefined ->
throw(
?__(1,"I currently can't sphere/cylinder map this type of chart/cuts,\n"
"I can't figure out which axes you want as X,Y, and Z,\n"
"please use unfolding or one of the projection mappings."));
find_axes_from_eigenv(Fs , , BEdges , We ) ;
Nice ->
Nice
end.
forms_closed_object(BEdges0,ChartNormal,We=#we{name=#ch{emap=Emap}}) ->
BEdges = [{auv_segment:map_edge(Edge,Emap),BE} || BE = #be{edge=Edge} <- BEdges0],
case is_an_8(BEdges, false) of
false -> undefined;
Edge ->
{North,South,Link,LinkR} = split_edges(Edge,BEdges,We),
NorthSouth = e3d_vec:sub(North,South),
Center = e3d_vec:average(North,South),
io : format("Temp : ~p ~n",[{North , South , Center } ] ) ,
LC = center(Link,We),
LCdir0 = e3d_vec:sub(LC,Center),
LCdir = case e3d_vec:len(LCdir0) > 0.0005 of
true -> LCdir0;
false -> e3d_vec:neg(ChartNormal)
end,
{Center,calc_axis(NorthSouth,LCdir),{Link,LinkR}}
end.
center(Bes,We) ->
Eds = lists:map(fun(#be{edge=E}) -> E end, Bes),
Vs = wings_vertex:from_edges(Eds,We),
wings_vertex:center(Vs,We).
calc_axis(Y0,Z0) ->
Y = e3d_vec:norm(Y0),
X = e3d_vec:norm(e3d_vec:cross(e3d_vec:norm(Z0),Y)),
Z = e3d_vec:norm(e3d_vec:cross(X,Y)),
{X,Y,Z}.
is_an_8([],E) ->
E;
is_an_8(R, E);
false -> is_an_8(R,HaveRemoved);
_ when HaveRemoved =/= false ->
E;
_ ->
case reverse(R) of
[{E,_}|R2] ->
is_an_8(reverse(R2), E);
_ -> E
end
end.
Split edges splits into three parts two loops
\_/--\_| = > 2 loops : mnoabc fghijk
getEs(L) ->
lists:map(fun({_E,BE}) -> BE end,L).
split_edges(Edge,Bes,We) ->
{L1,L2,Link} = split_edges_1(Edge,Bes),
North = case L1 of
[] -> wings_vertex:pos((hd(Link))#be.vs,We);
_ -> center(L1,We)
end,
South = case L2 of
[] -> wings_vertex:pos((lists:last(Link))#be.ve,We);
_ -> center(L2,We)
end,
LinkR = (((getEs(Bes) -- L1) -- L2) -- Link),
{North,South,Link,LinkR}.
split_edges_1(Edge,Bes) ->
io : format("Split : , d(Bes ) ] ) ,
{Before,BE1,After} = find_split(Edge,Bes,[]),
{LeftLoop0,BE2,RightLoop0} = find_split(Edge,After,[BE1]),
LeftLoop = LeftLoop0 ++ [BE2],
RightLoop = reverse(RightLoop0 ++ Before),
{Loop1,Link1} = find_link(LeftLoop, reverse(LeftLoop), []),
{Loop2,Link2} = find_link(RightLoop, reverse(RightLoop), []),
io : format("L1:~w ~ nL2:~w ~ nLink1:~w ~ nLink2:~w ~ n ~ n " ,
Link = reorder_link(Link2++reverse(Link1)),
{getEs(Loop1),getEs(Loop2),getEs(Link)}.
find_split(Edge,[G={Edge,_}|Bes],Acc) -> {reverse(Acc),G,Bes};
find_split(Edge,[This|Bes],Acc) ->
find_split(Edge,Bes,[This|Acc]).
find_link([{E,_}|_],[{E,_}|_],Link = [{E,_}|_]) ->
{[],Link};
find_link([G={E,_}|C1],[{E,_}|C2],Link) ->
find_link(C1,C2,[G|Link]);
find_link(C1,_,Link) ->
find_loop(C1,Link,[]).
find_loop([{E,_}|_],[{E,_}|_]=Link, Loop) ->
{Loop,Link};
find_loop([G|C1],Link,Loop) ->
find_loop(C1,Link,[G|Loop]);
find_loop([],[],Loop) -> {Loop,[]};
find_loop([],Link,[]) -> {[],Link}.
reorder_link([]) -> [];
reorder_link(A=[_]) -> A;
reorder_link(Ok = [{_,#be{ve=V}},{_,#be{vs=V}}|_]) -> Ok;
reorder_link(Rev = [{_,#be{vs=V}},{_,#be{ve=V}}|_]) ->
reorder_link(reverse(Rev));
reorder_link(Other) ->
io:format("Other: ~w~n",[Other]),
exit(internal_error).
init_mappings(Fs0,We0) ->
#we{vp=Vtab} = TriWe = wings_tesselation:triangulate(Fs0, We0),
Fs1 = Fs0 ++ wings_we:new_items_as_ordset(face,We0,TriWe),
Add = fun(V,_,_,{N, Vs,Face, ToNew, ToOld}) ->
case maps:get(V, ToNew, undefined) of
undefined ->
Pos = array:get(V, Vtab),
{N+1, [Pos|Vs], [N|Face], maps:put(V, N, ToNew), [{N,V}|ToOld]};
New ->
{N, Vs, [New|Face], ToNew, ToOld}
end
end,
{Vs,NewFs,ToNew,ToOld} = setup_maps(Fs1, TriWe, Add, 0, [], [], maps:new(), []),
{TriWe, Fs1, Vs, NewFs, ToNew, ToOld}.
setup_maps([F|Fs], We, Add, N0, Vs0, AFs, ToNew0, ToOld0) ->
{N, Vs,RFace,ToNew,ToOld} = wings_face:fold(Add, {N0, Vs0, [], ToNew0, ToOld0}, F, We),
setup_maps(Fs, We, Add, N, Vs, [RFace|AFs], ToNew, ToOld);
setup_maps([], _We, _, _, Vs0, AFs, ToNew, ToOld) ->
{lists:reverse(Vs0), AFs, ToNew, maps:from_list(ToOld)}.
remap_uvs(UVs0, Vs2WeVs) ->
Remap = fun({U,V}, N) -> {{maps:get(N, Vs2WeVs),{U,V,0.0}},N+1} end,
{UVs1,_} = lists:mapfoldl(Remap, 0, UVs0),
lists:sort(UVs1).
split_pinned([{I,Pos}|Ps], WeVs2Vs, Indx, PosL) ->
split_pinned(Ps, WeVs2Vs, [maps:get(I, WeVs2Vs)|Indx], [Pos|PosL]);
split_pinned([], _, Indx, PosL) ->
{reverse(Indx), reverse(PosL)}.
scaleVs(VUVs,Scale) ->
[{Id, {X*Scale,Y*Scale,0.0}} || {Id,{X,Y,0.0}} <- VUVs].
find_pinned({Circumference, BorderEdges}, We) ->
Vs = [array:get(V1, We#we.vp) || #be{vs=V1} <- BorderEdges],
Center = e3d_vec:average(Vs),
AllC = lists:map(fun(#be{vs=Id}) ->
Pos = array:get(Id, We#we.vp),
Dist = e3d_vec:dist(Pos, Center),
{Dist, Id, Pos}
end, BorderEdges),
[{_,V0,_V1Pos}|_] = lists:reverse(lists:sort(AllC)),
BE1 = reorder_edge_loop(V0, BorderEdges, []),
{V1, V2} = find_pinned(BE1, BE1, 0.0, HalfCC, HalfCC, undefined),
[{V1,{0.0,0.0}},{V2,{1.0,1.0}}].
find_pinned(Curr=[#be{vs=C1,dist=Clen}|CR],Start=[#be{ve=S2,dist=Slen}|SR],Len,HCC,Best,BVs) ->
Dlen = HCC-(Clen+Len),
ADlen = abs(Dlen),
if
Dlen >= 0.0 ->
if ADlen < Best ->
find_pinned(CR,Start,Clen+Len,HCC,ADlen,{S2,C1});
true ->
find_pinned(CR,Start,Clen+Len,HCC,Best,BVs)
end;
Dlen < 0.0 ->
if ADlen < Best ->
find_pinned(Curr,SR,Len-Slen,HCC, ADlen,{S2,C1});
true ->
find_pinned(Curr,SR,Len-Slen,HCC,Best,BVs)
end
end;
find_pinned([], _, _, _, _Best, Bvs) ->
? ~p ~p ~ n " , [ _ Best , Bvs ] ) ,
Bvs.
reorder_edge_loop(V1, [Rec=#be{vs=V1}|Ordered], Acc) ->
Ordered ++ lists:reverse([Rec|Acc]);
reorder_edge_loop(V1, [H|Tail], Acc) ->
reorder_edge_loop(V1, Tail, [H|Acc]).
Utils
chart_normal([],_We) -> throw(?__(1,"Can not calculate normal for chart."));
chart_normal(Fs,We = #we{es=Etab}) ->
CalcNormal = fun(Face,Area) -> face_normal(Face,Area,We) end,
N0 = foldl(CalcNormal, e3d_vec:zero(), Fs),
case e3d_vec:norm(N0) of
Bad normal
[{_,BE}|_] = auv_placement:group_edge_loops(Fs,We),
EdgeNormals =
fun(#be{edge=Edge}, Sum0) ->
#edge{lf=LF,rf=RF} = array:get(Edge, Etab),
Sum1 = CalcNormal(LF,Sum0),
CalcNormal(RF,Sum1)
end,
N1 = foldl(EdgeNormals, e3d_vec:zero(), BE),
case e3d_vec:norm(N1) of
Bad normal Fallback2
NewFs = decrease_chart(Fs,BE),
chart_normal(NewFs, We);
N -> e3d_vec:neg(N)
end;
N -> N
end.
face_normal(Face,Sum,We) ->
Normal = wings_face:normal(Face, We),
Vs0 = wpa:face_vertices(Face, We),
Area = calc_area(Vs0,Normal, We),
e3d_vec:add(Sum, e3d_vec:mul(Normal, Area)).
decrease_chart(Fs0,BE) ->
Fs1 = gb_sets:from_list(Fs0),
Del = fun(#be{face=Face},FSin) ->
gb_sets:del_element(Face,FSin)
end,
Fs = foldl(Del, Fs1, BE),
gb_sets:to_list(Fs).
rotate_to_z(Vs, Normal, We) ->
Rot = e3d_mat:rotate_s_to_t(Normal,{0.0,0.0,1.0}),
[{V,e3d_mat:mul_point(Rot, wings_vertex:pos(V, We))} || V <- Vs].
fs_area(Fs,We) ->
fs_area(Fs,We,0.0).
fs_area([Face|Rest],We,Area) ->
Vs0 = wpa:face_vertices(Face, We),
NewArea = try
Normal = wings_face:normal(Face, We),
calc_area(Vs0, Normal, We)
catch _:_ ->
0.0
end,
fs_area(Rest,We,NewArea+Area);
fs_area([],_,Area) ->
Area.
calc_area(Vs0, Normal, We) ->
[V|Vs] = [wings_vertex:pos(V, We) || V <- Vs0],
Sum = sum_crossp([V|Vs] ++ [V], e3d_vec:zero()),
0.5 * abs(e3d_vec:dot(Normal, Sum)).
sum_crossp([V1,V2|Vs], Acc) ->
Cross = e3d_vec:cross(V1,V2),
sum_crossp([V2|Vs], e3d_vec:add(Acc, Cross));
sum_crossp([_Last], Acc) ->
Acc.
-record(lsq,{a,x0,ap,temp1,temp2,dr}).
lsq_setup(Fs,We,Pinned) ->
{M,N,D,DR,L1,L2} = lsq_init(Fs,We,Pinned),
{Lquv0,{Np,Usum,Vsum}} =
lists:mapfoldl(
fun({P,{U,V} = UV}, {I,X,Y}) ->
{ok,Q} = dict:find(P, D),
{{Q,UV}, {I+1,X+U,Y+V}}
end,{0,0.0,0.0},Pinned),
?DBG("lsq_int - Lquv = ~p~n",[Lquv]),
M1 = Re(M ) , M2 = Im(M ) , M2n = -M2
{M1,M2,M2n} = build_basic(M,L1,L2),
{Mfp1c,Mfp2c,Mfp2nc,LuLv} = build_cols(M1,M2,M2n,Lquv),
?DBG("lsq_int - LuLv = ~p~n", [LuLv]),
{Af,Ap} = build_matrixes(N,Mfp1c,Mfp2c,Mfp2nc),
?DBG("Solving matrices~n", []),
X0Fix = auv_matrix:vector(lists:duplicate(M-Np, Usum/Np)++
lists:duplicate(M-Np, Vsum/Np)),
#lsq{a=Af,x0=X0Fix,ap=Ap,temp1=LuLv,temp2=Lquv,dr=DR}.
lsq_init(Fs0,We0,Pinned0) ->
We = wings_tesselation:triangulate(Fs0, We0),
Fs = Fs0 ++ wings_we:new_items_as_ordset(face,We0,We),
Pinned = gb_trees:from_orddict(lists:sort(Pinned0)),
lsq_init_fs(Fs,Pinned,We,{0,dict:new(),dict:new()},0,[],[]).
lsq_init_fs([F|Fs],P,We = #we{vp=Vtab},Ds0,N,Re0,Im0) ->
Vs = [[A0|_],[B0|_],[C0|_]] = wings_va:face_attr([vertex|uv], F, We),
{[A,B,C],Ds} = update_dicts(Vs,Ds0),
{ X1 = Z0x , Y1 = Z0y , X2 = Z1x , Y2 = Z1y , X3 = Z2x , Y3 = Z2y } =
{X1,Y1,X2,Y2,X3,Y3} =
project_tri(array:get(A0,Vtab),array:get(B0,Vtab),
array:get(C0,Vtab)),
Raimos old solution .
SqrtDT0 = try math:sqrt(abs((X2-X1)*(Y3-Y1)-(Y2-Y1)*(X3-X1)))
catch _:_ -> 0.000001
end,
true -> SqrtDT0
end,
W1re = X3-X2, W1im = Y3-Y2,
W2re = X1-X3, W2im = Y1-Y3,
W3re = X2-X1, W3im = Y2-Y1,
Re=[[{A,W1re/SqrtDT},{B,W2re/SqrtDT},{C,W3re/SqrtDT}]|Re0],
Im=[[{A,W1im/SqrtDT},{B,W2im/SqrtDT},{C,W3im/SqrtDT}]|Im0],
double c = z02.x ;
assert(b = = 0.0 ) ;
// Note : 2*id + 0 -- > u
int u0_id = 2*id0 ;
int v0_id = 2*id0 + 1 ;
int u1_id = 2*id1 ;
int v1_id = 2*id1 + 1 ;
int u2_id = 2*id2 ;
int v2_id = 2*id2 + 1 ;
// Note : b = 0
nlCoefficient(u0_id , -a+c ) ;
lsq_init_fs(Fs,P,We,Ds,N+1,Re,Im);
lsq_init_fs([],_,_We,{M,D,DR},N,Re0,Im0) ->
{M,N,D,DR,vecs(M,Re0,[]),vecs(M,Im0,[])}.
vecs(M,[R|Rs],Acc) ->
vecs(M,Rs,[auv_matrix:vector(M,R)|Acc]);
vecs(_,[],Acc) -> Acc.
update_dicts(Ids,{N,D,DR}) ->
update_dicts(Ids,N,D,DR,[]).
update_dicts([[P|_]|Rest],N,D,DR,Acc) ->
case dict:find(P,D) of
error ->
N1 = N+1,
update_dicts(Rest,N1,dict:store(P,N1,D),dict:store(N1,P,DR),[N1|Acc]);
{ok,Id} ->
update_dicts(Rest,N,D,DR,[Id|Acc])
end;
update_dicts([],N,D,DR,Acc) ->
{lists:reverse(Acc),{N,D,DR}}.
project_tri(P0,P1,P2) ->
L = e3d_vec:sub(P1,P0),
X = e3d_vec:norm(L),
T = e3d_vec:sub(P2,P0),
Z = e3d_vec:norm(e3d_vec:cross(X,T)),
Y = e3d_vec:cross(Z,X),
{0.0,0.0,
e3d_vec:len(L),0.0,
e3d_vec:dot(T,X), e3d_vec:dot(T,Y)}.
lsq(L, Lpuv) when is_list(Lpuv) ->
lsq(L, Lpuv, env);
lsq(Name, Method) when is_atom(Method) ->
{ok, [{L, Lpuv}]} = file:consult(Name),
lsq(L, Lpuv, Method).
lsq(L, Lpuv, Method0) when is_record(L,lsq), is_list(Lpuv), is_atom(Method0) ->
Method = case Method0 of
env ->
case os:getenv("WINGS_AUTOUV_SOLVER") of
"ge" -> ge;
"cg" -> cg;
"cg_jacobian" -> cg_jacobian;
"cg_colnorm" -> cg_colnorm;
_ -> cg_colnorm
end;
M -> M
end,
try lsq_int(L, Lpuv, Method)
catch
error:badarg:ST ->
error(badarg, {[L,Lpuv,Method],ST})
end;
lsq(L, Lpuv, Method) ->
error(badarg, [L, Lpuv, Method]).
lsq_int(#lsq{a=Af,x0=X0,ap=Ap,temp1=LuLv,temp2=Lquv,dr=Rdict},_Pinned,Method) ->
{Np,K_LuLv} = keyseq_neg(LuLv),
U = auv_matrix:vector(Np, K_LuLv),
?DBG("build_matrixes - U = ~p~n", [U]),
B = auv_matrix:mult(Ap, U),
X = case Method of
ge -> minimize_ge(Af,B);
_ ->
{_,X1} = minimize_cg(Af,X0,B),
X1
end,
? ~ n " , [ X ] ) ,
lsq_result(X, Lquv, Rdict).
build_basic(M,L1,L2) ->
M1 = auv_matrix:rows(M, L1),
M2 = auv_matrix:rows(M, L2),
M2n = auv_matrix:rows(M, [auv_matrix:mult(-1, X) || X <- L2]),
{M1,M2,M2n}.
build_cols(M1,M2,M2n,Lquv) ->
M1c = auv_matrix:cols(M1),
M2c = auv_matrix:cols(M2),
M2nc = auv_matrix:cols(M2n),
Split the column lists into free ( ) and pinned ( Mp )
is sorted
{pick(M1c, Lq),pick(M2c, Lq),pick(M2nc, Lq), Lu++Lv}.
split_quv(Lquv) ->
split_quv(Lquv, [], [], []).
split_quv([], Lq, Lu, Lv) ->
{lists:reverse(Lq),lists:reverse(Lu),lists:reverse(Lv)};
split_quv([{Q,{U,V}} | Lquv], Lq, Lu, Lv) ->
split_quv(Lquv, [Q | Lq], [U | Lu], [V | Lv]).
build_matrixes(N,{Mf1c,Mp1c},{Mf2c,Mp2c},{Mf2nc,Mp2nc}) ->
Build the matrixes Af and , and vector B
A = [ M1 -M2 ] , B = Ap U , U is vector of pinned points
Afu = auv_matrix:cols(N, Mf1c++Mf2nc),
Afl = auv_matrix:cols(N, Mf2c++Mf1c),
Af = auv_matrix:cat_rows(Afu, Afl),
Apu = auv_matrix:cols(N, Mp1c++Mp2nc),
Apl = auv_matrix:cols(N, Mp2c++Mp1c),
Ap = auv_matrix:cat_rows(Apu, Apl),
{Af, Ap}.
keyseq_neg(L) ->
keyseq(1, L, []).
keyseq(N, [], R) ->
{N-1,lists:reverse(R)};
keyseq(N, [X | L], R) ->
keyseq(N+1, L, [{N,-X} | R]).
using Gaussian Elimination and back substitution .
minimize_ge(A, B) ->
AA = mk_solve_matrix(A, B),
AAA = auv_matrix:reduce(AA),
? DBG("Reduced : ~p ~ n " , [ AAA ] ) ,
X = auv_matrix:backsubst(AAA),
?DBG("Solved~n",[]),
X.
mk_solve_matrix(Af,B) ->
AfT = auv_matrix:trans(Af),
AfTAf = auv_matrix:mult_trans(AfT, AfT),
AfTB = auv_matrix:mult(-1, auv_matrix:mult(AfT, B)),
auv_matrix:cat_cols(AfTAf, AfTB).
using the Preconditioned Coujugate Gradient method with as
minimize_cg(A, X0, B) ->
?DBG("minimize_cg - dim A=~p X0=~p B=~p~n",
[auv_matrix:dim(A), auv_matrix:dim(X0), auv_matrix:dim(B)]),
{N,M} = auv_matrix:dim(A),
{M,1} = auv_matrix:dim(X0),
{N,1} = auv_matrix:dim(B),
I = M,
Epsilon = 1.0e-3,
At = auv_matrix:trans(A),
AtB = auv_matrix:mult(At, B),
AtA above . The iteration time impact is also
Diag = auv_matrix:row_norm(At),
M_inv = try [1/V || V <- Diag] of
Diag_inv ->
M_i = auv_matrix:diag(Diag_inv),
fun (R_new) ->
auv_matrix:mult(M_i, R_new)
end
catch
error:badarith ->
fun (R_new) ->
auv_matrix:mult(1, R_new)
end
end,
R = auv_matrix:sub(AtB, auv_matrix:mult(At, auv_matrix:mult(A, X0))),
D = M_inv(R),
Delta = auv_matrix:mult(auv_matrix:trans(R), D),
Delta_max = Epsilon*Epsilon*Delta,
minimize_cg(M_inv, At, A, AtB, Delta_max,
Delta, I, D, R, X0).
minimize_cg(_, _At, _A, _, _,
_, 0, _D, _, X) ->
?DBG("minimize_cg() sizes were ~p ~p ~p~n",
[auv_matrix:dim(_At), auv_matrix:dim(_A), auv_matrix:dim(_D)]),
{stopped, X};
minimize_cg(_, _At, _A, _, Delta_max,
Delta, _, _D, _, X) when Delta < Delta_max ->
?DBG("minimize_cg() sizes were ~p ~p ~p~n",
[auv_matrix:dim(_At), auv_matrix:dim(_A), auv_matrix:dim(_D)]),
{ok, X};
minimize_cg(M_inv, At, A, AtB, Delta_max,
Delta, I, D, R, X) ->
P = auv_matrix:mult(A, D),
Alpha = Delta / auv_matrix:mult(auv_matrix:trans(P), P),
X_new = auv_matrix:add(X, auv_matrix:mult(Alpha, D)),
if (I + 5) rem 10 == 0 ->
minimize_cg_3(M_inv, At, A, AtB, Delta_max,
Delta, I, D, X_new);
true ->
minimize_cg_2(M_inv, At, A, AtB, Delta_max,
Delta, I, D, R, X_new, Alpha, P)
end.
minimize_cg_2(M_inv, At, A, AtB, Delta_max,
Delta, I, D, R, X_new, Alpha, P) ->
R_new = auv_matrix:sub(R, auv_matrix:mult(Alpha, auv_matrix:mult(At, P))),
S = M_inv(R_new),
Delta_new = auv_matrix:mult(auv_matrix:trans(R_new), S),
if Delta_new < Delta_max ->
minimize_cg_3(M_inv, At, A, AtB, Delta_max,
Delta, I, D, X_new);
true ->
D_new = auv_matrix:add(S, auv_matrix:mult(Delta_new/Delta, D)),
minimize_cg(M_inv, At, A, AtB, Delta_max,
Delta_new, I-1, D_new, R_new, X_new)
end.
minimize_cg_3(M_inv, At, A, AtB, Delta_max,
Delta, I, D, X_new) ->
?DBG("minimize_cg() recalculating residual ~p~n", [Delta]),
R_new = auv_matrix:sub
(AtB, auv_matrix:mult(At, auv_matrix:mult(A, X_new))),
S = M_inv(R_new),
Delta_new = auv_matrix:mult(auv_matrix:trans(R_new), S),
D_new = auv_matrix:add(S, auv_matrix:mult(Delta_new/Delta, D)),
minimize_cg(M_inv, At, A, AtB, Delta_max,
Delta_new, I-1, D_new, R_new, X_new).
Extract the result from vector X and combine it with the
lsq_result(X, Lquv, Rdict) ->
{MM,1} = auv_matrix:dim(X),
{Ulist, Vlist} = split(auv_matrix:vector(X), MM div 2),
{[],UVlistR} =
foldl(
fun (U, {[], R}) ->
{[], [{U,0.0} | R]};
(U, {[V | L], R}) ->
{L, [{U,V} | R]};
(Other, State) ->
throw({error, {?FILE, ?LINE, [Other, State, X]}})
end, {Vlist, []}, Ulist),
UVlist = insert(lists:reverse(UVlistR), Lquv),
{_, TxMapR} =
foldl(
fun (UV, {Q,R}) ->
{Q+1,[{dict:fetch(Q, Rdict),UV} | R]}
end, {1,[]}, UVlist),
TxMap = lists:reverse(TxMapR),
? - TxMap = ~p ~ n " , [ TxMap ] ) ,
{ok, TxMap}.
pick(L, P) when is_list(L), is_list(P) ->
case pick(1, L, P, [], []) of
{_, _} = Ok ->
Ok;
Fault ->
error(Fault, [L, P])
end;
pick(L, P) ->
error(badarg, [L, P]).
pick(_, L, [], R, Q) ->
{lists:reverse(R, L), lists:reverse(Q)};
pick(_, [], _, _, _) ->
badarg;
pick(_, _, [I, J | _], _, _) when I >= J ->
badarg;
pick(I, [V | L], [I | P], R, Q) ->
pick(I+1, L, P, R, [V | Q]);
pick(I, [V | L], P, R, Q) ->
pick(I+1, L, P, [V | R], Q);
pick(_, _, _, _, _) ->
badarg.
insert(L, S) when is_list(L), is_list(S) ->
case insert(1, L, S, []) of
R when is_list(R) ->
R;
Fault ->
error(Fault, [L, S])
end;
insert(L, S) ->
error(badarg, [L, S]).
insert(_, L, [], R) ->
lists:reverse(R, L);
insert(_, _, [{I,_}, {J,_} | _], _) when I >= J ->
badarg;
insert(I, L, [{I,E} | S], R) ->
insert(I+1, L, S, [E | R]);
insert(_, [], _, _) ->
badarg;
insert(I, [E | L], S, R) ->
insert(I+1, L, S, [E | R]).
Split a list into two after N terms
split(L, N) ->
split(L, N, []).
split([], _, R) ->
{lists:reverse(R), []};
split(L, 0, R) ->
{lists:reverse(R), L};
split([E | L], N, R) ->
split(L, N-1, [E | R]).
area2d2({S1,T1},{S2,T2},{S3,T3})
when is_float(S1),is_float(S2),is_float(S3),
is_float(T1),is_float(T2),is_float(T3) ->
((S2-S1)*(T3-T1)-(S3-S1)*(T2-T1)).
area3d(V1, V2, V3) ->
e3d_vec:area(V1, V2, V3).
From ' Texture Mapping Progressive Meshes ' by
, ,
Vertex 2 faces
}).
-define(MIN_STRETCH, 1.01).
-define(MAX_ITER, 100).
-define(MAX_LEVELS, 6).
-define(VERTEX_STEP, 0.001).
stretch_opt(We0, OVs) ->
Fs = wings_we:visible(We0),
wings_pb:start(?__(1,"optimizing")),
wings_pb:update(0.01, ?__(2,"initializing")),
{ FaceToStretchMean , FaceToStretchWorst , FaceToVerts , VertToFaces , VertToUvs }
{F2S2,_F2S8,Uvs,State,Scale} = stretch_setup(Fs,We0,OVs),
V2S = stretch_per_vertex(gb_trees:to_list(State#s.v2f),F2S2,State,gb_trees:empty()),
S2V = lists:reverse(lists:keysort(2,gb_trees:to_list(V2S))),
{SUvs0,_F2S2} = wings_pb:done(stretch_iter(S2V,1,V2S,F2S2,Uvs,State)),
_Mean2 = model_l2(gb_trees:keys(_F2S2), _F2S2, State#s.f2a,0.0, 0.0),
io:format(?__(3,"After Stretch sum (mean) ~p")++" ~n", [_Mean2]),
SUvs1 = gb_trees:to_list(SUvs0),
Suvs = [{Id,{S0/Scale,T0/Scale,0.0}} || {Id,{S0,T0}} <- SUvs1],
We0#we{vp=array:from_orddict(Suvs)}.
stretch_setup(Fs, We0, OVs) ->
Be = wings_face:outer_edges(Fs, We0),
Bv0 = foldl(fun(Edge, Acc) ->
#edge{vs=Vs,ve=Ve} = array:get(Edge, We0#we.es),
[Vs,Ve|Acc]
end, [], Be),
Bv = gb_sets:from_list(Bv0),
Tris0 = triangulate(Fs,We0),
{S,F2A,F2OV} = calc_scale(Tris0, OVs, 0.0, 0.0, [], []),
Tris = [{Face,[{Id1,{S1*S,T1*S}},{Id2,{S2*S,T2*S}},{Id3,{S3*S,T3*S}}]} ||
{Face,[{Id1,{S1,T1}},{Id2,{S2,T2}},{Id3,{S3,T3}}]} <- Tris0],
{F2S2,F2S8,Uvs,State0} = init_stretch(Tris,F2OV, [], [], [], [], []),
Worst = model_l8(gb_trees:keys(F2S8), F2S8, 0.0),
Mean = model_l2(gb_trees:keys(F2S2), F2S2, F2A,0.0, 0.0),
io:format(?__(1,"Stretch sum (worst) ~p")++" ~n", [Worst]),
io:format(?__(2,"Stretch sum (mean) ~p")++" ~n", [Mean]),
{F2S2,F2S8,Uvs,State0#s{f2a=F2A,f2ov=F2OV,bv=Bv},S}.
stretch_iter(S2V0=[{_,First}|_],I,V2S0,F2S20,Uvs0,State)
when First > ?MIN_STRETCH, I < ?MAX_ITER ->
if
I rem 4 =:= 0 ->
wings_pb:update(I/?MAX_ITER, ?__(1,"iteration")++" "++integer_to_list(I));
true ->
ok
end,
{V2S,F2S2,Uvs} = stretch_iter2(S2V0,V2S0,F2S20,Uvs0,State),
S2V = lists:reverse(lists:keysort(2, gb_trees:to_list(V2S))),
stretch_iter(S2V,I+1,V2S,F2S2,Uvs,State);
stretch_iter(_,_,_,F2S2,Uvs,_) ->
{Uvs,F2S2}.
stretch_iter2([{V,OldVal}|R],V2S0,F2S20,Uvs0,State)
when OldVal > ?MIN_STRETCH ->
Line = random_line(),
#s{f2v=F2Vs,v2f=V2Fs} = State,
Fs = gb_trees:get(V,V2Fs),
Val = gb_trees:get(V,V2S0),
{PVal,Uvs,F2S2} = opt_v(Val,0,?VERTEX_STEP,V,Line,Fs,F2S20,Uvs0,State),
case PVal == Val of
true ->
stretch_iter2(R,V2S0,F2S20,Uvs0,State);
false ->
Vs0 = lists:usort(lists:append([gb_trees:get(F,F2Vs)|| F<-Fs])),
Upd0 = foldl(fun(Vtx, New) ->
[{Vtx,gb_trees:get(Vtx, V2Fs)}|New]
end, [], Vs0),
V2S = stretch_per_vertex(Upd0,F2S2,State,V2S0),
stretch_iter2(R,V2S,F2S2,Uvs,State)
end;
stretch_iter2(_,V2S,F2S2,Uvs,_) ->
{V2S, F2S2, Uvs}.
random_line() ->
X = rand:uniform()-0.5,
Y = rand:uniform()-0.5,
Len = math:sqrt(X*X+Y*Y),
{X/Len,Y/Len}.
opt_v(PVal,I,Step,V,L,Fs,F2S0,Uvs0,_State=#s{f2v=F2Vs,f2ov=F2OV,f2a=F2A}) ->
UV = gb_trees:get(V, Uvs0),
{Data,F2S1} =
foldl(fun(Face, {Acc,Fs0}) ->
Vs = [V1,V2,V3] = gb_trees:get(Face, F2Vs),
{[{Vs,
{gb_trees:get(V1,Uvs0),
gb_trees:get(V2,Uvs0),
gb_trees:get(V3,Uvs0)},
gb_trees:get(Face, F2OV),
Face,
gb_trees:get(Face, F2A)}|Acc],
[{Face,gb_trees:get(Face,F2S0)}|Fs0]}
end, {[],[]}, Fs),
{Stretch,St,F2S2} = opt_v2(PVal,I,Step,V,UV,L,Data,F2S1),
case Stretch < PVal of
true ->
F2S = update_fs(F2S2,F2S0),
{Stretch,gb_trees:update(V,St,Uvs0),F2S};
false ->
{PVal,Uvs0,F2S0}
end.
update_fs([{Face,S}|Ss],F2S) ->
update_fs(Ss,gb_trees:update(Face,S,F2S));
update_fs([],F2S) -> F2S.
opt_v2(PVal,I,Step,V,UV={S0,T0},L={X,Y},Data,FS0)
when I < ?MAX_LEVELS ->
St = {S0+X*Step,T0+Y*Step},
{Stretch,FS} = calc_stretch(V,Data,St,0.0,0.0,[]),
if
Stretch < PVal ->
opt_v2(Stretch,I,Step,V,St,L,Data,FS);
(I rem 2) == 0 ->
opt_v2(PVal,I+1,-Step*0.9,V,UV,L,Data,FS0);
true ->
NewStep = Step/10,
opt_v2(PVal,I+1,NewStep,V,UV,L,Data,FS0)
end;
opt_v2(PVal,_I,_Step,_V,St,_L,_,FS) ->
{PVal,St,FS}.
calc_stretch(V,[{[V,_,_],{_,UV2,UV3},{Q1,Q2,Q3},Face,FA}|R],UV1,Mean,Area,FS) ->
S = l2(UV1,UV2,UV3,Q1,Q2,Q3),
calc_stretch(V,R,UV1,S*S*FA+Mean,FA+Area,[{Face,S}|FS]);
calc_stretch(V,[{[_,V,_],{UV1,_,UV3},{Q1,Q2,Q3},Face,FA}|R],UV2,Mean,Area,FS) ->
S = l2(UV1,UV2,UV3,Q1,Q2,Q3),
calc_stretch(V,R,UV2,S*S*FA+Mean,FA+Area,[{Face,S}|FS]);
calc_stretch(V,[{[_,_,V],{UV1,UV2,_},{Q1,Q2,Q3},Face,FA}|R],UV3,Mean,Area,FS) ->
S = l2(UV1,UV2,UV3,Q1,Q2,Q3),
calc_stretch(V,R,UV3,S*S*FA+Mean,FA+Area,[{Face,S}|FS]);
calc_stretch(_,[],_,Mean,Area,FS) ->
{math:sqrt(Mean/Area),reverse(FS)}.
stretch_per_vertex([{V,Fs}|R],F2S,State=#s{bv=Bv,f2a=F2A},Tree) ->
case gb_sets:is_member(V,Bv) of
false ->
Res = model_l2(Fs,F2S,F2A,0.0,0.0),
stretch_per_vertex(R,F2S,State,gb_trees:enter(V,Res,Tree));
true ->
stretch_per_vertex(R,F2S,State,Tree)
end;
stretch_per_vertex([], _, _,Acc) ->
Acc.
init_stretch([{Face,FUvs=[{Id1,P1},{Id2,P2},{Id3,P3}]}|R],
Ovs,F2S2,F2S8,F2Vs,V2Fs,UVs) ->
{Q1,Q2,Q3} = gb_trees:get(Face,Ovs),
S2 = l2(P1,P2,P3,Q1,Q2,Q3),
S8 = l8(P1,P2,P3,Q1,Q2,Q3),
init_stretch(R,Ovs, [{Face,S2}|F2S2],[{Face,S8}|F2S8],
[{Face, [Id1,Id2,Id3]}|F2Vs],
[{Id1,Face},{Id2,Face},{Id3,Face}|V2Fs],
FUvs ++ UVs);
init_stretch([],_,F2S2,F2S8,F2Vs,V2Fs0,Uvs) ->
V2Fs1 = sofs:relation(V2Fs0),
V2Fs2 = sofs:relation_to_family(V2Fs1),
V2Fs = sofs:to_external(V2Fs2),
{gb_trees:from_orddict(lists:sort(F2S2)),
gb_trees:from_orddict(lists:sort(F2S8)),
gb_trees:from_orddict(lists:usort(Uvs)),
#s{f2v = gb_trees:from_orddict(lists:sort(F2Vs)),
v2f = gb_trees:from_orddict(V2Fs)}}.
calc_scale([{Face,[{Id1,P1},{Id2,P2},{Id3,P3}]}|R], Ovs, A2D, A3D,F2A,F2OVs) ->
A2 = abs(area2d2(P1,P2,P3)/2),
Q1 = array:get(Id1,Ovs),
Q2 = array:get(Id2,Ovs),
Q3 = array:get(Id3,Ovs),
A3 = area3d(Q1,Q2,Q3),
calc_scale(R,Ovs,A2+A2D,A3+A3D,[{Face,A3}|F2A],[{Face,{Q1,Q2,Q3}}|F2OVs]);
calc_scale([],_Ovs,A2D,A3D,F2A,F2OVs) ->
{math:sqrt(A3D/A2D),
gb_trees:from_orddict(lists:sort(F2A)),
gb_trees:from_orddict(lists:sort(F2OVs))}.
model_l8([Face|R], F2S8, Worst) ->
FVal = gb_trees:get(Face,F2S8),
New = if FVal > Worst ->
? DBG("Face ~p has worst ~p ~ n " , [ Face , FVal ] ) ,
FVal;
true ->
Worst
end,
model_l8(R,F2S8,New);
model_l8([], _, Worst) -> Worst.
model_l2([Face|R], F2S2, F2A, Mean, Area) ->
TriM = gb_trees:get(Face,F2S2),
case gb_trees:get(Face,F2A) of
A when is_float(TriM), is_float(A) ->
model_l2(R,F2S2,F2A,TriM*TriM*A+Mean,Area+A)
end;
model_l2([],_,_,Mean,Area) ->
math:sqrt(Mean/Area).
l2({S1,T1}, {S2,T2}, {S3,T3},
{Q1x,Q1y,Q1z}, {Q2x,Q2y,Q2z}, {Q3x,Q3y,Q3z})
when is_float(S1), is_float(S2), is_float(S3),
is_float(T1), is_float(T2), is_float(T3),
is_float(Q1x), is_float(Q1y), is_float(Q1z),
is_float(Q2x), is_float(Q2y), is_float(Q2z),
is_float(Q3x), is_float(Q3y), is_float(Q3z) ->
T23 = T2-T3, T31 = T3-T1, T12 = T1-T2,
S32 = S3-S2, S13 = S1-S3, S21 = S2-S1,
case S21*T31-S13*T12 of
DoubleArea when DoubleArea > 0.00000001 ->
SX = Q1x*T23+Q2x*T31+Q3x*T12,
SY = Q1y*T23+Q2y*T31+Q3y*T12,
SZ = Q1z*T23+Q2z*T31+Q3z*T12,
A = SX*SX+SY*SY+SZ*SZ,
TX = Q1x*S32+Q2x*S13+Q3x*S21,
TY = Q1y*S32+Q2y*S13+Q3y*S21,
TZ = Q1z*S32+Q2z*S13+Q3z*S21,
C = TX*TX+TY*TY+TZ*TZ,
math:sqrt((A+C)/(2.0*DoubleArea*DoubleArea));
_ ->
9999999999.9
end.
A2 = area2d2(P1,P2,P3),
if A2 > 0.00000001 ->
SS = ss(P1,P2,P3,Q1,Q2,Q3,A2),
ST = st(P1,P2,P3,Q1,Q2,Q3,A2),
A = e3d_vec:dot(SS,SS),
B = e3d_vec:dot(SS,ST),
C = e3d_vec:dot(ST,ST),
math:sqrt(0.5*((A+C)+math:sqrt((A-C)*(A-C)+4*B*B)));
true ->
9999999999.9
end.
ss({_,T1},{_,T2},{_,T3},{Q1x,Q1y,Q1z},{Q2x,Q2y,Q2z},{Q3x,Q3y,Q3z},A)
when is_float(T1),is_float(T2),is_float(T3),
is_float(Q1x),is_float(Q1y),is_float(Q1z),
is_float(Q2x),is_float(Q2y),is_float(Q2z),
is_float(Q3x),is_float(Q3y),is_float(Q3z) ->
T23 = T2-T3, T31 = T3-T1, T12 = T1-T2,
{(Q1x*T23+Q2x*T31+Q3x*T12)/A,
(Q1y*T23+Q2y*T31+Q3y*T12)/A,
(Q1z*T23+Q2z*T31+Q3z*T12)/A}.
st({S1,_},{S2,_},{S3,_},{Q1x,Q1y,Q1z},{Q2x,Q2y,Q2z},{Q3x,Q3y,Q3z},A)
when is_float(S1),is_float(S2),is_float(S3),
is_float(Q1x),is_float(Q1y),is_float(Q1z),
is_float(Q2x),is_float(Q2y),is_float(Q2z),
is_float(Q3x),is_float(Q3y),is_float(Q3z) ->
S32 = S3-S2, S13 = S1-S3, S21 = S2-S1,
{(Q1x*S32+Q2x*S13+Q3x*S21)/A,
(Q1y*S32+Q2y*S13+Q3y*S21)/A,
(Q1z*S32+Q2z*S13+Q3z*S21)/A}.
triangulate(Fs,We) ->
TriWe = wings_tesselation:triangulate(Fs, We),
TriFs = Fs ++ wings_we:new_items_as_ordset(face, We, TriWe),
get_face_vspos(TriFs,TriWe, []).
get_face_vspos([Face|Fs], We, Tris) ->
Vs0 = wpa:face_vertices(Face, We),
Vs1 = [{V,wings_vertex:pos(V,We)} || V <- Vs0],
if length(Vs0) == 3 ->
Vs2 = [{Vid, {Vx, Vy}} || {Vid,{Vx,Vy,_}} <- Vs1],
get_face_vspos(Fs,We,[{Face, Vs2}|Tris]);
true ->
io:format(?__(1,"Error: Face isn't triangulated ~p with ~p vertices")++"~n",
[Face, Vs1]),
error({triangulation_bug, [Face, Vs1]})
end;
get_face_vspos([], _, Tris) ->
Tris.
|
63da507a2ecaaab6f012dd554f6dc58365db36f0ebac42705d82f54b68dad71b | DSiSc/why3 | encoding.ml | (********************************************************************)
(* *)
The Why3 Verification Platform / The Why3 Development Team
Copyright 2010 - 2018 -- Inria - CNRS - Paris - Sud University
(* *)
(* This software is distributed under the terms of the GNU Lesser *)
General Public License version 2.1 , with the special exception
(* on linking described in file LICENSE. *)
(* *)
(********************************************************************)
open Wstdlib
open Ty
open Theory
open Task
open Trans
let meta_select_kept = register_meta_excl "select_kept" [MTstring]
~desc:"Specify@ the@ types@ to@ mark@ with@ 'encoding:kept':@; \
@[\
- none: @[don't@ mark@ any@ type@ automatically@]@\n\
- goal: @[mark@ every@ closed@ type@ in@ the@ goal@]@\n\
- all: @[mark@ every@ closed@ type@ in@ the@ task.@]\
@]"
let meta_select_kept_default =
register_meta_excl "select_kept_default" [MTstring]
~desc:"Default@ setting@ for@ select_kept"
let meta_enco_kept = register_meta_excl "enco_kept" [MTstring]
~desc:"Specify@ the@ type@ protection@ transformation:@; \
@[\
- @[<hov 2>twin: use@ conversion@ functions@ between@ the@ kept@ types@ \
and@ the@ universal@ type@]\
@]"
let meta_enco_poly = register_meta_excl "enco_poly" [MTstring]
~desc:"Specify@ the@ type@ encoding@ transformation:@; \
@[\
- @[<hov 2>tags: protect@ variables@ in@ equalities@ \
with@ type@ annotations@]@\n\
- @[<hov 2>guards: protect@ variables@ in@ equalities@ \
with@ type@ conditions@]@\n\
- @[<hov 2>tags_full: put@ type@ annotations@ on@ top@ \
of@ every@ term@]@\n\
- @[<hov 2>guards_full: add@ type@ conditions@ for@ every@ variable.@]\
@]"
let def_enco_select_smt = "none"
let def_enco_kept_smt = "twin"
let def_enco_poly_smt = "guards"
let def_enco_poly_tptp = "tags"
let ft_select_kept = ((Hstr.create 17) : (Env.env,Sty.t) Trans.flag_trans)
let ft_enco_kept = ((Hstr.create 17) : (Env.env,task) Trans.flag_trans)
let ft_enco_poly = ((Hstr.create 17) : (Env.env,task) Trans.flag_trans)
let select_kept def env =
let def = Trans.on_flag meta_select_kept_default ft_select_kept def in
let select = Trans.on_flag_t meta_select_kept ft_select_kept def env in
let trans task =
let add ty acc = create_meta Libencoding.meta_kept [MAty ty] :: acc in
let decls = Sty.fold add (Trans.apply select task) [] in
Trans.apply (Trans.add_tdecls decls) task
in
Trans.store trans
let forget_kept = Trans.fold (fun hd task ->
match hd.task_decl.td_node with
| Meta (m,_) when meta_equal m Libencoding.meta_kept -> task
| _ -> add_tdecl task hd.task_decl) None
let encoding_smt env = Trans.seq [
Libencoding.monomorphise_goal;
select_kept def_enco_select_smt env;
Trans.print_meta Libencoding.debug Libencoding.meta_kept;
Trans.trace_goal "meta_enco_kept" (Trans.on_flag meta_enco_kept ft_enco_kept def_enco_kept_smt env);
Trans.on_flag meta_enco_poly ft_enco_poly def_enco_poly_smt env]
let encoding_tptp env = Trans.seq [
Libencoding.monomorphise_goal;
forget_kept;
Trans.on_flag meta_enco_poly ft_enco_poly def_enco_poly_tptp env]
let () = register_env_transform "encoding_smt" encoding_smt
~desc:"Encode@ polymorphic@ types@ for@ provers@ with@ sorts."
let () = register_env_transform "encoding_tptp" encoding_tptp
~desc:"Encode@ polymorphic@ types@ for@ provers@ without@ sorts."
(* encoding only if polymorphism occurs *)
let encoding_smt_if_poly env =
Trans.on_meta Detect_polymorphism.meta_monomorphic_types_only
(function
| [] -> encoding_smt env
| _ -> Trans.identity)
let () =
Trans.register_env_transform "encoding_smt_if_poly"
encoding_smt_if_poly
~desc:"Same@ as@ encoding_smt@ but@ only@ if@ polymorphism@ appear."
let encoding_tptp_if_poly env =
Trans.on_meta Detect_polymorphism.meta_monomorphic_types_only
(function
| [] -> encoding_tptp env
| _ -> Trans.identity)
let () =
Trans.register_env_transform "encoding_tptp_if_poly"
encoding_tptp_if_poly
~desc:"Same@ as@ encoding_tptp@ but@ only@ if@ polymorphism@ appear."
| null | https://raw.githubusercontent.com/DSiSc/why3/8ba9c2287224b53075adc51544bc377bc8ea5c75/src/transform/encoding.ml | ocaml | ******************************************************************
This software is distributed under the terms of the GNU Lesser
on linking described in file LICENSE.
******************************************************************
encoding only if polymorphism occurs | The Why3 Verification Platform / The Why3 Development Team
Copyright 2010 - 2018 -- Inria - CNRS - Paris - Sud University
General Public License version 2.1 , with the special exception
open Wstdlib
open Ty
open Theory
open Task
open Trans
let meta_select_kept = register_meta_excl "select_kept" [MTstring]
~desc:"Specify@ the@ types@ to@ mark@ with@ 'encoding:kept':@; \
@[\
- none: @[don't@ mark@ any@ type@ automatically@]@\n\
- goal: @[mark@ every@ closed@ type@ in@ the@ goal@]@\n\
- all: @[mark@ every@ closed@ type@ in@ the@ task.@]\
@]"
let meta_select_kept_default =
register_meta_excl "select_kept_default" [MTstring]
~desc:"Default@ setting@ for@ select_kept"
let meta_enco_kept = register_meta_excl "enco_kept" [MTstring]
~desc:"Specify@ the@ type@ protection@ transformation:@; \
@[\
- @[<hov 2>twin: use@ conversion@ functions@ between@ the@ kept@ types@ \
and@ the@ universal@ type@]\
@]"
let meta_enco_poly = register_meta_excl "enco_poly" [MTstring]
~desc:"Specify@ the@ type@ encoding@ transformation:@; \
@[\
- @[<hov 2>tags: protect@ variables@ in@ equalities@ \
with@ type@ annotations@]@\n\
- @[<hov 2>guards: protect@ variables@ in@ equalities@ \
with@ type@ conditions@]@\n\
- @[<hov 2>tags_full: put@ type@ annotations@ on@ top@ \
of@ every@ term@]@\n\
- @[<hov 2>guards_full: add@ type@ conditions@ for@ every@ variable.@]\
@]"
let def_enco_select_smt = "none"
let def_enco_kept_smt = "twin"
let def_enco_poly_smt = "guards"
let def_enco_poly_tptp = "tags"
let ft_select_kept = ((Hstr.create 17) : (Env.env,Sty.t) Trans.flag_trans)
let ft_enco_kept = ((Hstr.create 17) : (Env.env,task) Trans.flag_trans)
let ft_enco_poly = ((Hstr.create 17) : (Env.env,task) Trans.flag_trans)
let select_kept def env =
let def = Trans.on_flag meta_select_kept_default ft_select_kept def in
let select = Trans.on_flag_t meta_select_kept ft_select_kept def env in
let trans task =
let add ty acc = create_meta Libencoding.meta_kept [MAty ty] :: acc in
let decls = Sty.fold add (Trans.apply select task) [] in
Trans.apply (Trans.add_tdecls decls) task
in
Trans.store trans
let forget_kept = Trans.fold (fun hd task ->
match hd.task_decl.td_node with
| Meta (m,_) when meta_equal m Libencoding.meta_kept -> task
| _ -> add_tdecl task hd.task_decl) None
let encoding_smt env = Trans.seq [
Libencoding.monomorphise_goal;
select_kept def_enco_select_smt env;
Trans.print_meta Libencoding.debug Libencoding.meta_kept;
Trans.trace_goal "meta_enco_kept" (Trans.on_flag meta_enco_kept ft_enco_kept def_enco_kept_smt env);
Trans.on_flag meta_enco_poly ft_enco_poly def_enco_poly_smt env]
let encoding_tptp env = Trans.seq [
Libencoding.monomorphise_goal;
forget_kept;
Trans.on_flag meta_enco_poly ft_enco_poly def_enco_poly_tptp env]
let () = register_env_transform "encoding_smt" encoding_smt
~desc:"Encode@ polymorphic@ types@ for@ provers@ with@ sorts."
let () = register_env_transform "encoding_tptp" encoding_tptp
~desc:"Encode@ polymorphic@ types@ for@ provers@ without@ sorts."
let encoding_smt_if_poly env =
Trans.on_meta Detect_polymorphism.meta_monomorphic_types_only
(function
| [] -> encoding_smt env
| _ -> Trans.identity)
let () =
Trans.register_env_transform "encoding_smt_if_poly"
encoding_smt_if_poly
~desc:"Same@ as@ encoding_smt@ but@ only@ if@ polymorphism@ appear."
let encoding_tptp_if_poly env =
Trans.on_meta Detect_polymorphism.meta_monomorphic_types_only
(function
| [] -> encoding_tptp env
| _ -> Trans.identity)
let () =
Trans.register_env_transform "encoding_tptp_if_poly"
encoding_tptp_if_poly
~desc:"Same@ as@ encoding_tptp@ but@ only@ if@ polymorphism@ appear."
|
bce911e64d3854bb851b4c776546cf33c1a8eb75dc45ffe8a48bdb689c2c8005 | TokTok/hs-toxcore | KeySpec.hs | {-# LANGUAGE FlexibleContexts #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE StrictData #
{-# LANGUAGE Trustworthy #-}
module Network.Tox.Crypto.KeySpec where
import Test.Hspec
import Test.QuickCheck
import Control.Monad.Validate (MonadValidate, runValidate)
import qualified Crypto.Saltine.Class as Sodium
import Data.Binary (Binary)
import Data.ByteString (ByteString)
import qualified Data.ByteString as ByteString
import Data.MessagePack (DecodeError, errorMessages)
import Data.Proxy (Proxy (..))
import Data.Typeable (Typeable)
import qualified Network.Tox.Binary as Binary
import Network.Tox.Crypto.Key (Key (..))
import qualified Network.Tox.Crypto.Key as Key
import Network.Tox.EncodingSpec
import qualified Text.Read as Read
readMaybe :: String -> Maybe Key.PublicKey
readMaybe = Read.readMaybe
decodeM :: MonadValidate DecodeError m => ByteString -> m Key.PublicKey
decodeM = Key.decode
keyToInteger :: String -> Integer
keyToInteger string =
Key.keyToInteger (read string :: Key.PublicKey)
encodeDecodePublicKey :: Key.PublicKey -> Expectation
encodeDecodePublicKey key =
Sodium.decode (Sodium.encode key) `shouldBe` Just key
localEncodingSpec
:: (Typeable a, Read a, Show a, Binary a, Arbitrary a, Eq a)
=> Proxy a -> Spec
localEncodingSpec proxy =
describe (Binary.typeName proxy) $ do
binarySpec proxy
readShowSpec proxy
spec :: Spec
spec = do
PublicKey for RPC tests .
rpcSpec (Proxy :: Proxy Key.PublicKey)
-- All others only local tests.
localEncodingSpec (Proxy :: Proxy Key.CombinedKey)
localEncodingSpec (Proxy :: Proxy Key.Nonce)
localEncodingSpec (Proxy :: Proxy Key.PublicKey)
localEncodingSpec (Proxy :: Proxy Key.SecretKey)
describe "IsEncoding" $
it "decodes encoded public keys correctly" $
property encodeDecodePublicKey
describe "read" $ do
it "decodes valid hex string to PublicKey" $
let
actual = readMaybe "\"0100000000000000000000000000000000000000000000000000000000000010\""
Just expected = Sodium.decode $ ByteString.pack [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0x10]
in
actual `shouldBe` Just (Key expected)
it "decodes empty string to Nothing" $ do
let actual = readMaybe ""
actual `shouldBe` Nothing
case runValidate $ decodeM ByteString.empty of
Left msg -> errorMessages msg `shouldBe` ["unable to decode ByteString to Key"]
Right val -> expectationFailure $ "unexpected success: " ++ show val
it "decodes valid hex string of wrong length to Nothing" $
let actual = readMaybe "\"0110\"" in
actual `shouldBe` Nothing
describe "keyToInteger" $ do
it "converts keys to Integer in big endian" $ do
keyToInteger "\"fe00000000000000000000000000000000000000000000000000000000000000\""
`shouldBe` 0xfe00000000000000000000000000000000000000000000000000000000000000
keyToInteger "\"00000000000000000000000000000000000000000000000000000000000000fe\""
`shouldBe` 0x00000000000000000000000000000000000000000000000000000000000000fe
it "encodes all keys to positive Integers" $
property $ \key ->
Key.keyToInteger (key :: Key.PublicKey) `shouldSatisfy` (0 <=)
| null | https://raw.githubusercontent.com/TokTok/hs-toxcore/647c3070cab29aee3d795a456be534d77c167d81/test/Network/Tox/Crypto/KeySpec.hs | haskell | # LANGUAGE FlexibleContexts #
# LANGUAGE Trustworthy #
All others only local tests. | # LANGUAGE ScopedTypeVariables #
# LANGUAGE StrictData #
module Network.Tox.Crypto.KeySpec where
import Test.Hspec
import Test.QuickCheck
import Control.Monad.Validate (MonadValidate, runValidate)
import qualified Crypto.Saltine.Class as Sodium
import Data.Binary (Binary)
import Data.ByteString (ByteString)
import qualified Data.ByteString as ByteString
import Data.MessagePack (DecodeError, errorMessages)
import Data.Proxy (Proxy (..))
import Data.Typeable (Typeable)
import qualified Network.Tox.Binary as Binary
import Network.Tox.Crypto.Key (Key (..))
import qualified Network.Tox.Crypto.Key as Key
import Network.Tox.EncodingSpec
import qualified Text.Read as Read
readMaybe :: String -> Maybe Key.PublicKey
readMaybe = Read.readMaybe
decodeM :: MonadValidate DecodeError m => ByteString -> m Key.PublicKey
decodeM = Key.decode
keyToInteger :: String -> Integer
keyToInteger string =
Key.keyToInteger (read string :: Key.PublicKey)
encodeDecodePublicKey :: Key.PublicKey -> Expectation
encodeDecodePublicKey key =
Sodium.decode (Sodium.encode key) `shouldBe` Just key
localEncodingSpec
:: (Typeable a, Read a, Show a, Binary a, Arbitrary a, Eq a)
=> Proxy a -> Spec
localEncodingSpec proxy =
describe (Binary.typeName proxy) $ do
binarySpec proxy
readShowSpec proxy
spec :: Spec
spec = do
PublicKey for RPC tests .
rpcSpec (Proxy :: Proxy Key.PublicKey)
localEncodingSpec (Proxy :: Proxy Key.CombinedKey)
localEncodingSpec (Proxy :: Proxy Key.Nonce)
localEncodingSpec (Proxy :: Proxy Key.PublicKey)
localEncodingSpec (Proxy :: Proxy Key.SecretKey)
describe "IsEncoding" $
it "decodes encoded public keys correctly" $
property encodeDecodePublicKey
describe "read" $ do
it "decodes valid hex string to PublicKey" $
let
actual = readMaybe "\"0100000000000000000000000000000000000000000000000000000000000010\""
Just expected = Sodium.decode $ ByteString.pack [1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0x10]
in
actual `shouldBe` Just (Key expected)
it "decodes empty string to Nothing" $ do
let actual = readMaybe ""
actual `shouldBe` Nothing
case runValidate $ decodeM ByteString.empty of
Left msg -> errorMessages msg `shouldBe` ["unable to decode ByteString to Key"]
Right val -> expectationFailure $ "unexpected success: " ++ show val
it "decodes valid hex string of wrong length to Nothing" $
let actual = readMaybe "\"0110\"" in
actual `shouldBe` Nothing
describe "keyToInteger" $ do
it "converts keys to Integer in big endian" $ do
keyToInteger "\"fe00000000000000000000000000000000000000000000000000000000000000\""
`shouldBe` 0xfe00000000000000000000000000000000000000000000000000000000000000
keyToInteger "\"00000000000000000000000000000000000000000000000000000000000000fe\""
`shouldBe` 0x00000000000000000000000000000000000000000000000000000000000000fe
it "encodes all keys to positive Integers" $
property $ \key ->
Key.keyToInteger (key :: Key.PublicKey) `shouldSatisfy` (0 <=)
|
c15a6c2a154de88a6b5fb8d99abcd76ce5079064af0dfed5946285b95a283e37 | cj1128/sicp-review | symbolic-algebra.scm | (define (install-polynomial-package)
(define (make-poly var term-list) (cons var term-list))
(define (variable p) (car p))
(define (term-list p) (cdr p))
(define (add-poly p1 p2)
(if (same-variable? (variable p1) (variable p2))
(make-poly (variable p1)
(add-terms (term-list p1) (term-list p2)))
(error "Polys not in same var: ADD-POLY" (list p1 p2))))
(define (mul-poly p1 p2)
(if (same-variable? (variable p1) (variable p2))
(make-poly
(variable p1)
(mul-terms (term-list p1) (term-list p2)))
(error "Polys not in same var: MUL-POLY" (list p1 p2))))
(define (add-terms l1 l2)
(cond
((empty-term-list? l1) l2)
((empty-term-list? l2) l1)
(else
(let
((t1 (first-term l1))
(t2 (first-term l2)))
(cond
((> (order t1) (order t2))
(adjoin-term
t1 (add-terms (rest-terms l1) l2)))
((> (order t2) (order t1))
(adjoin-iterm
t2 (add-terms (rest-terms l2) l1)))
(else
(adjoin-term
(make-term (order t1)
(add (coeff t1) (coeff t2)))
(add-terms (rest-terms l1) (rest-terms l2)))))))))
(define (mul-terms l1 l2)
(if (empty-term-list? l1)
(the-empty-term-list)
(let ((t1 (first-term l1)))
(add-terms (mul-term-by-all-terms t1 l2)
(mul-terms (rest-terms l1) l2)))))
(define (mul-term-by-all-terms t l)
(if (empty-term-list? l)
(the-empty-term-list)
(let ((t1 (first-term l)))
(adjoin-term
(make-term
(add (order t) (order t1))
(mul (coeff t) (coeff t1)))
(mul-term-by-all-terms t (rest-terms l))))))
;; Term List Representation
(define (adjoin-term term term-list)
(if (=zero? (coeff term))
term-list
(cons term term-list)))
(define (the-empty-term-list) '())
(define (first-term term-list) (car term-list))
(define (rest-terms term-list) (cdr term-list))
(define (empty-term-list? term-list) (null? term-list))
(define (make-term order coeff) (cons order coeff))
(define (order term) (car term))
(define (coeff term) (cdr term))
(define (tag p) (attach-tag 'polynomial p))
(put 'add '(polynomial polynomial)
(lambda (p1 p2)
(tag (add-poly p1 p2))))
(put 'mul '(polynomial polynomial)
(lambda (p1 p2)
(tag (mul-poly p1 p2))))
(put 'make 'polynomial
(lambda (var term-list)
(tag (make-poly var term-list))))
'done
)
| null | https://raw.githubusercontent.com/cj1128/sicp-review/efaa2f863b7f03c51641c22d701bac97e398a050/chapter-2/2.5/symbolic-algebra.scm | scheme | Term List Representation | (define (install-polynomial-package)
(define (make-poly var term-list) (cons var term-list))
(define (variable p) (car p))
(define (term-list p) (cdr p))
(define (add-poly p1 p2)
(if (same-variable? (variable p1) (variable p2))
(make-poly (variable p1)
(add-terms (term-list p1) (term-list p2)))
(error "Polys not in same var: ADD-POLY" (list p1 p2))))
(define (mul-poly p1 p2)
(if (same-variable? (variable p1) (variable p2))
(make-poly
(variable p1)
(mul-terms (term-list p1) (term-list p2)))
(error "Polys not in same var: MUL-POLY" (list p1 p2))))
(define (add-terms l1 l2)
(cond
((empty-term-list? l1) l2)
((empty-term-list? l2) l1)
(else
(let
((t1 (first-term l1))
(t2 (first-term l2)))
(cond
((> (order t1) (order t2))
(adjoin-term
t1 (add-terms (rest-terms l1) l2)))
((> (order t2) (order t1))
(adjoin-iterm
t2 (add-terms (rest-terms l2) l1)))
(else
(adjoin-term
(make-term (order t1)
(add (coeff t1) (coeff t2)))
(add-terms (rest-terms l1) (rest-terms l2)))))))))
(define (mul-terms l1 l2)
(if (empty-term-list? l1)
(the-empty-term-list)
(let ((t1 (first-term l1)))
(add-terms (mul-term-by-all-terms t1 l2)
(mul-terms (rest-terms l1) l2)))))
(define (mul-term-by-all-terms t l)
(if (empty-term-list? l)
(the-empty-term-list)
(let ((t1 (first-term l)))
(adjoin-term
(make-term
(add (order t) (order t1))
(mul (coeff t) (coeff t1)))
(mul-term-by-all-terms t (rest-terms l))))))
(define (adjoin-term term term-list)
(if (=zero? (coeff term))
term-list
(cons term term-list)))
(define (the-empty-term-list) '())
(define (first-term term-list) (car term-list))
(define (rest-terms term-list) (cdr term-list))
(define (empty-term-list? term-list) (null? term-list))
(define (make-term order coeff) (cons order coeff))
(define (order term) (car term))
(define (coeff term) (cdr term))
(define (tag p) (attach-tag 'polynomial p))
(put 'add '(polynomial polynomial)
(lambda (p1 p2)
(tag (add-poly p1 p2))))
(put 'mul '(polynomial polynomial)
(lambda (p1 p2)
(tag (mul-poly p1 p2))))
(put 'make 'polynomial
(lambda (var term-list)
(tag (make-poly var term-list))))
'done
)
|
e14d1892b799734e58e285d39ddfc1a3a333f25919892109aeafc2110d930e00 | aws-beam/aws-erlang | aws_elasticsearch.erl | %% WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
See -beam/aws-codegen for more details .
@doc Amazon Elasticsearch Configuration Service
%%
%% Use the Amazon Elasticsearch Configuration API to create, configure, and
manage Elasticsearch domains .
%%
For sample code that uses the Configuration API , see the Amazon
Elasticsearch Service Developer Guide . The guide also contains sample code
for sending signed HTTP requests to the Elasticsearch APIs .
%%
%% The endpoint for configuration service requests is region-specific:
%% es.region.amazonaws.com. For example, es.us-east-1.amazonaws.com. For a
%% current list of supported regions and endpoints, see Regions and
%% Endpoints.
-module(aws_elasticsearch).
-export([accept_inbound_cross_cluster_search_connection/3,
accept_inbound_cross_cluster_search_connection/4,
add_tags/2,
add_tags/3,
associate_package/4,
associate_package/5,
authorize_vpc_endpoint_access/3,
authorize_vpc_endpoint_access/4,
cancel_elasticsearch_service_software_update/2,
cancel_elasticsearch_service_software_update/3,
create_elasticsearch_domain/2,
create_elasticsearch_domain/3,
create_outbound_cross_cluster_search_connection/2,
create_outbound_cross_cluster_search_connection/3,
create_package/2,
create_package/3,
create_vpc_endpoint/2,
create_vpc_endpoint/3,
delete_elasticsearch_domain/3,
delete_elasticsearch_domain/4,
delete_elasticsearch_service_role/2,
delete_elasticsearch_service_role/3,
delete_inbound_cross_cluster_search_connection/3,
delete_inbound_cross_cluster_search_connection/4,
delete_outbound_cross_cluster_search_connection/3,
delete_outbound_cross_cluster_search_connection/4,
delete_package/3,
delete_package/4,
delete_vpc_endpoint/3,
delete_vpc_endpoint/4,
describe_domain_auto_tunes/2,
describe_domain_auto_tunes/4,
describe_domain_auto_tunes/5,
describe_domain_change_progress/2,
describe_domain_change_progress/4,
describe_domain_change_progress/5,
describe_elasticsearch_domain/2,
describe_elasticsearch_domain/4,
describe_elasticsearch_domain/5,
describe_elasticsearch_domain_config/2,
describe_elasticsearch_domain_config/4,
describe_elasticsearch_domain_config/5,
describe_elasticsearch_domains/2,
describe_elasticsearch_domains/3,
describe_elasticsearch_instance_type_limits/3,
describe_elasticsearch_instance_type_limits/5,
describe_elasticsearch_instance_type_limits/6,
describe_inbound_cross_cluster_search_connections/2,
describe_inbound_cross_cluster_search_connections/3,
describe_outbound_cross_cluster_search_connections/2,
describe_outbound_cross_cluster_search_connections/3,
describe_packages/2,
describe_packages/3,
describe_reserved_elasticsearch_instance_offerings/1,
describe_reserved_elasticsearch_instance_offerings/3,
describe_reserved_elasticsearch_instance_offerings/4,
describe_reserved_elasticsearch_instances/1,
describe_reserved_elasticsearch_instances/3,
describe_reserved_elasticsearch_instances/4,
describe_vpc_endpoints/2,
describe_vpc_endpoints/3,
dissociate_package/4,
dissociate_package/5,
get_compatible_elasticsearch_versions/1,
get_compatible_elasticsearch_versions/3,
get_compatible_elasticsearch_versions/4,
get_package_version_history/2,
get_package_version_history/4,
get_package_version_history/5,
get_upgrade_history/2,
get_upgrade_history/4,
get_upgrade_history/5,
get_upgrade_status/2,
get_upgrade_status/4,
get_upgrade_status/5,
list_domain_names/1,
list_domain_names/3,
list_domain_names/4,
list_domains_for_package/2,
list_domains_for_package/4,
list_domains_for_package/5,
list_elasticsearch_instance_types/2,
list_elasticsearch_instance_types/4,
list_elasticsearch_instance_types/5,
list_elasticsearch_versions/1,
list_elasticsearch_versions/3,
list_elasticsearch_versions/4,
list_packages_for_domain/2,
list_packages_for_domain/4,
list_packages_for_domain/5,
list_tags/2,
list_tags/4,
list_tags/5,
list_vpc_endpoint_access/2,
list_vpc_endpoint_access/4,
list_vpc_endpoint_access/5,
list_vpc_endpoints/1,
list_vpc_endpoints/3,
list_vpc_endpoints/4,
list_vpc_endpoints_for_domain/2,
list_vpc_endpoints_for_domain/4,
list_vpc_endpoints_for_domain/5,
purchase_reserved_elasticsearch_instance_offering/2,
purchase_reserved_elasticsearch_instance_offering/3,
reject_inbound_cross_cluster_search_connection/3,
reject_inbound_cross_cluster_search_connection/4,
remove_tags/2,
remove_tags/3,
revoke_vpc_endpoint_access/3,
revoke_vpc_endpoint_access/4,
start_elasticsearch_service_software_update/2,
start_elasticsearch_service_software_update/3,
update_elasticsearch_domain_config/3,
update_elasticsearch_domain_config/4,
update_package/2,
update_package/3,
update_vpc_endpoint/2,
update_vpc_endpoint/3,
upgrade_elasticsearch_domain/2,
upgrade_elasticsearch_domain/3]).
-include_lib("hackney/include/hackney_lib.hrl").
%%====================================================================
%% API
%%====================================================================
%% @doc Allows the destination domain owner to accept an inbound
%% cross-cluster search connection request.
accept_inbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input) ->
accept_inbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input, []).
accept_inbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input0, Options0) ->
Method = put,
Path = ["/2015-01-01/es/ccs/inboundConnection/", aws_util:encode_uri(CrossClusterSearchConnectionId), "/accept"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Attaches tags to an existing Elasticsearch domain .
%%
Tags are a set of case - sensitive key value pairs . An Elasticsearch domain
may have up to 10 tags . See Tagging Amazon Elasticsearch Service Domains
%% for more information.
add_tags(Client, Input) ->
add_tags(Client, Input, []).
add_tags(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/tags"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Associates a package with an Amazon ES domain .
associate_package(Client, DomainName, PackageID, Input) ->
associate_package(Client, DomainName, PackageID, Input, []).
associate_package(Client, DomainName, PackageID, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/packages/associate/", aws_util:encode_uri(PackageID), "/", aws_util:encode_uri(DomainName), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Provides access to an Amazon OpenSearch Service domain through the
use of an interface VPC endpoint .
authorize_vpc_endpoint_access(Client, DomainName, Input) ->
authorize_vpc_endpoint_access(Client, DomainName, Input, []).
authorize_vpc_endpoint_access(Client, DomainName, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/domain/", aws_util:encode_uri(DomainName), "/authorizeVpcEndpointAccess"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Cancels a scheduled service software update for an Amazon ES domain .
%%
You can only perform this operation before the ` AutomatedUpdateDate '
and when the ` UpdateStatus ' is in the ` PENDING_UPDATE ' state .
cancel_elasticsearch_service_software_update(Client, Input) ->
cancel_elasticsearch_service_software_update(Client, Input, []).
cancel_elasticsearch_service_software_update(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/serviceSoftwareUpdate/cancel"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Creates a new Elasticsearch domain .
%%
For more information , see Creating Elasticsearch Domains in the Amazon
Elasticsearch Service Developer Guide .
create_elasticsearch_domain(Client, Input) ->
create_elasticsearch_domain(Client, Input, []).
create_elasticsearch_domain(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/domain"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Creates a new cross-cluster search connection from a source domain to
%% a destination domain.
create_outbound_cross_cluster_search_connection(Client, Input) ->
create_outbound_cross_cluster_search_connection(Client, Input, []).
create_outbound_cross_cluster_search_connection(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/ccs/outboundConnection"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Create a package for use with Amazon ES domains .
create_package(Client, Input) ->
create_package(Client, Input, []).
create_package(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/packages"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Creates an Amazon OpenSearch Service - managed VPC endpoint .
create_vpc_endpoint(Client, Input) ->
create_vpc_endpoint(Client, Input, []).
create_vpc_endpoint(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/vpcEndpoints"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Permanently deletes the specified Elasticsearch domain and all of its
%% data.
%%
%% Once a domain is deleted, it cannot be recovered.
delete_elasticsearch_domain(Client, DomainName, Input) ->
delete_elasticsearch_domain(Client, DomainName, Input, []).
delete_elasticsearch_domain(Client, DomainName, Input0, Options0) ->
Method = delete,
Path = ["/2015-01-01/es/domain/", aws_util:encode_uri(DomainName), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Deletes the service - linked role that Elasticsearch Service uses to
manage and maintain VPC domains .
%%
Role deletion will fail if any existing VPC domains use the role . You must
delete any such Elasticsearch domains before deleting the role . See
Deleting Elasticsearch Service Role in VPC Endpoints for Amazon
%% Elasticsearch Service Domains.
delete_elasticsearch_service_role(Client, Input) ->
delete_elasticsearch_service_role(Client, Input, []).
delete_elasticsearch_service_role(Client, Input0, Options0) ->
Method = delete,
Path = ["/2015-01-01/es/role"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Allows the destination domain owner to delete an existing inbound
%% cross-cluster search connection.
delete_inbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input) ->
delete_inbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input, []).
delete_inbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input0, Options0) ->
Method = delete,
Path = ["/2015-01-01/es/ccs/inboundConnection/", aws_util:encode_uri(CrossClusterSearchConnectionId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Allows the source domain owner to delete an existing outbound
%% cross-cluster search connection.
delete_outbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input) ->
delete_outbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input, []).
delete_outbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input0, Options0) ->
Method = delete,
Path = ["/2015-01-01/es/ccs/outboundConnection/", aws_util:encode_uri(CrossClusterSearchConnectionId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Delete the package.
delete_package(Client, PackageID, Input) ->
delete_package(Client, PackageID, Input, []).
delete_package(Client, PackageID, Input0, Options0) ->
Method = delete,
Path = ["/2015-01-01/packages/", aws_util:encode_uri(PackageID), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Deletes an Amazon OpenSearch Service - managed interface VPC endpoint .
delete_vpc_endpoint(Client, VpcEndpointId, Input) ->
delete_vpc_endpoint(Client, VpcEndpointId, Input, []).
delete_vpc_endpoint(Client, VpcEndpointId, Input0, Options0) ->
Method = delete,
Path = ["/2015-01-01/es/vpcEndpoints/", aws_util:encode_uri(VpcEndpointId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Provides scheduled Auto - Tune action details for the Elasticsearch
domain , such as Auto - Tune action type , description , severity , and
%% scheduled date.
describe_domain_auto_tunes(Client, DomainName)
when is_map(Client) ->
describe_domain_auto_tunes(Client, DomainName, #{}, #{}).
describe_domain_auto_tunes(Client, DomainName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
describe_domain_auto_tunes(Client, DomainName, QueryMap, HeadersMap, []).
describe_domain_auto_tunes(Client, DomainName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/domain/", aws_util:encode_uri(DomainName), "/autoTunes"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns information about the current blue/green deployment happening
%% on a domain, including a change ID, status, and progress stages.
describe_domain_change_progress(Client, DomainName)
when is_map(Client) ->
describe_domain_change_progress(Client, DomainName, #{}, #{}).
describe_domain_change_progress(Client, DomainName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
describe_domain_change_progress(Client, DomainName, QueryMap, HeadersMap, []).
describe_domain_change_progress(Client, DomainName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/domain/", aws_util:encode_uri(DomainName), "/progress"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"changeid">>, maps:get(<<"changeid">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns domain configuration information about the specified
%% Elasticsearch domain, including the domain ID, domain endpoint, and domain
ARN .
describe_elasticsearch_domain(Client, DomainName)
when is_map(Client) ->
describe_elasticsearch_domain(Client, DomainName, #{}, #{}).
describe_elasticsearch_domain(Client, DomainName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
describe_elasticsearch_domain(Client, DomainName, QueryMap, HeadersMap, []).
describe_elasticsearch_domain(Client, DomainName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/domain/", aws_util:encode_uri(DomainName), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Provides cluster configuration information about the specified
%% Elasticsearch domain, such as the state, creation date, update version,
%% and update date for cluster options.
describe_elasticsearch_domain_config(Client, DomainName)
when is_map(Client) ->
describe_elasticsearch_domain_config(Client, DomainName, #{}, #{}).
describe_elasticsearch_domain_config(Client, DomainName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
describe_elasticsearch_domain_config(Client, DomainName, QueryMap, HeadersMap, []).
describe_elasticsearch_domain_config(Client, DomainName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/domain/", aws_util:encode_uri(DomainName), "/config"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns domain configuration information about the specified
Elasticsearch domains , including the domain ID , domain endpoint , and
domain ARN .
describe_elasticsearch_domains(Client, Input) ->
describe_elasticsearch_domains(Client, Input, []).
describe_elasticsearch_domains(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/domain-info"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Describe Elasticsearch Limits for a given InstanceType and
ElasticsearchVersion .
%%
%% When modifying existing Domain, specify the ` `DomainName' ' to
%% know what Limits are supported for modifying.
describe_elasticsearch_instance_type_limits(Client, ElasticsearchVersion, InstanceType)
when is_map(Client) ->
describe_elasticsearch_instance_type_limits(Client, ElasticsearchVersion, InstanceType, #{}, #{}).
describe_elasticsearch_instance_type_limits(Client, ElasticsearchVersion, InstanceType, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
describe_elasticsearch_instance_type_limits(Client, ElasticsearchVersion, InstanceType, QueryMap, HeadersMap, []).
describe_elasticsearch_instance_type_limits(Client, ElasticsearchVersion, InstanceType, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/instanceTypeLimits/", aws_util:encode_uri(ElasticsearchVersion), "/", aws_util:encode_uri(InstanceType), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"domainName">>, maps:get(<<"domainName">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Lists all the inbound cross-cluster search connections for a
%% destination domain.
describe_inbound_cross_cluster_search_connections(Client, Input) ->
describe_inbound_cross_cluster_search_connections(Client, Input, []).
describe_inbound_cross_cluster_search_connections(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/ccs/inboundConnection/search"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Lists all the outbound cross-cluster search connections for a source
%% domain.
describe_outbound_cross_cluster_search_connections(Client, Input) ->
describe_outbound_cross_cluster_search_connections(Client, Input, []).
describe_outbound_cross_cluster_search_connections(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/ccs/outboundConnection/search"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Describes all packages available to Amazon ES .
%%
%% Includes options for filtering, limiting the number of results, and
%% pagination.
describe_packages(Client, Input) ->
describe_packages(Client, Input, []).
describe_packages(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/packages/describe"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Lists available reserved Elasticsearch instance offerings .
describe_reserved_elasticsearch_instance_offerings(Client)
when is_map(Client) ->
describe_reserved_elasticsearch_instance_offerings(Client, #{}, #{}).
describe_reserved_elasticsearch_instance_offerings(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
describe_reserved_elasticsearch_instance_offerings(Client, QueryMap, HeadersMap, []).
describe_reserved_elasticsearch_instance_offerings(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/reservedInstanceOfferings"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)},
{<<"offeringId">>, maps:get(<<"offeringId">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Returns information about reserved Elasticsearch instances for this
%% account.
describe_reserved_elasticsearch_instances(Client)
when is_map(Client) ->
describe_reserved_elasticsearch_instances(Client, #{}, #{}).
describe_reserved_elasticsearch_instances(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
describe_reserved_elasticsearch_instances(Client, QueryMap, HeadersMap, []).
describe_reserved_elasticsearch_instances(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/reservedInstances"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)},
{<<"reservationId">>, maps:get(<<"reservationId">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Describes one or more Amazon OpenSearch Service - managed VPC
%% endpoints.
describe_vpc_endpoints(Client, Input) ->
describe_vpc_endpoints(Client, Input, []).
describe_vpc_endpoints(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/vpcEndpoints/describe"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Dissociates a package from the Amazon ES domain .
dissociate_package(Client, DomainName, PackageID, Input) ->
dissociate_package(Client, DomainName, PackageID, Input, []).
dissociate_package(Client, DomainName, PackageID, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/packages/dissociate/", aws_util:encode_uri(PackageID), "/", aws_util:encode_uri(DomainName), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Returns a list of upgrade compatible Elastisearch versions .
%%
%% You can optionally pass a ` `DomainName' ' to get all upgrade
compatible Elasticsearch versions for that specific domain .
get_compatible_elasticsearch_versions(Client)
when is_map(Client) ->
get_compatible_elasticsearch_versions(Client, #{}, #{}).
get_compatible_elasticsearch_versions(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_compatible_elasticsearch_versions(Client, QueryMap, HeadersMap, []).
get_compatible_elasticsearch_versions(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/compatibleVersions"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"domainName">>, maps:get(<<"domainName">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns a list of versions of the package, along with their creation
%% time and commit message.
get_package_version_history(Client, PackageID)
when is_map(Client) ->
get_package_version_history(Client, PackageID, #{}, #{}).
get_package_version_history(Client, PackageID, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_package_version_history(Client, PackageID, QueryMap, HeadersMap, []).
get_package_version_history(Client, PackageID, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/packages/", aws_util:encode_uri(PackageID), "/history"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Retrieves the complete history of the last 10 upgrades that were
%% performed on the domain.
get_upgrade_history(Client, DomainName)
when is_map(Client) ->
get_upgrade_history(Client, DomainName, #{}, #{}).
get_upgrade_history(Client, DomainName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_upgrade_history(Client, DomainName, QueryMap, HeadersMap, []).
get_upgrade_history(Client, DomainName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/upgradeDomain/", aws_util:encode_uri(DomainName), "/history"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Retrieves the latest status of the last upgrade or upgrade
%% eligibility check that was performed on the domain.
get_upgrade_status(Client, DomainName)
when is_map(Client) ->
get_upgrade_status(Client, DomainName, #{}, #{}).
get_upgrade_status(Client, DomainName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_upgrade_status(Client, DomainName, QueryMap, HeadersMap, []).
get_upgrade_status(Client, DomainName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/upgradeDomain/", aws_util:encode_uri(DomainName), "/status"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Returns the name of all Elasticsearch domains owned by the current
%% user's account.
list_domain_names(Client)
when is_map(Client) ->
list_domain_names(Client, #{}, #{}).
list_domain_names(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_domain_names(Client, QueryMap, HeadersMap, []).
list_domain_names(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/domain"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"engineType">>, maps:get(<<"engineType">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Lists all Amazon ES domains associated with the package .
list_domains_for_package(Client, PackageID)
when is_map(Client) ->
list_domains_for_package(Client, PackageID, #{}, #{}).
list_domains_for_package(Client, PackageID, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_domains_for_package(Client, PackageID, QueryMap, HeadersMap, []).
list_domains_for_package(Client, PackageID, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/packages/", aws_util:encode_uri(PackageID), "/domains"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc List all Elasticsearch instance types that are supported for given
ElasticsearchVersion
list_elasticsearch_instance_types(Client, ElasticsearchVersion)
when is_map(Client) ->
list_elasticsearch_instance_types(Client, ElasticsearchVersion, #{}, #{}).
list_elasticsearch_instance_types(Client, ElasticsearchVersion, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_elasticsearch_instance_types(Client, ElasticsearchVersion, QueryMap, HeadersMap, []).
list_elasticsearch_instance_types(Client, ElasticsearchVersion, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/instanceTypes/", aws_util:encode_uri(ElasticsearchVersion), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"domainName">>, maps:get(<<"domainName">>, QueryMap, undefined)},
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc List all supported Elasticsearch versions
list_elasticsearch_versions(Client)
when is_map(Client) ->
list_elasticsearch_versions(Client, #{}, #{}).
list_elasticsearch_versions(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_elasticsearch_versions(Client, QueryMap, HeadersMap, []).
list_elasticsearch_versions(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/versions"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Lists all packages associated with the Amazon ES domain .
list_packages_for_domain(Client, DomainName)
when is_map(Client) ->
list_packages_for_domain(Client, DomainName, #{}, #{}).
list_packages_for_domain(Client, DomainName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_packages_for_domain(Client, DomainName, QueryMap, HeadersMap, []).
list_packages_for_domain(Client, DomainName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/domain/", aws_util:encode_uri(DomainName), "/packages"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns all tags for the given Elasticsearch domain.
list_tags(Client, ARN)
when is_map(Client) ->
list_tags(Client, ARN, #{}, #{}).
list_tags(Client, ARN, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_tags(Client, ARN, QueryMap, HeadersMap, []).
list_tags(Client, ARN, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/tags/"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"arn">>, ARN}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Retrieves information about each principal that is allowed to access
a given Amazon OpenSearch Service domain through the use of an interface
VPC endpoint .
list_vpc_endpoint_access(Client, DomainName)
when is_map(Client) ->
list_vpc_endpoint_access(Client, DomainName, #{}, #{}).
list_vpc_endpoint_access(Client, DomainName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_vpc_endpoint_access(Client, DomainName, QueryMap, HeadersMap, []).
list_vpc_endpoint_access(Client, DomainName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/domain/", aws_util:encode_uri(DomainName), "/listVpcEndpointAccess"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Retrieves all Amazon OpenSearch Service - managed VPC endpoints in the
%% current account and Region.
list_vpc_endpoints(Client)
when is_map(Client) ->
list_vpc_endpoints(Client, #{}, #{}).
list_vpc_endpoints(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_vpc_endpoints(Client, QueryMap, HeadersMap, []).
list_vpc_endpoints(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/vpcEndpoints"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Retrieves all Amazon OpenSearch Service - managed VPC endpoints
%% associated with a particular domain.
list_vpc_endpoints_for_domain(Client, DomainName)
when is_map(Client) ->
list_vpc_endpoints_for_domain(Client, DomainName, #{}, #{}).
list_vpc_endpoints_for_domain(Client, DomainName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_vpc_endpoints_for_domain(Client, DomainName, QueryMap, HeadersMap, []).
list_vpc_endpoints_for_domain(Client, DomainName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/domain/", aws_util:encode_uri(DomainName), "/vpcEndpoints"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Allows you to purchase reserved Elasticsearch instances .
purchase_reserved_elasticsearch_instance_offering(Client, Input) ->
purchase_reserved_elasticsearch_instance_offering(Client, Input, []).
purchase_reserved_elasticsearch_instance_offering(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/purchaseReservedInstanceOffering"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Allows the destination domain owner to reject an inbound
%% cross-cluster search connection request.
reject_inbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input) ->
reject_inbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input, []).
reject_inbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input0, Options0) ->
Method = put,
Path = ["/2015-01-01/es/ccs/inboundConnection/", aws_util:encode_uri(CrossClusterSearchConnectionId), "/reject"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Removes the specified set of tags from the specified Elasticsearch
%% domain.
remove_tags(Client, Input) ->
remove_tags(Client, Input, []).
remove_tags(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/tags-removal"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Revokes access to an Amazon OpenSearch Service domain that was
provided through an interface VPC endpoint .
revoke_vpc_endpoint_access(Client, DomainName, Input) ->
revoke_vpc_endpoint_access(Client, DomainName, Input, []).
revoke_vpc_endpoint_access(Client, DomainName, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/domain/", aws_util:encode_uri(DomainName), "/revokeVpcEndpointAccess"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Schedules a service software update for an Amazon ES domain .
start_elasticsearch_service_software_update(Client, Input) ->
start_elasticsearch_service_software_update(Client, Input, []).
start_elasticsearch_service_software_update(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/serviceSoftwareUpdate/start"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Modifies the cluster configuration of the specified Elasticsearch
%% domain, setting as setting the instance type and the number of instances.
update_elasticsearch_domain_config(Client, DomainName, Input) ->
update_elasticsearch_domain_config(Client, DomainName, Input, []).
update_elasticsearch_domain_config(Client, DomainName, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/domain/", aws_util:encode_uri(DomainName), "/config"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Updates a package for use with Amazon ES domains .
update_package(Client, Input) ->
update_package(Client, Input, []).
update_package(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/packages/update"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Modifies an Amazon OpenSearch Service - managed interface VPC endpoint .
update_vpc_endpoint(Client, Input) ->
update_vpc_endpoint(Client, Input, []).
update_vpc_endpoint(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/vpcEndpoints/update"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Allows you to either upgrade your domain or perform an Upgrade
eligibility check to a compatible Elasticsearch version .
upgrade_elasticsearch_domain(Client, Input) ->
upgrade_elasticsearch_domain(Client, Input, []).
upgrade_elasticsearch_domain(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/upgradeDomain"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%%====================================================================
Internal functions
%%====================================================================
-spec request(aws_client:aws_client(), atom(), iolist(), list(),
list(), map() | undefined, list(), pos_integer() | undefined) ->
{ok, {integer(), list()}} |
{ok, Result, {integer(), list(), hackney:client()}} |
{error, Error, {integer(), list(), hackney:client()}} |
{error, term()} when
Result :: map(),
Error :: map().
request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
RequestFun = fun() -> do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) end,
aws_request:request(RequestFun, Options).
do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
Client1 = Client#{service => <<"es">>},
Host = build_host(<<"es">>, Client1),
URL0 = build_url(Host, Path, Client1),
URL = aws_request:add_query(URL0, Query),
AdditionalHeaders1 = [ {<<"Host">>, Host}
, {<<"Content-Type">>, <<"application/x-amz-json-1.1">>}
],
Payload =
case proplists:get_value(send_body_as_binary, Options) of
true ->
maps:get(<<"Body">>, Input, <<"">>);
false ->
encode_payload(Input)
end,
AdditionalHeaders = case proplists:get_value(append_sha256_content_hash, Options, false) of
true ->
add_checksum_hash_header(AdditionalHeaders1, Payload);
false ->
AdditionalHeaders1
end,
Headers1 = aws_request:add_headers(AdditionalHeaders, Headers0),
MethodBin = aws_request:method_to_binary(Method),
SignedHeaders = aws_request:sign_request(Client1, MethodBin, URL, Headers1, Payload),
Response = hackney:request(Method, URL, SignedHeaders, Payload, Options),
DecodeBody = not proplists:get_value(receive_body_as_binary, Options),
handle_response(Response, SuccessStatusCode, DecodeBody).
add_checksum_hash_header(Headers, Body) ->
[ {<<"X-Amz-CheckSum-SHA256">>, base64:encode(crypto:hash(sha256, Body))}
| Headers
].
handle_response({ok, StatusCode, ResponseHeaders}, SuccessStatusCode, _DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
{ok, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders}, _, _DecodeBody) ->
{error, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders, Client}, SuccessStatusCode, DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
case hackney:body(Client) of
{ok, <<>>} when StatusCode =:= 200;
StatusCode =:= SuccessStatusCode ->
{ok, #{}, {StatusCode, ResponseHeaders, Client}};
{ok, Body} ->
Result = case DecodeBody of
true ->
try
jsx:decode(Body)
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
false -> #{<<"Body">> => Body}
end,
{ok, Result, {StatusCode, ResponseHeaders, Client}}
end;
handle_response({ok, StatusCode, _ResponseHeaders, _Client}, _, _DecodeBody)
when StatusCode =:= 503 ->
Retriable error if retries are enabled
{error, service_unavailable};
handle_response({ok, StatusCode, ResponseHeaders, Client}, _, _DecodeBody) ->
{ok, Body} = hackney:body(Client),
try
DecodedError = jsx:decode(Body),
{error, DecodedError, {StatusCode, ResponseHeaders, Client}}
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
handle_response({error, Reason}, _, _DecodeBody) ->
{error, Reason}.
build_host(_EndpointPrefix, #{region := <<"local">>, endpoint := Endpoint}) ->
Endpoint;
build_host(_EndpointPrefix, #{region := <<"local">>}) ->
<<"localhost">>;
build_host(EndpointPrefix, #{region := Region, endpoint := Endpoint}) ->
aws_util:binary_join([EndpointPrefix, Region, Endpoint], <<".">>).
build_url(Host, Path0, Client) ->
Proto = aws_client:proto(Client),
Path = erlang:iolist_to_binary(Path0),
Port = aws_client:port(Client),
aws_util:binary_join([Proto, <<"://">>, Host, <<":">>, Port, Path], <<"">>).
-spec encode_payload(undefined | map()) -> binary().
encode_payload(undefined) ->
<<>>;
encode_payload(Input) ->
jsx:encode(Input).
| null | https://raw.githubusercontent.com/aws-beam/aws-erlang/699287cee7dfc9dc8c08ced5f090dcc192c9cba8/src/aws_elasticsearch.erl | erlang | WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
Use the Amazon Elasticsearch Configuration API to create, configure, and
The endpoint for configuration service requests is region-specific:
es.region.amazonaws.com. For example, es.us-east-1.amazonaws.com. For a
current list of supported regions and endpoints, see Regions and
Endpoints.
====================================================================
API
====================================================================
@doc Allows the destination domain owner to accept an inbound
cross-cluster search connection request.
for more information.
@doc Creates a new cross-cluster search connection from a source domain to
a destination domain.
data.
Once a domain is deleted, it cannot be recovered.
Elasticsearch Service Domains.
@doc Allows the destination domain owner to delete an existing inbound
cross-cluster search connection.
@doc Allows the source domain owner to delete an existing outbound
cross-cluster search connection.
@doc Delete the package.
scheduled date.
@doc Returns information about the current blue/green deployment happening
on a domain, including a change ID, status, and progress stages.
@doc Returns domain configuration information about the specified
Elasticsearch domain, including the domain ID, domain endpoint, and domain
@doc Provides cluster configuration information about the specified
Elasticsearch domain, such as the state, creation date, update version,
and update date for cluster options.
@doc Returns domain configuration information about the specified
When modifying existing Domain, specify the ` `DomainName' ' to
know what Limits are supported for modifying.
@doc Lists all the inbound cross-cluster search connections for a
destination domain.
@doc Lists all the outbound cross-cluster search connections for a source
domain.
Includes options for filtering, limiting the number of results, and
pagination.
account.
endpoints.
You can optionally pass a ` `DomainName' ' to get all upgrade
@doc Returns a list of versions of the package, along with their creation
time and commit message.
performed on the domain.
@doc Retrieves the latest status of the last upgrade or upgrade
eligibility check that was performed on the domain.
user's account.
@doc Returns all tags for the given Elasticsearch domain.
@doc Retrieves information about each principal that is allowed to access
current account and Region.
associated with a particular domain.
@doc Allows the destination domain owner to reject an inbound
cross-cluster search connection request.
domain.
domain, setting as setting the instance type and the number of instances.
@doc Allows you to either upgrade your domain or perform an Upgrade
====================================================================
==================================================================== | See -beam/aws-codegen for more details .
@doc Amazon Elasticsearch Configuration Service
manage Elasticsearch domains .
For sample code that uses the Configuration API , see the Amazon
Elasticsearch Service Developer Guide . The guide also contains sample code
for sending signed HTTP requests to the Elasticsearch APIs .
-module(aws_elasticsearch).
-export([accept_inbound_cross_cluster_search_connection/3,
accept_inbound_cross_cluster_search_connection/4,
add_tags/2,
add_tags/3,
associate_package/4,
associate_package/5,
authorize_vpc_endpoint_access/3,
authorize_vpc_endpoint_access/4,
cancel_elasticsearch_service_software_update/2,
cancel_elasticsearch_service_software_update/3,
create_elasticsearch_domain/2,
create_elasticsearch_domain/3,
create_outbound_cross_cluster_search_connection/2,
create_outbound_cross_cluster_search_connection/3,
create_package/2,
create_package/3,
create_vpc_endpoint/2,
create_vpc_endpoint/3,
delete_elasticsearch_domain/3,
delete_elasticsearch_domain/4,
delete_elasticsearch_service_role/2,
delete_elasticsearch_service_role/3,
delete_inbound_cross_cluster_search_connection/3,
delete_inbound_cross_cluster_search_connection/4,
delete_outbound_cross_cluster_search_connection/3,
delete_outbound_cross_cluster_search_connection/4,
delete_package/3,
delete_package/4,
delete_vpc_endpoint/3,
delete_vpc_endpoint/4,
describe_domain_auto_tunes/2,
describe_domain_auto_tunes/4,
describe_domain_auto_tunes/5,
describe_domain_change_progress/2,
describe_domain_change_progress/4,
describe_domain_change_progress/5,
describe_elasticsearch_domain/2,
describe_elasticsearch_domain/4,
describe_elasticsearch_domain/5,
describe_elasticsearch_domain_config/2,
describe_elasticsearch_domain_config/4,
describe_elasticsearch_domain_config/5,
describe_elasticsearch_domains/2,
describe_elasticsearch_domains/3,
describe_elasticsearch_instance_type_limits/3,
describe_elasticsearch_instance_type_limits/5,
describe_elasticsearch_instance_type_limits/6,
describe_inbound_cross_cluster_search_connections/2,
describe_inbound_cross_cluster_search_connections/3,
describe_outbound_cross_cluster_search_connections/2,
describe_outbound_cross_cluster_search_connections/3,
describe_packages/2,
describe_packages/3,
describe_reserved_elasticsearch_instance_offerings/1,
describe_reserved_elasticsearch_instance_offerings/3,
describe_reserved_elasticsearch_instance_offerings/4,
describe_reserved_elasticsearch_instances/1,
describe_reserved_elasticsearch_instances/3,
describe_reserved_elasticsearch_instances/4,
describe_vpc_endpoints/2,
describe_vpc_endpoints/3,
dissociate_package/4,
dissociate_package/5,
get_compatible_elasticsearch_versions/1,
get_compatible_elasticsearch_versions/3,
get_compatible_elasticsearch_versions/4,
get_package_version_history/2,
get_package_version_history/4,
get_package_version_history/5,
get_upgrade_history/2,
get_upgrade_history/4,
get_upgrade_history/5,
get_upgrade_status/2,
get_upgrade_status/4,
get_upgrade_status/5,
list_domain_names/1,
list_domain_names/3,
list_domain_names/4,
list_domains_for_package/2,
list_domains_for_package/4,
list_domains_for_package/5,
list_elasticsearch_instance_types/2,
list_elasticsearch_instance_types/4,
list_elasticsearch_instance_types/5,
list_elasticsearch_versions/1,
list_elasticsearch_versions/3,
list_elasticsearch_versions/4,
list_packages_for_domain/2,
list_packages_for_domain/4,
list_packages_for_domain/5,
list_tags/2,
list_tags/4,
list_tags/5,
list_vpc_endpoint_access/2,
list_vpc_endpoint_access/4,
list_vpc_endpoint_access/5,
list_vpc_endpoints/1,
list_vpc_endpoints/3,
list_vpc_endpoints/4,
list_vpc_endpoints_for_domain/2,
list_vpc_endpoints_for_domain/4,
list_vpc_endpoints_for_domain/5,
purchase_reserved_elasticsearch_instance_offering/2,
purchase_reserved_elasticsearch_instance_offering/3,
reject_inbound_cross_cluster_search_connection/3,
reject_inbound_cross_cluster_search_connection/4,
remove_tags/2,
remove_tags/3,
revoke_vpc_endpoint_access/3,
revoke_vpc_endpoint_access/4,
start_elasticsearch_service_software_update/2,
start_elasticsearch_service_software_update/3,
update_elasticsearch_domain_config/3,
update_elasticsearch_domain_config/4,
update_package/2,
update_package/3,
update_vpc_endpoint/2,
update_vpc_endpoint/3,
upgrade_elasticsearch_domain/2,
upgrade_elasticsearch_domain/3]).
-include_lib("hackney/include/hackney_lib.hrl").
accept_inbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input) ->
accept_inbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input, []).
accept_inbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input0, Options0) ->
Method = put,
Path = ["/2015-01-01/es/ccs/inboundConnection/", aws_util:encode_uri(CrossClusterSearchConnectionId), "/accept"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Attaches tags to an existing Elasticsearch domain .
Tags are a set of case - sensitive key value pairs . An Elasticsearch domain
may have up to 10 tags . See Tagging Amazon Elasticsearch Service Domains
add_tags(Client, Input) ->
add_tags(Client, Input, []).
add_tags(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/tags"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Associates a package with an Amazon ES domain .
associate_package(Client, DomainName, PackageID, Input) ->
associate_package(Client, DomainName, PackageID, Input, []).
associate_package(Client, DomainName, PackageID, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/packages/associate/", aws_util:encode_uri(PackageID), "/", aws_util:encode_uri(DomainName), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Provides access to an Amazon OpenSearch Service domain through the
use of an interface VPC endpoint .
authorize_vpc_endpoint_access(Client, DomainName, Input) ->
authorize_vpc_endpoint_access(Client, DomainName, Input, []).
authorize_vpc_endpoint_access(Client, DomainName, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/domain/", aws_util:encode_uri(DomainName), "/authorizeVpcEndpointAccess"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Cancels a scheduled service software update for an Amazon ES domain .
You can only perform this operation before the ` AutomatedUpdateDate '
and when the ` UpdateStatus ' is in the ` PENDING_UPDATE ' state .
cancel_elasticsearch_service_software_update(Client, Input) ->
cancel_elasticsearch_service_software_update(Client, Input, []).
cancel_elasticsearch_service_software_update(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/serviceSoftwareUpdate/cancel"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Creates a new Elasticsearch domain .
For more information , see Creating Elasticsearch Domains in the Amazon
Elasticsearch Service Developer Guide .
create_elasticsearch_domain(Client, Input) ->
create_elasticsearch_domain(Client, Input, []).
create_elasticsearch_domain(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/domain"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
create_outbound_cross_cluster_search_connection(Client, Input) ->
create_outbound_cross_cluster_search_connection(Client, Input, []).
create_outbound_cross_cluster_search_connection(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/ccs/outboundConnection"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Create a package for use with Amazon ES domains .
create_package(Client, Input) ->
create_package(Client, Input, []).
create_package(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/packages"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Creates an Amazon OpenSearch Service - managed VPC endpoint .
create_vpc_endpoint(Client, Input) ->
create_vpc_endpoint(Client, Input, []).
create_vpc_endpoint(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/vpcEndpoints"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Permanently deletes the specified Elasticsearch domain and all of its
delete_elasticsearch_domain(Client, DomainName, Input) ->
delete_elasticsearch_domain(Client, DomainName, Input, []).
delete_elasticsearch_domain(Client, DomainName, Input0, Options0) ->
Method = delete,
Path = ["/2015-01-01/es/domain/", aws_util:encode_uri(DomainName), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Deletes the service - linked role that Elasticsearch Service uses to
manage and maintain VPC domains .
Role deletion will fail if any existing VPC domains use the role . You must
delete any such Elasticsearch domains before deleting the role . See
Deleting Elasticsearch Service Role in VPC Endpoints for Amazon
delete_elasticsearch_service_role(Client, Input) ->
delete_elasticsearch_service_role(Client, Input, []).
delete_elasticsearch_service_role(Client, Input0, Options0) ->
Method = delete,
Path = ["/2015-01-01/es/role"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
delete_inbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input) ->
delete_inbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input, []).
delete_inbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input0, Options0) ->
Method = delete,
Path = ["/2015-01-01/es/ccs/inboundConnection/", aws_util:encode_uri(CrossClusterSearchConnectionId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
delete_outbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input) ->
delete_outbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input, []).
delete_outbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input0, Options0) ->
Method = delete,
Path = ["/2015-01-01/es/ccs/outboundConnection/", aws_util:encode_uri(CrossClusterSearchConnectionId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
delete_package(Client, PackageID, Input) ->
delete_package(Client, PackageID, Input, []).
delete_package(Client, PackageID, Input0, Options0) ->
Method = delete,
Path = ["/2015-01-01/packages/", aws_util:encode_uri(PackageID), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Deletes an Amazon OpenSearch Service - managed interface VPC endpoint .
delete_vpc_endpoint(Client, VpcEndpointId, Input) ->
delete_vpc_endpoint(Client, VpcEndpointId, Input, []).
delete_vpc_endpoint(Client, VpcEndpointId, Input0, Options0) ->
Method = delete,
Path = ["/2015-01-01/es/vpcEndpoints/", aws_util:encode_uri(VpcEndpointId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Provides scheduled Auto - Tune action details for the Elasticsearch
domain , such as Auto - Tune action type , description , severity , and
describe_domain_auto_tunes(Client, DomainName)
when is_map(Client) ->
describe_domain_auto_tunes(Client, DomainName, #{}, #{}).
describe_domain_auto_tunes(Client, DomainName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
describe_domain_auto_tunes(Client, DomainName, QueryMap, HeadersMap, []).
describe_domain_auto_tunes(Client, DomainName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/domain/", aws_util:encode_uri(DomainName), "/autoTunes"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
describe_domain_change_progress(Client, DomainName)
when is_map(Client) ->
describe_domain_change_progress(Client, DomainName, #{}, #{}).
describe_domain_change_progress(Client, DomainName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
describe_domain_change_progress(Client, DomainName, QueryMap, HeadersMap, []).
describe_domain_change_progress(Client, DomainName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/domain/", aws_util:encode_uri(DomainName), "/progress"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"changeid">>, maps:get(<<"changeid">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
ARN .
describe_elasticsearch_domain(Client, DomainName)
when is_map(Client) ->
describe_elasticsearch_domain(Client, DomainName, #{}, #{}).
describe_elasticsearch_domain(Client, DomainName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
describe_elasticsearch_domain(Client, DomainName, QueryMap, HeadersMap, []).
describe_elasticsearch_domain(Client, DomainName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/domain/", aws_util:encode_uri(DomainName), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
describe_elasticsearch_domain_config(Client, DomainName)
when is_map(Client) ->
describe_elasticsearch_domain_config(Client, DomainName, #{}, #{}).
describe_elasticsearch_domain_config(Client, DomainName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
describe_elasticsearch_domain_config(Client, DomainName, QueryMap, HeadersMap, []).
describe_elasticsearch_domain_config(Client, DomainName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/domain/", aws_util:encode_uri(DomainName), "/config"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
Elasticsearch domains , including the domain ID , domain endpoint , and
domain ARN .
describe_elasticsearch_domains(Client, Input) ->
describe_elasticsearch_domains(Client, Input, []).
describe_elasticsearch_domains(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/domain-info"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Describe Elasticsearch Limits for a given InstanceType and
ElasticsearchVersion .
describe_elasticsearch_instance_type_limits(Client, ElasticsearchVersion, InstanceType)
when is_map(Client) ->
describe_elasticsearch_instance_type_limits(Client, ElasticsearchVersion, InstanceType, #{}, #{}).
describe_elasticsearch_instance_type_limits(Client, ElasticsearchVersion, InstanceType, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
describe_elasticsearch_instance_type_limits(Client, ElasticsearchVersion, InstanceType, QueryMap, HeadersMap, []).
describe_elasticsearch_instance_type_limits(Client, ElasticsearchVersion, InstanceType, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/instanceTypeLimits/", aws_util:encode_uri(ElasticsearchVersion), "/", aws_util:encode_uri(InstanceType), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"domainName">>, maps:get(<<"domainName">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
describe_inbound_cross_cluster_search_connections(Client, Input) ->
describe_inbound_cross_cluster_search_connections(Client, Input, []).
describe_inbound_cross_cluster_search_connections(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/ccs/inboundConnection/search"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
describe_outbound_cross_cluster_search_connections(Client, Input) ->
describe_outbound_cross_cluster_search_connections(Client, Input, []).
describe_outbound_cross_cluster_search_connections(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/ccs/outboundConnection/search"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Describes all packages available to Amazon ES .
describe_packages(Client, Input) ->
describe_packages(Client, Input, []).
describe_packages(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/packages/describe"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Lists available reserved Elasticsearch instance offerings .
describe_reserved_elasticsearch_instance_offerings(Client)
when is_map(Client) ->
describe_reserved_elasticsearch_instance_offerings(Client, #{}, #{}).
describe_reserved_elasticsearch_instance_offerings(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
describe_reserved_elasticsearch_instance_offerings(Client, QueryMap, HeadersMap, []).
describe_reserved_elasticsearch_instance_offerings(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/reservedInstanceOfferings"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)},
{<<"offeringId">>, maps:get(<<"offeringId">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Returns information about reserved Elasticsearch instances for this
describe_reserved_elasticsearch_instances(Client)
when is_map(Client) ->
describe_reserved_elasticsearch_instances(Client, #{}, #{}).
describe_reserved_elasticsearch_instances(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
describe_reserved_elasticsearch_instances(Client, QueryMap, HeadersMap, []).
describe_reserved_elasticsearch_instances(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/reservedInstances"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)},
{<<"reservationId">>, maps:get(<<"reservationId">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Describes one or more Amazon OpenSearch Service - managed VPC
describe_vpc_endpoints(Client, Input) ->
describe_vpc_endpoints(Client, Input, []).
describe_vpc_endpoints(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/vpcEndpoints/describe"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Dissociates a package from the Amazon ES domain .
dissociate_package(Client, DomainName, PackageID, Input) ->
dissociate_package(Client, DomainName, PackageID, Input, []).
dissociate_package(Client, DomainName, PackageID, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/packages/dissociate/", aws_util:encode_uri(PackageID), "/", aws_util:encode_uri(DomainName), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Returns a list of upgrade compatible Elastisearch versions .
compatible Elasticsearch versions for that specific domain .
get_compatible_elasticsearch_versions(Client)
when is_map(Client) ->
get_compatible_elasticsearch_versions(Client, #{}, #{}).
get_compatible_elasticsearch_versions(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_compatible_elasticsearch_versions(Client, QueryMap, HeadersMap, []).
get_compatible_elasticsearch_versions(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/compatibleVersions"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"domainName">>, maps:get(<<"domainName">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_package_version_history(Client, PackageID)
when is_map(Client) ->
get_package_version_history(Client, PackageID, #{}, #{}).
get_package_version_history(Client, PackageID, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_package_version_history(Client, PackageID, QueryMap, HeadersMap, []).
get_package_version_history(Client, PackageID, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/packages/", aws_util:encode_uri(PackageID), "/history"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Retrieves the complete history of the last 10 upgrades that were
get_upgrade_history(Client, DomainName)
when is_map(Client) ->
get_upgrade_history(Client, DomainName, #{}, #{}).
get_upgrade_history(Client, DomainName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_upgrade_history(Client, DomainName, QueryMap, HeadersMap, []).
get_upgrade_history(Client, DomainName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/upgradeDomain/", aws_util:encode_uri(DomainName), "/history"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_upgrade_status(Client, DomainName)
when is_map(Client) ->
get_upgrade_status(Client, DomainName, #{}, #{}).
get_upgrade_status(Client, DomainName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_upgrade_status(Client, DomainName, QueryMap, HeadersMap, []).
get_upgrade_status(Client, DomainName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/upgradeDomain/", aws_util:encode_uri(DomainName), "/status"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Returns the name of all Elasticsearch domains owned by the current
list_domain_names(Client)
when is_map(Client) ->
list_domain_names(Client, #{}, #{}).
list_domain_names(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_domain_names(Client, QueryMap, HeadersMap, []).
list_domain_names(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/domain"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"engineType">>, maps:get(<<"engineType">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Lists all Amazon ES domains associated with the package .
list_domains_for_package(Client, PackageID)
when is_map(Client) ->
list_domains_for_package(Client, PackageID, #{}, #{}).
list_domains_for_package(Client, PackageID, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_domains_for_package(Client, PackageID, QueryMap, HeadersMap, []).
list_domains_for_package(Client, PackageID, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/packages/", aws_util:encode_uri(PackageID), "/domains"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc List all Elasticsearch instance types that are supported for given
ElasticsearchVersion
list_elasticsearch_instance_types(Client, ElasticsearchVersion)
when is_map(Client) ->
list_elasticsearch_instance_types(Client, ElasticsearchVersion, #{}, #{}).
list_elasticsearch_instance_types(Client, ElasticsearchVersion, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_elasticsearch_instance_types(Client, ElasticsearchVersion, QueryMap, HeadersMap, []).
list_elasticsearch_instance_types(Client, ElasticsearchVersion, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/instanceTypes/", aws_util:encode_uri(ElasticsearchVersion), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"domainName">>, maps:get(<<"domainName">>, QueryMap, undefined)},
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc List all supported Elasticsearch versions
list_elasticsearch_versions(Client)
when is_map(Client) ->
list_elasticsearch_versions(Client, #{}, #{}).
list_elasticsearch_versions(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_elasticsearch_versions(Client, QueryMap, HeadersMap, []).
list_elasticsearch_versions(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/versions"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Lists all packages associated with the Amazon ES domain .
list_packages_for_domain(Client, DomainName)
when is_map(Client) ->
list_packages_for_domain(Client, DomainName, #{}, #{}).
list_packages_for_domain(Client, DomainName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_packages_for_domain(Client, DomainName, QueryMap, HeadersMap, []).
list_packages_for_domain(Client, DomainName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/domain/", aws_util:encode_uri(DomainName), "/packages"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_tags(Client, ARN)
when is_map(Client) ->
list_tags(Client, ARN, #{}, #{}).
list_tags(Client, ARN, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_tags(Client, ARN, QueryMap, HeadersMap, []).
list_tags(Client, ARN, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/tags/"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"arn">>, ARN}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
a given Amazon OpenSearch Service domain through the use of an interface
VPC endpoint .
list_vpc_endpoint_access(Client, DomainName)
when is_map(Client) ->
list_vpc_endpoint_access(Client, DomainName, #{}, #{}).
list_vpc_endpoint_access(Client, DomainName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_vpc_endpoint_access(Client, DomainName, QueryMap, HeadersMap, []).
list_vpc_endpoint_access(Client, DomainName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/domain/", aws_util:encode_uri(DomainName), "/listVpcEndpointAccess"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Retrieves all Amazon OpenSearch Service - managed VPC endpoints in the
list_vpc_endpoints(Client)
when is_map(Client) ->
list_vpc_endpoints(Client, #{}, #{}).
list_vpc_endpoints(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_vpc_endpoints(Client, QueryMap, HeadersMap, []).
list_vpc_endpoints(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/vpcEndpoints"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Retrieves all Amazon OpenSearch Service - managed VPC endpoints
list_vpc_endpoints_for_domain(Client, DomainName)
when is_map(Client) ->
list_vpc_endpoints_for_domain(Client, DomainName, #{}, #{}).
list_vpc_endpoints_for_domain(Client, DomainName, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_vpc_endpoints_for_domain(Client, DomainName, QueryMap, HeadersMap, []).
list_vpc_endpoints_for_domain(Client, DomainName, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/2015-01-01/es/domain/", aws_util:encode_uri(DomainName), "/vpcEndpoints"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Allows you to purchase reserved Elasticsearch instances .
purchase_reserved_elasticsearch_instance_offering(Client, Input) ->
purchase_reserved_elasticsearch_instance_offering(Client, Input, []).
purchase_reserved_elasticsearch_instance_offering(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/purchaseReservedInstanceOffering"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
reject_inbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input) ->
reject_inbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input, []).
reject_inbound_cross_cluster_search_connection(Client, CrossClusterSearchConnectionId, Input0, Options0) ->
Method = put,
Path = ["/2015-01-01/es/ccs/inboundConnection/", aws_util:encode_uri(CrossClusterSearchConnectionId), "/reject"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Removes the specified set of tags from the specified Elasticsearch
remove_tags(Client, Input) ->
remove_tags(Client, Input, []).
remove_tags(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/tags-removal"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Revokes access to an Amazon OpenSearch Service domain that was
provided through an interface VPC endpoint .
revoke_vpc_endpoint_access(Client, DomainName, Input) ->
revoke_vpc_endpoint_access(Client, DomainName, Input, []).
revoke_vpc_endpoint_access(Client, DomainName, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/domain/", aws_util:encode_uri(DomainName), "/revokeVpcEndpointAccess"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Schedules a service software update for an Amazon ES domain .
start_elasticsearch_service_software_update(Client, Input) ->
start_elasticsearch_service_software_update(Client, Input, []).
start_elasticsearch_service_software_update(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/serviceSoftwareUpdate/start"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Modifies the cluster configuration of the specified Elasticsearch
update_elasticsearch_domain_config(Client, DomainName, Input) ->
update_elasticsearch_domain_config(Client, DomainName, Input, []).
update_elasticsearch_domain_config(Client, DomainName, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/domain/", aws_util:encode_uri(DomainName), "/config"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Updates a package for use with Amazon ES domains .
update_package(Client, Input) ->
update_package(Client, Input, []).
update_package(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/packages/update"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Modifies an Amazon OpenSearch Service - managed interface VPC endpoint .
update_vpc_endpoint(Client, Input) ->
update_vpc_endpoint(Client, Input, []).
update_vpc_endpoint(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/vpcEndpoints/update"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
eligibility check to a compatible Elasticsearch version .
upgrade_elasticsearch_domain(Client, Input) ->
upgrade_elasticsearch_domain(Client, Input, []).
upgrade_elasticsearch_domain(Client, Input0, Options0) ->
Method = post,
Path = ["/2015-01-01/es/upgradeDomain"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
Internal functions
-spec request(aws_client:aws_client(), atom(), iolist(), list(),
list(), map() | undefined, list(), pos_integer() | undefined) ->
{ok, {integer(), list()}} |
{ok, Result, {integer(), list(), hackney:client()}} |
{error, Error, {integer(), list(), hackney:client()}} |
{error, term()} when
Result :: map(),
Error :: map().
request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
RequestFun = fun() -> do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) end,
aws_request:request(RequestFun, Options).
do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
Client1 = Client#{service => <<"es">>},
Host = build_host(<<"es">>, Client1),
URL0 = build_url(Host, Path, Client1),
URL = aws_request:add_query(URL0, Query),
AdditionalHeaders1 = [ {<<"Host">>, Host}
, {<<"Content-Type">>, <<"application/x-amz-json-1.1">>}
],
Payload =
case proplists:get_value(send_body_as_binary, Options) of
true ->
maps:get(<<"Body">>, Input, <<"">>);
false ->
encode_payload(Input)
end,
AdditionalHeaders = case proplists:get_value(append_sha256_content_hash, Options, false) of
true ->
add_checksum_hash_header(AdditionalHeaders1, Payload);
false ->
AdditionalHeaders1
end,
Headers1 = aws_request:add_headers(AdditionalHeaders, Headers0),
MethodBin = aws_request:method_to_binary(Method),
SignedHeaders = aws_request:sign_request(Client1, MethodBin, URL, Headers1, Payload),
Response = hackney:request(Method, URL, SignedHeaders, Payload, Options),
DecodeBody = not proplists:get_value(receive_body_as_binary, Options),
handle_response(Response, SuccessStatusCode, DecodeBody).
add_checksum_hash_header(Headers, Body) ->
[ {<<"X-Amz-CheckSum-SHA256">>, base64:encode(crypto:hash(sha256, Body))}
| Headers
].
handle_response({ok, StatusCode, ResponseHeaders}, SuccessStatusCode, _DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
{ok, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders}, _, _DecodeBody) ->
{error, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders, Client}, SuccessStatusCode, DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
case hackney:body(Client) of
{ok, <<>>} when StatusCode =:= 200;
StatusCode =:= SuccessStatusCode ->
{ok, #{}, {StatusCode, ResponseHeaders, Client}};
{ok, Body} ->
Result = case DecodeBody of
true ->
try
jsx:decode(Body)
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
false -> #{<<"Body">> => Body}
end,
{ok, Result, {StatusCode, ResponseHeaders, Client}}
end;
handle_response({ok, StatusCode, _ResponseHeaders, _Client}, _, _DecodeBody)
when StatusCode =:= 503 ->
Retriable error if retries are enabled
{error, service_unavailable};
handle_response({ok, StatusCode, ResponseHeaders, Client}, _, _DecodeBody) ->
{ok, Body} = hackney:body(Client),
try
DecodedError = jsx:decode(Body),
{error, DecodedError, {StatusCode, ResponseHeaders, Client}}
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
handle_response({error, Reason}, _, _DecodeBody) ->
{error, Reason}.
build_host(_EndpointPrefix, #{region := <<"local">>, endpoint := Endpoint}) ->
Endpoint;
build_host(_EndpointPrefix, #{region := <<"local">>}) ->
<<"localhost">>;
build_host(EndpointPrefix, #{region := Region, endpoint := Endpoint}) ->
aws_util:binary_join([EndpointPrefix, Region, Endpoint], <<".">>).
build_url(Host, Path0, Client) ->
Proto = aws_client:proto(Client),
Path = erlang:iolist_to_binary(Path0),
Port = aws_client:port(Client),
aws_util:binary_join([Proto, <<"://">>, Host, <<":">>, Port, Path], <<"">>).
-spec encode_payload(undefined | map()) -> binary().
encode_payload(undefined) ->
<<>>;
encode_payload(Input) ->
jsx:encode(Input).
|
468f0d855fc6694f4671cf896596a3dcfe2fa111ba73fdc23edb7bfac7898801 | unison-code/uni-instr-sel | Drivers.hs | |
Copyright : Copyright ( c ) 2012 - 2017 , < >
License : BSD3 ( see the LICENSE file )
Maintainer :
Copyright : Copyright (c) 2012-2017, Gabriel Hjort Blindell <>
License : BSD3 (see the LICENSE file)
Maintainer :
-}
Main authors :
< >
Main authors:
Gabriel Hjort Blindell <>
-}
module UniISLLVM.Drivers
( module UniISLLVM.Drivers.Base )
where
import UniISLLVM.Drivers.Base
| null | https://raw.githubusercontent.com/unison-code/uni-instr-sel/2edb2f3399ea43e75f33706261bd6b93bedc6762/uni-is-llvm/UniISLLVM/Drivers.hs | haskell | |
Copyright : Copyright ( c ) 2012 - 2017 , < >
License : BSD3 ( see the LICENSE file )
Maintainer :
Copyright : Copyright (c) 2012-2017, Gabriel Hjort Blindell <>
License : BSD3 (see the LICENSE file)
Maintainer :
-}
Main authors :
< >
Main authors:
Gabriel Hjort Blindell <>
-}
module UniISLLVM.Drivers
( module UniISLLVM.Drivers.Base )
where
import UniISLLVM.Drivers.Base
|
|
47690802419c34344953079318e4f46e060151bf54ca3ad8ee4f0d12f44512e4 | imandra-ai/ocaml-opentelemetry | opentelemetry.ml | (** Opentelemetry types and instrumentation *)
module Thread_local = Thread_local
module Lock = Lock
(** Global lock. *)
module Rand_bytes = Rand_bytes
(** Generation of random identifiers. *)
open struct
let[@inline] result_bind x f =
match x with
| Error e -> Error e
| Ok x -> f x
end
* { 2 Wire format }
(** Protobuf types.
This is mostly useful internally. Users should not need to touch it. *)
module Proto = struct
module Common = struct
include Common_types
include Common_pp
include Common_pb
end
module Resource = struct
include Resource_types
include Resource_pp
include Resource_pb
end
module Trace = struct
include Trace_types
include Trace_pp
include Trace_pb
end
module Metrics = struct
include Metrics_types
include Metrics_pp
include Metrics_pb
end
module Trace_service = struct
include Trace_service_types
include Trace_service_pb
include Trace_service_pp
end
module Metrics_service = struct
include Metrics_service_types
include Metrics_service_pp
include Metrics_service_pb
end
module Status = struct
include Status_types
include Status_pp
include Status_pb
end
module Logs = struct
include Logs_types
include Logs_pb
include Logs_pp
end
module Logs_service = struct
include Logs_service_types
include Logs_service_pb
include Logs_service_pp
end
end
(** {2 Timestamps} *)
* Unix timestamp .
These timestamps measure time since the Unix epoch ( jan 1 , 1970 ) UTC
in nanoseconds .
These timestamps measure time since the Unix epoch (jan 1, 1970) UTC
in nanoseconds. *)
module Timestamp_ns = struct
type t = int64
let ns_in_a_day = Int64.(mul 1_000_000_000L (of_int (24 * 3600)))
(** Current unix timestamp in nanoseconds *)
let[@inline] now_unix_ns () : t =
let span = Ptime_clock.now () |> Ptime.to_span in
let d, ps = Ptime.Span.to_d_ps span in
let d = Int64.(mul (of_int d) ns_in_a_day) in
let ns = Int64.(div ps 1_000L) in
Int64.(add d ns)
end
* { 2 Interface to data collector }
* Collector types
These types are used by backend implementations , to send events to
collectors such as Jaeger .
Note : most users will not need to touch this module
These types are used by backend implementations, to send events to
collectors such as Jaeger.
Note: most users will not need to touch this module *)
module Collector = struct
open Proto
type 'msg sender = { send: 'a. 'msg -> ret:(unit -> 'a) -> 'a }
(** Sender interface for a message of type [msg].
Inspired from Logs' reporter
(see {{:#sync} its doc})
but without [over] as it doesn't make much sense in presence
of batching.
The [ret] callback is used to return the desired type (unit, or
a Lwt promise, or anything else) once the event has been transferred
to the backend.
It doesn't mean the event has been collected yet, it
could sit in a batch queue for a little while.
*)
(** Collector client interface. *)
module type BACKEND = sig
val send_trace : Trace.resource_spans list sender
val send_metrics : Metrics.resource_metrics list sender
val send_logs : Logs.resource_logs list sender
val signal_emit_gc_metrics : unit -> unit
* Signal the backend that it should emit GC metrics when it has the
chance . This should be installed in a GC alarm or another form
of regular trigger .
chance. This should be installed in a GC alarm or another form
of regular trigger. *)
val tick : unit -> unit
(** Should be called regularly for background processing,
timeout checks, etc. *)
val set_on_tick_callbacks : (unit -> unit) list ref -> unit
(** Give the collector the list of callbacks to be executed
when [tick()] is called. Each such callback should be short and
reentrant. Depending on the collector's implementation, it might be
called from a thread that is not the one that called [on_tick]. *)
val cleanup : unit -> unit
end
type backend = (module BACKEND)
(* hidden *)
open struct
let on_tick_cbs_ = ref []
let backend : backend option ref = ref None
end
(** Set collector backend *)
let set_backend (b : backend) : unit =
let (module B) = b in
B.set_on_tick_callbacks on_tick_cbs_;
backend := Some b
(** Is there a configured backend? *)
let[@inline] has_backend () : bool = !backend != None
(** Current backend, if any *)
let[@inline] get_backend () : backend option = !backend
let send_trace (l : Trace.resource_spans list) ~ret =
match !backend with
| None -> ret ()
| Some (module B) -> B.send_trace.send l ~ret
let send_metrics (l : Metrics.resource_metrics list) ~ret =
match !backend with
| None -> ret ()
| Some (module B) -> B.send_metrics.send l ~ret
let send_logs (l : Logs.resource_logs list) ~ret =
match !backend with
| None -> ret ()
| Some (module B) -> B.send_logs.send l ~ret
let[@inline] rand_bytes_16 () = !Rand_bytes.rand_bytes_16 ()
let[@inline] rand_bytes_8 () = !Rand_bytes.rand_bytes_8 ()
let on_tick f = on_tick_cbs_ := f :: !on_tick_cbs_
(** Do background work. Call this regularly if the collector doesn't
already have a ticker thread or internal timer. *)
let tick () =
match !backend with
| None -> ()
| Some (module B) -> B.tick ()
end
module Util_ = struct
let bytes_to_hex (b : bytes) : string =
let i_to_hex (i : int) =
if i < 10 then
Char.chr (i + Char.code '0')
else
Char.chr (i - 10 + Char.code 'a')
in
let res = Bytes.create (2 * Bytes.length b) in
for i = 0 to Bytes.length b - 1 do
let n = Char.code (Bytes.get b i) in
Bytes.set res (2 * i) (i_to_hex ((n land 0xf0) lsr 4));
Bytes.set res ((2 * i) + 1) (i_to_hex (n land 0x0f))
done;
Bytes.unsafe_to_string res
let bytes_of_hex (s : string) : bytes =
let n_of_c = function
| '0' .. '9' as c -> Char.code c - Char.code '0'
| 'a' .. 'f' as c -> 10 + Char.code c - Char.code 'a'
| _ -> raise (Invalid_argument "invalid hex char")
in
if String.length s mod 2 <> 0 then
raise (Invalid_argument "hex sequence must be of even length");
let res = Bytes.make (String.length s / 2) '\x00' in
for i = 0 to (String.length s / 2) - 1 do
let n1 = n_of_c (String.get s (2 * i)) in
let n2 = n_of_c (String.get s ((2 * i) + 1)) in
let n = (n1 lsl 4) lor n2 in
Bytes.set res i (Char.chr n)
done;
res
end
(** {2 Identifiers} *)
* Trace ID .
This 16 bytes identifier is shared by all spans in one trace .
This 16 bytes identifier is shared by all spans in one trace. *)
module Trace_id : sig
type t
val create : unit -> t
val pp : Format.formatter -> t -> unit
val to_bytes : t -> bytes
val of_bytes : bytes -> t
val to_hex : t -> string
val of_hex : string -> t
end = struct
open Proto.Trace
type t = bytes
let to_bytes self = self
let create () : t =
let b = Collector.rand_bytes_16 () in
assert (Bytes.length b = 16);
(* make sure the identifier is not all 0, which is a dummy identifier. *)
Bytes.set b 0 (Char.unsafe_chr (Char.code (Bytes.get b 0) lor 1));
b
let of_bytes b =
if Bytes.length b = 16 then
b
else
raise (Invalid_argument "trace IDs must be 16 bytes in length")
let to_hex self = Util_.bytes_to_hex self
let of_hex s = of_bytes (Util_.bytes_of_hex s)
let pp fmt t = Format.fprintf fmt "%s" (to_hex t)
end
(** Unique ID of a span. *)
module Span_id : sig
type t
val create : unit -> t
val pp : Format.formatter -> t -> unit
val to_bytes : t -> bytes
val of_bytes : bytes -> t
val to_hex : t -> string
val of_hex : string -> t
end = struct
open Proto.Trace
type t = bytes
let to_bytes self = self
let create () : t =
let b = Collector.rand_bytes_8 () in
assert (Bytes.length b = 8);
(* make sure the identifier is not all 0, which is a dummy identifier. *)
Bytes.set b 0 (Char.unsafe_chr (Char.code (Bytes.get b 0) lor 1));
b
let of_bytes b =
if Bytes.length b = 8 then
b
else
raise (Invalid_argument "span IDs must be 8 bytes in length")
let to_hex self = Util_.bytes_to_hex self
let of_hex s = of_bytes (Util_.bytes_of_hex s)
let pp fmt t = Format.fprintf fmt "%s" (to_hex t)
end
* { 2 Attributes and conventions }
module Conventions = struct
module Attributes = struct
module Process = struct
module Runtime = struct
let name = "process.runtime.name"
let version = "process.runtime.version"
let description = "process.runtime.description"
end
end
module Service = struct
let name = "service.name"
let namespace = "service.namespace"
let instance_id = "service.instance.id"
let version = "service.version"
end
end
module Metrics = struct
module Process = struct
module Runtime = struct
module Ocaml = struct
module GC = struct
let compactions = "process.runtime.ocaml.gc.compactions"
let major_collections = "process.runtime.ocaml.gc.major_collections"
let major_heap = "process.runtime.ocaml.gc.major_heap"
let minor_allocated = "process.runtime.ocaml.gc.minor_allocated"
let minor_collections = "process.runtime.ocaml.gc.minor_collections"
end
end
end
end
end
end
type value =
[ `Int of int
| `String of string
| `Bool of bool
| `None
]
type key_value = string * value
(**/**)
let _conv_value =
let open Proto.Common in
function
| `Int i -> Some (Int_value (Int64.of_int i))
| `String s -> Some (String_value s)
| `Bool b -> Some (Bool_value b)
| `None -> None
(**/**)
(**/**)
let _conv_key_value (k, v) =
let open Proto.Common in
let value = _conv_value v in
default_key_value ~key:k ~value ()
(**/**)
* { 2 Global settings }
(** Process-wide metadata, environment variables, etc. *)
module Globals = struct
open Proto.Common
(** Main service name metadata *)
let service_name = ref "unknown_service"
(** Namespace for the service *)
let service_namespace = ref None
(** Unique identifier for the service *)
let service_instance_id = ref None
let instrumentation_library =
default_instrumentation_scope ~version:"0.2" ~name:"ocaml-opentelemetry" ()
(** Global attributes, initially set
via OTEL_RESOURCE_ATTRIBUTES and modifiable
by the user code. They will be attached to each outgoing metrics/traces. *)
let global_attributes : key_value list ref =
let parse_pair s =
match String.split_on_char '=' s with
| [ a; b ] -> default_key_value ~key:a ~value:(Some (String_value b)) ()
| _ -> failwith (Printf.sprintf "invalid attribute: %S" s)
in
ref
@@
try
Sys.getenv "OTEL_RESOURCE_ATTRIBUTES"
|> String.split_on_char ',' |> List.map parse_pair
with _ -> []
(** Add a global attribute *)
let add_global_attribute (key : string) (v : value) : unit =
global_attributes := _conv_key_value (key, v) :: !global_attributes
(* add global attributes to this list *)
let merge_global_attributes_ into : _ list =
let not_redundant kv = List.for_all (fun kv' -> kv.key <> kv'.key) into in
List.rev_append (List.filter not_redundant !global_attributes) into
* Default span kind in { ! Span.create } .
This will be used in all spans that do not specify [ ~kind ] explicitly .
It can be convenient to set " client " or " server " uniformly in here .
@since 0.4
This will be used in all spans that do not specify [~kind] explicitly.
It can be convenient to set "client" or "server" uniformly in here.
@since 0.4 *)
let default_span_kind = ref Proto.Trace.Span_kind_unspecified
let mk_attributes ?(service_name = !service_name) ?(attrs = []) () : _ list =
let l = List.map _conv_key_value attrs in
let l =
default_key_value ~key:Conventions.Attributes.Service.name
~value:(Some (String_value service_name)) ()
:: l
in
let l =
match !service_instance_id with
| None -> l
| Some v ->
default_key_value ~key:Conventions.Attributes.Service.instance_id
~value:(Some (String_value v)) ()
:: l
in
let l =
match !service_namespace with
| None -> l
| Some v ->
default_key_value ~key:Conventions.Attributes.Service.namespace
~value:(Some (String_value v)) ()
:: l
in
l |> merge_global_attributes_
end
* { 2 Traces and Spans }
(** Events.
Events occur at a given time and can carry attributes. They always
belong in a span. *)
module Event : sig
open Proto.Trace
type t = span_event
val make :
?time_unix_nano:Timestamp_ns.t -> ?attrs:key_value list -> string -> t
end = struct
open Proto.Trace
type t = span_event
let make ?(time_unix_nano = Timestamp_ns.now_unix_ns ()) ?(attrs = [])
(name : string) : t =
let attrs = List.map _conv_key_value attrs in
default_span_event ~time_unix_nano ~name ~attributes:attrs ()
end
* { 2 Scopes }
(** Scopes.
A scope is a trace ID and the span ID of the currently active span.
*)
module Scope = struct
type t = {
trace_id: Trace_id.t;
span_id: Span_id.t;
mutable events: Event.t list;
mutable attrs: key_value list;
}
(** Add an event to the scope. It will be aggregated into the span.
Note that this takes a function that produces an event, and will only
call it if there is an instrumentation backend. *)
let[@inline] add_event (scope : t) (ev : unit -> Event.t) : unit =
if Collector.has_backend () then scope.events <- ev () :: scope.events
(** Add an attr to the scope. It will be aggregated into the span.
Note that this takes a function that produces attributes, and will only
call it if there is an instrumentation backend. *)
let[@inline] add_attrs (scope : t) (attrs : unit -> key_value list) : unit =
if Collector.has_backend () then
scope.attrs <- List.rev_append (attrs ()) scope.attrs
(**/**)
(* define this locally *)
let _global_scope : t Thread_local.t = Thread_local.create ()
(**/**)
(** Obtain current scope from thread-local storage, if available *)
let get_surrounding ?scope () : t option =
match scope with
| Some _ -> scope
| None -> Thread_local.get _global_scope
end
open struct
let get_surrounding_scope = Scope.get_surrounding
end
(** Span Link
A pointer from the current span to another span in the same trace or in a
different trace. For example, this can be used in batching operations,
where a single batch handler processes multiple requests from different
traces or when the handler receives a request from a different project.
*)
module Span_link : sig
open Proto.Trace
type t = span_link
val make :
trace_id:Trace_id.t ->
span_id:Span_id.t ->
?trace_state:string ->
?attrs:key_value list ->
?dropped_attributes_count:int ->
unit ->
t
end = struct
open Proto.Trace
type t = span_link
let make ~trace_id ~span_id ?trace_state ?(attrs = [])
?dropped_attributes_count () : t =
let attributes = List.map _conv_key_value attrs in
let dropped_attributes_count =
Option.map Int32.of_int dropped_attributes_count
in
default_span_link
~trace_id:(Trace_id.to_bytes trace_id)
~span_id:(Span_id.to_bytes span_id) ?trace_state ~attributes
?dropped_attributes_count ()
end
* Spans .
A Span is the workhorse of traces , it indicates an operation that
took place over a given span of time ( indicated by start_time and end_time )
as part of a hierarchical trace . All spans in a given trace are bound by
the use of the same { ! Trace_id.t } .
A Span is the workhorse of traces, it indicates an operation that
took place over a given span of time (indicated by start_time and end_time)
as part of a hierarchical trace. All spans in a given trace are bound by
the use of the same {!Trace_id.t}. *)
module Span : sig
open Proto.Trace
type t = span
type id = Span_id.t
type nonrec kind = span_span_kind =
| Span_kind_unspecified
| Span_kind_internal
| Span_kind_server
| Span_kind_client
| Span_kind_producer
| Span_kind_consumer
type nonrec status_code = status_status_code =
| Status_code_unset
| Status_code_ok
| Status_code_error
type nonrec status = status = {
message: string;
code: status_code;
}
val id : t -> Span_id.t
type key_value =
string * [ `Int of int | `String of string | `Bool of bool | `None ]
val create :
?kind:kind ->
?id:id ->
?trace_state:string ->
?attrs:key_value list ->
?events:Event.t list ->
?status:status ->
trace_id:Trace_id.t ->
?parent:id ->
?links:Span_link.t list ->
start_time:Timestamp_ns.t ->
end_time:Timestamp_ns.t ->
string ->
t * id
(** [create ~trace_id name] creates a new span with its unique ID.
@param trace_id the trace this belongs to
@param parent parent span, if any
@param links list of links to other spans, each with their trace state
(see {{: -context/#tracestate-header} w3.org}) *)
end = struct
open Proto.Trace
type t = span
type id = Span_id.t
type nonrec kind = span_span_kind =
| Span_kind_unspecified
| Span_kind_internal
| Span_kind_server
| Span_kind_client
| Span_kind_producer
| Span_kind_consumer
type key_value =
string * [ `Int of int | `String of string | `Bool of bool | `None ]
type nonrec status_code = status_status_code =
| Status_code_unset
| Status_code_ok
| Status_code_error
type nonrec status = status = {
message: string;
code: status_code;
}
let id self = Span_id.of_bytes self.span_id
let create ?(kind = !Globals.default_span_kind) ?(id = Span_id.create ())
?trace_state ?(attrs = []) ?(events = []) ?status ~trace_id ?parent
?(links = []) ~start_time ~end_time name : t * id =
let trace_id = Trace_id.to_bytes trace_id in
let parent_span_id = Option.map Span_id.to_bytes parent in
let attributes = List.map _conv_key_value attrs in
let span =
default_span ~trace_id ?parent_span_id ~span_id:(Span_id.to_bytes id)
~attributes ~events ?trace_state ~status ~kind ~name ~links
~start_time_unix_nano:start_time ~end_time_unix_nano:end_time ()
in
span, id
end
(** Traces.
See {{: /#tracing-signal} the spec} *)
module Trace = struct
open Proto.Trace
type span = Span.t
let make_resource_spans ?service_name ?attrs spans =
let ils =
default_scope_spans ~scope:(Some Globals.instrumentation_library) ~spans
()
in
let attributes = Globals.mk_attributes ?service_name ?attrs () in
let resource = Proto.Resource.default_resource ~attributes () in
default_resource_spans ~resource:(Some resource) ~scope_spans:[ ils ] ()
* Sync emitter .
This instructs the collector to forward
the spans to some backend at a later point .
{ b NOTE } be careful not to call this inside a Gc alarm , as it can
cause deadlocks .
This instructs the collector to forward
the spans to some backend at a later point.
{b NOTE} be careful not to call this inside a Gc alarm, as it can
cause deadlocks. *)
let emit ?service_name ?attrs (spans : span list) : unit =
let rs = make_resource_spans ?service_name ?attrs spans in
Collector.send_trace [ rs ] ~ret:(fun () -> ())
type scope = Scope.t = {
trace_id: Trace_id.t;
span_id: Span_id.t;
mutable events: Event.t list;
mutable attrs: Span.key_value list;
}
[@@deprecated "use Scope.t"]
let add_event = Scope.add_event [@@deprecated "use Scope.add_event"]
let add_attrs = Scope.add_attrs [@@deprecated "use Scope.add_attrs"]
* Sync span guard .
@param force_new_trace_id if true ( default false ) , the span will not use a
surrounding context , or [ scope ] , or [ trace_id ] , but will always
create a fresh new trace ID .
{ b NOTE } be careful not to call this inside a Gc alarm , as it can
cause deadlocks .
@param force_new_trace_id if true (default false), the span will not use a
surrounding context, or [scope], or [trace_id], but will always
create a fresh new trace ID.
{b NOTE} be careful not to call this inside a Gc alarm, as it can
cause deadlocks. *)
let with_ ?(force_new_trace_id = false) ?trace_state ?service_name
?(attrs : (string * [< value ]) list = []) ?kind ?trace_id ?parent ?scope
?links name (f : Scope.t -> 'a) : 'a =
let scope =
if force_new_trace_id then
None
else
get_surrounding_scope ?scope ()
in
let trace_id =
match trace_id, scope with
| _ when force_new_trace_id -> Trace_id.create ()
| Some trace_id, _ -> trace_id
| None, Some scope -> scope.trace_id
| None, None -> Trace_id.create ()
in
let parent =
match parent, scope with
| _ when force_new_trace_id -> None
| Some span_id, _ -> Some span_id
| None, Some scope -> Some scope.span_id
| None, None -> None
in
let start_time = Timestamp_ns.now_unix_ns () in
let span_id = Span_id.create () in
let scope = { trace_id; span_id; events = []; attrs } in
(* set global scope in this thread *)
Thread_local.with_ Scope._global_scope scope @@ fun _sc ->
(* called once we're done, to emit a span *)
let finally res =
let status =
match res with
| Ok () -> default_status ~code:Status_code_ok ()
| Error e -> default_status ~code:Status_code_error ~message:e ()
in
let span, _ =
TODO : should the attrs passed to with _ go on the Span
( in Span.create ) or on the ResourceSpan ( in emit ) ?
( question also applies to Opentelemetry_lwt.Trace.with )
(in Span.create) or on the ResourceSpan (in emit)?
(question also applies to Opentelemetry_lwt.Trace.with) *)
Span.create ?kind ~trace_id ?parent ?links ~id:span_id ?trace_state
~attrs:scope.attrs ~events:scope.events ~start_time
~end_time:(Timestamp_ns.now_unix_ns ())
~status name
in
emit ?service_name [ span ]
in
try
let x = f scope in
finally (Ok ());
x
with e ->
finally (Error (Printexc.to_string e));
raise e
end
(** {2 Metrics} *)
(** Metrics.
See {{: /#metric-signal} the spec} *)
module Metrics = struct
open Metrics_types
type t = Metrics_types.metric
* A single metric , measuring some time - varying quantity or statistical
distribution . It is composed of one or more data points that have
precise values and time stamps . Each distinct metric should have a
distinct name .
distribution. It is composed of one or more data points that have
precise values and time stamps. Each distinct metric should have a
distinct name. *)
open struct
let _program_start = Timestamp_ns.now_unix_ns ()
end
(** Number data point, as a float *)
let float ?(start_time_unix_nano = _program_start)
?(now = Timestamp_ns.now_unix_ns ()) ?(attrs = []) (d : float) :
number_data_point =
let attributes = attrs |> List.map _conv_key_value in
default_number_data_point ~start_time_unix_nano ~time_unix_nano:now
~attributes ~value:(As_double d) ()
(** Number data point, as an int *)
let int ?(start_time_unix_nano = _program_start)
?(now = Timestamp_ns.now_unix_ns ()) ?(attrs = []) (i : int) :
number_data_point =
let attributes = attrs |> List.map _conv_key_value in
default_number_data_point ~start_time_unix_nano ~time_unix_nano:now
~attributes
~value:(As_int (Int64.of_int i))
()
(** Aggregation of a scalar metric, always with the current value *)
let gauge ~name ?description ?unit_ (l : number_data_point list) : t =
let data = Gauge (default_gauge ~data_points:l ()) in
default_metric ~name ?description ?unit_ ~data ()
type aggregation_temporality = Metrics_types.aggregation_temporality =
| Aggregation_temporality_unspecified
| Aggregation_temporality_delta
| Aggregation_temporality_cumulative
(** Sum of all reported measurements over a time interval *)
let sum ~name ?description ?unit_
?(aggregation_temporality = Aggregation_temporality_cumulative)
?is_monotonic (l : number_data_point list) : t =
let data =
Sum (default_sum ~data_points:l ?is_monotonic ~aggregation_temporality ())
in
default_metric ~name ?description ?unit_ ~data ()
(** Histogram data
@param count number of values in population (non negative)
@param sum sum of values in population (0 if count is 0)
@param bucket_counts count value of histogram for each bucket. Sum of
the counts must be equal to [count].
length must be [1+length explicit_bounds]
@param explicit_bounds strictly increasing list of bounds for the buckets *)
let histogram_data_point ?(start_time_unix_nano = _program_start)
?(now = Timestamp_ns.now_unix_ns ()) ?(attrs = []) ?(exemplars = [])
?(explicit_bounds = []) ?sum ~bucket_counts ~count () :
histogram_data_point =
let attributes = attrs |> List.map _conv_key_value in
default_histogram_data_point ~start_time_unix_nano ~time_unix_nano:now
~attributes ~exemplars ~bucket_counts ~explicit_bounds ~count ?sum ()
let histogram ~name ?description ?unit_ ?aggregation_temporality
(l : histogram_data_point list) : t =
let data =
Histogram (default_histogram ~data_points:l ?aggregation_temporality ())
in
default_metric ~name ?description ?unit_ ~data ()
(* TODO: exponential history *)
(* TODO: summary *)
(* TODO: exemplar *)
* Aggregate metrics into a { ! Proto . Metrics.resource_metrics }
let make_resource_metrics ?service_name ?attrs (l : t list) : resource_metrics
=
let lm =
default_scope_metrics ~scope:(Some Globals.instrumentation_library)
~metrics:l ()
in
let attributes = Globals.mk_attributes ?service_name ?attrs () in
let resource = Proto.Resource.default_resource ~attributes () in
default_resource_metrics ~scope_metrics:[ lm ] ~resource:(Some resource) ()
* Emit some metrics to the collector ( sync ) . This blocks until
the backend has pushed the metrics into some internal queue , or
discarded them .
{ b NOTE } be careful not to call this inside a Gc alarm , as it can
cause deadlocks .
the backend has pushed the metrics into some internal queue, or
discarded them.
{b NOTE} be careful not to call this inside a Gc alarm, as it can
cause deadlocks.
*)
let emit ?attrs (l : t list) : unit =
let rm = make_resource_metrics ?attrs l in
Collector.send_metrics [ rm ] ~ret:ignore
end
(** Logs.
See {{: /#log-signal} the spec} *)
module Logs = struct
open Logs_types
type t = log_record
(** Severity level of a log event *)
type severity = Logs_types.severity_number =
| Severity_number_unspecified
| Severity_number_trace
| Severity_number_trace2
| Severity_number_trace3
| Severity_number_trace4
| Severity_number_debug
| Severity_number_debug2
| Severity_number_debug3
| Severity_number_debug4
| Severity_number_info
| Severity_number_info2
| Severity_number_info3
| Severity_number_info4
| Severity_number_warn
| Severity_number_warn2
| Severity_number_warn3
| Severity_number_warn4
| Severity_number_error
| Severity_number_error2
| Severity_number_error3
| Severity_number_error4
| Severity_number_fatal
| Severity_number_fatal2
| Severity_number_fatal3
| Severity_number_fatal4
let pp_severity = Logs_pp.pp_severity_number
type flags = Logs_types.log_record_flags =
| Log_record_flag_unspecified
| Log_record_flag_trace_flags_mask
let pp_flags = Logs_pp.pp_log_record_flags
(** Make a single log entry *)
let make ?time ?(observed_time_unix_nano = Timestamp_ns.now_unix_ns ())
?severity ?log_level ?flags ?trace_id ?span_id (body : value) : t =
let time_unix_nano =
match time with
| None -> observed_time_unix_nano
| Some t -> t
in
let trace_id = Option.map Trace_id.to_bytes trace_id in
let span_id = Option.map Span_id.to_bytes span_id in
let body = _conv_value body in
default_log_record ~time_unix_nano ~observed_time_unix_nano
?severity_number:severity ?severity_text:log_level ?flags ?trace_id
?span_id ~body ()
(** Make a log entry whose body is a string *)
let make_str ?time ?observed_time_unix_nano ?severity ?log_level ?flags
?trace_id ?span_id (body : string) : t =
make ?time ?observed_time_unix_nano ?severity ?log_level ?flags ?trace_id
?span_id (`String body)
(** Make a log entry with format *)
let make_strf ?time ?observed_time_unix_nano ?severity ?log_level ?flags
?trace_id ?span_id fmt =
Format.kasprintf
(fun bod ->
make_str ?time ?observed_time_unix_nano ?severity ?log_level ?flags
?trace_id ?span_id bod)
fmt
* Emit logs .
This instructs the collector to send the logs to some backend at
a later date .
{ b NOTE } be careful not to call this inside a Gc alarm , as it can
cause deadlocks .
This instructs the collector to send the logs to some backend at
a later date.
{b NOTE} be careful not to call this inside a Gc alarm, as it can
cause deadlocks. *)
let emit ?service_name ?attrs (l : t list) : unit =
let attributes = Globals.mk_attributes ?service_name ?attrs () in
let resource = Proto.Resource.default_resource ~attributes () in
let ll =
default_scope_logs ~scope:(Some Globals.instrumentation_library)
~log_records:l ()
in
let rl =
default_resource_logs ~resource:(Some resource) ~scope_logs:[ ll ] ()
in
Collector.send_logs [ rl ] ~ret:ignore
end
(** A set of callbacks that produce metrics when called.
The metrics are automatically called regularly.
This allows applications to register metrics callbacks from various points
in the program (or even in libraries), and not worry about setting
alarms/intervals to emit them. *)
module Metrics_callbacks = struct
open struct
let cbs_ : (unit -> Metrics.t list) list ref = ref []
end
(** [register f] adds the callback [f] to the list.
[f] will be called at unspecified times and is expected to return
a list of metrics. It might be called regularly by the backend,
in particular (but not only) when {!Collector.tick} is called. *)
let register f : unit =
if !cbs_ = [] then
(* make sure we call [f] (and others) at each tick *)
Collector.on_tick (fun () ->
let m = List.map (fun f -> f ()) !cbs_ |> List.flatten in
Metrics.emit m);
cbs_ := f :: !cbs_
end
(** {2 Utils} *)
(** Implementation of the W3C Trace Context spec
-context/
*)
module Trace_context = struct
(** The traceparent header
-context/#traceparent-header
*)
module Traceparent = struct
let name = "traceparent"
* the value of the traceparent header .
The values are of the form :
{ [
{ version}-{trace_id}-{parent_id}-{flags }
] }
For example :
{ [ 00 - 4bf92f3577b34da6a3ce929d0e0e4736 - 00f067aa0ba902b7 - 01 ] }
[ { flags } ] are currently ignored .
The values are of the form:
{[
{version}-{trace_id}-{parent_id}-{flags}
]}
For example:
{[ 00-4bf92f3577b34da6a3ce929d0e0e4736-00f067aa0ba902b7-01 ]}
[{flags}] are currently ignored.
*)
let of_value str : (Trace_id.t * Span_id.t, string) result =
let ( let* ) = result_bind in
let blit ~offset ~len ~or_ =
let buf = Bytes.create len in
let* str =
match Bytes.blit_string str offset buf 0 len with
| () -> Ok (Bytes.unsafe_to_string buf)
| exception Invalid_argument _ -> Error or_
in
Ok (str, offset + len)
in
let consume expected ~offset ~or_ =
let len = String.length expected in
let* str, offset = blit ~offset ~len ~or_ in
if str = expected then
Ok offset
else
Error or_
in
let offset = 0 in
let* offset = consume "00" ~offset ~or_:"Expected version 00" in
let* offset = consume "-" ~offset ~or_:"Expected delimiter" in
let* trace_id, offset =
blit ~offset ~len:32 ~or_:"Expected 32-digit trace-id"
in
let* trace_id =
match Trace_id.of_hex trace_id with
| trace_id -> Ok trace_id
| exception Invalid_argument _ -> Error "Expected hex-encoded trace-id"
in
let* offset = consume "-" ~offset ~or_:"Expected delimiter" in
let* parent_id, offset =
blit ~offset ~len:16 ~or_:"Expected 16-digit parent-id"
in
let* parent_id =
match Span_id.of_hex parent_id with
| parent_id -> Ok parent_id
| exception Invalid_argument _ -> Error "Expected hex-encoded parent-id"
in
let* offset = consume "-" ~offset ~or_:"Expected delimiter" in
let* _flags, _offset =
blit ~offset ~len:2 ~or_:"Expected 2-digit flags"
in
Ok (trace_id, parent_id)
let to_value ~(trace_id : Trace_id.t) ~(parent_id : Span_id.t) () : string =
Printf.sprintf "00-%s-%s-00" (Trace_id.to_hex trace_id)
(Span_id.to_hex parent_id)
end
end
* Export GC metrics .
These metrics are emitted after each GC collection .
These metrics are emitted after each GC collection. *)
module GC_metrics : sig
val basic_setup : unit -> unit
* Setup a hook that will emit GC statistics regularly
val get_runtime_attributes : unit -> Span.key_value list
(** Get OCaml name and version runtime attributes *)
val get_metrics : unit -> Metrics.t list
* Get a few metrics from the current state of the GC
end = struct
* See -telemetry/opentelemetry-specification/blob/main/specification/resource/semantic_conventions/process.md#process-runtimes
let runtime_attributes =
lazy
Conventions.Attributes.
[
Process.Runtime.name, `String "ocaml";
Process.Runtime.version, `String Sys.ocaml_version;
]
let get_runtime_attributes () = Lazy.force runtime_attributes
let basic_setup () =
emit metrics when GC is called
let on_gc () =
match Collector.get_backend () with
| None -> ()
| Some (module C) -> C.signal_emit_gc_metrics ()
in
ignore (Gc.create_alarm on_gc : Gc.alarm)
let bytes_per_word = Sys.word_size / 8
let word_to_bytes n = n * bytes_per_word
let word_to_bytes_f n = n *. float bytes_per_word
let get_metrics () : Metrics.t list =
let gc = Gc.quick_stat () in
let now = Timestamp_ns.now_unix_ns () in
let open Metrics in
let open Conventions.Metrics in
[
gauge ~name:Process.Runtime.Ocaml.GC.major_heap ~unit_:"B"
[ int ~now (word_to_bytes gc.Gc.heap_words) ];
sum ~name:Process.Runtime.Ocaml.GC.minor_allocated
~aggregation_temporality:Metrics.Aggregation_temporality_cumulative
~is_monotonic:true ~unit_:"B"
[ float ~now (word_to_bytes_f gc.Gc.minor_words) ];
sum ~name:Process.Runtime.Ocaml.GC.minor_collections
~aggregation_temporality:Metrics.Aggregation_temporality_cumulative
~is_monotonic:true
[ int ~now gc.Gc.minor_collections ];
sum ~name:Process.Runtime.Ocaml.GC.major_collections
~aggregation_temporality:Metrics.Aggregation_temporality_cumulative
~is_monotonic:true
[ int ~now gc.Gc.major_collections ];
sum ~name:Process.Runtime.Ocaml.GC.compactions
~aggregation_temporality:Metrics.Aggregation_temporality_cumulative
~is_monotonic:true
[ int ~now gc.Gc.compactions ];
]
end
| null | https://raw.githubusercontent.com/imandra-ai/ocaml-opentelemetry/8a52da644691ab29ab86cd0fdb670bccca484bf4/src/opentelemetry.ml | ocaml | * Opentelemetry types and instrumentation
* Global lock.
* Generation of random identifiers.
* Protobuf types.
This is mostly useful internally. Users should not need to touch it.
* {2 Timestamps}
* Current unix timestamp in nanoseconds
* Sender interface for a message of type [msg].
Inspired from Logs' reporter
(see {{:#sync} its doc})
but without [over] as it doesn't make much sense in presence
of batching.
The [ret] callback is used to return the desired type (unit, or
a Lwt promise, or anything else) once the event has been transferred
to the backend.
It doesn't mean the event has been collected yet, it
could sit in a batch queue for a little while.
* Collector client interface.
* Should be called regularly for background processing,
timeout checks, etc.
* Give the collector the list of callbacks to be executed
when [tick()] is called. Each such callback should be short and
reentrant. Depending on the collector's implementation, it might be
called from a thread that is not the one that called [on_tick].
hidden
* Set collector backend
* Is there a configured backend?
* Current backend, if any
* Do background work. Call this regularly if the collector doesn't
already have a ticker thread or internal timer.
* {2 Identifiers}
make sure the identifier is not all 0, which is a dummy identifier.
* Unique ID of a span.
make sure the identifier is not all 0, which is a dummy identifier.
*/*
*/*
*/*
*/*
* Process-wide metadata, environment variables, etc.
* Main service name metadata
* Namespace for the service
* Unique identifier for the service
* Global attributes, initially set
via OTEL_RESOURCE_ATTRIBUTES and modifiable
by the user code. They will be attached to each outgoing metrics/traces.
* Add a global attribute
add global attributes to this list
* Events.
Events occur at a given time and can carry attributes. They always
belong in a span.
* Scopes.
A scope is a trace ID and the span ID of the currently active span.
* Add an event to the scope. It will be aggregated into the span.
Note that this takes a function that produces an event, and will only
call it if there is an instrumentation backend.
* Add an attr to the scope. It will be aggregated into the span.
Note that this takes a function that produces attributes, and will only
call it if there is an instrumentation backend.
*/*
define this locally
*/*
* Obtain current scope from thread-local storage, if available
* Span Link
A pointer from the current span to another span in the same trace or in a
different trace. For example, this can be used in batching operations,
where a single batch handler processes multiple requests from different
traces or when the handler receives a request from a different project.
* [create ~trace_id name] creates a new span with its unique ID.
@param trace_id the trace this belongs to
@param parent parent span, if any
@param links list of links to other spans, each with their trace state
(see {{: -context/#tracestate-header} w3.org})
* Traces.
See {{: /#tracing-signal} the spec}
set global scope in this thread
called once we're done, to emit a span
* {2 Metrics}
* Metrics.
See {{: /#metric-signal} the spec}
* Number data point, as a float
* Number data point, as an int
* Aggregation of a scalar metric, always with the current value
* Sum of all reported measurements over a time interval
* Histogram data
@param count number of values in population (non negative)
@param sum sum of values in population (0 if count is 0)
@param bucket_counts count value of histogram for each bucket. Sum of
the counts must be equal to [count].
length must be [1+length explicit_bounds]
@param explicit_bounds strictly increasing list of bounds for the buckets
TODO: exponential history
TODO: summary
TODO: exemplar
* Logs.
See {{: /#log-signal} the spec}
* Severity level of a log event
* Make a single log entry
* Make a log entry whose body is a string
* Make a log entry with format
* A set of callbacks that produce metrics when called.
The metrics are automatically called regularly.
This allows applications to register metrics callbacks from various points
in the program (or even in libraries), and not worry about setting
alarms/intervals to emit them.
* [register f] adds the callback [f] to the list.
[f] will be called at unspecified times and is expected to return
a list of metrics. It might be called regularly by the backend,
in particular (but not only) when {!Collector.tick} is called.
make sure we call [f] (and others) at each tick
* {2 Utils}
* Implementation of the W3C Trace Context spec
-context/
* The traceparent header
-context/#traceparent-header
* Get OCaml name and version runtime attributes |
module Thread_local = Thread_local
module Lock = Lock
module Rand_bytes = Rand_bytes
open struct
let[@inline] result_bind x f =
match x with
| Error e -> Error e
| Ok x -> f x
end
* { 2 Wire format }
module Proto = struct
module Common = struct
include Common_types
include Common_pp
include Common_pb
end
module Resource = struct
include Resource_types
include Resource_pp
include Resource_pb
end
module Trace = struct
include Trace_types
include Trace_pp
include Trace_pb
end
module Metrics = struct
include Metrics_types
include Metrics_pp
include Metrics_pb
end
module Trace_service = struct
include Trace_service_types
include Trace_service_pb
include Trace_service_pp
end
module Metrics_service = struct
include Metrics_service_types
include Metrics_service_pp
include Metrics_service_pb
end
module Status = struct
include Status_types
include Status_pp
include Status_pb
end
module Logs = struct
include Logs_types
include Logs_pb
include Logs_pp
end
module Logs_service = struct
include Logs_service_types
include Logs_service_pb
include Logs_service_pp
end
end
* Unix timestamp .
These timestamps measure time since the Unix epoch ( jan 1 , 1970 ) UTC
in nanoseconds .
These timestamps measure time since the Unix epoch (jan 1, 1970) UTC
in nanoseconds. *)
module Timestamp_ns = struct
type t = int64
let ns_in_a_day = Int64.(mul 1_000_000_000L (of_int (24 * 3600)))
let[@inline] now_unix_ns () : t =
let span = Ptime_clock.now () |> Ptime.to_span in
let d, ps = Ptime.Span.to_d_ps span in
let d = Int64.(mul (of_int d) ns_in_a_day) in
let ns = Int64.(div ps 1_000L) in
Int64.(add d ns)
end
* { 2 Interface to data collector }
* Collector types
These types are used by backend implementations , to send events to
collectors such as Jaeger .
Note : most users will not need to touch this module
These types are used by backend implementations, to send events to
collectors such as Jaeger.
Note: most users will not need to touch this module *)
module Collector = struct
open Proto
type 'msg sender = { send: 'a. 'msg -> ret:(unit -> 'a) -> 'a }
module type BACKEND = sig
val send_trace : Trace.resource_spans list sender
val send_metrics : Metrics.resource_metrics list sender
val send_logs : Logs.resource_logs list sender
val signal_emit_gc_metrics : unit -> unit
* Signal the backend that it should emit GC metrics when it has the
chance . This should be installed in a GC alarm or another form
of regular trigger .
chance. This should be installed in a GC alarm or another form
of regular trigger. *)
val tick : unit -> unit
val set_on_tick_callbacks : (unit -> unit) list ref -> unit
val cleanup : unit -> unit
end
type backend = (module BACKEND)
open struct
let on_tick_cbs_ = ref []
let backend : backend option ref = ref None
end
let set_backend (b : backend) : unit =
let (module B) = b in
B.set_on_tick_callbacks on_tick_cbs_;
backend := Some b
let[@inline] has_backend () : bool = !backend != None
let[@inline] get_backend () : backend option = !backend
let send_trace (l : Trace.resource_spans list) ~ret =
match !backend with
| None -> ret ()
| Some (module B) -> B.send_trace.send l ~ret
let send_metrics (l : Metrics.resource_metrics list) ~ret =
match !backend with
| None -> ret ()
| Some (module B) -> B.send_metrics.send l ~ret
let send_logs (l : Logs.resource_logs list) ~ret =
match !backend with
| None -> ret ()
| Some (module B) -> B.send_logs.send l ~ret
let[@inline] rand_bytes_16 () = !Rand_bytes.rand_bytes_16 ()
let[@inline] rand_bytes_8 () = !Rand_bytes.rand_bytes_8 ()
let on_tick f = on_tick_cbs_ := f :: !on_tick_cbs_
let tick () =
match !backend with
| None -> ()
| Some (module B) -> B.tick ()
end
module Util_ = struct
let bytes_to_hex (b : bytes) : string =
let i_to_hex (i : int) =
if i < 10 then
Char.chr (i + Char.code '0')
else
Char.chr (i - 10 + Char.code 'a')
in
let res = Bytes.create (2 * Bytes.length b) in
for i = 0 to Bytes.length b - 1 do
let n = Char.code (Bytes.get b i) in
Bytes.set res (2 * i) (i_to_hex ((n land 0xf0) lsr 4));
Bytes.set res ((2 * i) + 1) (i_to_hex (n land 0x0f))
done;
Bytes.unsafe_to_string res
let bytes_of_hex (s : string) : bytes =
let n_of_c = function
| '0' .. '9' as c -> Char.code c - Char.code '0'
| 'a' .. 'f' as c -> 10 + Char.code c - Char.code 'a'
| _ -> raise (Invalid_argument "invalid hex char")
in
if String.length s mod 2 <> 0 then
raise (Invalid_argument "hex sequence must be of even length");
let res = Bytes.make (String.length s / 2) '\x00' in
for i = 0 to (String.length s / 2) - 1 do
let n1 = n_of_c (String.get s (2 * i)) in
let n2 = n_of_c (String.get s ((2 * i) + 1)) in
let n = (n1 lsl 4) lor n2 in
Bytes.set res i (Char.chr n)
done;
res
end
* Trace ID .
This 16 bytes identifier is shared by all spans in one trace .
This 16 bytes identifier is shared by all spans in one trace. *)
module Trace_id : sig
type t
val create : unit -> t
val pp : Format.formatter -> t -> unit
val to_bytes : t -> bytes
val of_bytes : bytes -> t
val to_hex : t -> string
val of_hex : string -> t
end = struct
open Proto.Trace
type t = bytes
let to_bytes self = self
let create () : t =
let b = Collector.rand_bytes_16 () in
assert (Bytes.length b = 16);
Bytes.set b 0 (Char.unsafe_chr (Char.code (Bytes.get b 0) lor 1));
b
let of_bytes b =
if Bytes.length b = 16 then
b
else
raise (Invalid_argument "trace IDs must be 16 bytes in length")
let to_hex self = Util_.bytes_to_hex self
let of_hex s = of_bytes (Util_.bytes_of_hex s)
let pp fmt t = Format.fprintf fmt "%s" (to_hex t)
end
module Span_id : sig
type t
val create : unit -> t
val pp : Format.formatter -> t -> unit
val to_bytes : t -> bytes
val of_bytes : bytes -> t
val to_hex : t -> string
val of_hex : string -> t
end = struct
open Proto.Trace
type t = bytes
let to_bytes self = self
let create () : t =
let b = Collector.rand_bytes_8 () in
assert (Bytes.length b = 8);
Bytes.set b 0 (Char.unsafe_chr (Char.code (Bytes.get b 0) lor 1));
b
let of_bytes b =
if Bytes.length b = 8 then
b
else
raise (Invalid_argument "span IDs must be 8 bytes in length")
let to_hex self = Util_.bytes_to_hex self
let of_hex s = of_bytes (Util_.bytes_of_hex s)
let pp fmt t = Format.fprintf fmt "%s" (to_hex t)
end
* { 2 Attributes and conventions }
module Conventions = struct
module Attributes = struct
module Process = struct
module Runtime = struct
let name = "process.runtime.name"
let version = "process.runtime.version"
let description = "process.runtime.description"
end
end
module Service = struct
let name = "service.name"
let namespace = "service.namespace"
let instance_id = "service.instance.id"
let version = "service.version"
end
end
module Metrics = struct
module Process = struct
module Runtime = struct
module Ocaml = struct
module GC = struct
let compactions = "process.runtime.ocaml.gc.compactions"
let major_collections = "process.runtime.ocaml.gc.major_collections"
let major_heap = "process.runtime.ocaml.gc.major_heap"
let minor_allocated = "process.runtime.ocaml.gc.minor_allocated"
let minor_collections = "process.runtime.ocaml.gc.minor_collections"
end
end
end
end
end
end
type value =
[ `Int of int
| `String of string
| `Bool of bool
| `None
]
type key_value = string * value
let _conv_value =
let open Proto.Common in
function
| `Int i -> Some (Int_value (Int64.of_int i))
| `String s -> Some (String_value s)
| `Bool b -> Some (Bool_value b)
| `None -> None
let _conv_key_value (k, v) =
let open Proto.Common in
let value = _conv_value v in
default_key_value ~key:k ~value ()
* { 2 Global settings }
module Globals = struct
open Proto.Common
let service_name = ref "unknown_service"
let service_namespace = ref None
let service_instance_id = ref None
let instrumentation_library =
default_instrumentation_scope ~version:"0.2" ~name:"ocaml-opentelemetry" ()
let global_attributes : key_value list ref =
let parse_pair s =
match String.split_on_char '=' s with
| [ a; b ] -> default_key_value ~key:a ~value:(Some (String_value b)) ()
| _ -> failwith (Printf.sprintf "invalid attribute: %S" s)
in
ref
@@
try
Sys.getenv "OTEL_RESOURCE_ATTRIBUTES"
|> String.split_on_char ',' |> List.map parse_pair
with _ -> []
let add_global_attribute (key : string) (v : value) : unit =
global_attributes := _conv_key_value (key, v) :: !global_attributes
let merge_global_attributes_ into : _ list =
let not_redundant kv = List.for_all (fun kv' -> kv.key <> kv'.key) into in
List.rev_append (List.filter not_redundant !global_attributes) into
* Default span kind in { ! Span.create } .
This will be used in all spans that do not specify [ ~kind ] explicitly .
It can be convenient to set " client " or " server " uniformly in here .
@since 0.4
This will be used in all spans that do not specify [~kind] explicitly.
It can be convenient to set "client" or "server" uniformly in here.
@since 0.4 *)
let default_span_kind = ref Proto.Trace.Span_kind_unspecified
let mk_attributes ?(service_name = !service_name) ?(attrs = []) () : _ list =
let l = List.map _conv_key_value attrs in
let l =
default_key_value ~key:Conventions.Attributes.Service.name
~value:(Some (String_value service_name)) ()
:: l
in
let l =
match !service_instance_id with
| None -> l
| Some v ->
default_key_value ~key:Conventions.Attributes.Service.instance_id
~value:(Some (String_value v)) ()
:: l
in
let l =
match !service_namespace with
| None -> l
| Some v ->
default_key_value ~key:Conventions.Attributes.Service.namespace
~value:(Some (String_value v)) ()
:: l
in
l |> merge_global_attributes_
end
* { 2 Traces and Spans }
module Event : sig
open Proto.Trace
type t = span_event
val make :
?time_unix_nano:Timestamp_ns.t -> ?attrs:key_value list -> string -> t
end = struct
open Proto.Trace
type t = span_event
let make ?(time_unix_nano = Timestamp_ns.now_unix_ns ()) ?(attrs = [])
(name : string) : t =
let attrs = List.map _conv_key_value attrs in
default_span_event ~time_unix_nano ~name ~attributes:attrs ()
end
* { 2 Scopes }
module Scope = struct
type t = {
trace_id: Trace_id.t;
span_id: Span_id.t;
mutable events: Event.t list;
mutable attrs: key_value list;
}
let[@inline] add_event (scope : t) (ev : unit -> Event.t) : unit =
if Collector.has_backend () then scope.events <- ev () :: scope.events
let[@inline] add_attrs (scope : t) (attrs : unit -> key_value list) : unit =
if Collector.has_backend () then
scope.attrs <- List.rev_append (attrs ()) scope.attrs
let _global_scope : t Thread_local.t = Thread_local.create ()
let get_surrounding ?scope () : t option =
match scope with
| Some _ -> scope
| None -> Thread_local.get _global_scope
end
open struct
let get_surrounding_scope = Scope.get_surrounding
end
module Span_link : sig
open Proto.Trace
type t = span_link
val make :
trace_id:Trace_id.t ->
span_id:Span_id.t ->
?trace_state:string ->
?attrs:key_value list ->
?dropped_attributes_count:int ->
unit ->
t
end = struct
open Proto.Trace
type t = span_link
let make ~trace_id ~span_id ?trace_state ?(attrs = [])
?dropped_attributes_count () : t =
let attributes = List.map _conv_key_value attrs in
let dropped_attributes_count =
Option.map Int32.of_int dropped_attributes_count
in
default_span_link
~trace_id:(Trace_id.to_bytes trace_id)
~span_id:(Span_id.to_bytes span_id) ?trace_state ~attributes
?dropped_attributes_count ()
end
* Spans .
A Span is the workhorse of traces , it indicates an operation that
took place over a given span of time ( indicated by start_time and end_time )
as part of a hierarchical trace . All spans in a given trace are bound by
the use of the same { ! Trace_id.t } .
A Span is the workhorse of traces, it indicates an operation that
took place over a given span of time (indicated by start_time and end_time)
as part of a hierarchical trace. All spans in a given trace are bound by
the use of the same {!Trace_id.t}. *)
module Span : sig
open Proto.Trace
type t = span
type id = Span_id.t
type nonrec kind = span_span_kind =
| Span_kind_unspecified
| Span_kind_internal
| Span_kind_server
| Span_kind_client
| Span_kind_producer
| Span_kind_consumer
type nonrec status_code = status_status_code =
| Status_code_unset
| Status_code_ok
| Status_code_error
type nonrec status = status = {
message: string;
code: status_code;
}
val id : t -> Span_id.t
type key_value =
string * [ `Int of int | `String of string | `Bool of bool | `None ]
val create :
?kind:kind ->
?id:id ->
?trace_state:string ->
?attrs:key_value list ->
?events:Event.t list ->
?status:status ->
trace_id:Trace_id.t ->
?parent:id ->
?links:Span_link.t list ->
start_time:Timestamp_ns.t ->
end_time:Timestamp_ns.t ->
string ->
t * id
end = struct
open Proto.Trace
type t = span
type id = Span_id.t
type nonrec kind = span_span_kind =
| Span_kind_unspecified
| Span_kind_internal
| Span_kind_server
| Span_kind_client
| Span_kind_producer
| Span_kind_consumer
type key_value =
string * [ `Int of int | `String of string | `Bool of bool | `None ]
type nonrec status_code = status_status_code =
| Status_code_unset
| Status_code_ok
| Status_code_error
type nonrec status = status = {
message: string;
code: status_code;
}
let id self = Span_id.of_bytes self.span_id
let create ?(kind = !Globals.default_span_kind) ?(id = Span_id.create ())
?trace_state ?(attrs = []) ?(events = []) ?status ~trace_id ?parent
?(links = []) ~start_time ~end_time name : t * id =
let trace_id = Trace_id.to_bytes trace_id in
let parent_span_id = Option.map Span_id.to_bytes parent in
let attributes = List.map _conv_key_value attrs in
let span =
default_span ~trace_id ?parent_span_id ~span_id:(Span_id.to_bytes id)
~attributes ~events ?trace_state ~status ~kind ~name ~links
~start_time_unix_nano:start_time ~end_time_unix_nano:end_time ()
in
span, id
end
module Trace = struct
open Proto.Trace
type span = Span.t
let make_resource_spans ?service_name ?attrs spans =
let ils =
default_scope_spans ~scope:(Some Globals.instrumentation_library) ~spans
()
in
let attributes = Globals.mk_attributes ?service_name ?attrs () in
let resource = Proto.Resource.default_resource ~attributes () in
default_resource_spans ~resource:(Some resource) ~scope_spans:[ ils ] ()
* Sync emitter .
This instructs the collector to forward
the spans to some backend at a later point .
{ b NOTE } be careful not to call this inside a Gc alarm , as it can
cause deadlocks .
This instructs the collector to forward
the spans to some backend at a later point.
{b NOTE} be careful not to call this inside a Gc alarm, as it can
cause deadlocks. *)
let emit ?service_name ?attrs (spans : span list) : unit =
let rs = make_resource_spans ?service_name ?attrs spans in
Collector.send_trace [ rs ] ~ret:(fun () -> ())
type scope = Scope.t = {
trace_id: Trace_id.t;
span_id: Span_id.t;
mutable events: Event.t list;
mutable attrs: Span.key_value list;
}
[@@deprecated "use Scope.t"]
let add_event = Scope.add_event [@@deprecated "use Scope.add_event"]
let add_attrs = Scope.add_attrs [@@deprecated "use Scope.add_attrs"]
* Sync span guard .
@param force_new_trace_id if true ( default false ) , the span will not use a
surrounding context , or [ scope ] , or [ trace_id ] , but will always
create a fresh new trace ID .
{ b NOTE } be careful not to call this inside a Gc alarm , as it can
cause deadlocks .
@param force_new_trace_id if true (default false), the span will not use a
surrounding context, or [scope], or [trace_id], but will always
create a fresh new trace ID.
{b NOTE} be careful not to call this inside a Gc alarm, as it can
cause deadlocks. *)
let with_ ?(force_new_trace_id = false) ?trace_state ?service_name
?(attrs : (string * [< value ]) list = []) ?kind ?trace_id ?parent ?scope
?links name (f : Scope.t -> 'a) : 'a =
let scope =
if force_new_trace_id then
None
else
get_surrounding_scope ?scope ()
in
let trace_id =
match trace_id, scope with
| _ when force_new_trace_id -> Trace_id.create ()
| Some trace_id, _ -> trace_id
| None, Some scope -> scope.trace_id
| None, None -> Trace_id.create ()
in
let parent =
match parent, scope with
| _ when force_new_trace_id -> None
| Some span_id, _ -> Some span_id
| None, Some scope -> Some scope.span_id
| None, None -> None
in
let start_time = Timestamp_ns.now_unix_ns () in
let span_id = Span_id.create () in
let scope = { trace_id; span_id; events = []; attrs } in
Thread_local.with_ Scope._global_scope scope @@ fun _sc ->
let finally res =
let status =
match res with
| Ok () -> default_status ~code:Status_code_ok ()
| Error e -> default_status ~code:Status_code_error ~message:e ()
in
let span, _ =
TODO : should the attrs passed to with _ go on the Span
( in Span.create ) or on the ResourceSpan ( in emit ) ?
( question also applies to Opentelemetry_lwt.Trace.with )
(in Span.create) or on the ResourceSpan (in emit)?
(question also applies to Opentelemetry_lwt.Trace.with) *)
Span.create ?kind ~trace_id ?parent ?links ~id:span_id ?trace_state
~attrs:scope.attrs ~events:scope.events ~start_time
~end_time:(Timestamp_ns.now_unix_ns ())
~status name
in
emit ?service_name [ span ]
in
try
let x = f scope in
finally (Ok ());
x
with e ->
finally (Error (Printexc.to_string e));
raise e
end
module Metrics = struct
open Metrics_types
type t = Metrics_types.metric
* A single metric , measuring some time - varying quantity or statistical
distribution . It is composed of one or more data points that have
precise values and time stamps . Each distinct metric should have a
distinct name .
distribution. It is composed of one or more data points that have
precise values and time stamps. Each distinct metric should have a
distinct name. *)
open struct
let _program_start = Timestamp_ns.now_unix_ns ()
end
let float ?(start_time_unix_nano = _program_start)
?(now = Timestamp_ns.now_unix_ns ()) ?(attrs = []) (d : float) :
number_data_point =
let attributes = attrs |> List.map _conv_key_value in
default_number_data_point ~start_time_unix_nano ~time_unix_nano:now
~attributes ~value:(As_double d) ()
let int ?(start_time_unix_nano = _program_start)
?(now = Timestamp_ns.now_unix_ns ()) ?(attrs = []) (i : int) :
number_data_point =
let attributes = attrs |> List.map _conv_key_value in
default_number_data_point ~start_time_unix_nano ~time_unix_nano:now
~attributes
~value:(As_int (Int64.of_int i))
()
let gauge ~name ?description ?unit_ (l : number_data_point list) : t =
let data = Gauge (default_gauge ~data_points:l ()) in
default_metric ~name ?description ?unit_ ~data ()
type aggregation_temporality = Metrics_types.aggregation_temporality =
| Aggregation_temporality_unspecified
| Aggregation_temporality_delta
| Aggregation_temporality_cumulative
let sum ~name ?description ?unit_
?(aggregation_temporality = Aggregation_temporality_cumulative)
?is_monotonic (l : number_data_point list) : t =
let data =
Sum (default_sum ~data_points:l ?is_monotonic ~aggregation_temporality ())
in
default_metric ~name ?description ?unit_ ~data ()
let histogram_data_point ?(start_time_unix_nano = _program_start)
?(now = Timestamp_ns.now_unix_ns ()) ?(attrs = []) ?(exemplars = [])
?(explicit_bounds = []) ?sum ~bucket_counts ~count () :
histogram_data_point =
let attributes = attrs |> List.map _conv_key_value in
default_histogram_data_point ~start_time_unix_nano ~time_unix_nano:now
~attributes ~exemplars ~bucket_counts ~explicit_bounds ~count ?sum ()
let histogram ~name ?description ?unit_ ?aggregation_temporality
(l : histogram_data_point list) : t =
let data =
Histogram (default_histogram ~data_points:l ?aggregation_temporality ())
in
default_metric ~name ?description ?unit_ ~data ()
* Aggregate metrics into a { ! Proto . Metrics.resource_metrics }
let make_resource_metrics ?service_name ?attrs (l : t list) : resource_metrics
=
let lm =
default_scope_metrics ~scope:(Some Globals.instrumentation_library)
~metrics:l ()
in
let attributes = Globals.mk_attributes ?service_name ?attrs () in
let resource = Proto.Resource.default_resource ~attributes () in
default_resource_metrics ~scope_metrics:[ lm ] ~resource:(Some resource) ()
* Emit some metrics to the collector ( sync ) . This blocks until
the backend has pushed the metrics into some internal queue , or
discarded them .
{ b NOTE } be careful not to call this inside a Gc alarm , as it can
cause deadlocks .
the backend has pushed the metrics into some internal queue, or
discarded them.
{b NOTE} be careful not to call this inside a Gc alarm, as it can
cause deadlocks.
*)
let emit ?attrs (l : t list) : unit =
let rm = make_resource_metrics ?attrs l in
Collector.send_metrics [ rm ] ~ret:ignore
end
module Logs = struct
open Logs_types
type t = log_record
type severity = Logs_types.severity_number =
| Severity_number_unspecified
| Severity_number_trace
| Severity_number_trace2
| Severity_number_trace3
| Severity_number_trace4
| Severity_number_debug
| Severity_number_debug2
| Severity_number_debug3
| Severity_number_debug4
| Severity_number_info
| Severity_number_info2
| Severity_number_info3
| Severity_number_info4
| Severity_number_warn
| Severity_number_warn2
| Severity_number_warn3
| Severity_number_warn4
| Severity_number_error
| Severity_number_error2
| Severity_number_error3
| Severity_number_error4
| Severity_number_fatal
| Severity_number_fatal2
| Severity_number_fatal3
| Severity_number_fatal4
let pp_severity = Logs_pp.pp_severity_number
type flags = Logs_types.log_record_flags =
| Log_record_flag_unspecified
| Log_record_flag_trace_flags_mask
let pp_flags = Logs_pp.pp_log_record_flags
let make ?time ?(observed_time_unix_nano = Timestamp_ns.now_unix_ns ())
?severity ?log_level ?flags ?trace_id ?span_id (body : value) : t =
let time_unix_nano =
match time with
| None -> observed_time_unix_nano
| Some t -> t
in
let trace_id = Option.map Trace_id.to_bytes trace_id in
let span_id = Option.map Span_id.to_bytes span_id in
let body = _conv_value body in
default_log_record ~time_unix_nano ~observed_time_unix_nano
?severity_number:severity ?severity_text:log_level ?flags ?trace_id
?span_id ~body ()
let make_str ?time ?observed_time_unix_nano ?severity ?log_level ?flags
?trace_id ?span_id (body : string) : t =
make ?time ?observed_time_unix_nano ?severity ?log_level ?flags ?trace_id
?span_id (`String body)
let make_strf ?time ?observed_time_unix_nano ?severity ?log_level ?flags
?trace_id ?span_id fmt =
Format.kasprintf
(fun bod ->
make_str ?time ?observed_time_unix_nano ?severity ?log_level ?flags
?trace_id ?span_id bod)
fmt
* Emit logs .
This instructs the collector to send the logs to some backend at
a later date .
{ b NOTE } be careful not to call this inside a Gc alarm , as it can
cause deadlocks .
This instructs the collector to send the logs to some backend at
a later date.
{b NOTE} be careful not to call this inside a Gc alarm, as it can
cause deadlocks. *)
let emit ?service_name ?attrs (l : t list) : unit =
let attributes = Globals.mk_attributes ?service_name ?attrs () in
let resource = Proto.Resource.default_resource ~attributes () in
let ll =
default_scope_logs ~scope:(Some Globals.instrumentation_library)
~log_records:l ()
in
let rl =
default_resource_logs ~resource:(Some resource) ~scope_logs:[ ll ] ()
in
Collector.send_logs [ rl ] ~ret:ignore
end
module Metrics_callbacks = struct
open struct
let cbs_ : (unit -> Metrics.t list) list ref = ref []
end
let register f : unit =
if !cbs_ = [] then
Collector.on_tick (fun () ->
let m = List.map (fun f -> f ()) !cbs_ |> List.flatten in
Metrics.emit m);
cbs_ := f :: !cbs_
end
module Trace_context = struct
module Traceparent = struct
let name = "traceparent"
* the value of the traceparent header .
The values are of the form :
{ [
{ version}-{trace_id}-{parent_id}-{flags }
] }
For example :
{ [ 00 - 4bf92f3577b34da6a3ce929d0e0e4736 - 00f067aa0ba902b7 - 01 ] }
[ { flags } ] are currently ignored .
The values are of the form:
{[
{version}-{trace_id}-{parent_id}-{flags}
]}
For example:
{[ 00-4bf92f3577b34da6a3ce929d0e0e4736-00f067aa0ba902b7-01 ]}
[{flags}] are currently ignored.
*)
let of_value str : (Trace_id.t * Span_id.t, string) result =
let ( let* ) = result_bind in
let blit ~offset ~len ~or_ =
let buf = Bytes.create len in
let* str =
match Bytes.blit_string str offset buf 0 len with
| () -> Ok (Bytes.unsafe_to_string buf)
| exception Invalid_argument _ -> Error or_
in
Ok (str, offset + len)
in
let consume expected ~offset ~or_ =
let len = String.length expected in
let* str, offset = blit ~offset ~len ~or_ in
if str = expected then
Ok offset
else
Error or_
in
let offset = 0 in
let* offset = consume "00" ~offset ~or_:"Expected version 00" in
let* offset = consume "-" ~offset ~or_:"Expected delimiter" in
let* trace_id, offset =
blit ~offset ~len:32 ~or_:"Expected 32-digit trace-id"
in
let* trace_id =
match Trace_id.of_hex trace_id with
| trace_id -> Ok trace_id
| exception Invalid_argument _ -> Error "Expected hex-encoded trace-id"
in
let* offset = consume "-" ~offset ~or_:"Expected delimiter" in
let* parent_id, offset =
blit ~offset ~len:16 ~or_:"Expected 16-digit parent-id"
in
let* parent_id =
match Span_id.of_hex parent_id with
| parent_id -> Ok parent_id
| exception Invalid_argument _ -> Error "Expected hex-encoded parent-id"
in
let* offset = consume "-" ~offset ~or_:"Expected delimiter" in
let* _flags, _offset =
blit ~offset ~len:2 ~or_:"Expected 2-digit flags"
in
Ok (trace_id, parent_id)
let to_value ~(trace_id : Trace_id.t) ~(parent_id : Span_id.t) () : string =
Printf.sprintf "00-%s-%s-00" (Trace_id.to_hex trace_id)
(Span_id.to_hex parent_id)
end
end
* Export GC metrics .
These metrics are emitted after each GC collection .
These metrics are emitted after each GC collection. *)
module GC_metrics : sig
val basic_setup : unit -> unit
* Setup a hook that will emit GC statistics regularly
val get_runtime_attributes : unit -> Span.key_value list
val get_metrics : unit -> Metrics.t list
* Get a few metrics from the current state of the GC
end = struct
* See -telemetry/opentelemetry-specification/blob/main/specification/resource/semantic_conventions/process.md#process-runtimes
let runtime_attributes =
lazy
Conventions.Attributes.
[
Process.Runtime.name, `String "ocaml";
Process.Runtime.version, `String Sys.ocaml_version;
]
let get_runtime_attributes () = Lazy.force runtime_attributes
let basic_setup () =
emit metrics when GC is called
let on_gc () =
match Collector.get_backend () with
| None -> ()
| Some (module C) -> C.signal_emit_gc_metrics ()
in
ignore (Gc.create_alarm on_gc : Gc.alarm)
let bytes_per_word = Sys.word_size / 8
let word_to_bytes n = n * bytes_per_word
let word_to_bytes_f n = n *. float bytes_per_word
let get_metrics () : Metrics.t list =
let gc = Gc.quick_stat () in
let now = Timestamp_ns.now_unix_ns () in
let open Metrics in
let open Conventions.Metrics in
[
gauge ~name:Process.Runtime.Ocaml.GC.major_heap ~unit_:"B"
[ int ~now (word_to_bytes gc.Gc.heap_words) ];
sum ~name:Process.Runtime.Ocaml.GC.minor_allocated
~aggregation_temporality:Metrics.Aggregation_temporality_cumulative
~is_monotonic:true ~unit_:"B"
[ float ~now (word_to_bytes_f gc.Gc.minor_words) ];
sum ~name:Process.Runtime.Ocaml.GC.minor_collections
~aggregation_temporality:Metrics.Aggregation_temporality_cumulative
~is_monotonic:true
[ int ~now gc.Gc.minor_collections ];
sum ~name:Process.Runtime.Ocaml.GC.major_collections
~aggregation_temporality:Metrics.Aggregation_temporality_cumulative
~is_monotonic:true
[ int ~now gc.Gc.major_collections ];
sum ~name:Process.Runtime.Ocaml.GC.compactions
~aggregation_temporality:Metrics.Aggregation_temporality_cumulative
~is_monotonic:true
[ int ~now gc.Gc.compactions ];
]
end
|
c4c35ac921d97b77faef6e635115760ebba28f33b2c0082bdfc0b5321a0a831e | aantron/markup.ml | markup.ml | This file is part of Markup.ml , released under the MIT license . See
LICENSE.md for details , or visit .
LICENSE.md for details, or visit . *)
module type IO =
sig
type 'a t
val return : 'a -> 'a t
val of_cps : ((exn -> unit) -> ('a -> unit) -> unit) -> 'a t
val to_cps : (unit -> 'a t) -> ((exn -> unit) -> ('a -> unit) -> unit)
end
module Synchronous : IO with type 'a t = 'a =
struct
type 'a t = 'a
exception Not_synchronous
let return x = x
let of_cps f =
let result = ref None in
f raise (fun v -> result := Some v);
match !result with
| None -> raise Not_synchronous
| Some v -> v
let to_cps f =
fun throw k ->
match f () with
| v -> k v
| exception exn -> throw exn
end
type async = unit
type sync = unit
type ('data, 'sync) stream = 'data Kstream.t
let kstream s = s
let of_kstream s = s
let of_list = Kstream.of_list
type location = Common.location
let compare_locations = Common.compare_locations
module Error = Error
type name = Common.name
type xml_declaration = Common.xml_declaration =
{version : string;
encoding : string option;
standalone : bool option}
type doctype = Common.doctype =
{doctype_name : string option;
public_identifier : string option;
system_identifier : string option;
raw_text : string option;
force_quirks : bool}
type signal = Common.signal
let signal_to_string = Common.signal_to_string
type 's parser =
{mutable location : location;
mutable signals : (signal, 's) stream}
let signals parser = parser.signals
let location parser = parser.location
let stream_to_parser s =
let parser = {location = (1, 1); signals = Kstream.empty ()} in
parser.signals <-
s |> Kstream.map (fun (l, v) _ k -> parser.location <- l; k v);
parser
module Cps =
struct
let parse_xml
report ?encoding namespace entity context source =
let with_encoding (encoding : Encoding.t) k =
source
|> encoding ~report
|> Input.preprocess Common.is_valid_xml_char report
|> Xml_tokenizer.tokenize report entity
|> Xml_parser.parse context namespace report
|> k
in
let constructor throw k =
match encoding with
| Some encoding -> with_encoding encoding k
| None ->
Detect.select_xml source throw (fun encoding ->
with_encoding encoding k)
in
Kstream.construct constructor
|> stream_to_parser
let write_xml report prefix signals =
signals
|> Xml_writer.write report prefix
|> Utility.strings_to_bytes
let parse_html report ?encoding context source =
let with_encoding (encoding : Encoding.t) k =
source
|> encoding ~report
|> Input.preprocess Common.is_valid_html_char report
|> Html_tokenizer.tokenize report
|> Html_parser.parse context report
|> k
in
let constructor throw k =
match encoding with
| Some encoding -> with_encoding encoding k
| None ->
Detect.select_html source throw (fun encoding ->
with_encoding encoding k)
in
Kstream.construct constructor
|> stream_to_parser
let write_html ?escape_attribute ?escape_text signals =
signals
|> Html_writer.write ?escape_attribute ?escape_text
|> Utility.strings_to_bytes
end
let string = Stream_io.string
let buffer = Stream_io.buffer
let channel = Stream_io.channel
let file = Stream_io.file
let to_channel c bytes = Stream_io.to_channel c bytes |> Synchronous.of_cps
let to_file f bytes = Stream_io.to_file f bytes |> Synchronous.of_cps
let preprocess_input_stream source =
Input.preprocess (fun _ -> true) Error.ignore_errors source
include Utility
module Ns =
struct
let html = Common.html_ns
let svg = Common.svg_ns
let mathml = Common.mathml_ns
let xml = Common.xml_ns
let xmlns = Common.xmlns_ns
let xlink = Common.xlink_ns
end
module type ASYNCHRONOUS =
sig
type 'a io
module Encoding :
sig
type t = Encoding.t
val decode :
?report:(location -> Error.t -> unit io) -> t ->
(char, _) stream -> (int, async) stream
end
val parse_xml :
?report:(location -> Error.t -> unit io) ->
?encoding:Encoding.t ->
?namespace:(string -> string option) ->
?entity:(string -> string option) ->
?context:[< `Document | `Fragment ] ->
(char, _) stream -> async parser
val write_xml :
?report:((signal * int) -> Error.t -> unit io) ->
?prefix:(string -> string option) ->
([< signal ], _) stream -> (char, async) stream
val parse_html :
?report:(location -> Error.t -> unit io) ->
?encoding:Encoding.t ->
?context:[< `Document | `Fragment of string ] ->
(char, _) stream -> async parser
val write_html :
?escape_attribute:(string -> string) ->
?escape_text:(string -> string) ->
([< signal ], _) stream -> (char, async) stream
val fn : (unit -> char option io) -> (char, async) stream
val to_string : (char, _) stream -> string io
val to_buffer : (char, _) stream -> Buffer.t io
val stream : (unit -> 'a option io) -> ('a, async) stream
val next : ('a, _) stream -> 'a option io
val peek : ('a, _) stream -> 'a option io
val transform :
('a -> 'b -> ('c list * 'a option) io) -> 'a -> ('b, _) stream ->
('c, async) stream
val fold : ('a -> 'b -> 'a io) -> 'a -> ('b, _) stream -> 'a io
val map : ('a -> 'b io) -> ('a, _) stream -> ('b, async) stream
val filter : ('a -> bool io) -> ('a, _) stream -> ('a, async) stream
val filter_map : ('a -> 'b option io) -> ('a, _) stream -> ('b, async) stream
val iter : ('a -> unit io) -> ('a, _) stream -> unit io
val drain : ('a, _) stream -> unit io
val to_list : ('a, _) stream -> 'a list io
val load : ('a, _) stream -> ('a, sync) stream io
val tree :
?text:(string list -> 'a) ->
?element:(name -> (name * string) list -> 'a list -> 'a) ->
?comment:(string -> 'a) ->
?pi:(string -> string -> 'a) ->
?xml:(xml_declaration -> 'a) ->
?doctype:(doctype -> 'a) ->
([< signal ], _) stream -> 'a option io
end
module Asynchronous (IO : IO) =
struct
let wrap_report report = fun l e -> IO.to_cps (fun () -> report l e)
module Encoding =
struct
include Encoding
let decode ?(report = fun _ _ -> IO.return ()) (f : Encoding.t) s =
f ~report:(wrap_report report) s
end
let parse_xml
?(report = fun _ _ -> IO.return ())
?encoding
?(namespace = fun _ -> None)
?(entity = fun _ -> None)
?context
source =
Cps.parse_xml
(wrap_report report) ?encoding namespace entity context source
let write_xml
?(report = fun _ _ -> IO.return ())
?(prefix = fun _ -> None)
signals =
Cps.write_xml (wrap_report report) prefix signals
let parse_html
?(report = fun _ _ -> IO.return ())
?encoding
?context
source =
Cps.parse_html (wrap_report report) ?encoding context source
let write_html ?escape_attribute ?escape_text signals =
Cps.write_html ?escape_attribute ?escape_text signals
let to_string bytes = Stream_io.to_string bytes |> IO.of_cps
let to_buffer bytes = Stream_io.to_buffer bytes |> IO.of_cps
let stream f =
let f = IO.to_cps f in
(fun throw e k ->
f throw (function
| None -> e ()
| Some v -> k v))
|> Kstream.make
let fn = stream
let next s = Kstream.next_option s |> IO.of_cps
let peek s = Kstream.peek_option s |> IO.of_cps
Without Flambda , thunks are repeatedly created and passed on IO.to_cps ,
resulting in a performance penalty . Flambda seems to optimize this away ,
however .
resulting in a performance penalty. Flambda seems to optimize this away,
however. *)
let transform f v s =
Kstream.transform (fun v s -> IO.to_cps (fun () -> f v s)) v s
let fold f v s =
Kstream.fold (fun v v' -> IO.to_cps (fun () -> f v v')) v s |> IO.of_cps
let map f s = Kstream.map (fun v -> IO.to_cps (fun () -> f v)) s
let filter f s = Kstream.filter (fun v -> IO.to_cps (fun () -> f v)) s
let filter_map f s = Kstream.filter_map (fun v -> IO.to_cps (fun () -> f v)) s
let iter f s =
Kstream.iter (fun v -> IO.to_cps (fun () -> f v)) s |> IO.of_cps
let drain s = iter (fun _ -> IO.return ()) s
let to_list s = Kstream.to_list s |> IO.of_cps
let load s =
(fun throw k -> Kstream.to_list s throw (fun l -> k (Kstream.of_list l)))
|> IO.of_cps
let tree ?text ?element ?comment ?pi ?xml ?doctype s =
Utility.tree ?text ?element ?comment ?pi ?xml ?doctype s |> IO.of_cps
end
include Asynchronous (Synchronous)
| null | https://raw.githubusercontent.com/aantron/markup.ml/9afbf57ac7ef10a03bece6cae677a151fa164666/src/markup.ml | ocaml | This file is part of Markup.ml , released under the MIT license . See
LICENSE.md for details , or visit .
LICENSE.md for details, or visit . *)
module type IO =
sig
type 'a t
val return : 'a -> 'a t
val of_cps : ((exn -> unit) -> ('a -> unit) -> unit) -> 'a t
val to_cps : (unit -> 'a t) -> ((exn -> unit) -> ('a -> unit) -> unit)
end
module Synchronous : IO with type 'a t = 'a =
struct
type 'a t = 'a
exception Not_synchronous
let return x = x
let of_cps f =
let result = ref None in
f raise (fun v -> result := Some v);
match !result with
| None -> raise Not_synchronous
| Some v -> v
let to_cps f =
fun throw k ->
match f () with
| v -> k v
| exception exn -> throw exn
end
type async = unit
type sync = unit
type ('data, 'sync) stream = 'data Kstream.t
let kstream s = s
let of_kstream s = s
let of_list = Kstream.of_list
type location = Common.location
let compare_locations = Common.compare_locations
module Error = Error
type name = Common.name
type xml_declaration = Common.xml_declaration =
{version : string;
encoding : string option;
standalone : bool option}
type doctype = Common.doctype =
{doctype_name : string option;
public_identifier : string option;
system_identifier : string option;
raw_text : string option;
force_quirks : bool}
type signal = Common.signal
let signal_to_string = Common.signal_to_string
type 's parser =
{mutable location : location;
mutable signals : (signal, 's) stream}
let signals parser = parser.signals
let location parser = parser.location
let stream_to_parser s =
let parser = {location = (1, 1); signals = Kstream.empty ()} in
parser.signals <-
s |> Kstream.map (fun (l, v) _ k -> parser.location <- l; k v);
parser
module Cps =
struct
let parse_xml
report ?encoding namespace entity context source =
let with_encoding (encoding : Encoding.t) k =
source
|> encoding ~report
|> Input.preprocess Common.is_valid_xml_char report
|> Xml_tokenizer.tokenize report entity
|> Xml_parser.parse context namespace report
|> k
in
let constructor throw k =
match encoding with
| Some encoding -> with_encoding encoding k
| None ->
Detect.select_xml source throw (fun encoding ->
with_encoding encoding k)
in
Kstream.construct constructor
|> stream_to_parser
let write_xml report prefix signals =
signals
|> Xml_writer.write report prefix
|> Utility.strings_to_bytes
let parse_html report ?encoding context source =
let with_encoding (encoding : Encoding.t) k =
source
|> encoding ~report
|> Input.preprocess Common.is_valid_html_char report
|> Html_tokenizer.tokenize report
|> Html_parser.parse context report
|> k
in
let constructor throw k =
match encoding with
| Some encoding -> with_encoding encoding k
| None ->
Detect.select_html source throw (fun encoding ->
with_encoding encoding k)
in
Kstream.construct constructor
|> stream_to_parser
let write_html ?escape_attribute ?escape_text signals =
signals
|> Html_writer.write ?escape_attribute ?escape_text
|> Utility.strings_to_bytes
end
let string = Stream_io.string
let buffer = Stream_io.buffer
let channel = Stream_io.channel
let file = Stream_io.file
let to_channel c bytes = Stream_io.to_channel c bytes |> Synchronous.of_cps
let to_file f bytes = Stream_io.to_file f bytes |> Synchronous.of_cps
let preprocess_input_stream source =
Input.preprocess (fun _ -> true) Error.ignore_errors source
include Utility
module Ns =
struct
let html = Common.html_ns
let svg = Common.svg_ns
let mathml = Common.mathml_ns
let xml = Common.xml_ns
let xmlns = Common.xmlns_ns
let xlink = Common.xlink_ns
end
module type ASYNCHRONOUS =
sig
type 'a io
module Encoding :
sig
type t = Encoding.t
val decode :
?report:(location -> Error.t -> unit io) -> t ->
(char, _) stream -> (int, async) stream
end
val parse_xml :
?report:(location -> Error.t -> unit io) ->
?encoding:Encoding.t ->
?namespace:(string -> string option) ->
?entity:(string -> string option) ->
?context:[< `Document | `Fragment ] ->
(char, _) stream -> async parser
val write_xml :
?report:((signal * int) -> Error.t -> unit io) ->
?prefix:(string -> string option) ->
([< signal ], _) stream -> (char, async) stream
val parse_html :
?report:(location -> Error.t -> unit io) ->
?encoding:Encoding.t ->
?context:[< `Document | `Fragment of string ] ->
(char, _) stream -> async parser
val write_html :
?escape_attribute:(string -> string) ->
?escape_text:(string -> string) ->
([< signal ], _) stream -> (char, async) stream
val fn : (unit -> char option io) -> (char, async) stream
val to_string : (char, _) stream -> string io
val to_buffer : (char, _) stream -> Buffer.t io
val stream : (unit -> 'a option io) -> ('a, async) stream
val next : ('a, _) stream -> 'a option io
val peek : ('a, _) stream -> 'a option io
val transform :
('a -> 'b -> ('c list * 'a option) io) -> 'a -> ('b, _) stream ->
('c, async) stream
val fold : ('a -> 'b -> 'a io) -> 'a -> ('b, _) stream -> 'a io
val map : ('a -> 'b io) -> ('a, _) stream -> ('b, async) stream
val filter : ('a -> bool io) -> ('a, _) stream -> ('a, async) stream
val filter_map : ('a -> 'b option io) -> ('a, _) stream -> ('b, async) stream
val iter : ('a -> unit io) -> ('a, _) stream -> unit io
val drain : ('a, _) stream -> unit io
val to_list : ('a, _) stream -> 'a list io
val load : ('a, _) stream -> ('a, sync) stream io
val tree :
?text:(string list -> 'a) ->
?element:(name -> (name * string) list -> 'a list -> 'a) ->
?comment:(string -> 'a) ->
?pi:(string -> string -> 'a) ->
?xml:(xml_declaration -> 'a) ->
?doctype:(doctype -> 'a) ->
([< signal ], _) stream -> 'a option io
end
module Asynchronous (IO : IO) =
struct
let wrap_report report = fun l e -> IO.to_cps (fun () -> report l e)
module Encoding =
struct
include Encoding
let decode ?(report = fun _ _ -> IO.return ()) (f : Encoding.t) s =
f ~report:(wrap_report report) s
end
let parse_xml
?(report = fun _ _ -> IO.return ())
?encoding
?(namespace = fun _ -> None)
?(entity = fun _ -> None)
?context
source =
Cps.parse_xml
(wrap_report report) ?encoding namespace entity context source
let write_xml
?(report = fun _ _ -> IO.return ())
?(prefix = fun _ -> None)
signals =
Cps.write_xml (wrap_report report) prefix signals
let parse_html
?(report = fun _ _ -> IO.return ())
?encoding
?context
source =
Cps.parse_html (wrap_report report) ?encoding context source
let write_html ?escape_attribute ?escape_text signals =
Cps.write_html ?escape_attribute ?escape_text signals
let to_string bytes = Stream_io.to_string bytes |> IO.of_cps
let to_buffer bytes = Stream_io.to_buffer bytes |> IO.of_cps
let stream f =
let f = IO.to_cps f in
(fun throw e k ->
f throw (function
| None -> e ()
| Some v -> k v))
|> Kstream.make
let fn = stream
let next s = Kstream.next_option s |> IO.of_cps
let peek s = Kstream.peek_option s |> IO.of_cps
Without Flambda , thunks are repeatedly created and passed on IO.to_cps ,
resulting in a performance penalty . Flambda seems to optimize this away ,
however .
resulting in a performance penalty. Flambda seems to optimize this away,
however. *)
let transform f v s =
Kstream.transform (fun v s -> IO.to_cps (fun () -> f v s)) v s
let fold f v s =
Kstream.fold (fun v v' -> IO.to_cps (fun () -> f v v')) v s |> IO.of_cps
let map f s = Kstream.map (fun v -> IO.to_cps (fun () -> f v)) s
let filter f s = Kstream.filter (fun v -> IO.to_cps (fun () -> f v)) s
let filter_map f s = Kstream.filter_map (fun v -> IO.to_cps (fun () -> f v)) s
let iter f s =
Kstream.iter (fun v -> IO.to_cps (fun () -> f v)) s |> IO.of_cps
let drain s = iter (fun _ -> IO.return ()) s
let to_list s = Kstream.to_list s |> IO.of_cps
let load s =
(fun throw k -> Kstream.to_list s throw (fun l -> k (Kstream.of_list l)))
|> IO.of_cps
let tree ?text ?element ?comment ?pi ?xml ?doctype s =
Utility.tree ?text ?element ?comment ?pi ?xml ?doctype s |> IO.of_cps
end
include Asynchronous (Synchronous)
|
|
40df4e1fc432aa499ac464be616d19002bc0f77e334f2916b16c9754bf456937 | nkpart/kit | Project.hs | # LANGUAGE TupleSections #
module Kit.Project (
makeKitProject,
KitProject(..),
kitProjectActions
)
where
import Kit.AbsolutePath (filePath)
import Kit.Spec
import Kit.Contents
import Kit.Util
import Kit.Util.FSAction
import Kit.Xcode.Builder
import Kit.Xcode.XCConfig
import Data.Maybe
import qualified Data.Map as M
import Kit.FlaggedFile
-- Paths
kitDir, projectDir, prefixFile, projectFile, xcodeConfigFile, depsConfigFile, kitUpdateMakeFilePath, kitResourceDir :: FilePath
kitDir = "." </> "Kits"
projectDir = "KitDeps.xcodeproj"
prefixFile = "Prefix.pch"
projectFile = projectDir </> "project.pbxproj"
xcodeConfigFile = "Kit.xcconfig"
depsConfigFile = "DepsOnly.xcconfig"
kitUpdateMakeFilePath = "Makefile"
kitResourceDir = "Resources"
kitFrameworksDir = "Frameworks"
kitUpdateMakeFile :: String
kitUpdateMakeFile = "kit: Kit.xcconfig\n" ++
"Kit.xcconfig: ../KitSpec\n" ++
"\tcd .. && kit update && exit 1\n"
prefixDefault :: String
prefixDefault = "#ifdef __OBJC__\n" ++
" #import <Foundation/Foundation.h>\n" ++
" #if (TARGET_OS_MAC && !TARGET_OS_IPHONE)\n" ++
" #import <Cocoa/Cocoa.h>\n" ++
" #else\n" ++
" #import <UIKit/UIKit.h>\n" ++
" #endif\n" ++
"#endif\n"
data KitProject = KitProject {
kitProjectFile :: String,
kitProjectPrefix :: String,
kitProjectConfig :: String,
kitProjectDepsConfig :: String,
kitProjectResourceDirs :: [(FilePath, FilePath)]
} deriving (Eq, Show)
kitProjectActions :: KitProject -> [FSAction]
kitProjectActions kp = templatedFiles ++ resourceLinks where
resourceLinks = map (within kitDir . uncurry Symlink) $ kitProjectResourceDirs kp
templatedFiles = map (within kitDir) [
FileCreate projectFile (kitProjectFile kp),
FileCreate prefixFile (kitProjectPrefix kp),
FileCreate xcodeConfigFile (kitProjectConfig kp),
FileCreate kitUpdateMakeFilePath kitUpdateMakeFile,
FileCreate depsConfigFile (kitProjectDepsConfig kp)
]
resourceLink :: KitContents -> Maybe (FilePath, FilePath)
resourceLink contents = fmap (,linkName) $ contentResourceDir contents where
linkName = kitResourceDir </> packageName contents
frameworkLink :: KitContents -> [(FilePath, FilePath)]
frameworkLink contents = fmap f $ contentFrameworks contents where
f absFilePath = let fp = filePath absFilePath in (fp,kitFrameworksDir </> takeFileName fp)
makeKitProject :: [KitContents] -> Maybe String -> KitProject
makeKitProject kitsContents depsOnlyConfig =
let pf = createProjectFile kitsContents
header = createHeader kitsContents
config = createConfig kitsContents
TODO : Make this specify an data type
depsConfig = "#include \"" ++ xcodeConfigFile ++ "\"\n\nSKIP_INSTALL=YES\nSDKROOT=iphoneos\n\n" ++ fromMaybe "" depsOnlyConfig
frameworks = concatMap frameworkLink kitsContents
resources = mapMaybe resourceLink kitsContents
in KitProject pf header config depsConfig (frameworks ++ resources)
where createProjectFile cs = let
toSourceGroup kc = SourceGroup (packageFileName kc) (contentHeaders kc) (contentSources kc) (contentLibs kc) (map (flaggedFile "") $ contentFrameworks kc)
in renderXcodeProject (map toSourceGroup cs) "libKitDeps.a"
createHeader cs = let
headers = mapMaybe namedPrefix cs
combinedHeader = stringJoin "\n" headers
in prefixDefault ++ combinedHeader ++ "\n"
createConfig cs = let
configs = mapMaybe contentConfig cs
sourceDirs = map (\kc -> contentBaseDir kc </> specSourceDirectory (contentSpec kc)) cs
parentConfig = XCC "Base" (M.fromList [
("HEADER_SEARCH_PATHS", "$(HEADER_SEARCH_PATHS) " ++ stringJoin " " sourceDirs),
("GCC_PRECOMPILE_PREFIX_HEADER", "YES"),
("GCC_PREFIX_HEADER","$(SRCROOT)/Prefix.pch")
]) []
combinedConfig = multiConfig "KitConfig" (parentConfig:configs)
in configToString combinedConfig ++ "\n"
| null | https://raw.githubusercontent.com/nkpart/kit/ed217ddbc90688350e52156503cca092c9bf8300/Kit/Project.hs | haskell | Paths | # LANGUAGE TupleSections #
module Kit.Project (
makeKitProject,
KitProject(..),
kitProjectActions
)
where
import Kit.AbsolutePath (filePath)
import Kit.Spec
import Kit.Contents
import Kit.Util
import Kit.Util.FSAction
import Kit.Xcode.Builder
import Kit.Xcode.XCConfig
import Data.Maybe
import qualified Data.Map as M
import Kit.FlaggedFile
kitDir, projectDir, prefixFile, projectFile, xcodeConfigFile, depsConfigFile, kitUpdateMakeFilePath, kitResourceDir :: FilePath
kitDir = "." </> "Kits"
projectDir = "KitDeps.xcodeproj"
prefixFile = "Prefix.pch"
projectFile = projectDir </> "project.pbxproj"
xcodeConfigFile = "Kit.xcconfig"
depsConfigFile = "DepsOnly.xcconfig"
kitUpdateMakeFilePath = "Makefile"
kitResourceDir = "Resources"
kitFrameworksDir = "Frameworks"
kitUpdateMakeFile :: String
kitUpdateMakeFile = "kit: Kit.xcconfig\n" ++
"Kit.xcconfig: ../KitSpec\n" ++
"\tcd .. && kit update && exit 1\n"
prefixDefault :: String
prefixDefault = "#ifdef __OBJC__\n" ++
" #import <Foundation/Foundation.h>\n" ++
" #if (TARGET_OS_MAC && !TARGET_OS_IPHONE)\n" ++
" #import <Cocoa/Cocoa.h>\n" ++
" #else\n" ++
" #import <UIKit/UIKit.h>\n" ++
" #endif\n" ++
"#endif\n"
data KitProject = KitProject {
kitProjectFile :: String,
kitProjectPrefix :: String,
kitProjectConfig :: String,
kitProjectDepsConfig :: String,
kitProjectResourceDirs :: [(FilePath, FilePath)]
} deriving (Eq, Show)
kitProjectActions :: KitProject -> [FSAction]
kitProjectActions kp = templatedFiles ++ resourceLinks where
resourceLinks = map (within kitDir . uncurry Symlink) $ kitProjectResourceDirs kp
templatedFiles = map (within kitDir) [
FileCreate projectFile (kitProjectFile kp),
FileCreate prefixFile (kitProjectPrefix kp),
FileCreate xcodeConfigFile (kitProjectConfig kp),
FileCreate kitUpdateMakeFilePath kitUpdateMakeFile,
FileCreate depsConfigFile (kitProjectDepsConfig kp)
]
resourceLink :: KitContents -> Maybe (FilePath, FilePath)
resourceLink contents = fmap (,linkName) $ contentResourceDir contents where
linkName = kitResourceDir </> packageName contents
frameworkLink :: KitContents -> [(FilePath, FilePath)]
frameworkLink contents = fmap f $ contentFrameworks contents where
f absFilePath = let fp = filePath absFilePath in (fp,kitFrameworksDir </> takeFileName fp)
makeKitProject :: [KitContents] -> Maybe String -> KitProject
makeKitProject kitsContents depsOnlyConfig =
let pf = createProjectFile kitsContents
header = createHeader kitsContents
config = createConfig kitsContents
TODO : Make this specify an data type
depsConfig = "#include \"" ++ xcodeConfigFile ++ "\"\n\nSKIP_INSTALL=YES\nSDKROOT=iphoneos\n\n" ++ fromMaybe "" depsOnlyConfig
frameworks = concatMap frameworkLink kitsContents
resources = mapMaybe resourceLink kitsContents
in KitProject pf header config depsConfig (frameworks ++ resources)
where createProjectFile cs = let
toSourceGroup kc = SourceGroup (packageFileName kc) (contentHeaders kc) (contentSources kc) (contentLibs kc) (map (flaggedFile "") $ contentFrameworks kc)
in renderXcodeProject (map toSourceGroup cs) "libKitDeps.a"
createHeader cs = let
headers = mapMaybe namedPrefix cs
combinedHeader = stringJoin "\n" headers
in prefixDefault ++ combinedHeader ++ "\n"
createConfig cs = let
configs = mapMaybe contentConfig cs
sourceDirs = map (\kc -> contentBaseDir kc </> specSourceDirectory (contentSpec kc)) cs
parentConfig = XCC "Base" (M.fromList [
("HEADER_SEARCH_PATHS", "$(HEADER_SEARCH_PATHS) " ++ stringJoin " " sourceDirs),
("GCC_PRECOMPILE_PREFIX_HEADER", "YES"),
("GCC_PREFIX_HEADER","$(SRCROOT)/Prefix.pch")
]) []
combinedConfig = multiConfig "KitConfig" (parentConfig:configs)
in configToString combinedConfig ++ "\n"
|
f47c6a16cf65ecfaf72211dac3d7aabba32b5a2120d9d850afde596166fce8fb | BranchTaken/Hemlock | test_map.ml | open! Basis.Rudiments
open! Basis
open Option
let test () =
List.iter [Some 42L; None] ~f:(fun o ->
File.Fmt.stdout
|> Fmt.fmt "map ("
|> (pp Uns.pp) o
|> Fmt.fmt ") ~f:(fun u -> Uns.to_string u) -> "
|> (pp String.pp) (map o ~f:(fun u -> Uns.to_string u))
|> Fmt.fmt "\n"
|> ignore
)
let _ = test ()
| null | https://raw.githubusercontent.com/BranchTaken/Hemlock/25a3fce449caa06ad46db351efcfb428f423184d/bootstrap/test/basis/option/test_map.ml | ocaml | open! Basis.Rudiments
open! Basis
open Option
let test () =
List.iter [Some 42L; None] ~f:(fun o ->
File.Fmt.stdout
|> Fmt.fmt "map ("
|> (pp Uns.pp) o
|> Fmt.fmt ") ~f:(fun u -> Uns.to_string u) -> "
|> (pp String.pp) (map o ~f:(fun u -> Uns.to_string u))
|> Fmt.fmt "\n"
|> ignore
)
let _ = test ()
|
|
dbca6250be3f8f7e809443445f2005db6c2f35b66ad9c7fd02627381324a359b | donaldsonjw/bigloo | xml.scm | ;*=====================================================================*/
* serrano / prgm / project / bigloo / api / web / src / Llib / xml.scm * /
;* ------------------------------------------------------------- */
* Author : * /
* Creation : Fri Mar 11 16:23:53 2005 * /
* Last change : Mon Aug 10 16:53:05 2015 ( serrano ) * /
* Copyright : 2005 - 15 * /
;* ------------------------------------------------------------- */
;* XML parsing */
;*=====================================================================*/
;*---------------------------------------------------------------------*/
;* The module */
;*---------------------------------------------------------------------*/
(module __web_xml
(option (set! *dlopen-init-gc* #t))
(export (xml-parse::pair-nil port::input-port
#!key
(content-length 0)
(procedure list)
(specials '())
(strict #t)
(encoding 'UTF-8))
(xml-string-decode::bstring ::bstring)
(xml-string-decode!::bstring ::bstring)
(xml-string-encode::bstring ::bstring)
(read-xml #!optional (port::input-port (current-input-port)))
(xml-metadata xml-tree::pair-nil)))
;*---------------------------------------------------------------------*/
;* xml-parse ... */
;*---------------------------------------------------------------------*/
(define (xml-parse::pair-nil port::input-port
#!key
(content-length 0)
(procedure list)
(specials '())
(strict #t)
(encoding 'UTF-8))
(when (elong? content-length)
(set! content-length (elong->fixnum content-length)))
(when (and (fixnum? content-length) (>fx content-length 0))
(input-port-fill-barrier-set! port content-length))
(when (>fx content-length 0)
(set! content-length (+fx content-length (input-port-position port))))
(let loop ((decoder (lambda (x) x)))
(let ((obj (read/rp xml-grammar port procedure procedure specials strict decoder encoding)))
(when (and (fixnum? content-length) (>fx content-length 0))
(input-port-fill-barrier-set! port -1))
(cond
((eof-object? obj)
'())
((and (>fx content-length 0)
(>=fx (input-port-position port) content-length))
(list obj))
((and (pair? obj) (eq? 'xml-decl (car obj)))
(let ((enc (assq 'encoding (cdr obj))))
(if enc
(cons obj (loop (get-decoder (cdr enc) encoding)))
(cons obj (loop decoder)))))
(else
(cons obj (loop decoder)))))))
;*---------------------------------------------------------------------*/
;* xml-parse-error ... */
;*---------------------------------------------------------------------*/
(define (xml-parse-error msg obj name pos)
(raise
(instantiate::&io-parse-error
(proc 'xml-parse)
(msg msg)
(obj obj)
(fname name)
(location pos))))
;*---------------------------------------------------------------------*/
;* error-line ... */
;*---------------------------------------------------------------------*/
(define (error-line c port)
(let ((line (read-line port)))
(string-append "{" (string c) "}" (if (string? line) line ""))))
;*---------------------------------------------------------------------*/
;* special ... */
;*---------------------------------------------------------------------*/
(define-struct special tag attributes body owner)
;*---------------------------------------------------------------------*/
;* collect-up-to ... */
;*---------------------------------------------------------------------*/
(define (collect-up-to ignore tag attributes port make specials strict decoder encoding)
(define (collect ignore tags)
(let ((name (input-port-name port))
(po (input-port-position port)))
(let loop ((acc '())
(item (ignore)))
(cond
((symbol? item)
(cond
((eq? item tag)
(make tag attributes (reverse! acc)))
(strict
(xml-parse-error "Illegal closing tag"
(format "`~a' expected, `~a' provided"
tag item)
name po))
(else
(make tag attributes (reverse! acc)))))
((special? item)
(let ((nitem (make (special-tag item)
(special-attributes item)
(special-body item))))
(if (memq (special-tag item) tags)
(loop acc nitem)
(begin
(list (make tag attributes (reverse! acc)) nitem)))))
((eof-object? item)
(if strict
(xml-parse-error
(format "Premature end of line, expecting tag `~a'"
tag)
item name po)
(make tag attributes (reverse! acc))))
(else
(let ((po (input-port-last-token-position port)))
(loop (econs item acc (list 'at name po)) (ignore))))))))
(let ((spec (assq tag specials)))
(cond
((not spec)
(collect ignore '()))
((null? (cdr spec))
(make tag attributes '()))
((procedure? (cdr spec))
(make tag attributes ((cdr spec) port)))
((pair? (cdr spec))
(let ((ignore (lambda ()
(read/rp xml-grammar port
(lambda (t a b) (special t a b tag))
make
specials strict decoder encoding))))
(collect ignore (cdr spec))))
(else
(error "xml-parse" "Illegal special handler" spec)))))
;*---------------------------------------------------------------------*/
;* attribute-value-grammar ... */
;*---------------------------------------------------------------------*/
(define attribute-value-grammar
(regular-grammar (strict tag)
((+ (in " \t\n\r"))
(ignore))
((: #\" (* (or (out #\\ #\") (: #\\ all))) #\")
(the-substring 1 (-fx (the-length) 1)))
((: #\' (* (or (out #\\ #\') (: #\\ all))) #\')
(the-substring 1 (-fx (the-length) 1)))
((: (+ digit) (? (or "%" "px" "cm" "em" "mm" "inch")))
(if strict
(xml-parse-error (format "Illegal `~a' attribute value" tag)
(the-string)
(input-port-name (the-port))
(input-port-position (the-port)))
(the-string)))
((+ (out " \t\n\r<>(){}[]@!\"'"))
(if strict
(xml-parse-error (format "Illegal `~a' attribute character" tag)
(the-string)
(input-port-name (the-port))
(input-port-position (the-port)))
(the-string)))
(else
(let ((c (the-failure)))
(if (not (eof-object? c))
(if (or strict
(not (or (char=? c #\space)
(char=? c #\Newline)
(char=? c #\>))))
(xml-parse-error
(format "Illegal `~a' attribute character" tag)
(error-line c (the-port))
(input-port-name (the-port))
(input-port-position (the-port)))
" ")
(xml-parse-error
(format "Premature end of line for tag `~a' attribute" tag)
c
(input-port-name (the-port))
(-fx (input-port-position (the-port)) 1)))))))
;*---------------------------------------------------------------------*/
;* attribute-grammar ... */
;*---------------------------------------------------------------------*/
(define attribute-grammar
(regular-grammar ((id (: (in ("azAZ") "_") (* (in ("azAZ09") ":_-"))))
tag
strict
decoder)
((+ (in " \t\n\r"))
(ignore))
((: id "=")
(let* ((key (the-substring 0 (-fx (the-length) 1)))
(val (read/rp attribute-value-grammar (the-port) strict tag)))
(cons (string->symbol (decoder key)) (decoder val))))
((: id (+ blank) "=")
(let* ((key (the-substring 0 (-fx (the-length) 2)))
(val (read/rp attribute-value-grammar (the-port) strict tag)))
(let loop ((i (-fx (string-length key) 1)))
(case (string-ref key i)
((#\space #\tab #\Newline)
(loop (-fx i 1)))
(else
(set! key (substring key 0 (+ i 1))))))
(cons (string->symbol (decoder key)) (decoder val))))
((: id)
(let* ((key (decoder (the-string))))
(cons (string->symbol key) key)))
((or "/>" ">")
(the-symbol))
(else
(let ((c (the-failure)))
(if (not (eof-object? c))
(xml-parse-error "Illegal attribute character"
(error-line c (the-port))
(input-port-name (the-port))
(input-port-position (the-port)))
(xml-parse-error
(format "Premature end of line, expecting tag `~a'" tag)
c
(input-port-name (the-port))
(-fx (input-port-position (the-port)) 1)))))))
;*---------------------------------------------------------------------*/
* cdata - grammar ... * /
;*---------------------------------------------------------------------*/
(define cdata-grammar
(regular-grammar (decoder)
((* (out "]"))
(let* ((res (decoder (the-string)))
(rest (ignore)))
(string-append res rest)))
("]"
(string-append "]" (ignore)))
((: "]]>" (? "\n"))
"")
(else
(let* ((c (the-failure))
(msg (if (not (eof-object? c))
"Illegal <![CDATA[ character"
"Premature end of line, expecting tag `]]>'")))
(xml-parse-error msg
c
(input-port-name (the-port))
(input-port-position (the-port)))))))
;*---------------------------------------------------------------------*/
;* get-decoder ... */
;*---------------------------------------------------------------------*/
(define (get-decoder::procedure enc::bstring dst-enc)
(let ((src-enc (string->symbol (string-upcase enc))))
(cond
((or (not src-enc) (eq? src-enc dst-enc))
(lambda (x) x))
((eq? src-enc 'UTF-8)
(cond
((memq dst-enc '(ISO-8859-1 ISO-8859-2 ISO-8859-15))
utf8->iso-latin)
((eq? dst-enc 'UCS-2)
utf8-string->ucs2-string)
(else
(lambda (x) x))))
((memq src-enc '(ISO-8859-1 ISO-8859-2 ISO-8859-15))
(cond
((eq? dst-enc 'UTF-8)
iso-latin->utf8)
((eq? dst-enc 'UCS-2)
(lambda (x)
(utf8-string->ucs2-string (iso-latin->utf8 x))))
(else
(lambda (x) x))))
(else
(lambda (x) x)))))
;*---------------------------------------------------------------------*/
;* xml-grammar ... */
;*---------------------------------------------------------------------*/
(define xml-grammar
(regular-grammar ((id (: (in ("azAZ") "!?") (* (in ("azAZ09") ":_-"))))
next
make
specials
strict
decoder
encoding)
((+ (in " \t\n\r"))
(the-string))
((: "<!--"
(* (or (out "-") (: "-" (out "-")) (: "--" (out ">"))))
"-->")
(cons 'comment (the-string)))
((: "<!" (: (or (out "[-") (: "-" (out "-")))
(* (out ">]"))
(? (: "[" (* (out "]")) "]"))
(* (out ">"))) ">")
(cons 'declaration (the-string)))
("<![CDATA["
(cons 'cdata (read/rp cdata-grammar (the-port) decoder)))
((: "<?xml " (* (out "?>")) "?>")
(let ((s (the-substring 6 (the-length))))
(string-set! s (-fx (string-length s) 2) #\space)
(let ((p (open-input-string s)))
(let loop ((attr '()))
(let ((obj (read/rp attribute-grammar p 'xml #t decoder)))
(cond
((pair? obj)
(loop (cons obj attr)))
((eq? obj '>)
(cons 'xml-decl attr))))))))
((: "<?" (* (out ">")) ">")
(cons 'instruction (the-string)))
((: "<" id ">")
(let* ((t (the-substring 1 (-fx (the-length) 1)))
(ts (string->symbol t))
(p (the-port)))
(collect-up-to ignore ts '() p make specials strict decoder encoding)))
((: "<" id "/>")
(let ((t (the-substring 1 (-fx (the-length) 2))))
(make (string->symbol t) '() '())))
((: "<" id (in " \n\t\r"))
(let* ((t (the-substring 1 (-fx (the-length) 1)))
(ts (string->symbol t))
(p (the-port)))
(let loop ((attr '()))
(let ((obj (read/rp attribute-grammar p t strict decoder)))
(cond
((pair? obj)
(loop (cons obj attr)))
((eq? obj '>)
(collect-up-to ignore ts (reverse! attr) p make specials strict decoder encoding))
((eq? obj '/>)
(make ts (reverse! attr) '())))))))
((: "</" id ">")
(string->symbol (the-substring 2 (-fx (the-length) 1))))
((+ (out "<"))
(decoder (the-string)))
(else
(let ((c (the-failure)))
(cond
((not (eof-object? c))
(xml-parse-error "Illegal character"
(error-line c (the-port))
(input-port-name (the-port))
(input-port-position (the-port))))
(else
c))))))
;*---------------------------------------------------------------------*/
;* char-hexnumeric? ... */
;*---------------------------------------------------------------------*/
(define (char-hexnumeric? c)
(or (char-numeric? c)
(and (char>=? c #\A) (char<=? c #\F))
(and (char>=? c #\a) (char<=? c #\f))))
;*---------------------------------------------------------------------*/
;* xml-string-decode-inner! ... */
;*---------------------------------------------------------------------*/
(define (xml-string-decode-inner! str ol nl res)
(define (char-value c)
(cond
((char-numeric? c)
(-fx (char->integer c) (char->integer #\0)))
((char<=? c #\F)
(+fx 10 (-fx (char->integer c) (char->integer #\A))))
(else
(+fx 10 (-fx (char->integer c) (char->integer #\a))))))
(let ((ol-2 (-fx ol 2)))
(let loop ((i 0)
(j 0))
(if (=fx j nl)
res
(let ((c (string-ref str i)))
(if (and (char=? c #\%) (<fx i ol-2))
(let ((c1 (string-ref str (+fx i 1)))
(c2 (string-ref str (+fx i 2))))
(if (and (char-hexnumeric? c1) (char-hexnumeric? c2))
(let* ((v1 (char-value c1))
(v2 (char-value c2))
(d (integer->char (+fx (*fx v1 16) v2))))
(string-set! res j d)
(loop (+fx i 3) (+fx j 1)))
(begin
(string-set! res j c)
(loop (+fx i 1) (+fx j 1)))))
(begin
(string-set! res j c)
(loop (+fx i 1) (+fx j 1)))))))))
;*---------------------------------------------------------------------*/
;* xml-count ... */
;*---------------------------------------------------------------------*/
(define (xml-count str ol)
(let loop ((i 0)
(c 0))
(cond
((=fx i ol)
c)
((char=? (string-ref str i) #\&)
(cond
((substring-at? str "<" i)
(loop (+fx i 4) (+fx c 1)))
((substring-at? str ">" i)
(loop (+fx i 4) (+fx c 1)))
((substring-at? str "&" i)
(loop (+fx i 5) (+fx c 1)))
((substring-at? str """ i)
(loop (+fx i 6) (+fx c 1)))
((substring-at? str " " i)
(loop (+fx i 6) (+fx c 1)))
((substring-at? str "&#" i)
(let liip ((i (+fx i 2)))
(cond
((=fx i ol)
c)
((char-numeric? (string-ref str i))
(liip (+fx i 1)))
(else
(loop (+fx i 1) (+fx c 1))))))
(else
(loop (+fx i 1) (+fx c 1)))))
(else
(loop (+fx i 1) (+fx c 1))))))
;*---------------------------------------------------------------------*/
;* xml-decode ... */
;*---------------------------------------------------------------------*/
(define (xml-decode! str res ol nl)
(let loop ((i 0)
(j 0))
(cond
((=fx i ol)
res)
((char=? (string-ref str i) #\&)
(cond
((substring-at? str "<" i)
(string-set! res j #\<)
(loop (+fx i 4) (+fx j 1)))
((substring-at? str ">" i)
(string-set! res j #\>)
(loop (+fx i 4) (+fx j 1)))
((substring-at? str "&" i)
(string-set! res j #\&)
(loop (+fx i 5) (+fx j 1)))
((substring-at? str """ i)
(string-set! res j #\")
(loop (+fx i 6) (+fx j 1)))
((substring-at? str " " i)
(string-set! res j #\space)
(loop (+fx i 6) (+fx j 1)))
((substring-at? str "&#" i)
(let liip ((i (+fx i 2))
(n 0))
(if (=fx i ol)
res
(let ((c (string-ref str i)))
(if (char-numeric? c)
(liip (+fx i 1)
(+fx (*fx n 10)
(-fx (char->integer c)
(char->integer #\0))))
(begin
(string-set! res j (integer->char n))
(loop (+fx i 1) (+fx j 1))))))))
(else
(string-set! res j (string-ref str i))
(loop (+fx i 1) (+fx j 1)))))
(else
(string-set! res j (string-ref str i))
(loop (+fx i 1) (+fx j 1))))))
;*---------------------------------------------------------------------*/
;* xml-string-decode ... */
;*---------------------------------------------------------------------*/
(define (xml-string-decode str)
(let ((ol (string-length str)))
(if (>=fx ol 3)
(let ((nl (xml-count str ol)))
(if (=fx nl ol)
(string-copy str)
(let ((res (make-string nl)))
(xml-decode! str res ol nl)
res)))
(string-copy str))))
;*---------------------------------------------------------------------*/
;* xml-string-decode! ... */
;*---------------------------------------------------------------------*/
(define (xml-string-decode! str)
(let ((ol (string-length str)))
(if (>=fx ol 3)
(let ((nl (xml-count str ol)))
(if (=fx nl ol)
str
(begin
(xml-decode! str str ol nl)
(string-shrink! str nl))))
str)))
;*---------------------------------------------------------------------*/
;* xml-string-encode ... */
;*---------------------------------------------------------------------*/
(define (xml-string-encode str)
(define (count str ol)
(let loop ((i 0)
(n 0))
(if (=fx i ol)
n
(let ((c (string-ref str i)))
(case c
((#\")
(loop (+fx i 1) (+fx n 6)))
((#\&)
(loop (+fx i 1) (+fx n 5)))
((#\< #\>)
(loop (+fx i 1) (+fx n 4)))
(else
(loop (+fx i 1) (+fx n 1))))))))
(define (encode str ol nl)
(if (=fx nl ol)
str
(let ((res (make-string nl)))
(let loop ((i 0)
(j 0))
(if (=fx j nl)
res
(let ((c (string-ref str i)))
(case c
((#\<)
(blit-string! "<" 0 res j 4)
(loop (+fx i 1) (+fx j 4)))
((#\>)
(blit-string! ">" 0 res j 4)
(loop (+fx i 1) (+fx j 4)))
((#\&)
(blit-string! "&" 0 res j 5)
(loop (+fx i 1) (+fx j 5)))
((#\")
(blit-string! """ 0 res j 6)
(loop (+fx i 1) (+fx j 6)))
(else
(string-set! res j c)
(loop (+fx i 1) (+fx j 1))))))))))
(let ((ol (string-length str)))
(encode str ol (count str ol))))
;*---------------------------------------------------------------------*/
;* read-xml ... */
;*---------------------------------------------------------------------*/
(define (read-xml #!optional (port::input-port (current-input-port)))
(xml-parse port))
;*---------------------------------------------------------------------*/
;* xml-metadata ... */
;*---------------------------------------------------------------------*/
(define (xml-metadata xml)
(let ((xml-ver #f)
(xml-enc #f)
(xml-lang #f)
(root-ver 0.0)
(xml-root #f)
(xml-ns '()))
(let loop1 ((l xml))
(when (pair? l)
(match-case (car l)
((xml-decl . (and ?attr (?- . ?-)))
(for-each (lambda (at)
(case (car at)
((version) (set! xml-ver (cdr attr)))
((encoding) (set! xml-enc (cdr attr)))))
attr))
((?mark ?lattr . ?-)
(let loop3 ((lattr lattr))
(unless xml-root (set! xml-root mark))
(when (pair? lattr)
(let ((attr (car lattr)))
(case (car attr)
((xml:lang)
(set! xml-lang (cdr attr)))
((xmlns)
(set! xml-root (cons (cdr attr) xml-root)))
((version)
(set! root-ver (string->number (cdr attr))))
(else
(let ((str (symbol->string (car attr))))
(when (substring=? str "xmlns:" 6)
(let* ((l (string-length str))
(s (substring str 6 l))
(si (string->symbol s)))
(set! xml-ns
(cons (cons (cdr attr) si)
xml-ns)))))))
(loop3 (cdr lattr)))))))
(loop1 (cdr l))))
(unless xml-root
(error "xml-metadata" "Empty XML document !" xml))
;; Values are :
- XML Version ( 1.0 or 1.1 ) or # f
;; - XML Encoding (#f if unknown)
;; - xml:lang value
- Pair , which car is the first data markup , and
;; cdr the default namespace
- xml first data markup version attribute ( 0 if unspecified )
;; - list of prefixed namespaces (prefix . path)
(values xml-ver xml-enc xml-lang xml-root root-ver xml-ns)))
| null | https://raw.githubusercontent.com/donaldsonjw/bigloo/a4d06e409d0004e159ce92b9908719510a18aed5/api/web/src/Llib/xml.scm | scheme | *=====================================================================*/
* ------------------------------------------------------------- */
* ------------------------------------------------------------- */
* XML parsing */
*=====================================================================*/
*---------------------------------------------------------------------*/
* The module */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* xml-parse ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* xml-parse-error ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* error-line ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* special ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* collect-up-to ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* attribute-value-grammar ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* attribute-grammar ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* get-decoder ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* xml-grammar ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* char-hexnumeric? ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* xml-string-decode-inner! ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* xml-count ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* xml-decode ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* xml-string-decode ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* xml-string-decode! ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* xml-string-encode ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* read-xml ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* xml-metadata ... */
*---------------------------------------------------------------------*/
Values are :
- XML Encoding (#f if unknown)
- xml:lang value
cdr the default namespace
- list of prefixed namespaces (prefix . path) | * serrano / prgm / project / bigloo / api / web / src / Llib / xml.scm * /
* Author : * /
* Creation : Fri Mar 11 16:23:53 2005 * /
* Last change : Mon Aug 10 16:53:05 2015 ( serrano ) * /
* Copyright : 2005 - 15 * /
(module __web_xml
(option (set! *dlopen-init-gc* #t))
(export (xml-parse::pair-nil port::input-port
#!key
(content-length 0)
(procedure list)
(specials '())
(strict #t)
(encoding 'UTF-8))
(xml-string-decode::bstring ::bstring)
(xml-string-decode!::bstring ::bstring)
(xml-string-encode::bstring ::bstring)
(read-xml #!optional (port::input-port (current-input-port)))
(xml-metadata xml-tree::pair-nil)))
(define (xml-parse::pair-nil port::input-port
#!key
(content-length 0)
(procedure list)
(specials '())
(strict #t)
(encoding 'UTF-8))
(when (elong? content-length)
(set! content-length (elong->fixnum content-length)))
(when (and (fixnum? content-length) (>fx content-length 0))
(input-port-fill-barrier-set! port content-length))
(when (>fx content-length 0)
(set! content-length (+fx content-length (input-port-position port))))
(let loop ((decoder (lambda (x) x)))
(let ((obj (read/rp xml-grammar port procedure procedure specials strict decoder encoding)))
(when (and (fixnum? content-length) (>fx content-length 0))
(input-port-fill-barrier-set! port -1))
(cond
((eof-object? obj)
'())
((and (>fx content-length 0)
(>=fx (input-port-position port) content-length))
(list obj))
((and (pair? obj) (eq? 'xml-decl (car obj)))
(let ((enc (assq 'encoding (cdr obj))))
(if enc
(cons obj (loop (get-decoder (cdr enc) encoding)))
(cons obj (loop decoder)))))
(else
(cons obj (loop decoder)))))))
(define (xml-parse-error msg obj name pos)
(raise
(instantiate::&io-parse-error
(proc 'xml-parse)
(msg msg)
(obj obj)
(fname name)
(location pos))))
(define (error-line c port)
(let ((line (read-line port)))
(string-append "{" (string c) "}" (if (string? line) line ""))))
(define-struct special tag attributes body owner)
(define (collect-up-to ignore tag attributes port make specials strict decoder encoding)
(define (collect ignore tags)
(let ((name (input-port-name port))
(po (input-port-position port)))
(let loop ((acc '())
(item (ignore)))
(cond
((symbol? item)
(cond
((eq? item tag)
(make tag attributes (reverse! acc)))
(strict
(xml-parse-error "Illegal closing tag"
(format "`~a' expected, `~a' provided"
tag item)
name po))
(else
(make tag attributes (reverse! acc)))))
((special? item)
(let ((nitem (make (special-tag item)
(special-attributes item)
(special-body item))))
(if (memq (special-tag item) tags)
(loop acc nitem)
(begin
(list (make tag attributes (reverse! acc)) nitem)))))
((eof-object? item)
(if strict
(xml-parse-error
(format "Premature end of line, expecting tag `~a'"
tag)
item name po)
(make tag attributes (reverse! acc))))
(else
(let ((po (input-port-last-token-position port)))
(loop (econs item acc (list 'at name po)) (ignore))))))))
(let ((spec (assq tag specials)))
(cond
((not spec)
(collect ignore '()))
((null? (cdr spec))
(make tag attributes '()))
((procedure? (cdr spec))
(make tag attributes ((cdr spec) port)))
((pair? (cdr spec))
(let ((ignore (lambda ()
(read/rp xml-grammar port
(lambda (t a b) (special t a b tag))
make
specials strict decoder encoding))))
(collect ignore (cdr spec))))
(else
(error "xml-parse" "Illegal special handler" spec)))))
(define attribute-value-grammar
(regular-grammar (strict tag)
((+ (in " \t\n\r"))
(ignore))
((: #\" (* (or (out #\\ #\") (: #\\ all))) #\")
(the-substring 1 (-fx (the-length) 1)))
((: #\' (* (or (out #\\ #\') (: #\\ all))) #\')
(the-substring 1 (-fx (the-length) 1)))
((: (+ digit) (? (or "%" "px" "cm" "em" "mm" "inch")))
(if strict
(xml-parse-error (format "Illegal `~a' attribute value" tag)
(the-string)
(input-port-name (the-port))
(input-port-position (the-port)))
(the-string)))
((+ (out " \t\n\r<>(){}[]@!\"'"))
(if strict
(xml-parse-error (format "Illegal `~a' attribute character" tag)
(the-string)
(input-port-name (the-port))
(input-port-position (the-port)))
(the-string)))
(else
(let ((c (the-failure)))
(if (not (eof-object? c))
(if (or strict
(not (or (char=? c #\space)
(char=? c #\Newline)
(char=? c #\>))))
(xml-parse-error
(format "Illegal `~a' attribute character" tag)
(error-line c (the-port))
(input-port-name (the-port))
(input-port-position (the-port)))
" ")
(xml-parse-error
(format "Premature end of line for tag `~a' attribute" tag)
c
(input-port-name (the-port))
(-fx (input-port-position (the-port)) 1)))))))
(define attribute-grammar
(regular-grammar ((id (: (in ("azAZ") "_") (* (in ("azAZ09") ":_-"))))
tag
strict
decoder)
((+ (in " \t\n\r"))
(ignore))
((: id "=")
(let* ((key (the-substring 0 (-fx (the-length) 1)))
(val (read/rp attribute-value-grammar (the-port) strict tag)))
(cons (string->symbol (decoder key)) (decoder val))))
((: id (+ blank) "=")
(let* ((key (the-substring 0 (-fx (the-length) 2)))
(val (read/rp attribute-value-grammar (the-port) strict tag)))
(let loop ((i (-fx (string-length key) 1)))
(case (string-ref key i)
((#\space #\tab #\Newline)
(loop (-fx i 1)))
(else
(set! key (substring key 0 (+ i 1))))))
(cons (string->symbol (decoder key)) (decoder val))))
((: id)
(let* ((key (decoder (the-string))))
(cons (string->symbol key) key)))
((or "/>" ">")
(the-symbol))
(else
(let ((c (the-failure)))
(if (not (eof-object? c))
(xml-parse-error "Illegal attribute character"
(error-line c (the-port))
(input-port-name (the-port))
(input-port-position (the-port)))
(xml-parse-error
(format "Premature end of line, expecting tag `~a'" tag)
c
(input-port-name (the-port))
(-fx (input-port-position (the-port)) 1)))))))
* cdata - grammar ... * /
(define cdata-grammar
(regular-grammar (decoder)
((* (out "]"))
(let* ((res (decoder (the-string)))
(rest (ignore)))
(string-append res rest)))
("]"
(string-append "]" (ignore)))
((: "]]>" (? "\n"))
"")
(else
(let* ((c (the-failure))
(msg (if (not (eof-object? c))
"Illegal <![CDATA[ character"
"Premature end of line, expecting tag `]]>'")))
(xml-parse-error msg
c
(input-port-name (the-port))
(input-port-position (the-port)))))))
(define (get-decoder::procedure enc::bstring dst-enc)
(let ((src-enc (string->symbol (string-upcase enc))))
(cond
((or (not src-enc) (eq? src-enc dst-enc))
(lambda (x) x))
((eq? src-enc 'UTF-8)
(cond
((memq dst-enc '(ISO-8859-1 ISO-8859-2 ISO-8859-15))
utf8->iso-latin)
((eq? dst-enc 'UCS-2)
utf8-string->ucs2-string)
(else
(lambda (x) x))))
((memq src-enc '(ISO-8859-1 ISO-8859-2 ISO-8859-15))
(cond
((eq? dst-enc 'UTF-8)
iso-latin->utf8)
((eq? dst-enc 'UCS-2)
(lambda (x)
(utf8-string->ucs2-string (iso-latin->utf8 x))))
(else
(lambda (x) x))))
(else
(lambda (x) x)))))
(define xml-grammar
(regular-grammar ((id (: (in ("azAZ") "!?") (* (in ("azAZ09") ":_-"))))
next
make
specials
strict
decoder
encoding)
((+ (in " \t\n\r"))
(the-string))
((: "<!--"
(* (or (out "-") (: "-" (out "-")) (: "--" (out ">"))))
"-->")
(cons 'comment (the-string)))
((: "<!" (: (or (out "[-") (: "-" (out "-")))
(* (out ">]"))
(? (: "[" (* (out "]")) "]"))
(* (out ">"))) ">")
(cons 'declaration (the-string)))
("<![CDATA["
(cons 'cdata (read/rp cdata-grammar (the-port) decoder)))
((: "<?xml " (* (out "?>")) "?>")
(let ((s (the-substring 6 (the-length))))
(string-set! s (-fx (string-length s) 2) #\space)
(let ((p (open-input-string s)))
(let loop ((attr '()))
(let ((obj (read/rp attribute-grammar p 'xml #t decoder)))
(cond
((pair? obj)
(loop (cons obj attr)))
((eq? obj '>)
(cons 'xml-decl attr))))))))
((: "<?" (* (out ">")) ">")
(cons 'instruction (the-string)))
((: "<" id ">")
(let* ((t (the-substring 1 (-fx (the-length) 1)))
(ts (string->symbol t))
(p (the-port)))
(collect-up-to ignore ts '() p make specials strict decoder encoding)))
((: "<" id "/>")
(let ((t (the-substring 1 (-fx (the-length) 2))))
(make (string->symbol t) '() '())))
((: "<" id (in " \n\t\r"))
(let* ((t (the-substring 1 (-fx (the-length) 1)))
(ts (string->symbol t))
(p (the-port)))
(let loop ((attr '()))
(let ((obj (read/rp attribute-grammar p t strict decoder)))
(cond
((pair? obj)
(loop (cons obj attr)))
((eq? obj '>)
(collect-up-to ignore ts (reverse! attr) p make specials strict decoder encoding))
((eq? obj '/>)
(make ts (reverse! attr) '())))))))
((: "</" id ">")
(string->symbol (the-substring 2 (-fx (the-length) 1))))
((+ (out "<"))
(decoder (the-string)))
(else
(let ((c (the-failure)))
(cond
((not (eof-object? c))
(xml-parse-error "Illegal character"
(error-line c (the-port))
(input-port-name (the-port))
(input-port-position (the-port))))
(else
c))))))
(define (char-hexnumeric? c)
(or (char-numeric? c)
(and (char>=? c #\A) (char<=? c #\F))
(and (char>=? c #\a) (char<=? c #\f))))
(define (xml-string-decode-inner! str ol nl res)
(define (char-value c)
(cond
((char-numeric? c)
(-fx (char->integer c) (char->integer #\0)))
((char<=? c #\F)
(+fx 10 (-fx (char->integer c) (char->integer #\A))))
(else
(+fx 10 (-fx (char->integer c) (char->integer #\a))))))
(let ((ol-2 (-fx ol 2)))
(let loop ((i 0)
(j 0))
(if (=fx j nl)
res
(let ((c (string-ref str i)))
(if (and (char=? c #\%) (<fx i ol-2))
(let ((c1 (string-ref str (+fx i 1)))
(c2 (string-ref str (+fx i 2))))
(if (and (char-hexnumeric? c1) (char-hexnumeric? c2))
(let* ((v1 (char-value c1))
(v2 (char-value c2))
(d (integer->char (+fx (*fx v1 16) v2))))
(string-set! res j d)
(loop (+fx i 3) (+fx j 1)))
(begin
(string-set! res j c)
(loop (+fx i 1) (+fx j 1)))))
(begin
(string-set! res j c)
(loop (+fx i 1) (+fx j 1)))))))))
(define (xml-count str ol)
(let loop ((i 0)
(c 0))
(cond
((=fx i ol)
c)
((char=? (string-ref str i) #\&)
(cond
((substring-at? str "<" i)
(loop (+fx i 4) (+fx c 1)))
((substring-at? str ">" i)
(loop (+fx i 4) (+fx c 1)))
((substring-at? str "&" i)
(loop (+fx i 5) (+fx c 1)))
((substring-at? str """ i)
(loop (+fx i 6) (+fx c 1)))
((substring-at? str " " i)
(loop (+fx i 6) (+fx c 1)))
((substring-at? str "&#" i)
(let liip ((i (+fx i 2)))
(cond
((=fx i ol)
c)
((char-numeric? (string-ref str i))
(liip (+fx i 1)))
(else
(loop (+fx i 1) (+fx c 1))))))
(else
(loop (+fx i 1) (+fx c 1)))))
(else
(loop (+fx i 1) (+fx c 1))))))
(define (xml-decode! str res ol nl)
(let loop ((i 0)
(j 0))
(cond
((=fx i ol)
res)
((char=? (string-ref str i) #\&)
(cond
((substring-at? str "<" i)
(string-set! res j #\<)
(loop (+fx i 4) (+fx j 1)))
((substring-at? str ">" i)
(string-set! res j #\>)
(loop (+fx i 4) (+fx j 1)))
((substring-at? str "&" i)
(string-set! res j #\&)
(loop (+fx i 5) (+fx j 1)))
((substring-at? str """ i)
(string-set! res j #\")
(loop (+fx i 6) (+fx j 1)))
((substring-at? str " " i)
(string-set! res j #\space)
(loop (+fx i 6) (+fx j 1)))
((substring-at? str "&#" i)
(let liip ((i (+fx i 2))
(n 0))
(if (=fx i ol)
res
(let ((c (string-ref str i)))
(if (char-numeric? c)
(liip (+fx i 1)
(+fx (*fx n 10)
(-fx (char->integer c)
(char->integer #\0))))
(begin
(string-set! res j (integer->char n))
(loop (+fx i 1) (+fx j 1))))))))
(else
(string-set! res j (string-ref str i))
(loop (+fx i 1) (+fx j 1)))))
(else
(string-set! res j (string-ref str i))
(loop (+fx i 1) (+fx j 1))))))
(define (xml-string-decode str)
(let ((ol (string-length str)))
(if (>=fx ol 3)
(let ((nl (xml-count str ol)))
(if (=fx nl ol)
(string-copy str)
(let ((res (make-string nl)))
(xml-decode! str res ol nl)
res)))
(string-copy str))))
(define (xml-string-decode! str)
(let ((ol (string-length str)))
(if (>=fx ol 3)
(let ((nl (xml-count str ol)))
(if (=fx nl ol)
str
(begin
(xml-decode! str str ol nl)
(string-shrink! str nl))))
str)))
(define (xml-string-encode str)
(define (count str ol)
(let loop ((i 0)
(n 0))
(if (=fx i ol)
n
(let ((c (string-ref str i)))
(case c
((#\")
(loop (+fx i 1) (+fx n 6)))
((#\&)
(loop (+fx i 1) (+fx n 5)))
((#\< #\>)
(loop (+fx i 1) (+fx n 4)))
(else
(loop (+fx i 1) (+fx n 1))))))))
(define (encode str ol nl)
(if (=fx nl ol)
str
(let ((res (make-string nl)))
(let loop ((i 0)
(j 0))
(if (=fx j nl)
res
(let ((c (string-ref str i)))
(case c
((#\<)
(blit-string! "<" 0 res j 4)
(loop (+fx i 1) (+fx j 4)))
((#\>)
(blit-string! ">" 0 res j 4)
(loop (+fx i 1) (+fx j 4)))
((#\&)
(blit-string! "&" 0 res j 5)
(loop (+fx i 1) (+fx j 5)))
((#\")
(blit-string! """ 0 res j 6)
(loop (+fx i 1) (+fx j 6)))
(else
(string-set! res j c)
(loop (+fx i 1) (+fx j 1))))))))))
(let ((ol (string-length str)))
(encode str ol (count str ol))))
(define (read-xml #!optional (port::input-port (current-input-port)))
(xml-parse port))
(define (xml-metadata xml)
(let ((xml-ver #f)
(xml-enc #f)
(xml-lang #f)
(root-ver 0.0)
(xml-root #f)
(xml-ns '()))
(let loop1 ((l xml))
(when (pair? l)
(match-case (car l)
((xml-decl . (and ?attr (?- . ?-)))
(for-each (lambda (at)
(case (car at)
((version) (set! xml-ver (cdr attr)))
((encoding) (set! xml-enc (cdr attr)))))
attr))
((?mark ?lattr . ?-)
(let loop3 ((lattr lattr))
(unless xml-root (set! xml-root mark))
(when (pair? lattr)
(let ((attr (car lattr)))
(case (car attr)
((xml:lang)
(set! xml-lang (cdr attr)))
((xmlns)
(set! xml-root (cons (cdr attr) xml-root)))
((version)
(set! root-ver (string->number (cdr attr))))
(else
(let ((str (symbol->string (car attr))))
(when (substring=? str "xmlns:" 6)
(let* ((l (string-length str))
(s (substring str 6 l))
(si (string->symbol s)))
(set! xml-ns
(cons (cons (cdr attr) si)
xml-ns)))))))
(loop3 (cdr lattr)))))))
(loop1 (cdr l))))
(unless xml-root
(error "xml-metadata" "Empty XML document !" xml))
- XML Version ( 1.0 or 1.1 ) or # f
- Pair , which car is the first data markup , and
- xml first data markup version attribute ( 0 if unspecified )
(values xml-ver xml-enc xml-lang xml-root root-ver xml-ns)))
|
658039f5b3f01394e55d66c43072ed64d4c01060844132b6deeab6ee51c32b61 | matlux/lambda-game-engine | main.clj | (ns zone.lambda.game.main
(:require
[zone.lambda.game.chess.core :as chess]
[zone.lambda.game.connect4.core :as connect4]
[clojure.core.reducers :as r]))
(defn helper []
(println "use one of the following parameters:
\tchess
\tconnect4"
))
(defn -main
([]
(helper))
([app]
(case app
"chess" (chess/-main)
"connect4" (connect4/-main)
(helper))))
| null | https://raw.githubusercontent.com/matlux/lambda-game-engine/07109206714d80db2e3a2b65c84cd987a81006c4/src/zone/lambda/game/main.clj | clojure | (ns zone.lambda.game.main
(:require
[zone.lambda.game.chess.core :as chess]
[zone.lambda.game.connect4.core :as connect4]
[clojure.core.reducers :as r]))
(defn helper []
(println "use one of the following parameters:
\tchess
\tconnect4"
))
(defn -main
([]
(helper))
([app]
(case app
"chess" (chess/-main)
"connect4" (connect4/-main)
(helper))))
|
|
df4dd16b4acf435dbdf1b29a01dc27b6810a0f7bb917855c7d0901deea8392ce | cedlemo/OCaml-GI-ctypes-bindings-generator | Unit.mli | open Ctypes
type t = None | Points | Inch | Mm
val of_value:
Unsigned.uint32 -> t
val to_value:
t -> Unsigned.uint32
val t_view: t typ
| null | https://raw.githubusercontent.com/cedlemo/OCaml-GI-ctypes-bindings-generator/21a4d449f9dbd6785131979b91aa76877bad2615/tools/Gtk3/Unit.mli | ocaml | open Ctypes
type t = None | Points | Inch | Mm
val of_value:
Unsigned.uint32 -> t
val to_value:
t -> Unsigned.uint32
val t_view: t typ
|
|
85fd11b43d20f175e92f87d59c7dcdbbb5a6576c74561597604859ca4126744d | janestreet/async_smtp | headers.mli | open! Core
open! Async
open! Async_smtp_types
module Config : sig
module Header_cond : sig
type t =
{ name : Email_headers.Name.t
; if_ : [ `Contains of string ] option
}
[@@deriving sexp]
end
module Listed_header_cond : sig
type t =
{ name : Email_headers.Name.t
; if_ : [ `Contains of string ] option
; remove_duplicates : unit option
}
[@@deriving sexp]
end
type t =
{ strip_whitespace : unit option
; normalize_whitespace : Header_cond.t list
; filter : Header_cond.t list
; mask : Header_cond.t list
; hash : Header_cond.t list
; dedup : Header_cond.t list
; (* read in as list of emails and sort *)
sort_emails : Listed_header_cond.t list
; (* read in as list of whitespace-separated words and sort *)
sort_words : Listed_header_cond.t list
; sort : bool
}
[@@deriving sexp]
val default : t
val load : string -> t Deferred.t
end
module Header : sig
type t = Email_headers.Name.t * Email_headers.Value.t [@@deriving compare]
end
val transform : Config.t -> Smtp_envelope.t -> Smtp_envelope.t
| null | https://raw.githubusercontent.com/janestreet/async_smtp/c2c1f8b7b27f571a99d2f21e8a31ce150fbd6ced/tools/src/headers.mli | ocaml | read in as list of emails and sort
read in as list of whitespace-separated words and sort | open! Core
open! Async
open! Async_smtp_types
module Config : sig
module Header_cond : sig
type t =
{ name : Email_headers.Name.t
; if_ : [ `Contains of string ] option
}
[@@deriving sexp]
end
module Listed_header_cond : sig
type t =
{ name : Email_headers.Name.t
; if_ : [ `Contains of string ] option
; remove_duplicates : unit option
}
[@@deriving sexp]
end
type t =
{ strip_whitespace : unit option
; normalize_whitespace : Header_cond.t list
; filter : Header_cond.t list
; mask : Header_cond.t list
; hash : Header_cond.t list
; dedup : Header_cond.t list
sort_emails : Listed_header_cond.t list
sort_words : Listed_header_cond.t list
; sort : bool
}
[@@deriving sexp]
val default : t
val load : string -> t Deferred.t
end
module Header : sig
type t = Email_headers.Name.t * Email_headers.Value.t [@@deriving compare]
end
val transform : Config.t -> Smtp_envelope.t -> Smtp_envelope.t
|
5a7079c14162a890c350ed912ca9fdff5f353de4e9f7ba201256e2705294a7a4 | alexkazik/qrcode | Image.hs | # LANGUAGE NoImplicitPrelude #
# LANGUAGE RecordWildCards #
module Codec.QRCode.Code.Image
( drawFunctionPatterns
, drawFormatBits
, drawCodeWords
) where
import Codec.QRCode.Base
import Control.Monad.Primitive (PrimMonad, PrimState)
import qualified Data.Vector.Unboxed as UV
import qualified Data.Vector.Unboxed.Mutable as MUV
import Codec.QRCode.Data.ErrorLevel
import Codec.QRCode.Data.Mask
import Codec.QRCode.Data.MQRImage
import Codec.QRCode.Data.Version
--
-- Draw (almost) all function patterns into an image
--
-- | Draw all function patterns
drawFunctionPatterns :: PrimMonad m => MQRImage1 (PrimState m) -> m ()
drawFunctionPatterns img@MQRImage1{..} = do
drawTimingPatterns img -- will be overwritten by finder and alignment patterns
let
(alignmentPatternPositions, maxAlignmentPosition) = calculateAlignmentPatternPositions mqrImage1Version
forM_ [(x,y) | x <- alignmentPatternPositions, y <- alignmentPatternPositions] $ \(x,y) ->
unless (x == 6 && y == 6 || x == maxAlignmentPosition && y == 6 || x == 6 && y == maxAlignmentPosition) $
drawAlignmentPattern img x y
drawFinderPattern img 3 3
drawFinderPattern img (mqrImage1Size-4) 3
drawFinderPattern img 3 (mqrImage1Size-4)
when (unVersion mqrImage1Version >= 7) $
drawVersion img
will be overwritten later with
where
-- | Calculate all alignment pattern positions
calculateAlignmentPatternPositions :: Version -> ([Int], Int)
calculateAlignmentPatternPositions ver
| unVersion ver == 1 = ([], 0)
| otherwise =
let
numAlign = unVersion ver `div` 7 + 2
step
| unVersion ver == 32 = 26
| otherwise = (unVersion ver * 4 + numAlign * 2 + 1) `div` (2 * numAlign - 2) * 2
pos p = unVersion ver * 4 + 10 - p * step
in
(6 : [ pos p | p <- [0 .. numAlign-2]], pos 0)
-- | Draw both timing patterns (alternate black/white modules)
drawTimingPatterns :: PrimMonad m => MQRImage1 (PrimState m) -> m ()
drawTimingPatterns img@MQRImage1{..} =
forM_ [0 .. mqrImage1Size-1] $ \i -> do
setFunctionModule img 6 i (i `mod` 2 == 0)
setFunctionModule img i 6 (i `mod` 2 == 0)
| Draws a 5 * 5 alignment pattern , with the center module at ( x , y )
drawAlignmentPattern :: PrimMonad m => MQRImage1 (PrimState m) -> Int -> Int -> m ()
drawAlignmentPattern img x y =
forM_ [-2 .. 2] $ \i ->
forM_ [-2 .. 2] $ \j ->
setFunctionModule img (x+j) (y+i) ((abs i `max` abs j) /= 1)
| Draws a 9 * 9 finder pattern including the border separator , with the center module at ( x , y )
drawFinderPattern :: PrimMonad m => MQRImage1 (PrimState m) -> Int -> Int -> m ()
drawFinderPattern img@MQRImage1{..} x y =
forM_ [-4 .. 4] $ \i ->
forM_ [-4 .. 4] $ \j -> do
let
dist = abs i `max` abs j
x' = x + j
y' = y + i
when (x' >= 0 && x' < mqrImage1Size && y' >= 0 && y' < mqrImage1Size) $
setFunctionModule img (x+j) (y+i) (dist /= 2 && dist /= 4)
-- | Draw the version information into the image
drawVersion :: PrimMonad m => MQRImage1 (PrimState m) -> m ()
drawVersion img@MQRImage1{..} = do
let
v = unVersion mqrImage1Version
-- Calculate error correction code and pack bits
rem' = iterateN 12 v (\r -> (r `shiftL` 1) `xor` ((r `shiftR` 11) * 0x1F25))
da = (v `shiftL` 12) .|. rem'
Draw two copies
forM_ [0 .. 17] $ \i -> do
let
d = testBit da i
a = mqrImage1Size - 11 + (i `mod` 3)
b = i `div` 3
setFunctionModule img a b d
setFunctionModule img b a d
-- | Mark all modules which will be used by the format bits as a function pattern
-- (but don't actually write anything into it yet).
reserveFormatBits :: PrimMonad m => MQRImage1 (PrimState m) -> m ()
reserveFormatBits img@MQRImage1{..} = do
let
fn x y = MUV.write mqrImage1Fixed (y * mqrImage1Size + x) True
-- Reserve first copy
forM_ [0 .. 5] $ \i ->
fn 8 i
fn 8 7
fn 8 8
fn 7 8
forM_ [9 .. 14] $ \i ->
fn (14 - i) 8
Reserve second copy
forM_ [0 .. 7] $ \i ->
fn (mqrImage1Size - 1 - i) 8
forM_ [8 .. 14] $ \i ->
fn 8 (mqrImage1Size - 15 + i)
-- Draw fixed set module
setFunctionModule img 8 (mqrImage1Size - 8) True
--
-- Functions to be used later (once the format / data is determined)
--
-- | Draw the actual format bits into the image
drawFormatBits :: PrimMonad m => MQRImage3 (PrimState m) -> Mask -> m ()
drawFormatBits MQRImage3{..} m = do
let
daSource = (errorLevelMask mqrImage3ErrorLevel `shiftL` 3) .|. fromEnum m
rem' = iterateN 10 daSource (\r -> (r `shiftL` 1) `xor` ((r `shiftR` 9) * 0x537))
da = ((daSource `shiftL` 10) .|. rem') `xor` 0x5412
fn x y = MUV.write mqrImage3Data (x + y * mqrImage3Size)
Draw first copy
forM_ [0 .. 5] $ \i ->
fn 8 i (testBit da i)
fn 8 7 (testBit da 6)
fn 8 8 (testBit da 7)
fn 7 8 (testBit da 8)
forM_ [9 .. 14] $ \i ->
fn (14 - i) 8 (testBit da i)
Draw second copy
forM_ [0 .. 7] $ \i ->
fn (mqrImage3Size - 1 - i) 8 (testBit da i)
forM_ [8 .. 14] $ \i ->
fn 8 (mqrImage3Size - 15 + i) (testBit da i)
-- | Draw the code words (data and error correction) into the image
drawCodeWords :: PrimMonad m => MQRImage2 (PrimState m) -> [Bool] -> m ()
drawCodeWords MQRImage2{..} d = do
ffoldlM_ d ([mqrImage2Size-1, mqrImage2Size-3 .. 8] ++ [5, 3, 1]) $ \d' right -> do
let
upward = ((right + 1) .&. 2) == 0
ffoldlM d' (bool [0 .. mqrImage2Size-1] [mqrImage2Size-1, mqrImage2Size-2 .. 0] upward) $ \d'' y ->
ffoldlM d'' [right, right-1] $ \d''' x -> do
let
f = mqrImage2Fixed UV.! (x + y * mqrImage2Size)
case d''' of
(isBlack:xs)
| not f -> do
when isBlack $
MUV.write mqrImage2Data (x + y * mqrImage2Size) True -- all unused pixels are already white and do not need to be set
return xs
xxs -> return xxs
return ()
where
ffoldlM d' i f = foldlM f d' i
ffoldlM_ d' i f = void $ foldlM f d' i
--
-- Helper
--
-- | Sets the color of a module and marks it as a function module
setFunctionModule :: PrimMonad m => MQRImage1 (PrimState m) -> Int -> Int -> Bool -> m ()
# INLINABLE setFunctionModule #
setFunctionModule MQRImage1{..} x y isBlack = do
MUV.write mqrImage1Data (y * mqrImage1Size + x) isBlack
MUV.write mqrImage1Fixed (y * mqrImage1Size + x) True
-- | Execute an action n times
iterateN :: Int -> a -> (a -> a) -> a
{-# INLINABLE iterateN #-}
iterateN n0 i0 f = go n0 i0
where
go n i
| n <= 0 = i
| otherwise = go (n-1) (f i)
-- | The mask value of an ErrorLevel
errorLevelMask :: ErrorLevel -> Int
errorLevelMask L = 1
errorLevelMask M = 0
errorLevelMask Q = 3
errorLevelMask H = 2
| null | https://raw.githubusercontent.com/alexkazik/qrcode/7ee12de893c856a968dc1397602a7f81f8ea2c68/qrcode-core/src/Codec/QRCode/Code/Image.hs | haskell |
Draw (almost) all function patterns into an image
| Draw all function patterns
will be overwritten by finder and alignment patterns
| Calculate all alignment pattern positions
| Draw both timing patterns (alternate black/white modules)
| Draw the version information into the image
Calculate error correction code and pack bits
| Mark all modules which will be used by the format bits as a function pattern
(but don't actually write anything into it yet).
Reserve first copy
Draw fixed set module
Functions to be used later (once the format / data is determined)
| Draw the actual format bits into the image
| Draw the code words (data and error correction) into the image
all unused pixels are already white and do not need to be set
Helper
| Sets the color of a module and marks it as a function module
| Execute an action n times
# INLINABLE iterateN #
| The mask value of an ErrorLevel | # LANGUAGE NoImplicitPrelude #
# LANGUAGE RecordWildCards #
module Codec.QRCode.Code.Image
( drawFunctionPatterns
, drawFormatBits
, drawCodeWords
) where
import Codec.QRCode.Base
import Control.Monad.Primitive (PrimMonad, PrimState)
import qualified Data.Vector.Unboxed as UV
import qualified Data.Vector.Unboxed.Mutable as MUV
import Codec.QRCode.Data.ErrorLevel
import Codec.QRCode.Data.Mask
import Codec.QRCode.Data.MQRImage
import Codec.QRCode.Data.Version
drawFunctionPatterns :: PrimMonad m => MQRImage1 (PrimState m) -> m ()
drawFunctionPatterns img@MQRImage1{..} = do
let
(alignmentPatternPositions, maxAlignmentPosition) = calculateAlignmentPatternPositions mqrImage1Version
forM_ [(x,y) | x <- alignmentPatternPositions, y <- alignmentPatternPositions] $ \(x,y) ->
unless (x == 6 && y == 6 || x == maxAlignmentPosition && y == 6 || x == 6 && y == maxAlignmentPosition) $
drawAlignmentPattern img x y
drawFinderPattern img 3 3
drawFinderPattern img (mqrImage1Size-4) 3
drawFinderPattern img 3 (mqrImage1Size-4)
when (unVersion mqrImage1Version >= 7) $
drawVersion img
will be overwritten later with
where
calculateAlignmentPatternPositions :: Version -> ([Int], Int)
calculateAlignmentPatternPositions ver
| unVersion ver == 1 = ([], 0)
| otherwise =
let
numAlign = unVersion ver `div` 7 + 2
step
| unVersion ver == 32 = 26
| otherwise = (unVersion ver * 4 + numAlign * 2 + 1) `div` (2 * numAlign - 2) * 2
pos p = unVersion ver * 4 + 10 - p * step
in
(6 : [ pos p | p <- [0 .. numAlign-2]], pos 0)
drawTimingPatterns :: PrimMonad m => MQRImage1 (PrimState m) -> m ()
drawTimingPatterns img@MQRImage1{..} =
forM_ [0 .. mqrImage1Size-1] $ \i -> do
setFunctionModule img 6 i (i `mod` 2 == 0)
setFunctionModule img i 6 (i `mod` 2 == 0)
| Draws a 5 * 5 alignment pattern , with the center module at ( x , y )
drawAlignmentPattern :: PrimMonad m => MQRImage1 (PrimState m) -> Int -> Int -> m ()
drawAlignmentPattern img x y =
forM_ [-2 .. 2] $ \i ->
forM_ [-2 .. 2] $ \j ->
setFunctionModule img (x+j) (y+i) ((abs i `max` abs j) /= 1)
| Draws a 9 * 9 finder pattern including the border separator , with the center module at ( x , y )
drawFinderPattern :: PrimMonad m => MQRImage1 (PrimState m) -> Int -> Int -> m ()
drawFinderPattern img@MQRImage1{..} x y =
forM_ [-4 .. 4] $ \i ->
forM_ [-4 .. 4] $ \j -> do
let
dist = abs i `max` abs j
x' = x + j
y' = y + i
when (x' >= 0 && x' < mqrImage1Size && y' >= 0 && y' < mqrImage1Size) $
setFunctionModule img (x+j) (y+i) (dist /= 2 && dist /= 4)
drawVersion :: PrimMonad m => MQRImage1 (PrimState m) -> m ()
drawVersion img@MQRImage1{..} = do
let
v = unVersion mqrImage1Version
rem' = iterateN 12 v (\r -> (r `shiftL` 1) `xor` ((r `shiftR` 11) * 0x1F25))
da = (v `shiftL` 12) .|. rem'
Draw two copies
forM_ [0 .. 17] $ \i -> do
let
d = testBit da i
a = mqrImage1Size - 11 + (i `mod` 3)
b = i `div` 3
setFunctionModule img a b d
setFunctionModule img b a d
reserveFormatBits :: PrimMonad m => MQRImage1 (PrimState m) -> m ()
reserveFormatBits img@MQRImage1{..} = do
let
fn x y = MUV.write mqrImage1Fixed (y * mqrImage1Size + x) True
forM_ [0 .. 5] $ \i ->
fn 8 i
fn 8 7
fn 8 8
fn 7 8
forM_ [9 .. 14] $ \i ->
fn (14 - i) 8
Reserve second copy
forM_ [0 .. 7] $ \i ->
fn (mqrImage1Size - 1 - i) 8
forM_ [8 .. 14] $ \i ->
fn 8 (mqrImage1Size - 15 + i)
setFunctionModule img 8 (mqrImage1Size - 8) True
drawFormatBits :: PrimMonad m => MQRImage3 (PrimState m) -> Mask -> m ()
drawFormatBits MQRImage3{..} m = do
let
daSource = (errorLevelMask mqrImage3ErrorLevel `shiftL` 3) .|. fromEnum m
rem' = iterateN 10 daSource (\r -> (r `shiftL` 1) `xor` ((r `shiftR` 9) * 0x537))
da = ((daSource `shiftL` 10) .|. rem') `xor` 0x5412
fn x y = MUV.write mqrImage3Data (x + y * mqrImage3Size)
Draw first copy
forM_ [0 .. 5] $ \i ->
fn 8 i (testBit da i)
fn 8 7 (testBit da 6)
fn 8 8 (testBit da 7)
fn 7 8 (testBit da 8)
forM_ [9 .. 14] $ \i ->
fn (14 - i) 8 (testBit da i)
Draw second copy
forM_ [0 .. 7] $ \i ->
fn (mqrImage3Size - 1 - i) 8 (testBit da i)
forM_ [8 .. 14] $ \i ->
fn 8 (mqrImage3Size - 15 + i) (testBit da i)
drawCodeWords :: PrimMonad m => MQRImage2 (PrimState m) -> [Bool] -> m ()
drawCodeWords MQRImage2{..} d = do
ffoldlM_ d ([mqrImage2Size-1, mqrImage2Size-3 .. 8] ++ [5, 3, 1]) $ \d' right -> do
let
upward = ((right + 1) .&. 2) == 0
ffoldlM d' (bool [0 .. mqrImage2Size-1] [mqrImage2Size-1, mqrImage2Size-2 .. 0] upward) $ \d'' y ->
ffoldlM d'' [right, right-1] $ \d''' x -> do
let
f = mqrImage2Fixed UV.! (x + y * mqrImage2Size)
case d''' of
(isBlack:xs)
| not f -> do
when isBlack $
return xs
xxs -> return xxs
return ()
where
ffoldlM d' i f = foldlM f d' i
ffoldlM_ d' i f = void $ foldlM f d' i
setFunctionModule :: PrimMonad m => MQRImage1 (PrimState m) -> Int -> Int -> Bool -> m ()
# INLINABLE setFunctionModule #
setFunctionModule MQRImage1{..} x y isBlack = do
MUV.write mqrImage1Data (y * mqrImage1Size + x) isBlack
MUV.write mqrImage1Fixed (y * mqrImage1Size + x) True
iterateN :: Int -> a -> (a -> a) -> a
iterateN n0 i0 f = go n0 i0
where
go n i
| n <= 0 = i
| otherwise = go (n-1) (f i)
errorLevelMask :: ErrorLevel -> Int
errorLevelMask L = 1
errorLevelMask M = 0
errorLevelMask Q = 3
errorLevelMask H = 2
|
60c9de7049053719f6dcf0ba30f832dee14eac6945e2a9746b3638e763b9652c | dyzsr/ocaml-selectml | polling.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
and , projet Cambium , INRIA Paris
, OCaml Labs Consultancy Ltd
and , Jane Street Europe
(* *)
Copyright 2021 Institut National de Recherche en Informatique et
(* en Automatique. *)
Copyright 2021 OCaml Labs Consultancy Ltd
Copyright 2021 Jane Street Group LLC
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
open Mach
open Format
module Int = Numbers.Int
module String = Misc.Stdlib.String
let function_is_assumed_to_never_poll func =
String.starts_with ~prefix:"caml_apply" func
|| String.starts_with ~prefix:"caml_send" func
(* These are used for the poll error annotation later on*)
type polling_point = Alloc | Poll | Function_call | External_call
type error = Poll_error of (polling_point * Debuginfo.t) list
exception Error of error
(* Detection of recursive handlers that are not guaranteed to poll
at every loop iteration. *)
We use a backwards dataflow analysis to compute a mapping from handlers H
(= loop heads ) to either " safe " or " unsafe " .
H is " safe " if every path starting from H goes through an Ialloc ,
Ipoll , Ireturn , Itailcall_ind or Itailcall_imm instruction .
H is " unsafe " , therefore , if starting from H we can loop infinitely
without crossing an Ialloc or Ipoll instruction .
(= loop heads) to either "safe" or "unsafe".
H is "safe" if every path starting from H goes through an Ialloc,
Ipoll, Ireturn, Itailcall_ind or Itailcall_imm instruction.
H is "unsafe", therefore, if starting from H we can loop infinitely
without crossing an Ialloc or Ipoll instruction.
*)
type unsafe_or_safe = Unsafe | Safe
module Unsafe_or_safe = struct
type t = unsafe_or_safe
let bot = Unsafe
let join t1 t2 =
match t1, t2 with
| Unsafe, Unsafe
| Unsafe, Safe
| Safe, Unsafe -> Unsafe
| Safe, Safe -> Safe
let lessequal t1 t2 =
match t1, t2 with
| Unsafe, Unsafe
| Unsafe, Safe
| Safe, Safe -> true
| Safe, Unsafe -> false
end
module PolledLoopsAnalysis = Dataflow.Backward(Unsafe_or_safe)
let polled_loops_analysis funbody =
let transfer i ~next ~exn =
match i.desc with
| Iend -> next
| Iop (Ialloc _ | Ipoll _)
| Iop (Itailcall_ind | Itailcall_imm _) -> Safe
| Iop op ->
if operation_can_raise op
then Unsafe_or_safe.join next exn
else next
| Ireturn -> Safe
| Iifthenelse _ | Iswitch _ | Icatch _ | Iexit _ | Itrywith _ -> next
| Iraise _ -> exn
in
(* [exnescape] is [Safe] because we can't loop infinitely having
returned from the function via an unhandled exception. *)
snd (PolledLoopsAnalysis.analyze ~exnescape:Safe ~transfer funbody)
(* Detection of functions that can loop via a tail-call without going
through a poll point. *)
We use a backwards dataflow analysis to compute a single value : either
" Might_not_poll " or " Always_polls " .
" Might_not_poll " means there exists a path from the function entry to a
Potentially Recursive Tail Call ( an Itailcall_ind or
Itailcall_imm to a forward function )
that does not go through an Ialloc or Ipoll instruction .
" Always_polls " , therefore , means the function always polls ( via Ialloc or
Ipoll ) before doing a PRTC . This includes the case where it does not
perform any PRTC .
A note on Potentially Recursive Tail Calls
------------------------------------------
Tail calls can create infinite loops , of course . ( Consider a function
that tail - calls itself . ) But not all tail calls need to be flagged
as potential infinite loops .
We optimise by making a partial ordering over Mach functions : in
definition order within a compilation unit , and dependency
order between compilation units . This order is acyclic , as
OCaml does not allow circular dependencies between modules .
It 's also finite , so if there 's an infinite sequence of
function calls then something has to make a forward reference .
Also , in such an infinite sequence of function calls , at most finitely
many of them can be non - tail calls . ( If there are infinitely many
non - tail calls , then the program soon terminates with a stack
overflow ) .
So , every such infinite sequence must contain many forward - referencing
tail calls . These tail calls are the Potentially Recursive Tail Calls
( ) . Polling only on those calls suffices .
Several functions below take a parameter [ future_funcnames ]
which is the set of functions defined " after " the current function
in the current compilation unit . The PTRCs are tail calls
to known functions in [ future_funcnames ] , or tail calls to
unknown functions .
"Might_not_poll" or "Always_polls".
"Might_not_poll" means there exists a path from the function entry to a
Potentially Recursive Tail Call (an Itailcall_ind or
Itailcall_imm to a forward function)
that does not go through an Ialloc or Ipoll instruction.
"Always_polls", therefore, means the function always polls (via Ialloc or
Ipoll) before doing a PRTC. This includes the case where it does not
perform any PRTC.
A note on Potentially Recursive Tail Calls
------------------------------------------
Tail calls can create infinite loops, of course. (Consider a function
that tail-calls itself.) But not all tail calls need to be flagged
as potential infinite loops.
We optimise by making a partial ordering over Mach functions: in
definition order within a compilation unit, and dependency
order between compilation units. This order is acyclic, as
OCaml does not allow circular dependencies between modules.
It's also finite, so if there's an infinite sequence of
function calls then something has to make a forward reference.
Also, in such an infinite sequence of function calls, at most finitely
many of them can be non-tail calls. (If there are infinitely many
non-tail calls, then the program soon terminates with a stack
overflow).
So, every such infinite sequence must contain many forward-referencing
tail calls. These tail calls are the Potentially Recursive Tail Calls
(PTRCs). Polling only on those calls suffices.
Several functions below take a parameter [future_funcnames]
which is the set of functions defined "after" the current function
in the current compilation unit. The PTRCs are tail calls
to known functions in [future_funcnames], or tail calls to
unknown functions.
*)
type polls_before_prtc = Might_not_poll | Always_polls
module Polls_before_prtc = struct
type t = polls_before_prtc
let bot = Always_polls
let join t1 t2 =
match t1, t2 with
| Might_not_poll, Might_not_poll
| Might_not_poll, Always_polls
| Always_polls, Might_not_poll -> Might_not_poll
| Always_polls, Always_polls -> Always_polls
let lessequal t1 t2 =
match t1, t2 with
| Always_polls, Always_polls
| Always_polls, Might_not_poll
| Might_not_poll, Might_not_poll -> true
| Might_not_poll, Always_polls -> false
end
module PTRCAnalysis = Dataflow.Backward(Polls_before_prtc)
let potentially_recursive_tailcall ~future_funcnames funbody =
let transfer i ~next ~exn =
match i.desc with
| Iend -> next
| Iop (Ialloc _ | Ipoll _) -> Always_polls
| Iop (Itailcall_ind) -> Might_not_poll (* this is a PTRC *)
| Iop (Itailcall_imm { func }) ->
if String.Set.mem func future_funcnames
|| function_is_assumed_to_never_poll func
then Might_not_poll (* this is a PTRC *)
else Always_polls (* this is not a PTRC *)
| Iop op ->
if operation_can_raise op
then Polls_before_prtc.join next exn
else next
| Ireturn -> Always_polls
| Iifthenelse _ | Iswitch _ | Icatch _ | Iexit _ | Itrywith _ -> next
| Iraise _ -> exn
in
fst (PTRCAnalysis.analyze ~transfer funbody)
We refer to the set of recursive handler labels that need extra polling
as the " unguarded back edges " ( " ube " ) .
Given the result of the analysis of recursive handlers , add [ Ipoll ]
instructions at the [ Iexit ] instructions before unguarded back edges ,
thus ensuring that every loop contains a poll point . Also compute whether
the resulting function contains any [ Ipoll ] instructions .
as the "unguarded back edges" ("ube").
Given the result of the analysis of recursive handlers, add [Ipoll]
instructions at the [Iexit] instructions before unguarded back edges,
thus ensuring that every loop contains a poll point. Also compute whether
the resulting function contains any [Ipoll] instructions.
*)
let contains_polls = ref false
let add_poll i =
contains_polls := true;
Mach.instr_cons_debug (Iop (Ipoll { return_label = None })) [||] [||] i.dbg i
let instr_body handler_safe i =
let add_unsafe_handler ube (k, _) =
match handler_safe k with
| Safe -> ube
| Unsafe -> Int.Set.add k ube
in
let rec instr ube i =
match i.desc with
| Iifthenelse (test, i0, i1) ->
{ i with
desc = Iifthenelse (test, instr ube i0, instr ube i1);
next = instr ube i.next;
}
| Iswitch (index, cases) ->
{ i with
desc = Iswitch (index, Array.map (instr ube) cases);
next = instr ube i.next;
}
| Icatch (rc, hdl, body) ->
let ube' =
match rc with
| Cmm.Recursive -> List.fold_left add_unsafe_handler ube hdl
| Cmm.Nonrecursive -> ube in
let instr_handler (k, i0) =
let i1 = instr ube' i0 in
(k, i1) in
(* Since we are only interested in unguarded _back_ edges, we don't
use [ube'] for instrumenting [body], but just [ube] instead. *)
let body = instr ube body in
{ i with
desc = Icatch (rc,
List.map instr_handler hdl,
body);
next = instr ube i.next;
}
| Iexit k ->
if Int.Set.mem k ube
then add_poll i
else i
| Itrywith (body, hdl) ->
{ i with
desc = Itrywith (instr ube body, instr ube hdl);
next = instr ube i.next;
}
| Iend | Ireturn | Iraise _ -> i
| Iop op ->
begin match op with
| Ipoll _ -> contains_polls := true
| _ -> ()
end;
{ i with next = instr ube i.next }
in
instr Int.Set.empty i
let find_poll_alloc_or_calls instr =
let f_match i =
match i.desc with
| Iop(Ipoll _) -> Some (Poll, i.dbg)
| Iop(Ialloc _) -> Some (Alloc, i.dbg)
| Iop(Icall_ind | Icall_imm _ |
Itailcall_ind | Itailcall_imm _ ) -> Some (Function_call, i.dbg)
| Iop(Iextcall { alloc = true }) -> Some (External_call, i.dbg)
| Iop(Imove | Ispill | Ireload | Iconst_int _ | Iconst_float _ |
Iconst_symbol _ | Iextcall { alloc = false } | Istackoffset _ |
Iload _ | Istore _ | Iintop _ | Iintop_imm _ | Ifloatofint |
Iintoffloat | Inegf | Iabsf | Iaddf | Isubf | Imulf | Idivf |
Iopaque | Ispecific _)-> None
| Iend | Ireturn | Iifthenelse _ | Iswitch _ | Icatch _ | Iexit _ |
Itrywith _ | Iraise _ -> None
in
let matches = ref [] in
Mach.instr_iter
(fun i ->
match f_match i with
| Some(x) -> matches := x :: !matches
| None -> ())
instr;
List.rev !matches
let instrument_fundecl ~future_funcnames:_ (f : Mach.fundecl) : Mach.fundecl =
if function_is_assumed_to_never_poll f.fun_name then f
else begin
let handler_needs_poll = polled_loops_analysis f.fun_body in
contains_polls := false;
let new_body = instr_body handler_needs_poll f.fun_body in
begin match f.fun_poll with
| Error_poll -> begin
match find_poll_alloc_or_calls new_body with
| [] -> ()
| poll_error_instrs -> raise (Error(Poll_error poll_error_instrs))
end
| Default_poll -> () end;
let new_contains_calls = f.fun_contains_calls || !contains_polls in
{ f with fun_body = new_body; fun_contains_calls = new_contains_calls }
end
let requires_prologue_poll ~future_funcnames ~fun_name i =
if function_is_assumed_to_never_poll fun_name then false
else
match potentially_recursive_tailcall ~future_funcnames i with
| Might_not_poll -> true
| Always_polls -> false
(* Error report *)
let instr_type p =
match p with
| Poll -> "inserted poll"
| Alloc -> "allocation"
| Function_call -> "function call"
| External_call -> "external call that allocates"
let report_error ppf = function
| Poll_error instrs ->
begin
let num_inserted_polls =
List.fold_left
(fun s (p,_) -> s + match p with Poll -> 1
| Alloc | Function_call | External_call -> 0
) 0 instrs in
let num_user_polls = (List.length instrs) - num_inserted_polls in
if num_user_polls = 0 then
fprintf ppf "Function with poll-error attribute contains polling \
points (inserted by the compiler)\n"
else begin
fprintf ppf
"Function with poll-error attribute contains polling points:\n";
List.iter (fun (p,dbg) ->
begin match p with
| Poll -> ()
| Alloc | Function_call | External_call ->
fprintf ppf "\t%s at " (instr_type p);
Location.print_loc ppf (Debuginfo.to_location dbg);
fprintf ppf "\n"
end
) instrs;
if num_inserted_polls > 0 then
fprintf ppf "\t(plus compiler-inserted polling point(s) in prologue \
and/or loop back edges)\n"
end
end
let () =
Location.register_error_of_exn
(function
| Error err -> Some (Location.error_of_printer_file report_error err)
| _ -> None
)
| null | https://raw.githubusercontent.com/dyzsr/ocaml-selectml/875544110abb3350e9fb5ec9bbadffa332c270d2/asmcomp/polling.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
These are used for the poll error annotation later on
Detection of recursive handlers that are not guaranteed to poll
at every loop iteration.
[exnescape] is [Safe] because we can't loop infinitely having
returned from the function via an unhandled exception.
Detection of functions that can loop via a tail-call without going
through a poll point.
this is a PTRC
this is a PTRC
this is not a PTRC
Since we are only interested in unguarded _back_ edges, we don't
use [ube'] for instrumenting [body], but just [ube] instead.
Error report | and , projet Cambium , INRIA Paris
, OCaml Labs Consultancy Ltd
and , Jane Street Europe
Copyright 2021 Institut National de Recherche en Informatique et
Copyright 2021 OCaml Labs Consultancy Ltd
Copyright 2021 Jane Street Group LLC
the GNU Lesser General Public License version 2.1 , with the
open Mach
open Format
module Int = Numbers.Int
module String = Misc.Stdlib.String
let function_is_assumed_to_never_poll func =
String.starts_with ~prefix:"caml_apply" func
|| String.starts_with ~prefix:"caml_send" func
type polling_point = Alloc | Poll | Function_call | External_call
type error = Poll_error of (polling_point * Debuginfo.t) list
exception Error of error
We use a backwards dataflow analysis to compute a mapping from handlers H
(= loop heads ) to either " safe " or " unsafe " .
H is " safe " if every path starting from H goes through an Ialloc ,
Ipoll , Ireturn , Itailcall_ind or Itailcall_imm instruction .
H is " unsafe " , therefore , if starting from H we can loop infinitely
without crossing an Ialloc or Ipoll instruction .
(= loop heads) to either "safe" or "unsafe".
H is "safe" if every path starting from H goes through an Ialloc,
Ipoll, Ireturn, Itailcall_ind or Itailcall_imm instruction.
H is "unsafe", therefore, if starting from H we can loop infinitely
without crossing an Ialloc or Ipoll instruction.
*)
type unsafe_or_safe = Unsafe | Safe
module Unsafe_or_safe = struct
type t = unsafe_or_safe
let bot = Unsafe
let join t1 t2 =
match t1, t2 with
| Unsafe, Unsafe
| Unsafe, Safe
| Safe, Unsafe -> Unsafe
| Safe, Safe -> Safe
let lessequal t1 t2 =
match t1, t2 with
| Unsafe, Unsafe
| Unsafe, Safe
| Safe, Safe -> true
| Safe, Unsafe -> false
end
module PolledLoopsAnalysis = Dataflow.Backward(Unsafe_or_safe)
let polled_loops_analysis funbody =
let transfer i ~next ~exn =
match i.desc with
| Iend -> next
| Iop (Ialloc _ | Ipoll _)
| Iop (Itailcall_ind | Itailcall_imm _) -> Safe
| Iop op ->
if operation_can_raise op
then Unsafe_or_safe.join next exn
else next
| Ireturn -> Safe
| Iifthenelse _ | Iswitch _ | Icatch _ | Iexit _ | Itrywith _ -> next
| Iraise _ -> exn
in
snd (PolledLoopsAnalysis.analyze ~exnescape:Safe ~transfer funbody)
We use a backwards dataflow analysis to compute a single value : either
" Might_not_poll " or " Always_polls " .
" Might_not_poll " means there exists a path from the function entry to a
Potentially Recursive Tail Call ( an Itailcall_ind or
Itailcall_imm to a forward function )
that does not go through an Ialloc or Ipoll instruction .
" Always_polls " , therefore , means the function always polls ( via Ialloc or
Ipoll ) before doing a PRTC . This includes the case where it does not
perform any PRTC .
A note on Potentially Recursive Tail Calls
------------------------------------------
Tail calls can create infinite loops , of course . ( Consider a function
that tail - calls itself . ) But not all tail calls need to be flagged
as potential infinite loops .
We optimise by making a partial ordering over Mach functions : in
definition order within a compilation unit , and dependency
order between compilation units . This order is acyclic , as
OCaml does not allow circular dependencies between modules .
It 's also finite , so if there 's an infinite sequence of
function calls then something has to make a forward reference .
Also , in such an infinite sequence of function calls , at most finitely
many of them can be non - tail calls . ( If there are infinitely many
non - tail calls , then the program soon terminates with a stack
overflow ) .
So , every such infinite sequence must contain many forward - referencing
tail calls . These tail calls are the Potentially Recursive Tail Calls
( ) . Polling only on those calls suffices .
Several functions below take a parameter [ future_funcnames ]
which is the set of functions defined " after " the current function
in the current compilation unit . The PTRCs are tail calls
to known functions in [ future_funcnames ] , or tail calls to
unknown functions .
"Might_not_poll" or "Always_polls".
"Might_not_poll" means there exists a path from the function entry to a
Potentially Recursive Tail Call (an Itailcall_ind or
Itailcall_imm to a forward function)
that does not go through an Ialloc or Ipoll instruction.
"Always_polls", therefore, means the function always polls (via Ialloc or
Ipoll) before doing a PRTC. This includes the case where it does not
perform any PRTC.
A note on Potentially Recursive Tail Calls
------------------------------------------
Tail calls can create infinite loops, of course. (Consider a function
that tail-calls itself.) But not all tail calls need to be flagged
as potential infinite loops.
We optimise by making a partial ordering over Mach functions: in
definition order within a compilation unit, and dependency
order between compilation units. This order is acyclic, as
OCaml does not allow circular dependencies between modules.
It's also finite, so if there's an infinite sequence of
function calls then something has to make a forward reference.
Also, in such an infinite sequence of function calls, at most finitely
many of them can be non-tail calls. (If there are infinitely many
non-tail calls, then the program soon terminates with a stack
overflow).
So, every such infinite sequence must contain many forward-referencing
tail calls. These tail calls are the Potentially Recursive Tail Calls
(PTRCs). Polling only on those calls suffices.
Several functions below take a parameter [future_funcnames]
which is the set of functions defined "after" the current function
in the current compilation unit. The PTRCs are tail calls
to known functions in [future_funcnames], or tail calls to
unknown functions.
*)
type polls_before_prtc = Might_not_poll | Always_polls
module Polls_before_prtc = struct
type t = polls_before_prtc
let bot = Always_polls
let join t1 t2 =
match t1, t2 with
| Might_not_poll, Might_not_poll
| Might_not_poll, Always_polls
| Always_polls, Might_not_poll -> Might_not_poll
| Always_polls, Always_polls -> Always_polls
let lessequal t1 t2 =
match t1, t2 with
| Always_polls, Always_polls
| Always_polls, Might_not_poll
| Might_not_poll, Might_not_poll -> true
| Might_not_poll, Always_polls -> false
end
module PTRCAnalysis = Dataflow.Backward(Polls_before_prtc)
let potentially_recursive_tailcall ~future_funcnames funbody =
let transfer i ~next ~exn =
match i.desc with
| Iend -> next
| Iop (Ialloc _ | Ipoll _) -> Always_polls
| Iop (Itailcall_imm { func }) ->
if String.Set.mem func future_funcnames
|| function_is_assumed_to_never_poll func
| Iop op ->
if operation_can_raise op
then Polls_before_prtc.join next exn
else next
| Ireturn -> Always_polls
| Iifthenelse _ | Iswitch _ | Icatch _ | Iexit _ | Itrywith _ -> next
| Iraise _ -> exn
in
fst (PTRCAnalysis.analyze ~transfer funbody)
We refer to the set of recursive handler labels that need extra polling
as the " unguarded back edges " ( " ube " ) .
Given the result of the analysis of recursive handlers , add [ Ipoll ]
instructions at the [ Iexit ] instructions before unguarded back edges ,
thus ensuring that every loop contains a poll point . Also compute whether
the resulting function contains any [ Ipoll ] instructions .
as the "unguarded back edges" ("ube").
Given the result of the analysis of recursive handlers, add [Ipoll]
instructions at the [Iexit] instructions before unguarded back edges,
thus ensuring that every loop contains a poll point. Also compute whether
the resulting function contains any [Ipoll] instructions.
*)
let contains_polls = ref false
let add_poll i =
contains_polls := true;
Mach.instr_cons_debug (Iop (Ipoll { return_label = None })) [||] [||] i.dbg i
let instr_body handler_safe i =
let add_unsafe_handler ube (k, _) =
match handler_safe k with
| Safe -> ube
| Unsafe -> Int.Set.add k ube
in
let rec instr ube i =
match i.desc with
| Iifthenelse (test, i0, i1) ->
{ i with
desc = Iifthenelse (test, instr ube i0, instr ube i1);
next = instr ube i.next;
}
| Iswitch (index, cases) ->
{ i with
desc = Iswitch (index, Array.map (instr ube) cases);
next = instr ube i.next;
}
| Icatch (rc, hdl, body) ->
let ube' =
match rc with
| Cmm.Recursive -> List.fold_left add_unsafe_handler ube hdl
| Cmm.Nonrecursive -> ube in
let instr_handler (k, i0) =
let i1 = instr ube' i0 in
(k, i1) in
let body = instr ube body in
{ i with
desc = Icatch (rc,
List.map instr_handler hdl,
body);
next = instr ube i.next;
}
| Iexit k ->
if Int.Set.mem k ube
then add_poll i
else i
| Itrywith (body, hdl) ->
{ i with
desc = Itrywith (instr ube body, instr ube hdl);
next = instr ube i.next;
}
| Iend | Ireturn | Iraise _ -> i
| Iop op ->
begin match op with
| Ipoll _ -> contains_polls := true
| _ -> ()
end;
{ i with next = instr ube i.next }
in
instr Int.Set.empty i
let find_poll_alloc_or_calls instr =
let f_match i =
match i.desc with
| Iop(Ipoll _) -> Some (Poll, i.dbg)
| Iop(Ialloc _) -> Some (Alloc, i.dbg)
| Iop(Icall_ind | Icall_imm _ |
Itailcall_ind | Itailcall_imm _ ) -> Some (Function_call, i.dbg)
| Iop(Iextcall { alloc = true }) -> Some (External_call, i.dbg)
| Iop(Imove | Ispill | Ireload | Iconst_int _ | Iconst_float _ |
Iconst_symbol _ | Iextcall { alloc = false } | Istackoffset _ |
Iload _ | Istore _ | Iintop _ | Iintop_imm _ | Ifloatofint |
Iintoffloat | Inegf | Iabsf | Iaddf | Isubf | Imulf | Idivf |
Iopaque | Ispecific _)-> None
| Iend | Ireturn | Iifthenelse _ | Iswitch _ | Icatch _ | Iexit _ |
Itrywith _ | Iraise _ -> None
in
let matches = ref [] in
Mach.instr_iter
(fun i ->
match f_match i with
| Some(x) -> matches := x :: !matches
| None -> ())
instr;
List.rev !matches
let instrument_fundecl ~future_funcnames:_ (f : Mach.fundecl) : Mach.fundecl =
if function_is_assumed_to_never_poll f.fun_name then f
else begin
let handler_needs_poll = polled_loops_analysis f.fun_body in
contains_polls := false;
let new_body = instr_body handler_needs_poll f.fun_body in
begin match f.fun_poll with
| Error_poll -> begin
match find_poll_alloc_or_calls new_body with
| [] -> ()
| poll_error_instrs -> raise (Error(Poll_error poll_error_instrs))
end
| Default_poll -> () end;
let new_contains_calls = f.fun_contains_calls || !contains_polls in
{ f with fun_body = new_body; fun_contains_calls = new_contains_calls }
end
let requires_prologue_poll ~future_funcnames ~fun_name i =
if function_is_assumed_to_never_poll fun_name then false
else
match potentially_recursive_tailcall ~future_funcnames i with
| Might_not_poll -> true
| Always_polls -> false
let instr_type p =
match p with
| Poll -> "inserted poll"
| Alloc -> "allocation"
| Function_call -> "function call"
| External_call -> "external call that allocates"
let report_error ppf = function
| Poll_error instrs ->
begin
let num_inserted_polls =
List.fold_left
(fun s (p,_) -> s + match p with Poll -> 1
| Alloc | Function_call | External_call -> 0
) 0 instrs in
let num_user_polls = (List.length instrs) - num_inserted_polls in
if num_user_polls = 0 then
fprintf ppf "Function with poll-error attribute contains polling \
points (inserted by the compiler)\n"
else begin
fprintf ppf
"Function with poll-error attribute contains polling points:\n";
List.iter (fun (p,dbg) ->
begin match p with
| Poll -> ()
| Alloc | Function_call | External_call ->
fprintf ppf "\t%s at " (instr_type p);
Location.print_loc ppf (Debuginfo.to_location dbg);
fprintf ppf "\n"
end
) instrs;
if num_inserted_polls > 0 then
fprintf ppf "\t(plus compiler-inserted polling point(s) in prologue \
and/or loop back edges)\n"
end
end
let () =
Location.register_error_of_exn
(function
| Error err -> Some (Location.error_of_printer_file report_error err)
| _ -> None
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.