_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
da230af67506d2d3d42916800a23b541a2b78354593debcde91a3d202d8de81a | manuel-serrano/hop | notepad.scm | ;*=====================================================================*/
* serrano / prgm / project / hop / hop / widget / notepad.scm * /
;* ------------------------------------------------------------- */
* Author : * /
* Creation : Thu Aug 18 10:01:02 2005 * /
* Last change : Tue May 7 12:03:13 2019 ( serrano ) * /
* Copyright : 2005 - 19 * /
;* ------------------------------------------------------------- */
;* The HOP implementation of notepads. */
;*=====================================================================*/
;*---------------------------------------------------------------------*/
;* The module */
;*---------------------------------------------------------------------*/
(module __hopwidget-notepad
(library hop)
(static (class xml-nphead-element::xml-element)
(class xml-nptabhead-element::xml-element)
(class xml-nptab-element::xml-element
(idtab::bstring read-only)
(head::xml-nptabhead-element read-only)
(onselect read-only)
klass::bstring ))
(export (<NOTEPAD> . ::obj)
(<NPHEAD> . ::obj)
(<NPTAB> . ::obj)
(<NPTABHEAD> . ::obj)))
;*---------------------------------------------------------------------*/
;* object-serializer ::html-foldlist ... */
;*---------------------------------------------------------------------*/
(define (serialize o ctx)
(let ((p (open-output-string)))
(obj->javascript-expr o p ctx)
(close-output-port p)))
(define (unserialize o ctx)
o)
(register-class-serialization! xml-nphead-element serialize unserialize)
(register-class-serialization! xml-nptabhead-element serialize unserialize)
(register-class-serialization! xml-nptab-element serialize unserialize)
;*---------------------------------------------------------------------*/
;* <NOTEPAD> ... */
;* ------------------------------------------------------------- */
* See _ _ for HSS types . * /
;*---------------------------------------------------------------------*/
(define-tag <NOTEPAD> ((id #unspecified string)
(%context #f)
(class #unspecified string)
(history #unspecified)
(onchange #f)
(%location #f)
(attrs)
body)
(let ((id (xml-make-id id 'NOTEPAD))
(history (if (boolean? history) history (not (eq? id #unspecified))))
(body (xml-body body %context))
head)
(if (and (pair? body) (isa? (car body) xml-nphead-element))
(begin
(set! head (car body))
(set! body (filter (lambda (e) (isa? e xml-nptab-element))
(cdr body))))
(begin
(set! head #f)
(set! body (filter (lambda (e) (isa? e xml-nptab-element))
body))))
(if (null? body)
(error "<NOTEPAD>" "Missing <NPTAB> elements" id)
(notepad id class history
(map (lambda (a) (xml-primitive-value a %context)) attrs)
head body onchange %context))))
;*---------------------------------------------------------------------*/
* nptab - get - body ... * /
;*---------------------------------------------------------------------*/
(define (nptab-get-body tab)
(with-access::xml-nptab-element tab (body)
(if (and (isa? (car body) xml-delay) (null? (cdr body)))
(with-access::xml-delay (car body) (thunk)
(thunk))
body)))
;*---------------------------------------------------------------------*/
;* make-class-name ... */
;*---------------------------------------------------------------------*/
(define (make-class-name::bstring default::bstring name)
(if (string? name)
(string-append default " " name)
default))
;*---------------------------------------------------------------------*/
;* notepad ... */
;*---------------------------------------------------------------------*/
(define (notepad id klass history attrs head tabs onchange ctx)
(define svc
(call-with-output-string
(lambda (op)
(obj->javascript-attr
(procedure->service
(lambda (i)
(nptab-get-body (list-ref tabs i))))
op))))
(define (make-tab-div tab i)
(with-access::xml-nptab-element tab (attributes (idt id) idtab body klass)
(let ((click (format "hop_notepad_select( '~a', '~a', ~a )"
id idt (if history "true" "false"))))
(set! attributes
`(:onclick ,(secure-javascript-attr click)
:class ,(string-append klass
(if (=fx i 0)
" hop-nptab-active"
" hop-nptab-inactive"))
,@attributes)))
(with-access::xml-element tab (body)
(when (and (pair? body)
(isa? (car body) xml-delay)
(null? (cdr body)))
(set! attributes `(:lang "delay" ,@attributes))))
(<DIV> :data-hss-tag "hop-notepad-tab-body"
:style (if (=fx i 0) "display: block" "display: none")
:id idt
:data-idtab idtab
(cond
((=fx i 0)
(nptab-get-body tab))
((and (isa? (car body) xml-delay) (null? (cdr body)))
;; we must not eagerly evaluate the tab...
"")
(else
body)))))
(let ((bodies (map (lambda (t i) (make-tab-div t i))
tabs (iota (length tabs))))
(attrs (append-map (lambda (a)
(let ((a (xml-primitive-value a ctx)))
(list (symbol->keyword (car a)) (cdr a))))
attrs)))
(apply <DIV>
:id id
:data-hss-tag "hop-notepad"
:class (make-class-name "hop-notepad" klass)
head
(<TABLE> :data-hss-tag "hop-notepad"
(<TR>
(<TD> :id (string-append id "-tabs")
:data-hss-tag "hop-notepad-tabs"
tabs))
(<TR>
(<TD> :id (string-append id "-body")
:data-hss-tag "hop-notepad-body" bodies)))
(<SCRIPT>
(when onchange
(format "document.getElementById('~a').onchange = ~a"
id (hop->js-callback onchange)))
(format "document.getElementById('~a').onkeyup = function(_) { return ~a;}"
id svc))
attrs)))
;*---------------------------------------------------------------------*/
;* <NPHEAD> ... */
;*---------------------------------------------------------------------*/
(define-tag <NPHEAD> ((id #unspecified string)
(%location #f)
(attr)
body)
(instantiate::xml-nphead-element
(tag 'div)
(id (xml-make-id id 'NPHEAD))
(attributes `(:data-hss-tag "hop-nphead" ,@attr))
(body body)))
;*---------------------------------------------------------------------*/
;* <NPTABHEAD> ... */
;*---------------------------------------------------------------------*/
(define-tag <NPTABHEAD> ((id #unspecified string)
(%location #f)
(attr)
body)
(instantiate::xml-nptabhead-element
(tag 'span)
(id (xml-make-id id 'NPTABHEAD))
(attributes `(:data-hss-tag "hop-nptab-head" ,@attr))
(body body)))
;*---------------------------------------------------------------------*/
* < NPTAB > ... * /
;*---------------------------------------------------------------------*/
(define-tag <NPTAB> ((id #unspecified string)
(%context #f)
(class #unspecified string)
(selected #f)
(onselect #f)
(%location #f)
(attr)
body)
(let ((head (filter (lambda (b) (isa? b xml-nptabhead-element)) body))
(body (filter (lambda (x) (not (isa? x xml-nptabhead-element))) body)))
(cond
((null? head)
(error "<NPTAB>" "Missing <NPTABHEAD> " id))
((null? body)
(error "<NPTAB>" "Illegal <NPTABHEAD> " body))
(else
(let ((cla (make-class-name "hop-nptab " class)))
(instantiate::xml-nptab-element
(tag 'span)
(id (xml-make-id id 'NPTAB))
(idtab (xml-make-id #f 'NPTABTAG))
(attributes `(:data-hss-tag "hop-nptab"
,@(map (lambda (a)
(xml-primitive-value a %context))
attr)))
(klass cla)
(onselect onselect)
(head (car head))
(body body)))))))
;*---------------------------------------------------------------------*/
;* xml-write ... */
;*---------------------------------------------------------------------*/
(define-method (xml-write obj::xml-nptab-element p backend)
(with-access::xml-nptab-element obj (idtab head attributes onselect)
(display "<span id='" p)
(display idtab p)
(display "'" p)
(xml-write-attributes attributes p backend)
(display ">" p)
(when onselect
(display "<script>" p)
(display "document.getElementById( '" p)
(display idtab p)
(display "' ).onselect = " p)
(display (hop->js-callback onselect) p)
(display "</script>" p))
(xml-write head p backend)
(display "</span>" p)))
;*---------------------------------------------------------------------*/
;* xml-compare ::xml-nphead-element ... */
;*---------------------------------------------------------------------*/
(define-method (xml-compare a1::xml-nphead-element a2)
(if (and (isa? a2 xml-markup)
(with-access::xml-markup a2 (tag)
(eq? tag 'div))
(equal? (dom-get-attribute a2 "class") "hop-nphead"))
(with-access::xml-markup a1 ((body1 body))
(with-access::xml-markup a2 ((body2 body))
(xml-compare body1 body2)))
(call-next-method)))
;*---------------------------------------------------------------------*/
;* xml-compare ::xml-nptabhead-element ... */
;*---------------------------------------------------------------------*/
(define-method (xml-compare a1::xml-nptabhead-element a2)
(if (and (isa? a2 xml-markup)
(with-access::xml-markup a2 (tag)
(eq? tag 'span))
(equal? (dom-get-attribute a2 "data-hss-tag") "hop-nptab-head"))
(with-access::xml-markup a1 ((body1 body))
(with-access::xml-markup a2 ((body2 body))
(xml-compare body1 body2)))
(call-next-method)))
;*---------------------------------------------------------------------*/
;* xml-compare ::xml-nptab-element ... */
;*---------------------------------------------------------------------*/
(define-method (xml-compare a1::xml-nptab-element a2)
(if (and (isa? a2 xml-markup)
(with-access::xml-markup a2 (tag)
(eq? tag 'span))
(equal? (dom-get-attribute a2 "data-hss-tag") "hop-nptab")
(let ((head (dom-first-child a2))
(body (cadr (dom-child-nodes a2))))
(xml-compare (cadr (dom-child-nodes a1)) head)))
(call-next-method)))
| null | https://raw.githubusercontent.com/manuel-serrano/hop/481cb10478286796addd2ec9ee29c95db27aa390/widget/notepad.scm | scheme | *=====================================================================*/
* ------------------------------------------------------------- */
* ------------------------------------------------------------- */
* The HOP implementation of notepads. */
*=====================================================================*/
*---------------------------------------------------------------------*/
* The module */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* object-serializer ::html-foldlist ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* <NOTEPAD> ... */
* ------------------------------------------------------------- */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* make-class-name ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* notepad ... */
*---------------------------------------------------------------------*/
we must not eagerly evaluate the tab...
*---------------------------------------------------------------------*/
* <NPHEAD> ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* <NPTABHEAD> ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* xml-write ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* xml-compare ::xml-nphead-element ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* xml-compare ::xml-nptabhead-element ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* xml-compare ::xml-nptab-element ... */
*---------------------------------------------------------------------*/ | * serrano / prgm / project / hop / hop / widget / notepad.scm * /
* Author : * /
* Creation : Thu Aug 18 10:01:02 2005 * /
* Last change : Tue May 7 12:03:13 2019 ( serrano ) * /
* Copyright : 2005 - 19 * /
(module __hopwidget-notepad
(library hop)
(static (class xml-nphead-element::xml-element)
(class xml-nptabhead-element::xml-element)
(class xml-nptab-element::xml-element
(idtab::bstring read-only)
(head::xml-nptabhead-element read-only)
(onselect read-only)
klass::bstring ))
(export (<NOTEPAD> . ::obj)
(<NPHEAD> . ::obj)
(<NPTAB> . ::obj)
(<NPTABHEAD> . ::obj)))
(define (serialize o ctx)
(let ((p (open-output-string)))
(obj->javascript-expr o p ctx)
(close-output-port p)))
(define (unserialize o ctx)
o)
(register-class-serialization! xml-nphead-element serialize unserialize)
(register-class-serialization! xml-nptabhead-element serialize unserialize)
(register-class-serialization! xml-nptab-element serialize unserialize)
* See _ _ for HSS types . * /
(define-tag <NOTEPAD> ((id #unspecified string)
(%context #f)
(class #unspecified string)
(history #unspecified)
(onchange #f)
(%location #f)
(attrs)
body)
(let ((id (xml-make-id id 'NOTEPAD))
(history (if (boolean? history) history (not (eq? id #unspecified))))
(body (xml-body body %context))
head)
(if (and (pair? body) (isa? (car body) xml-nphead-element))
(begin
(set! head (car body))
(set! body (filter (lambda (e) (isa? e xml-nptab-element))
(cdr body))))
(begin
(set! head #f)
(set! body (filter (lambda (e) (isa? e xml-nptab-element))
body))))
(if (null? body)
(error "<NOTEPAD>" "Missing <NPTAB> elements" id)
(notepad id class history
(map (lambda (a) (xml-primitive-value a %context)) attrs)
head body onchange %context))))
* nptab - get - body ... * /
(define (nptab-get-body tab)
(with-access::xml-nptab-element tab (body)
(if (and (isa? (car body) xml-delay) (null? (cdr body)))
(with-access::xml-delay (car body) (thunk)
(thunk))
body)))
(define (make-class-name::bstring default::bstring name)
(if (string? name)
(string-append default " " name)
default))
(define (notepad id klass history attrs head tabs onchange ctx)
(define svc
(call-with-output-string
(lambda (op)
(obj->javascript-attr
(procedure->service
(lambda (i)
(nptab-get-body (list-ref tabs i))))
op))))
(define (make-tab-div tab i)
(with-access::xml-nptab-element tab (attributes (idt id) idtab body klass)
(let ((click (format "hop_notepad_select( '~a', '~a', ~a )"
id idt (if history "true" "false"))))
(set! attributes
`(:onclick ,(secure-javascript-attr click)
:class ,(string-append klass
(if (=fx i 0)
" hop-nptab-active"
" hop-nptab-inactive"))
,@attributes)))
(with-access::xml-element tab (body)
(when (and (pair? body)
(isa? (car body) xml-delay)
(null? (cdr body)))
(set! attributes `(:lang "delay" ,@attributes))))
(<DIV> :data-hss-tag "hop-notepad-tab-body"
:style (if (=fx i 0) "display: block" "display: none")
:id idt
:data-idtab idtab
(cond
((=fx i 0)
(nptab-get-body tab))
((and (isa? (car body) xml-delay) (null? (cdr body)))
"")
(else
body)))))
(let ((bodies (map (lambda (t i) (make-tab-div t i))
tabs (iota (length tabs))))
(attrs (append-map (lambda (a)
(let ((a (xml-primitive-value a ctx)))
(list (symbol->keyword (car a)) (cdr a))))
attrs)))
(apply <DIV>
:id id
:data-hss-tag "hop-notepad"
:class (make-class-name "hop-notepad" klass)
head
(<TABLE> :data-hss-tag "hop-notepad"
(<TR>
(<TD> :id (string-append id "-tabs")
:data-hss-tag "hop-notepad-tabs"
tabs))
(<TR>
(<TD> :id (string-append id "-body")
:data-hss-tag "hop-notepad-body" bodies)))
(<SCRIPT>
(when onchange
(format "document.getElementById('~a').onchange = ~a"
id (hop->js-callback onchange)))
(format "document.getElementById('~a').onkeyup = function(_) { return ~a;}"
id svc))
attrs)))
(define-tag <NPHEAD> ((id #unspecified string)
(%location #f)
(attr)
body)
(instantiate::xml-nphead-element
(tag 'div)
(id (xml-make-id id 'NPHEAD))
(attributes `(:data-hss-tag "hop-nphead" ,@attr))
(body body)))
(define-tag <NPTABHEAD> ((id #unspecified string)
(%location #f)
(attr)
body)
(instantiate::xml-nptabhead-element
(tag 'span)
(id (xml-make-id id 'NPTABHEAD))
(attributes `(:data-hss-tag "hop-nptab-head" ,@attr))
(body body)))
* < NPTAB > ... * /
(define-tag <NPTAB> ((id #unspecified string)
(%context #f)
(class #unspecified string)
(selected #f)
(onselect #f)
(%location #f)
(attr)
body)
(let ((head (filter (lambda (b) (isa? b xml-nptabhead-element)) body))
(body (filter (lambda (x) (not (isa? x xml-nptabhead-element))) body)))
(cond
((null? head)
(error "<NPTAB>" "Missing <NPTABHEAD> " id))
((null? body)
(error "<NPTAB>" "Illegal <NPTABHEAD> " body))
(else
(let ((cla (make-class-name "hop-nptab " class)))
(instantiate::xml-nptab-element
(tag 'span)
(id (xml-make-id id 'NPTAB))
(idtab (xml-make-id #f 'NPTABTAG))
(attributes `(:data-hss-tag "hop-nptab"
,@(map (lambda (a)
(xml-primitive-value a %context))
attr)))
(klass cla)
(onselect onselect)
(head (car head))
(body body)))))))
(define-method (xml-write obj::xml-nptab-element p backend)
(with-access::xml-nptab-element obj (idtab head attributes onselect)
(display "<span id='" p)
(display idtab p)
(display "'" p)
(xml-write-attributes attributes p backend)
(display ">" p)
(when onselect
(display "<script>" p)
(display "document.getElementById( '" p)
(display idtab p)
(display "' ).onselect = " p)
(display (hop->js-callback onselect) p)
(display "</script>" p))
(xml-write head p backend)
(display "</span>" p)))
(define-method (xml-compare a1::xml-nphead-element a2)
(if (and (isa? a2 xml-markup)
(with-access::xml-markup a2 (tag)
(eq? tag 'div))
(equal? (dom-get-attribute a2 "class") "hop-nphead"))
(with-access::xml-markup a1 ((body1 body))
(with-access::xml-markup a2 ((body2 body))
(xml-compare body1 body2)))
(call-next-method)))
(define-method (xml-compare a1::xml-nptabhead-element a2)
(if (and (isa? a2 xml-markup)
(with-access::xml-markup a2 (tag)
(eq? tag 'span))
(equal? (dom-get-attribute a2 "data-hss-tag") "hop-nptab-head"))
(with-access::xml-markup a1 ((body1 body))
(with-access::xml-markup a2 ((body2 body))
(xml-compare body1 body2)))
(call-next-method)))
(define-method (xml-compare a1::xml-nptab-element a2)
(if (and (isa? a2 xml-markup)
(with-access::xml-markup a2 (tag)
(eq? tag 'span))
(equal? (dom-get-attribute a2 "data-hss-tag") "hop-nptab")
(let ((head (dom-first-child a2))
(body (cadr (dom-child-nodes a2))))
(xml-compare (cadr (dom-child-nodes a1)) head)))
(call-next-method)))
|
d8163e6f69b18368c1d46d0e3146868deb46e7236b2b658ff3abbbf202286ae2 | WhatsApp/eqwalizer | behave.erl | Copyright ( c ) Meta Platforms , Inc. and affiliates . All rights reserved .
%%%
This source code is licensed under the Apache 2.0 license found in
%%% the LICENSE file in the root directory of this source tree.
-module(behave).
-callback foo() -> behave1:test().
-type invalid() :: _T.
-callback use_invalid() -> invalid().
-callback use_invalid2() -> invalid().
| null | https://raw.githubusercontent.com/WhatsApp/eqwalizer/9935940d71ef65c7bf7a9dfad77d89c0006c288e/eqwalizer/test_projects/check/src/behave.erl | erlang |
the LICENSE file in the root directory of this source tree. | Copyright ( c ) Meta Platforms , Inc. and affiliates . All rights reserved .
This source code is licensed under the Apache 2.0 license found in
-module(behave).
-callback foo() -> behave1:test().
-type invalid() :: _T.
-callback use_invalid() -> invalid().
-callback use_invalid2() -> invalid().
|
2cd9be98142ddd043665eb6e3d92ae82f19c924f296fc276f1646f621bd7c60a | AshleyYakeley/Truth | Main.hs | module Main
( main
) where
import Changes.Core
import Changes.World.File
import Changes.World.GNOME.GTK
import qualified Options.Applicative as O
import Shapes
import System.FilePath
textCodec :: ReasonCodec LazyByteString Text
textCodec = hoistCodec (mapResultFailure $ pack . show) utf8Codec . bijectionCodec strictBytestringBijection
textLens :: ChangeLens ByteStringUpdate (WholeUpdate ((Result Text) Text))
textLens = (wholeChangeLens $ injectionLens $ toInjection $ codecInjection textCodec) . convertChangeLens
optParser :: O.Parser ([FilePath], Bool, Bool, Bool)
optParser =
(,,,) <$> (O.many $ O.strArgument mempty) <*> O.switch (O.short '2') <*> O.switch (O.long "seltest") <*>
O.switch (O.long "save")
newtype AppUI =
MkAppUI (GViewState 'Locked -> UIWindow -> GView 'Locked Widget -> (MenuBar, GView 'Locked Widget))
main :: IO ()
main = do
(paths, double, selTest, saveOpt) <- O.execParser (O.info optParser mempty)
runLifecycle $
runGTK $ \gtkContext ->
runNewView $
runGView gtkContext $
gvRunLocked $ do
for_ paths $ \path -> do
let
bsObj :: Reference ByteStringEdit
bsObj = fileReference path
wholeTextObj :: Reference (WholeEdit ((Result Text) Text))
wholeTextObj = mapReference textLens bsObj
ui :: Model (FullResultOneUpdate (Result Text) (StringUpdate Text))
-> Maybe (Model (FullResultOneUpdate (Result Text) (StringUpdate Text)))
-> (GViewState 'Locked -> UIWindow -> GView 'Locked Widget -> (MenuBar, GView 'Locked Widget))
-> GView 'Locked Widget
ui sub1 msub2 extraui = do
(selModel, setsel) <- gvLiftLifecycleNoUI $ makeSharedModel makePremodelSelectNotify
let
openSelection ::
Model (FullResultOneUpdate (Result Text) (StringUpdate Text)) -> GView 'Locked ()
openSelection sub = do
mflens <- gvRunResource selModel $ \selAModel -> aModelRead selAModel ReadWhole
case mflens of
Nothing -> return ()
Just flens -> do
subSub <-
gvLiftViewNoUI $
viewFloatMapModel (liftFullResultOneFloatingChangeLens flens) sub
makeWindow "section" subSub Nothing extraui
rTextSpec :: Result Text (Model (StringUpdate Text)) -> GView 'Locked Widget
rTextSpec (SuccessResult sub) = createTextArea sub setsel
rTextSpec (FailureResult err) = createLabel $ constantModel err
makeSpecs sub = do
viewButton <-
createButton (constantModel "GView 'Locked") $
constantModel $ Just $ openSelection sub
textContent <- createOneWhole sub rTextSpec
scrolledTextContent <- createScrolled textContent
return
[ (defaultLayoutOptions, viewButton)
, (defaultLayoutOptions {loGrow = True}, scrolledTextContent)
]
allSpecs =
case msub2 of
Nothing -> makeSpecs sub1
Just sub2 -> liftA2 (<>) (makeSpecs sub1) (makeSpecs sub2)
specs <- allSpecs
createLayout OrientationVertical specs
makeWindow ::
Text
-> Model (FullResultOneUpdate (Result Text) (StringUpdate Text))
-> Maybe (Model (FullResultOneUpdate (Result Text) (StringUpdate Text)))
-> (GViewState 'Locked -> UIWindow -> GView 'Locked Widget -> ( MenuBar
, GView 'Locked Widget))
-> GView 'Locked ()
makeWindow title sub msub2 extraui = do
rec
let (mbar, cuic) = extraui closer r $ ui sub msub2 extraui
(r, closer) <-
gvGetState $
createWindow $ let
wsPosition = WindowPositionCenter
wsSize = (300, 400)
wsCloseBoxAction :: GView 'Locked ()
wsCloseBoxAction = gvCloseState closer
wsTitle :: Model (ROWUpdate Text)
wsTitle = constantModel title
wsContent :: AccelGroup -> GView 'Locked Widget
wsContent ag = do
mb <- createMenuBar ag mbar
uic <- cuic
createLayout
OrientationVertical
[ (defaultLayoutOptions, mb)
, (defaultLayoutOptions {loGrow = True}, uic)
]
in MkWindowSpec {..}
return ()
simpleUI ::
GViewState 'Locked -> UIWindow -> GView 'Locked Widget -> (MenuBar, GView 'Locked Widget)
simpleUI closer _ spec = let
mbar :: MenuBar
mbar =
[ SubMenuEntry
"File"
[ simpleActionMenuItem "Close" (Just $ MkMenuAccelerator [KMCtrl] 'W') $
gvCloseState closer
, SeparatorMenuEntry
, simpleActionMenuItem "Exit" (Just $ MkMenuAccelerator [KMCtrl] 'Q') gvExitUI
]
]
in (mbar, spec)
extraUI ::
SaveActions
-> UndoHandler
-> GViewState 'Locked
-> UIWindow
-> GView 'Locked Widget
-> (MenuBar, GView 'Locked Widget)
extraUI (MkSaveActions saveActions) uh closer _ spec = let
saveAction = do
mactions <- saveActions
_ <-
case mactions of
Just (action, _) -> action emptyResourceContext noEditSource
_ -> return False
return ()
revertAction = do
mactions <- saveActions
_ <-
case mactions of
Just (_, action) -> action emptyResourceContext noEditSource
_ -> return False
return ()
mbar :: [MenuEntry]
mbar =
[ SubMenuEntry
"File"
[ simpleActionMenuItem "Save" (Just $ MkMenuAccelerator [KMCtrl] 'S') $
liftIO saveAction
, simpleActionMenuItem "Revert" Nothing $ liftIO revertAction
, simpleActionMenuItem "Close" (Just $ MkMenuAccelerator [KMCtrl] 'W') $
gvCloseState closer
, SeparatorMenuEntry
, simpleActionMenuItem "Exit" (Just $ MkMenuAccelerator [KMCtrl] 'Q') gvExitUI
]
, SubMenuEntry
"Edit"
[ simpleActionMenuItem "Undo" (Just $ MkMenuAccelerator [KMCtrl] 'Z') $
liftIO $ undoHandlerUndo uh emptyResourceContext noEditSource >> return ()
, simpleActionMenuItem "Redo" (Just $ MkMenuAccelerator [KMCtrl] 'Y') $
liftIO $ undoHandlerRedo uh emptyResourceContext noEditSource >> return ()
]
]
in (mbar, spec)
action <- do
(textSub, MkAppUI appUI) <-
if saveOpt
then do
(bufferSub, saveActions) <-
gvLiftLifecycleNoUI $
makeSharedModel $ saveBufferReference emptyResourceContext wholeTextObj
uh <- liftIO newUndoHandler
return (undoHandlerModel uh bufferSub, MkAppUI $ extraUI saveActions uh)
else do
textSub <- gvLiftLifecycleNoUI $ makeReflectingModel $ convertReference wholeTextObj
return (textSub, MkAppUI simpleUI)
mTextSub2 <-
case selTest of
False -> return Nothing
True -> do
bsObj2 <- liftIO $ makeMemoryReference mempty $ \_ -> True
textSub2 <-
gvLiftLifecycleNoUI $
makeReflectingModel $
convertReference $ mapReference textLens $ convertReference bsObj2
return $ Just textSub2
return $ makeWindow (fromString $ takeFileName path) textSub mTextSub2 appUI
action
if double
then action
else return ()
| null | https://raw.githubusercontent.com/AshleyYakeley/Truth/2817d5e36bd1dc5de932d808026098b6c35e7185/Changes/changes-gnome/examples/text/Main.hs | haskell | module Main
( main
) where
import Changes.Core
import Changes.World.File
import Changes.World.GNOME.GTK
import qualified Options.Applicative as O
import Shapes
import System.FilePath
textCodec :: ReasonCodec LazyByteString Text
textCodec = hoistCodec (mapResultFailure $ pack . show) utf8Codec . bijectionCodec strictBytestringBijection
textLens :: ChangeLens ByteStringUpdate (WholeUpdate ((Result Text) Text))
textLens = (wholeChangeLens $ injectionLens $ toInjection $ codecInjection textCodec) . convertChangeLens
optParser :: O.Parser ([FilePath], Bool, Bool, Bool)
optParser =
(,,,) <$> (O.many $ O.strArgument mempty) <*> O.switch (O.short '2') <*> O.switch (O.long "seltest") <*>
O.switch (O.long "save")
newtype AppUI =
MkAppUI (GViewState 'Locked -> UIWindow -> GView 'Locked Widget -> (MenuBar, GView 'Locked Widget))
main :: IO ()
main = do
(paths, double, selTest, saveOpt) <- O.execParser (O.info optParser mempty)
runLifecycle $
runGTK $ \gtkContext ->
runNewView $
runGView gtkContext $
gvRunLocked $ do
for_ paths $ \path -> do
let
bsObj :: Reference ByteStringEdit
bsObj = fileReference path
wholeTextObj :: Reference (WholeEdit ((Result Text) Text))
wholeTextObj = mapReference textLens bsObj
ui :: Model (FullResultOneUpdate (Result Text) (StringUpdate Text))
-> Maybe (Model (FullResultOneUpdate (Result Text) (StringUpdate Text)))
-> (GViewState 'Locked -> UIWindow -> GView 'Locked Widget -> (MenuBar, GView 'Locked Widget))
-> GView 'Locked Widget
ui sub1 msub2 extraui = do
(selModel, setsel) <- gvLiftLifecycleNoUI $ makeSharedModel makePremodelSelectNotify
let
openSelection ::
Model (FullResultOneUpdate (Result Text) (StringUpdate Text)) -> GView 'Locked ()
openSelection sub = do
mflens <- gvRunResource selModel $ \selAModel -> aModelRead selAModel ReadWhole
case mflens of
Nothing -> return ()
Just flens -> do
subSub <-
gvLiftViewNoUI $
viewFloatMapModel (liftFullResultOneFloatingChangeLens flens) sub
makeWindow "section" subSub Nothing extraui
rTextSpec :: Result Text (Model (StringUpdate Text)) -> GView 'Locked Widget
rTextSpec (SuccessResult sub) = createTextArea sub setsel
rTextSpec (FailureResult err) = createLabel $ constantModel err
makeSpecs sub = do
viewButton <-
createButton (constantModel "GView 'Locked") $
constantModel $ Just $ openSelection sub
textContent <- createOneWhole sub rTextSpec
scrolledTextContent <- createScrolled textContent
return
[ (defaultLayoutOptions, viewButton)
, (defaultLayoutOptions {loGrow = True}, scrolledTextContent)
]
allSpecs =
case msub2 of
Nothing -> makeSpecs sub1
Just sub2 -> liftA2 (<>) (makeSpecs sub1) (makeSpecs sub2)
specs <- allSpecs
createLayout OrientationVertical specs
makeWindow ::
Text
-> Model (FullResultOneUpdate (Result Text) (StringUpdate Text))
-> Maybe (Model (FullResultOneUpdate (Result Text) (StringUpdate Text)))
-> (GViewState 'Locked -> UIWindow -> GView 'Locked Widget -> ( MenuBar
, GView 'Locked Widget))
-> GView 'Locked ()
makeWindow title sub msub2 extraui = do
rec
let (mbar, cuic) = extraui closer r $ ui sub msub2 extraui
(r, closer) <-
gvGetState $
createWindow $ let
wsPosition = WindowPositionCenter
wsSize = (300, 400)
wsCloseBoxAction :: GView 'Locked ()
wsCloseBoxAction = gvCloseState closer
wsTitle :: Model (ROWUpdate Text)
wsTitle = constantModel title
wsContent :: AccelGroup -> GView 'Locked Widget
wsContent ag = do
mb <- createMenuBar ag mbar
uic <- cuic
createLayout
OrientationVertical
[ (defaultLayoutOptions, mb)
, (defaultLayoutOptions {loGrow = True}, uic)
]
in MkWindowSpec {..}
return ()
simpleUI ::
GViewState 'Locked -> UIWindow -> GView 'Locked Widget -> (MenuBar, GView 'Locked Widget)
simpleUI closer _ spec = let
mbar :: MenuBar
mbar =
[ SubMenuEntry
"File"
[ simpleActionMenuItem "Close" (Just $ MkMenuAccelerator [KMCtrl] 'W') $
gvCloseState closer
, SeparatorMenuEntry
, simpleActionMenuItem "Exit" (Just $ MkMenuAccelerator [KMCtrl] 'Q') gvExitUI
]
]
in (mbar, spec)
extraUI ::
SaveActions
-> UndoHandler
-> GViewState 'Locked
-> UIWindow
-> GView 'Locked Widget
-> (MenuBar, GView 'Locked Widget)
extraUI (MkSaveActions saveActions) uh closer _ spec = let
saveAction = do
mactions <- saveActions
_ <-
case mactions of
Just (action, _) -> action emptyResourceContext noEditSource
_ -> return False
return ()
revertAction = do
mactions <- saveActions
_ <-
case mactions of
Just (_, action) -> action emptyResourceContext noEditSource
_ -> return False
return ()
mbar :: [MenuEntry]
mbar =
[ SubMenuEntry
"File"
[ simpleActionMenuItem "Save" (Just $ MkMenuAccelerator [KMCtrl] 'S') $
liftIO saveAction
, simpleActionMenuItem "Revert" Nothing $ liftIO revertAction
, simpleActionMenuItem "Close" (Just $ MkMenuAccelerator [KMCtrl] 'W') $
gvCloseState closer
, SeparatorMenuEntry
, simpleActionMenuItem "Exit" (Just $ MkMenuAccelerator [KMCtrl] 'Q') gvExitUI
]
, SubMenuEntry
"Edit"
[ simpleActionMenuItem "Undo" (Just $ MkMenuAccelerator [KMCtrl] 'Z') $
liftIO $ undoHandlerUndo uh emptyResourceContext noEditSource >> return ()
, simpleActionMenuItem "Redo" (Just $ MkMenuAccelerator [KMCtrl] 'Y') $
liftIO $ undoHandlerRedo uh emptyResourceContext noEditSource >> return ()
]
]
in (mbar, spec)
action <- do
(textSub, MkAppUI appUI) <-
if saveOpt
then do
(bufferSub, saveActions) <-
gvLiftLifecycleNoUI $
makeSharedModel $ saveBufferReference emptyResourceContext wholeTextObj
uh <- liftIO newUndoHandler
return (undoHandlerModel uh bufferSub, MkAppUI $ extraUI saveActions uh)
else do
textSub <- gvLiftLifecycleNoUI $ makeReflectingModel $ convertReference wholeTextObj
return (textSub, MkAppUI simpleUI)
mTextSub2 <-
case selTest of
False -> return Nothing
True -> do
bsObj2 <- liftIO $ makeMemoryReference mempty $ \_ -> True
textSub2 <-
gvLiftLifecycleNoUI $
makeReflectingModel $
convertReference $ mapReference textLens $ convertReference bsObj2
return $ Just textSub2
return $ makeWindow (fromString $ takeFileName path) textSub mTextSub2 appUI
action
if double
then action
else return ()
|
|
33747123ff16b28873c477913c415e524aa8d875cb88cf7bed22de00cfd7fea6 | antoniogarrote/clj-control | utils.clj | (ns clj-control.test.utils
(:use [clj-control.utils]
[clojure.test]))
(deftest should-apply-the-function-after-partial-application-of-n-args
(let [fx (curry 3 +)
fx1 (apply fx [1])
fx2 (apply fx1 [2])
v (apply fx2 [3])]
(is (= 6 v))
(is (fn? fx))
(is (fn? fx1))
(is (fn? fx2))))
(deftest should-apply-function-application
(is (= (map ($ 3) [(partial + 4)
(partial * 10)
(fn [x] (Math/pow x 2))
(fn [x] (Math/sqrt x))])
(list 7 30 9.0 1.7320508075688772))))
| null | https://raw.githubusercontent.com/antoniogarrote/clj-control/175ab9f3d0157355b082b3f68472b507261eb941/test/clj_control/test/utils.clj | clojure | (ns clj-control.test.utils
(:use [clj-control.utils]
[clojure.test]))
(deftest should-apply-the-function-after-partial-application-of-n-args
(let [fx (curry 3 +)
fx1 (apply fx [1])
fx2 (apply fx1 [2])
v (apply fx2 [3])]
(is (= 6 v))
(is (fn? fx))
(is (fn? fx1))
(is (fn? fx2))))
(deftest should-apply-function-application
(is (= (map ($ 3) [(partial + 4)
(partial * 10)
(fn [x] (Math/pow x 2))
(fn [x] (Math/sqrt x))])
(list 7 30 9.0 1.7320508075688772))))
|
|
cdf06191691e329495134d51b697ff6e8a1e15dd42bf86a21df192e8d05f831e | aryx/xix | hellogui.ml | open Common
let main () =
let display = Draw.init "Hello GUI" in
raise Todo
let _ = main ()
| null | https://raw.githubusercontent.com/aryx/xix/60ce1bd9a3f923e0e8bb2192f8938a9aa49c739c/lib_gui/tests/hellogui.ml | ocaml | open Common
let main () =
let display = Draw.init "Hello GUI" in
raise Todo
let _ = main ()
|
|
a70796e136df325dc3d3347a8c2427979aec06ae49ed43273c2011e149c4892e | sjl/cl-digraph | make-quickutils.lisp | (ql:quickload 'quickutil)
(qtlc:save-utils-as
"quickutils.lisp"
:utilities '(
:appendf
:compose
:curry
:dohash
:ensure-boolean
:ensure-gethash
:ensure-list
:hash-table-keys
:maphash-keys
:mkstr
:once-only
:rcurry
:removef
:symb
:with-gensyms
)
:package "DIGRAPH.QUICKUTILS")
| null | https://raw.githubusercontent.com/sjl/cl-digraph/380177f204bb531c13052502b2b453cb1a1fc60a/vendor/make-quickutils.lisp | lisp | (ql:quickload 'quickutil)
(qtlc:save-utils-as
"quickutils.lisp"
:utilities '(
:appendf
:compose
:curry
:dohash
:ensure-boolean
:ensure-gethash
:ensure-list
:hash-table-keys
:maphash-keys
:mkstr
:once-only
:rcurry
:removef
:symb
:with-gensyms
)
:package "DIGRAPH.QUICKUTILS")
|
|
626a8783a3d3eb8fa600cc99d90a6dbd3fd4f6c5536f4df68f9485c6ee880329 | rizo/streaming-zoo | sequence.ml |
Original
module A = struct
type 'a t = ('a -> unit) -> unit
let count n k =
let count = ref 0 in
while true do
k !count;
incr count
done
let rec init n f k =
let i = ref 0 in
while !i < n do
k (f !i);
incr i
done
let map f seq k =
seq (fun x -> k (f x))
let filter p seq k =
seq (fun x -> if p x then k x)
exception ExitTake
let take n seq k =
let count = ref 0 in
try
seq
(fun x ->
if !count = n then raise ExitTake;
incr count;
k x)
with ExitTake -> ()
exception Fold_stop_sequence
let fold f acc seq =
let r = ref acc in
seq (fun elt -> r := f !r elt);
!r
let to_list seq =
let r = ref [] in
seq (fun elt -> r := elt :: !r);
!r
let rec of_list l k =
match l with
| [] -> ()
| x :: xs ->
k x; of_list xs k
end
(* Safe *)
module B = struct
type 'a t = ('a -> bool) -> unit
let rec count n k =
if k n then count (n + 1) k
let rec of_list l (k: 'a -> bool) : unit =
match l with
| [] -> ()
| x :: xs ->
if k x
then of_list xs k
else ()
let to_list (seq: ('a -> bool) -> unit) =
let r = ref [] in
seq (fun x -> r := x :: !r; true);
List.rev !r
let take n seq k =
let count = ref 0 in
seq (fun x ->
if !count = n
then false
else let () = incr count in k x)
let fold f acc seq =
let r = ref acc in
seq (fun elt -> r := f !r elt; true);
!r
let map f seq k =
seq (fun x -> k (f x))
let filter p seq k =
seq (fun x -> if p x then k x else true)
end
Safe 2
module C = struct
type 'r reduced = Continue of 'r | Done of 'r
let rec count n =
fun step r ->
match step r n with
| Continue r' -> count (n + 1) step r'
| Done r' -> r'
let fold f acc seq =
seq (fun r a -> Continue (f r a)) acc
let map f seq =
fun step ->
seq (fun r a -> step r (f a))
let filter p seq =
fun step ->
seq (fun r a -> if p a then step r a else Continue r)
let take n seq =
fun step ->
let i = ref 0 in
seq (fun r a ->
if !i = n then Done r
else (incr i; step r a))
end
Safe 3
module D = struct
let rec count n k =
match k n with
| `Continue -> count (n + 1) k
| `Done -> ()
let fold f acc seq =
let r = ref acc in
seq (fun a -> r := f !r a; `Continue);
!r
let map f seq k =
seq (fun a -> k (f a))
let filter p seq k =
seq (fun a -> if p a then k a else `Continue)
let take n seq k =
let i = ref 0 in
seq (fun a ->
if !i = n then `Done
else (incr i; k a))
end
(* Fast *)
module E = struct
type 'a reduced = Continue of 'a | Stop of 'a
type ('a, 'r) seq = ('r -> 'a -> 'r reduced) -> 'r -> 'r
let rec count n : (int, 'r) seq =
fun step init ->
match step init n with
| Continue r -> count (n + 1) step r
| Stop r -> r
let take n seq =
fun step init ->
let count = ref 0 in
seq (fun r a ->
if !count = n
then Stop r
else (incr count; step r a)) init
let fold f acc seq =
seq (fun r a -> Continue (f r a)) acc
let map f seq =
fun step init ->
seq (fun r a -> step r (f a)) init
let filter p seq =
fun step init ->
seq (fun r a -> if p a then step r a else Continue r) init
end
(* Fast state *)
module F = struct
type ('s, 'r) reduced = Continue of ('s * 'r) | Done of 'r
type ('a, 's, 'r) reducer = 'r -> 'a -> ('s, 'r) reduced
type ('a, 's, 'r) seq = ('a, 's, 'r) reducer -> 'r -> 'r
let rec count n =
fun step s0 r0 ->
match step s0 r0 n with
| Continue (s, r) -> count (n + 1) step s r
| Done r -> r
let take n seq =
fun step s0 r0 ->
let s0' = (s0, 0) in
let step' (s, i) r a =
if i = n then Done r
else step (s, i + 1) r a in
seq step' s0' r0
let fold f r0 seq =
let step s r a = Continue (s, f r a) in
seq step () r0
let map f seq =
fun step s0 r0 ->
let step' s r a = step s r (f a) in
seq step' s0 r0
let filter p seq =
fun step s0 r0 ->
let step' s r a =
if p a then step s r a
else Continue (s, r) in
seq step' s0 r0
end
include A
| null | https://raw.githubusercontent.com/rizo/streaming-zoo/bf586c8b986a41353f5e3963de01047b83e7649a/src/sequence.ml | ocaml | Safe
Fast
Fast state |
Original
module A = struct
type 'a t = ('a -> unit) -> unit
let count n k =
let count = ref 0 in
while true do
k !count;
incr count
done
let rec init n f k =
let i = ref 0 in
while !i < n do
k (f !i);
incr i
done
let map f seq k =
seq (fun x -> k (f x))
let filter p seq k =
seq (fun x -> if p x then k x)
exception ExitTake
let take n seq k =
let count = ref 0 in
try
seq
(fun x ->
if !count = n then raise ExitTake;
incr count;
k x)
with ExitTake -> ()
exception Fold_stop_sequence
let fold f acc seq =
let r = ref acc in
seq (fun elt -> r := f !r elt);
!r
let to_list seq =
let r = ref [] in
seq (fun elt -> r := elt :: !r);
!r
let rec of_list l k =
match l with
| [] -> ()
| x :: xs ->
k x; of_list xs k
end
module B = struct
type 'a t = ('a -> bool) -> unit
let rec count n k =
if k n then count (n + 1) k
let rec of_list l (k: 'a -> bool) : unit =
match l with
| [] -> ()
| x :: xs ->
if k x
then of_list xs k
else ()
let to_list (seq: ('a -> bool) -> unit) =
let r = ref [] in
seq (fun x -> r := x :: !r; true);
List.rev !r
let take n seq k =
let count = ref 0 in
seq (fun x ->
if !count = n
then false
else let () = incr count in k x)
let fold f acc seq =
let r = ref acc in
seq (fun elt -> r := f !r elt; true);
!r
let map f seq k =
seq (fun x -> k (f x))
let filter p seq k =
seq (fun x -> if p x then k x else true)
end
Safe 2
module C = struct
type 'r reduced = Continue of 'r | Done of 'r
let rec count n =
fun step r ->
match step r n with
| Continue r' -> count (n + 1) step r'
| Done r' -> r'
let fold f acc seq =
seq (fun r a -> Continue (f r a)) acc
let map f seq =
fun step ->
seq (fun r a -> step r (f a))
let filter p seq =
fun step ->
seq (fun r a -> if p a then step r a else Continue r)
let take n seq =
fun step ->
let i = ref 0 in
seq (fun r a ->
if !i = n then Done r
else (incr i; step r a))
end
Safe 3
module D = struct
let rec count n k =
match k n with
| `Continue -> count (n + 1) k
| `Done -> ()
let fold f acc seq =
let r = ref acc in
seq (fun a -> r := f !r a; `Continue);
!r
let map f seq k =
seq (fun a -> k (f a))
let filter p seq k =
seq (fun a -> if p a then k a else `Continue)
let take n seq k =
let i = ref 0 in
seq (fun a ->
if !i = n then `Done
else (incr i; k a))
end
module E = struct
type 'a reduced = Continue of 'a | Stop of 'a
type ('a, 'r) seq = ('r -> 'a -> 'r reduced) -> 'r -> 'r
let rec count n : (int, 'r) seq =
fun step init ->
match step init n with
| Continue r -> count (n + 1) step r
| Stop r -> r
let take n seq =
fun step init ->
let count = ref 0 in
seq (fun r a ->
if !count = n
then Stop r
else (incr count; step r a)) init
let fold f acc seq =
seq (fun r a -> Continue (f r a)) acc
let map f seq =
fun step init ->
seq (fun r a -> step r (f a)) init
let filter p seq =
fun step init ->
seq (fun r a -> if p a then step r a else Continue r) init
end
module F = struct
type ('s, 'r) reduced = Continue of ('s * 'r) | Done of 'r
type ('a, 's, 'r) reducer = 'r -> 'a -> ('s, 'r) reduced
type ('a, 's, 'r) seq = ('a, 's, 'r) reducer -> 'r -> 'r
let rec count n =
fun step s0 r0 ->
match step s0 r0 n with
| Continue (s, r) -> count (n + 1) step s r
| Done r -> r
let take n seq =
fun step s0 r0 ->
let s0' = (s0, 0) in
let step' (s, i) r a =
if i = n then Done r
else step (s, i + 1) r a in
seq step' s0' r0
let fold f r0 seq =
let step s r a = Continue (s, f r a) in
seq step () r0
let map f seq =
fun step s0 r0 ->
let step' s r a = step s r (f a) in
seq step' s0 r0
let filter p seq =
fun step s0 r0 ->
let step' s r a =
if p a then step s r a
else Continue (s, r) in
seq step' s0 r0
end
include A
|
969e8565facc9c08292fdb5df1787f65ce0dda01518c57e4f015649758b21fda | GaloisInc/saw-script | Name.hs | |
Module : Verifier . SAW.Name
Copyright : Galois , Inc. 2012 - 2015
License : :
Stability : experimental
Portability : non - portable ( language extensions )
Various kinds of names .
Module : Verifier.SAW.Name
Copyright : Galois, Inc. 2012-2015
License : BSD3
Maintainer :
Stability : experimental
Portability : non-portable (language extensions)
Various kinds of names.
-}
# LANGUAGE DeriveFoldable #
# LANGUAGE DeriveFunctor #
# LANGUAGE DeriveGeneric #
{-# LANGUAGE DeriveLift #-}
{-# LANGUAGE DeriveTraversable #-}
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
module Verifier.SAW.Name
( -- * Module names
ModuleName, mkModuleName
, preludeName
, moduleNameText
, moduleNamePieces
-- * Identifiers
, Ident(identModule, identBaseName), identName, mkIdent
, parseIdent
, isIdent
, identText
, identPieces
* NameInfo
, NameInfo(..)
, toShortName
, toAbsoluteName
, moduleIdentToURI
, nameURI
, nameAliases
-- * ExtCns
, VarIndex
, ExtCns(..)
, scFreshNameURI
, PrimName(..)
, primNameToExtCns
-- * Naming Environments
, SAWNamingEnv(..)
, emptySAWNamingEnv
, registerName
, resolveURI
, resolveName
, bestAlias
) where
import Control.Exception (assert)
import Data.Char
import Data.Hashable
import Data.List.NonEmpty (NonEmpty(..))
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.Set (Set)
import qualified Data.Set as Set
import Data.String (IsString(..))
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Word
import GHC.Generics (Generic)
import Text.URI
import qualified Language.Haskell.TH.Syntax as TH
import Instances.TH.Lift () -- for instance TH.Lift Text
import Verifier.SAW.Utils (panic, internalError)
-- Module Names ----------------------------------------------------------------
newtype ModuleName = ModuleName Text
deriving (Eq, Ord, Generic, TH.Lift)
instance Hashable ModuleName -- automatically derived
instance Show ModuleName where
show (ModuleName s) = Text.unpack s
moduleNameText :: ModuleName -> Text
moduleNameText (ModuleName x) = x
moduleNamePieces :: ModuleName -> [Text]
moduleNamePieces (ModuleName x) = Text.splitOn (Text.pack ".") x
-- | Create a module name given a list of strings with the top-most
module name given first .
mkModuleName :: [Text] -> ModuleName
mkModuleName [] = error "internal: mkModuleName given empty module name"
mkModuleName nms = assert (all (isCtor . Text.unpack) nms) $ ModuleName s
where s = Text.intercalate "." (reverse nms)
preludeName :: ModuleName
preludeName = mkModuleName ["Prelude"]
-- Identifiers -----------------------------------------------------------------
data Ident =
Ident
{ identModule :: ModuleName
, identBaseName :: Text
}
deriving (Eq, Ord, Generic)
instance Hashable Ident -- automatically derived
instance Show Ident where
show (Ident m s) = shows m ('.' : Text.unpack s)
identText :: Ident -> Text
identText i = moduleNameText (identModule i) <> Text.pack "." <> identBaseName i
identPieces :: Ident -> NonEmpty Text
identPieces i =
case moduleNamePieces (identModule i) of
[] -> identBaseName i :| []
(x:xs) -> x :| (xs ++ [identBaseName i])
identName :: Ident -> String
identName = Text.unpack . identBaseName
instance Read Ident where
readsPrec _ str =
let (str1, str2) = break (not . isIdChar) str in
[(parseIdent str1, str2)]
mkIdent :: ModuleName -> Text -> Ident
mkIdent m s = Ident m s
-- | Parse a fully qualified identifier.
parseIdent :: String -> Ident
parseIdent s0 =
case reverse (breakEach s0) of
(_:[]) -> internalError $ "parseIdent given empty module name."
(nm:rMod) -> mkIdent (mkModuleName (reverse rMod)) nm
_ -> internalError $ "parseIdent given bad identifier " ++ show s0
where breakEach s =
case break (=='.') s of
(h, []) -> [Text.pack h]
(h, _ : r) -> Text.pack h : breakEach r
instance IsString Ident where
fromString = parseIdent
isIdent :: String -> Bool
isIdent (c:l) = isAlpha c && all isIdChar l
isIdent [] = False
isCtor :: String -> Bool
isCtor (c:l) = isUpper c && all isIdChar l
isCtor [] = False
-- | Returns true if character can appear in identifier.
isIdChar :: Char -> Bool
isIdChar c = isAlphaNum c || (c == '_') || (c == '\'') || (c == '.')
--------------------------------------------------------------------------------
NameInfo
-- | Descriptions of the origins of names that may be in scope
data NameInfo
= -- | This name arises from an exported declaration from a module
ModuleIdentifier Ident
| -- | This name was imported from some other programming language/scope
ImportedName
URI -- ^ An absolutely-qualified name, which is required to be unique
[Text] -- ^ A collection of aliases for this name. Sorter or "less-qualified"
-- aliases should be nearer the front of the list
deriving (Eq,Ord,Show)
nameURI :: NameInfo -> URI
nameURI =
\case
ModuleIdentifier i -> moduleIdentToURI i
ImportedName uri _ -> uri
nameAliases :: NameInfo -> [Text]
nameAliases =
\case
ModuleIdentifier i -> [identBaseName i, identText i]
ImportedName _ aliases -> aliases
toShortName :: NameInfo -> Text
toShortName (ModuleIdentifier i) = identBaseName i
toShortName (ImportedName uri []) = render uri
toShortName (ImportedName _ (x:_)) = x
toAbsoluteName :: NameInfo -> Text
toAbsoluteName (ModuleIdentifier i) = identText i
toAbsoluteName (ImportedName uri _) = render uri
moduleIdentToURI :: Ident -> URI
moduleIdentToURI ident = fromMaybe (panic "moduleIdentToURI" ["Failed to constructed ident URI", show ident]) $
do sch <- mkScheme "sawcore"
path <- mapM mkPathPiece (identPieces ident)
pure URI
{ uriScheme = Just sch
, uriAuthority = Left True -- absolute path
, uriPath = Just (False, path)
, uriQuery = []
, uriFragment = Nothing
}
-- External Constants ----------------------------------------------------------
type VarIndex = Word64
-- | An external constant with a name.
-- Names are not necessarily unique, but the var index should be.
data ExtCns e =
EC
{ ecVarIndex :: !VarIndex
, ecName :: !NameInfo
, ecType :: !e
}
deriving (Show, Functor, Foldable, Traversable)
instance Eq (ExtCns e) where
x == y = ecVarIndex x == ecVarIndex y
instance Ord (ExtCns e) where
compare x y = compare (ecVarIndex x) (ecVarIndex y)
instance Hashable (ExtCns e) where
hashWithSalt x ec = hashWithSalt x (ecVarIndex ec)
-- Primitive Names ------------------------------------------------------------
-- | Names of SAWCore primitives, data types and data type constructors.
data PrimName e =
PrimName
{ primVarIndex :: !VarIndex
, primName :: !Ident
, primType :: e
}
deriving (Show, Functor, Foldable, Traversable)
instance Eq (PrimName e) where
x == y = primVarIndex x == primVarIndex y
instance Ord (PrimName e) where
compare x y = compare (primVarIndex x) (primVarIndex y)
instance Hashable (PrimName e) where
hashWithSalt x pn = hashWithSalt x (primVarIndex pn)
primNameToExtCns :: PrimName e -> ExtCns e
primNameToExtCns (PrimName varIdx nm tp) = EC varIdx (ModuleIdentifier nm) tp
scFreshNameURI :: Text -> VarIndex -> URI
scFreshNameURI nm i = fromMaybe (panic "scFreshNameURI" ["Failed to constructed name URI", show nm, show i]) $
do sch <- mkScheme "fresh"
nm' <- mkPathPiece (if Text.null nm then "_" else nm)
i' <- mkFragment (Text.pack (show i))
pure URI
{ uriScheme = Just sch
, uriAuthority = Left False -- relative path
, uriPath = Just (False, (nm' :| []))
, uriQuery = []
, uriFragment = Just i'
}
-- Naming Environments ---------------------------------------------------------
data SAWNamingEnv = SAWNamingEnv
{ resolvedNames :: !(Map VarIndex NameInfo)
, absoluteNames :: !(Map URI VarIndex)
, aliasNames :: !(Map Text (Set VarIndex))
}
-- Invariants: The 'resolvedNames' and 'absoluteNames' maps should be
inverses of each other . That is , ' resolvedNames ' maps @i@ to @n@ if
and only if ' absoluteNames ' maps @nameURI n@ to Also , every
' VarIndex ' appearing in ' aliasNames ' must be present as a key in
-- 'resolvedNames'.
emptySAWNamingEnv :: SAWNamingEnv
emptySAWNamingEnv = SAWNamingEnv mempty mempty mempty
| Add a new name entry in a ' SAWNamingEnv ' . Returns ' Left ' if
there is already an entry under the same URI .
registerName :: VarIndex -> NameInfo -> SAWNamingEnv -> Either URI SAWNamingEnv
registerName i nmi env =
case Map.lookup uri (absoluteNames env) of
Just _ -> Left uri
Nothing ->
Right $
SAWNamingEnv
{ resolvedNames = Map.insert i nmi (resolvedNames env)
, absoluteNames = Map.insert uri i (absoluteNames env)
, aliasNames = foldr insertAlias (aliasNames env) aliases
}
where
uri = nameURI nmi
aliases = render uri : nameAliases nmi
insertAlias :: Text -> Map Text (Set VarIndex) -> Map Text (Set VarIndex)
insertAlias x m = Map.insertWith Set.union x (Set.singleton i) m
resolveURI :: SAWNamingEnv -> URI -> Maybe VarIndex
resolveURI env uri = Map.lookup uri (absoluteNames env)
resolveName :: SAWNamingEnv -> Text -> [(VarIndex, NameInfo)]
resolveName env nm =
case Map.lookup nm (aliasNames env) of
Nothing -> []
Just vs -> [ (v, findName v (resolvedNames env)) | v <- Set.toList vs ]
where
findName v m =
case Map.lookup v m of
Just nmi -> nmi
Nothing -> panic "resolveName" ["Unbound VarIndex when resolving name", show nm, show v]
| Return the first alias ( according to ' nameAliases ' ) that is
-- unambiguous in the naming environment. If there is no unambiguous
alias , then return the URI .
bestAlias :: SAWNamingEnv -> NameInfo -> Either URI Text
bestAlias env nmi = go (nameAliases nmi)
where
go [] = Left (nameURI nmi)
go (x : xs) =
case Map.lookup x (aliasNames env) of
Nothing -> go xs
Just vs
| Set.size vs == 1 -> Right x
| otherwise -> go xs
| null | https://raw.githubusercontent.com/GaloisInc/saw-script/0312fd3d5c9ea721a374463150e47caa1e3524e3/saw-core/src/Verifier/SAW/Name.hs | haskell | # LANGUAGE DeriveLift #
# LANGUAGE DeriveTraversable #
# LANGUAGE OverloadedStrings #
* Module names
* Identifiers
* ExtCns
* Naming Environments
for instance TH.Lift Text
Module Names ----------------------------------------------------------------
automatically derived
| Create a module name given a list of strings with the top-most
Identifiers -----------------------------------------------------------------
automatically derived
| Parse a fully qualified identifier.
| Returns true if character can appear in identifier.
------------------------------------------------------------------------------
| Descriptions of the origins of names that may be in scope
| This name arises from an exported declaration from a module
| This name was imported from some other programming language/scope
^ An absolutely-qualified name, which is required to be unique
^ A collection of aliases for this name. Sorter or "less-qualified"
aliases should be nearer the front of the list
absolute path
External Constants ----------------------------------------------------------
| An external constant with a name.
Names are not necessarily unique, but the var index should be.
Primitive Names ------------------------------------------------------------
| Names of SAWCore primitives, data types and data type constructors.
relative path
Naming Environments ---------------------------------------------------------
Invariants: The 'resolvedNames' and 'absoluteNames' maps should be
'resolvedNames'.
unambiguous in the naming environment. If there is no unambiguous | |
Module : Verifier . SAW.Name
Copyright : Galois , Inc. 2012 - 2015
License : :
Stability : experimental
Portability : non - portable ( language extensions )
Various kinds of names .
Module : Verifier.SAW.Name
Copyright : Galois, Inc. 2012-2015
License : BSD3
Maintainer :
Stability : experimental
Portability : non-portable (language extensions)
Various kinds of names.
-}
# LANGUAGE DeriveFoldable #
# LANGUAGE DeriveFunctor #
# LANGUAGE DeriveGeneric #
# LANGUAGE LambdaCase #
module Verifier.SAW.Name
ModuleName, mkModuleName
, preludeName
, moduleNameText
, moduleNamePieces
, Ident(identModule, identBaseName), identName, mkIdent
, parseIdent
, isIdent
, identText
, identPieces
* NameInfo
, NameInfo(..)
, toShortName
, toAbsoluteName
, moduleIdentToURI
, nameURI
, nameAliases
, VarIndex
, ExtCns(..)
, scFreshNameURI
, PrimName(..)
, primNameToExtCns
, SAWNamingEnv(..)
, emptySAWNamingEnv
, registerName
, resolveURI
, resolveName
, bestAlias
) where
import Control.Exception (assert)
import Data.Char
import Data.Hashable
import Data.List.NonEmpty (NonEmpty(..))
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.Set (Set)
import qualified Data.Set as Set
import Data.String (IsString(..))
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Word
import GHC.Generics (Generic)
import Text.URI
import qualified Language.Haskell.TH.Syntax as TH
import Verifier.SAW.Utils (panic, internalError)
newtype ModuleName = ModuleName Text
deriving (Eq, Ord, Generic, TH.Lift)
instance Show ModuleName where
show (ModuleName s) = Text.unpack s
moduleNameText :: ModuleName -> Text
moduleNameText (ModuleName x) = x
moduleNamePieces :: ModuleName -> [Text]
moduleNamePieces (ModuleName x) = Text.splitOn (Text.pack ".") x
module name given first .
mkModuleName :: [Text] -> ModuleName
mkModuleName [] = error "internal: mkModuleName given empty module name"
mkModuleName nms = assert (all (isCtor . Text.unpack) nms) $ ModuleName s
where s = Text.intercalate "." (reverse nms)
preludeName :: ModuleName
preludeName = mkModuleName ["Prelude"]
data Ident =
Ident
{ identModule :: ModuleName
, identBaseName :: Text
}
deriving (Eq, Ord, Generic)
instance Show Ident where
show (Ident m s) = shows m ('.' : Text.unpack s)
identText :: Ident -> Text
identText i = moduleNameText (identModule i) <> Text.pack "." <> identBaseName i
identPieces :: Ident -> NonEmpty Text
identPieces i =
case moduleNamePieces (identModule i) of
[] -> identBaseName i :| []
(x:xs) -> x :| (xs ++ [identBaseName i])
identName :: Ident -> String
identName = Text.unpack . identBaseName
instance Read Ident where
readsPrec _ str =
let (str1, str2) = break (not . isIdChar) str in
[(parseIdent str1, str2)]
mkIdent :: ModuleName -> Text -> Ident
mkIdent m s = Ident m s
parseIdent :: String -> Ident
parseIdent s0 =
case reverse (breakEach s0) of
(_:[]) -> internalError $ "parseIdent given empty module name."
(nm:rMod) -> mkIdent (mkModuleName (reverse rMod)) nm
_ -> internalError $ "parseIdent given bad identifier " ++ show s0
where breakEach s =
case break (=='.') s of
(h, []) -> [Text.pack h]
(h, _ : r) -> Text.pack h : breakEach r
instance IsString Ident where
fromString = parseIdent
isIdent :: String -> Bool
isIdent (c:l) = isAlpha c && all isIdChar l
isIdent [] = False
isCtor :: String -> Bool
isCtor (c:l) = isUpper c && all isIdChar l
isCtor [] = False
isIdChar :: Char -> Bool
isIdChar c = isAlphaNum c || (c == '_') || (c == '\'') || (c == '.')
NameInfo
data NameInfo
ModuleIdentifier Ident
ImportedName
deriving (Eq,Ord,Show)
nameURI :: NameInfo -> URI
nameURI =
\case
ModuleIdentifier i -> moduleIdentToURI i
ImportedName uri _ -> uri
nameAliases :: NameInfo -> [Text]
nameAliases =
\case
ModuleIdentifier i -> [identBaseName i, identText i]
ImportedName _ aliases -> aliases
toShortName :: NameInfo -> Text
toShortName (ModuleIdentifier i) = identBaseName i
toShortName (ImportedName uri []) = render uri
toShortName (ImportedName _ (x:_)) = x
toAbsoluteName :: NameInfo -> Text
toAbsoluteName (ModuleIdentifier i) = identText i
toAbsoluteName (ImportedName uri _) = render uri
moduleIdentToURI :: Ident -> URI
moduleIdentToURI ident = fromMaybe (panic "moduleIdentToURI" ["Failed to constructed ident URI", show ident]) $
do sch <- mkScheme "sawcore"
path <- mapM mkPathPiece (identPieces ident)
pure URI
{ uriScheme = Just sch
, uriPath = Just (False, path)
, uriQuery = []
, uriFragment = Nothing
}
type VarIndex = Word64
data ExtCns e =
EC
{ ecVarIndex :: !VarIndex
, ecName :: !NameInfo
, ecType :: !e
}
deriving (Show, Functor, Foldable, Traversable)
instance Eq (ExtCns e) where
x == y = ecVarIndex x == ecVarIndex y
instance Ord (ExtCns e) where
compare x y = compare (ecVarIndex x) (ecVarIndex y)
instance Hashable (ExtCns e) where
hashWithSalt x ec = hashWithSalt x (ecVarIndex ec)
data PrimName e =
PrimName
{ primVarIndex :: !VarIndex
, primName :: !Ident
, primType :: e
}
deriving (Show, Functor, Foldable, Traversable)
instance Eq (PrimName e) where
x == y = primVarIndex x == primVarIndex y
instance Ord (PrimName e) where
compare x y = compare (primVarIndex x) (primVarIndex y)
instance Hashable (PrimName e) where
hashWithSalt x pn = hashWithSalt x (primVarIndex pn)
primNameToExtCns :: PrimName e -> ExtCns e
primNameToExtCns (PrimName varIdx nm tp) = EC varIdx (ModuleIdentifier nm) tp
scFreshNameURI :: Text -> VarIndex -> URI
scFreshNameURI nm i = fromMaybe (panic "scFreshNameURI" ["Failed to constructed name URI", show nm, show i]) $
do sch <- mkScheme "fresh"
nm' <- mkPathPiece (if Text.null nm then "_" else nm)
i' <- mkFragment (Text.pack (show i))
pure URI
{ uriScheme = Just sch
, uriPath = Just (False, (nm' :| []))
, uriQuery = []
, uriFragment = Just i'
}
data SAWNamingEnv = SAWNamingEnv
{ resolvedNames :: !(Map VarIndex NameInfo)
, absoluteNames :: !(Map URI VarIndex)
, aliasNames :: !(Map Text (Set VarIndex))
}
inverses of each other . That is , ' resolvedNames ' maps @i@ to @n@ if
and only if ' absoluteNames ' maps @nameURI n@ to Also , every
' VarIndex ' appearing in ' aliasNames ' must be present as a key in
emptySAWNamingEnv :: SAWNamingEnv
emptySAWNamingEnv = SAWNamingEnv mempty mempty mempty
| Add a new name entry in a ' SAWNamingEnv ' . Returns ' Left ' if
there is already an entry under the same URI .
registerName :: VarIndex -> NameInfo -> SAWNamingEnv -> Either URI SAWNamingEnv
registerName i nmi env =
case Map.lookup uri (absoluteNames env) of
Just _ -> Left uri
Nothing ->
Right $
SAWNamingEnv
{ resolvedNames = Map.insert i nmi (resolvedNames env)
, absoluteNames = Map.insert uri i (absoluteNames env)
, aliasNames = foldr insertAlias (aliasNames env) aliases
}
where
uri = nameURI nmi
aliases = render uri : nameAliases nmi
insertAlias :: Text -> Map Text (Set VarIndex) -> Map Text (Set VarIndex)
insertAlias x m = Map.insertWith Set.union x (Set.singleton i) m
resolveURI :: SAWNamingEnv -> URI -> Maybe VarIndex
resolveURI env uri = Map.lookup uri (absoluteNames env)
resolveName :: SAWNamingEnv -> Text -> [(VarIndex, NameInfo)]
resolveName env nm =
case Map.lookup nm (aliasNames env) of
Nothing -> []
Just vs -> [ (v, findName v (resolvedNames env)) | v <- Set.toList vs ]
where
findName v m =
case Map.lookup v m of
Just nmi -> nmi
Nothing -> panic "resolveName" ["Unbound VarIndex when resolving name", show nm, show v]
| Return the first alias ( according to ' nameAliases ' ) that is
alias , then return the URI .
bestAlias :: SAWNamingEnv -> NameInfo -> Either URI Text
bestAlias env nmi = go (nameAliases nmi)
where
go [] = Left (nameURI nmi)
go (x : xs) =
case Map.lookup x (aliasNames env) of
Nothing -> go xs
Just vs
| Set.size vs == 1 -> Right x
| otherwise -> go xs
|
8dbc3a76e39b41beb2512eec47b3ab2e9d860d2687cb2bcb77c22777ab737958 | realworldocaml/book | parametric.ml | (******************************************************************************)
(* *)
(* First we duplicate some of the examples from test.mlt, but changing their *)
(* use of fresh abstract types for type parameters. *)
(* *)
(******************************************************************************)
module Basic_record = struct
module V1 = struct
type ('a, 'b, 'c, 'd) t =
{ a : 'a
; b1 : 'b
; c : 'c
; d : 'd
}
end
module V2 = struct
type ('a, 'b, 'c, 'd) t =
{ a : 'a
; b2 : 'b
; c : 'c
; d : 'd * 'a
}
[@@deriving
stable_record
~version:[%stable: ('a, 'b, 'c, 'd) V1.t]
~add:[ b1 ]
~remove:[ b2 ]
~modify:[ c ]
~set:[ d ]]
end
end
module Basic_variant = struct
module V1 = struct
type ('a, 'b, 'c, 'd, 'e, 'f, 'j, 'k, 'l) t =
| I0
| I1 of 'a
| I2 of 'b * 'c
| X1
| X2 of 'j
| X3 of 'k * 'l
| Z1 of 'd * 'e
| Z2 of 'f
| Z3
[@@deriving_inline stable_variant]
include struct
[@@@ocaml.warning "-60"]
let _ = fun (_ : ('a, 'b, 'c, 'd, 'e, 'f, 'j, 'k, 'l) t) -> ()
module Stable_variant = struct
module Helper = struct
let map
~i0:i0_fun
~i1:i1_fun
~i2:i2_fun
~x1:x1_fun
~x2:x2_fun
~x3:x3_fun
~z1:z1_fun
~z2:z2_fun
~z3:z3_fun
= function
| I0 -> i0_fun ()
| I1 v0 -> i1_fun v0
| I2 (v0, v1) -> i2_fun v0 v1
| X1 -> x1_fun ()
| X2 v0 -> x2_fun v0
| X3 (v0, v1) -> x3_fun v0 v1
| Z1 (v0, v1) -> z1_fun v0 v1
| Z2 v0 -> z2_fun v0
| Z3 -> z3_fun ()
;;
let _ = map
end
end
end [@@ocaml.doc "@inline"]
[@@@end]
end
module V2 = struct
type ('a, 'b, 'c, 'd, 'e, 'f, 'g, 'h, 'i) t =
| I0
| I1 of 'a
| I2 of 'b * 'c
| Y1
| Y2 of 'g
| Y3 of 'h * 'i
| Z1
| Z2 of 'f
| Z3 of 'd * 'e
[@@deriving
stable_variant
~version:[%stable: ('a, 'b, 'c, 'd, 'e, 'f, 'g, 'h, 'i) V1.t]
~remove:[ Y1; Y2; Y3 ]
~add:[ X1; X2; X3 ]
~modify:[ Z1; Z2; Z3 ]]
end
end
(******************************************************************************)
(* *)
(* Now we add some more interesting examples *)
(* *)
(******************************************************************************)
(******************)
First : records
(******************)
module Add_type_parameter_record = struct
module V1 = struct
type t = { value : int }
end
module V2 = struct
type 'a t =
{ value : int
; stuff : 'a
}
[@@deriving_inline stable_record ~version:V1.t ~remove:[ stuff ]]
let _ = fun (_ : 'a t) -> ()
let to_V1_t (_t : 'a t) =
let ({ stuff = _; value } : 'a t) = _t in
({ value } : V1.t)
;;
let _ = to_V1_t
let of_V1_t (_t : V1.t) ~stuff =
let ({ value } : V1.t) = _t in
({ stuff; value } : 'a t)
;;
let _ = of_V1_t
[@@@end]
end
end
module Change_type_parameter_record = struct
module V1 = struct
type 'a t = { lst : 'a list }
end
module V2 = struct
type ('a, 'b) t = { pair_lst : ('a * 'b) list }
[@@deriving
stable_record ~version:[%stable: 'a V1.t] ~add:[ lst ] ~remove:[ pair_lst ]]
end
end
(******************)
(* Then: variants *)
(******************)
module Add_type_parameter_variant = struct
module V1 = struct
type t = Int of int [@@deriving_inline stable_variant]
include struct
[@@@ocaml.warning "-60"]
let _ = fun (_ : t) -> ()
module Stable_variant = struct
module Helper = struct
let map ~int:int_fun = function
| Int v0 -> int_fun v0
;;
let _ = map
end
end
end [@@ocaml.doc "@inline"]
[@@@end]
end
module V2 = struct
type 'a t =
| Int of int
| Otherwise of 'a
[@@deriving_inline stable_variant ~version:V1.t ~remove:[ Otherwise ]]
include struct
[@@@ocaml.warning "-60"]
let _ = fun (_ : 'a t) -> ()
module Stable_variant = struct
module Helper = struct
let map ~int:int_fun ~otherwise:otherwise_fun = function
| Int v0 -> int_fun v0
| Otherwise v0 -> otherwise_fun v0
;;
let _ = map
end
end
let to_V1_t ~remove_Otherwise (v : 'a t) : V1.t =
Stable_variant.Helper.map v ~int:(fun v0 -> V1.Int v0) ~otherwise:remove_Otherwise
;;
let _ = to_V1_t
let of_V1_t (v : V1.t) : 'a t =
V1.Stable_variant.Helper.map v ~int:(fun v0 -> Int v0)
;;
let _ = of_V1_t
end [@@ocaml.doc "@inline"]
[@@@end]
end
end
module Change_type_parameter_variant = struct
module V1 = struct
type 'a t = Foo of 'a [@@deriving_inline stable_variant]
include struct
[@@@ocaml.warning "-60"]
let _ = fun (_ : 'a t) -> ()
module Stable_variant = struct
module Helper = struct
let map ~foo:foo_fun = function
| Foo v0 -> foo_fun v0
;;
let _ = map
end
end
end [@@ocaml.doc "@inline"]
[@@@end]
end
module V2 = struct
type ('a, 'b) t =
| Foo of 'a * int
| Bar of 'b
[@@deriving
stable_variant ~version:[%stable: ('a * int) V1.t] ~modify:[ Foo ] ~remove:[ Bar ]]
end
end
module Change_type_parameter_variations = struct
module V1 = struct
type 'a t = { foo : 'a }
end
module V2a = struct
type 'b t = { foo : 'b }
[@@deriving_inline stable_record ~version:[%stable: 'x V1.t] ~modify:[ foo ]]
let _ = fun (_ : 'b t) -> ()
let to_V1_t (_t : 'b t) ~modify_foo =
let ({ foo } : 'b t) = _t in
({ foo = modify_foo foo } : 'x V1.t)
;;
let _ = to_V1_t
let of_V1_t (_t : 'x V1.t) ~modify_foo =
let ({ foo } : 'x V1.t) = _t in
({ foo = modify_foo foo } : 'b t)
;;
let _ = of_V1_t
[@@@end]
end
module V2b = struct
type 'b t = { foo : 'b }
[@@deriving_inline stable_record ~version:[%stable: 'b V1.t] ~modify:[ foo ]]
let _ = fun (_ : 'b t) -> ()
let to_V1_t (_t : 'b t) ~modify_foo =
let ({ foo } : 'b t) = _t in
({ foo = modify_foo foo } : 'b V1.t)
;;
let _ = to_V1_t
let of_V1_t (_t : 'b V1.t) ~modify_foo =
let ({ foo } : 'b V1.t) = _t in
({ foo = modify_foo foo } : 'b t)
;;
let _ = of_V1_t
[@@@end]
end
module V2c = struct
type 'b t = { foo : 'b }
[@@deriving_inline stable_record ~version:[%stable: _ V1.t] ~modify:[ foo ]]
let _ = fun (_ : 'b t) -> ()
let to_V1_t (_t : 'b t) ~modify_foo =
let ({ foo } : 'b t) = _t in
({ foo = modify_foo foo } : _ V1.t)
;;
let _ = to_V1_t
let of_V1_t (_t : _ V1.t) ~modify_foo =
let ({ foo } : _ V1.t) = _t in
({ foo = modify_foo foo } : 'b t)
;;
let _ = of_V1_t
[@@@end]
end
module V2d = struct
type 'b t = { foo : 'b }
[@@deriving_inline
stable_record ~version:[%stable: (int * string) V1.t] ~modify:[ foo ]]
let _ = fun (_ : 'b t) -> ()
let to_V1_t (_t : 'b t) ~modify_foo =
let ({ foo } : 'b t) = _t in
({ foo = modify_foo foo } : (int * string) V1.t)
;;
let _ = to_V1_t
let of_V1_t (_t : (int * string) V1.t) ~modify_foo =
let ({ foo } : (int * string) V1.t) = _t in
({ foo = modify_foo foo } : 'b t)
;;
let _ = of_V1_t
[@@@end]
end
end
| null | https://raw.githubusercontent.com/realworldocaml/book/d822fd065f19dbb6324bf83e0143bc73fd77dbf9/duniverse/ppx_stable/tests/parametric.ml | ocaml | ****************************************************************************
First we duplicate some of the examples from test.mlt, but changing their
use of fresh abstract types for type parameters.
****************************************************************************
****************************************************************************
Now we add some more interesting examples
****************************************************************************
****************
****************
****************
Then: variants
**************** |
module Basic_record = struct
module V1 = struct
type ('a, 'b, 'c, 'd) t =
{ a : 'a
; b1 : 'b
; c : 'c
; d : 'd
}
end
module V2 = struct
type ('a, 'b, 'c, 'd) t =
{ a : 'a
; b2 : 'b
; c : 'c
; d : 'd * 'a
}
[@@deriving
stable_record
~version:[%stable: ('a, 'b, 'c, 'd) V1.t]
~add:[ b1 ]
~remove:[ b2 ]
~modify:[ c ]
~set:[ d ]]
end
end
module Basic_variant = struct
module V1 = struct
type ('a, 'b, 'c, 'd, 'e, 'f, 'j, 'k, 'l) t =
| I0
| I1 of 'a
| I2 of 'b * 'c
| X1
| X2 of 'j
| X3 of 'k * 'l
| Z1 of 'd * 'e
| Z2 of 'f
| Z3
[@@deriving_inline stable_variant]
include struct
[@@@ocaml.warning "-60"]
let _ = fun (_ : ('a, 'b, 'c, 'd, 'e, 'f, 'j, 'k, 'l) t) -> ()
module Stable_variant = struct
module Helper = struct
let map
~i0:i0_fun
~i1:i1_fun
~i2:i2_fun
~x1:x1_fun
~x2:x2_fun
~x3:x3_fun
~z1:z1_fun
~z2:z2_fun
~z3:z3_fun
= function
| I0 -> i0_fun ()
| I1 v0 -> i1_fun v0
| I2 (v0, v1) -> i2_fun v0 v1
| X1 -> x1_fun ()
| X2 v0 -> x2_fun v0
| X3 (v0, v1) -> x3_fun v0 v1
| Z1 (v0, v1) -> z1_fun v0 v1
| Z2 v0 -> z2_fun v0
| Z3 -> z3_fun ()
;;
let _ = map
end
end
end [@@ocaml.doc "@inline"]
[@@@end]
end
module V2 = struct
type ('a, 'b, 'c, 'd, 'e, 'f, 'g, 'h, 'i) t =
| I0
| I1 of 'a
| I2 of 'b * 'c
| Y1
| Y2 of 'g
| Y3 of 'h * 'i
| Z1
| Z2 of 'f
| Z3 of 'd * 'e
[@@deriving
stable_variant
~version:[%stable: ('a, 'b, 'c, 'd, 'e, 'f, 'g, 'h, 'i) V1.t]
~remove:[ Y1; Y2; Y3 ]
~add:[ X1; X2; X3 ]
~modify:[ Z1; Z2; Z3 ]]
end
end
First : records
module Add_type_parameter_record = struct
module V1 = struct
type t = { value : int }
end
module V2 = struct
type 'a t =
{ value : int
; stuff : 'a
}
[@@deriving_inline stable_record ~version:V1.t ~remove:[ stuff ]]
let _ = fun (_ : 'a t) -> ()
let to_V1_t (_t : 'a t) =
let ({ stuff = _; value } : 'a t) = _t in
({ value } : V1.t)
;;
let _ = to_V1_t
let of_V1_t (_t : V1.t) ~stuff =
let ({ value } : V1.t) = _t in
({ stuff; value } : 'a t)
;;
let _ = of_V1_t
[@@@end]
end
end
module Change_type_parameter_record = struct
module V1 = struct
type 'a t = { lst : 'a list }
end
module V2 = struct
type ('a, 'b) t = { pair_lst : ('a * 'b) list }
[@@deriving
stable_record ~version:[%stable: 'a V1.t] ~add:[ lst ] ~remove:[ pair_lst ]]
end
end
module Add_type_parameter_variant = struct
module V1 = struct
type t = Int of int [@@deriving_inline stable_variant]
include struct
[@@@ocaml.warning "-60"]
let _ = fun (_ : t) -> ()
module Stable_variant = struct
module Helper = struct
let map ~int:int_fun = function
| Int v0 -> int_fun v0
;;
let _ = map
end
end
end [@@ocaml.doc "@inline"]
[@@@end]
end
module V2 = struct
type 'a t =
| Int of int
| Otherwise of 'a
[@@deriving_inline stable_variant ~version:V1.t ~remove:[ Otherwise ]]
include struct
[@@@ocaml.warning "-60"]
let _ = fun (_ : 'a t) -> ()
module Stable_variant = struct
module Helper = struct
let map ~int:int_fun ~otherwise:otherwise_fun = function
| Int v0 -> int_fun v0
| Otherwise v0 -> otherwise_fun v0
;;
let _ = map
end
end
let to_V1_t ~remove_Otherwise (v : 'a t) : V1.t =
Stable_variant.Helper.map v ~int:(fun v0 -> V1.Int v0) ~otherwise:remove_Otherwise
;;
let _ = to_V1_t
let of_V1_t (v : V1.t) : 'a t =
V1.Stable_variant.Helper.map v ~int:(fun v0 -> Int v0)
;;
let _ = of_V1_t
end [@@ocaml.doc "@inline"]
[@@@end]
end
end
module Change_type_parameter_variant = struct
module V1 = struct
type 'a t = Foo of 'a [@@deriving_inline stable_variant]
include struct
[@@@ocaml.warning "-60"]
let _ = fun (_ : 'a t) -> ()
module Stable_variant = struct
module Helper = struct
let map ~foo:foo_fun = function
| Foo v0 -> foo_fun v0
;;
let _ = map
end
end
end [@@ocaml.doc "@inline"]
[@@@end]
end
module V2 = struct
type ('a, 'b) t =
| Foo of 'a * int
| Bar of 'b
[@@deriving
stable_variant ~version:[%stable: ('a * int) V1.t] ~modify:[ Foo ] ~remove:[ Bar ]]
end
end
module Change_type_parameter_variations = struct
module V1 = struct
type 'a t = { foo : 'a }
end
module V2a = struct
type 'b t = { foo : 'b }
[@@deriving_inline stable_record ~version:[%stable: 'x V1.t] ~modify:[ foo ]]
let _ = fun (_ : 'b t) -> ()
let to_V1_t (_t : 'b t) ~modify_foo =
let ({ foo } : 'b t) = _t in
({ foo = modify_foo foo } : 'x V1.t)
;;
let _ = to_V1_t
let of_V1_t (_t : 'x V1.t) ~modify_foo =
let ({ foo } : 'x V1.t) = _t in
({ foo = modify_foo foo } : 'b t)
;;
let _ = of_V1_t
[@@@end]
end
module V2b = struct
type 'b t = { foo : 'b }
[@@deriving_inline stable_record ~version:[%stable: 'b V1.t] ~modify:[ foo ]]
let _ = fun (_ : 'b t) -> ()
let to_V1_t (_t : 'b t) ~modify_foo =
let ({ foo } : 'b t) = _t in
({ foo = modify_foo foo } : 'b V1.t)
;;
let _ = to_V1_t
let of_V1_t (_t : 'b V1.t) ~modify_foo =
let ({ foo } : 'b V1.t) = _t in
({ foo = modify_foo foo } : 'b t)
;;
let _ = of_V1_t
[@@@end]
end
module V2c = struct
type 'b t = { foo : 'b }
[@@deriving_inline stable_record ~version:[%stable: _ V1.t] ~modify:[ foo ]]
let _ = fun (_ : 'b t) -> ()
let to_V1_t (_t : 'b t) ~modify_foo =
let ({ foo } : 'b t) = _t in
({ foo = modify_foo foo } : _ V1.t)
;;
let _ = to_V1_t
let of_V1_t (_t : _ V1.t) ~modify_foo =
let ({ foo } : _ V1.t) = _t in
({ foo = modify_foo foo } : 'b t)
;;
let _ = of_V1_t
[@@@end]
end
module V2d = struct
type 'b t = { foo : 'b }
[@@deriving_inline
stable_record ~version:[%stable: (int * string) V1.t] ~modify:[ foo ]]
let _ = fun (_ : 'b t) -> ()
let to_V1_t (_t : 'b t) ~modify_foo =
let ({ foo } : 'b t) = _t in
({ foo = modify_foo foo } : (int * string) V1.t)
;;
let _ = to_V1_t
let of_V1_t (_t : (int * string) V1.t) ~modify_foo =
let ({ foo } : (int * string) V1.t) = _t in
({ foo = modify_foo foo } : 'b t)
;;
let _ = of_V1_t
[@@@end]
end
end
|
4c158f2579f9bc12155310da77f1776d16f820a985e4af89024766e7ed9d2b81 | facebook/flow | trace.mli |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
val compare : Type.trace -> Type.trace -> int
val trace_depth : Type.trace -> int
val unit_trace : Type.t -> Type.use_t -> Type.trace
val rec_trace : max:int -> Type.t -> Type.use_t -> Type.trace -> Type.trace
val concat_trace : max:int -> Type.trace list -> Type.trace
val dummy_trace : Type.trace
val reasons_of_trace : ?level:int -> Type.trace -> Reason.reason list
| null | https://raw.githubusercontent.com/facebook/flow/741104e69c43057ebd32804dd6bcc1b5e97548ea/src/typing/trace.mli | ocaml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
val compare : Type.trace -> Type.trace -> int
val trace_depth : Type.trace -> int
val unit_trace : Type.t -> Type.use_t -> Type.trace
val rec_trace : max:int -> Type.t -> Type.use_t -> Type.trace -> Type.trace
val concat_trace : max:int -> Type.trace list -> Type.trace
val dummy_trace : Type.trace
val reasons_of_trace : ?level:int -> Type.trace -> Reason.reason list
|
|
e94aee940a165daabd0737cb54fa1a53ed216a3b221210c6fcff2884a3231090 | david-broman/modelyze | utils.ml |
module IntSet = Set.Make(
struct
let compare = Stdlib.compare
type t = int
end)
type intset = IntSet.t
(* Returns the last element *)
let rec last xs =
match xs with
| [] -> raise (Invalid_argument "Utils.last")
| [x] -> x
| _::xs -> last xs
let findindex x l =
let rec findidx l c =
match l with
| [] -> raise Not_found
| y::ys -> if x = y then c else findidx ys (c+1)
in findidx l 0
let find_associndex x l =
let rec findidx l c =
match l with
| [] -> raise Not_found
| (y,v)::ys -> if x = y then (v,c) else findidx ys (c+1)
in findidx l 0
let (<|) f x = f x
let (>>) f g x = g(f x)
let map_option f op =
match op with
| Some t -> Some (f t)
| None -> None
let rec map2sc f l1 l2 =
match l1,l2 with
| [],_ -> []
| _,[] -> []
| (x::xs),(y::ys) -> (f x y)::(map2sc f xs ys)
let rec filtermap f ls =
match ls with
| x::xs -> (match f x with
| Some y -> y::(filtermap f xs)
| None -> filtermap f xs)
| [] -> []
let foldmap f k ls =
let rec work f k ls a =
match ls with
| x::xs ->
let (k',x') = f k x in
work f k' xs (x'::a)
| [] -> (k,List.rev a)
in work f k ls []
let rec option_split lst =
match lst with
| (Some x)::xs ->
(match option_split xs with
| Some xs' -> Some (x::xs')
| None -> None)
| (None)::_ -> None
| [] -> Some []
let string_of_intlist il =
let s = Bytes.create (List.length il) in
il |> List.fold_left (fun i x -> (Bytes.set s i (char_of_int x)); i+1) 0 |> ignore;
Bytes.to_string s
let intlist_of_string s =
let rec work n a = if n >= 0
then work (n-1) ((int_of_char (s.[n]))::a) else a in
work (String.length s) []
let write_binfile filename str =
let f = open_out_bin filename in
output_bytes f str;
flush f;
close_out f
let read_binfile filename =
let f = open_in_bin filename in
let size = in_channel_length f in
let s = Bytes.create size in
try
let rec readinput pos size =
let read = input f s pos size in
if read != 0 then readinput (pos+read) (size-read) else ()
in
readinput 0 size;
close_in f;
s
with
| Invalid_argument _ -> raise (Sys_error "Cannot read file")
let rec fold_interval f a s e =
if s = e then (f a s) else fold_interval f (f a s) (s+1) e
let genlist f n =
let rec work i a =
if i >= 0 then work (i-1) ((f (i-1))::a) else a
in work n []
let xor b1 b2 = (b1 || b2) && (not (b1 && b2))
let sign_extension v n =
if ((v lsr (n-1)) land 1) = 0 then v else (-1 lsl n) lor v
type 'a list_zipper =
| ZipLeftEnd of 'a list
| ZipRightEnd of 'a list
| Zipper of 'a list * 'a * 'a list
let list_to_zipper l = ZipLeftEnd l
let list_zipper_right ls = function
| [] -> ZipRightEnd ls
| x :: xs -> Zipper (ls, x, xs)
let list_zip_right = function
| ZipLeftEnd [] -> ZipRightEnd []
| ZipLeftEnd (x :: xs) -> Zipper ([], x, xs)
| ZipRightEnd xs -> ZipRightEnd xs
| Zipper(ls, x, r :: rs) -> Zipper (x :: ls, r, rs)
| Zipper(ls, x, []) -> ZipRightEnd (x :: ls)
let normalize_path p =
let delim = Str.regexp_string Filename.dir_sep in
let rec recur = function
| Zipper (ls, d, rs) when d = Filename.current_dir_name ->
list_zipper_right ls rs |> recur
| Zipper (l :: ls, dd, rs) when dd = Filename.parent_dir_name && l <> dd ->
list_zipper_right ls rs |> recur
| ZipRightEnd xs -> List.rev xs
| zipper -> list_zip_right zipper |> recur
in Str.split_delim delim p
|> list_to_zipper
|> recur
|> String.concat Filename.dir_sep
module Int =
struct
type t = int
let compare = compare
end
| null | https://raw.githubusercontent.com/david-broman/modelyze/e48c934283e683e268a9dfd0fed49d3c10277298/ext/ucamlib/src/utils.ml | ocaml | Returns the last element |
module IntSet = Set.Make(
struct
let compare = Stdlib.compare
type t = int
end)
type intset = IntSet.t
let rec last xs =
match xs with
| [] -> raise (Invalid_argument "Utils.last")
| [x] -> x
| _::xs -> last xs
let findindex x l =
let rec findidx l c =
match l with
| [] -> raise Not_found
| y::ys -> if x = y then c else findidx ys (c+1)
in findidx l 0
let find_associndex x l =
let rec findidx l c =
match l with
| [] -> raise Not_found
| (y,v)::ys -> if x = y then (v,c) else findidx ys (c+1)
in findidx l 0
let (<|) f x = f x
let (>>) f g x = g(f x)
let map_option f op =
match op with
| Some t -> Some (f t)
| None -> None
let rec map2sc f l1 l2 =
match l1,l2 with
| [],_ -> []
| _,[] -> []
| (x::xs),(y::ys) -> (f x y)::(map2sc f xs ys)
let rec filtermap f ls =
match ls with
| x::xs -> (match f x with
| Some y -> y::(filtermap f xs)
| None -> filtermap f xs)
| [] -> []
let foldmap f k ls =
let rec work f k ls a =
match ls with
| x::xs ->
let (k',x') = f k x in
work f k' xs (x'::a)
| [] -> (k,List.rev a)
in work f k ls []
let rec option_split lst =
match lst with
| (Some x)::xs ->
(match option_split xs with
| Some xs' -> Some (x::xs')
| None -> None)
| (None)::_ -> None
| [] -> Some []
let string_of_intlist il =
let s = Bytes.create (List.length il) in
il |> List.fold_left (fun i x -> (Bytes.set s i (char_of_int x)); i+1) 0 |> ignore;
Bytes.to_string s
let intlist_of_string s =
let rec work n a = if n >= 0
then work (n-1) ((int_of_char (s.[n]))::a) else a in
work (String.length s) []
let write_binfile filename str =
let f = open_out_bin filename in
output_bytes f str;
flush f;
close_out f
let read_binfile filename =
let f = open_in_bin filename in
let size = in_channel_length f in
let s = Bytes.create size in
try
let rec readinput pos size =
let read = input f s pos size in
if read != 0 then readinput (pos+read) (size-read) else ()
in
readinput 0 size;
close_in f;
s
with
| Invalid_argument _ -> raise (Sys_error "Cannot read file")
let rec fold_interval f a s e =
if s = e then (f a s) else fold_interval f (f a s) (s+1) e
let genlist f n =
let rec work i a =
if i >= 0 then work (i-1) ((f (i-1))::a) else a
in work n []
let xor b1 b2 = (b1 || b2) && (not (b1 && b2))
let sign_extension v n =
if ((v lsr (n-1)) land 1) = 0 then v else (-1 lsl n) lor v
type 'a list_zipper =
| ZipLeftEnd of 'a list
| ZipRightEnd of 'a list
| Zipper of 'a list * 'a * 'a list
let list_to_zipper l = ZipLeftEnd l
let list_zipper_right ls = function
| [] -> ZipRightEnd ls
| x :: xs -> Zipper (ls, x, xs)
let list_zip_right = function
| ZipLeftEnd [] -> ZipRightEnd []
| ZipLeftEnd (x :: xs) -> Zipper ([], x, xs)
| ZipRightEnd xs -> ZipRightEnd xs
| Zipper(ls, x, r :: rs) -> Zipper (x :: ls, r, rs)
| Zipper(ls, x, []) -> ZipRightEnd (x :: ls)
let normalize_path p =
let delim = Str.regexp_string Filename.dir_sep in
let rec recur = function
| Zipper (ls, d, rs) when d = Filename.current_dir_name ->
list_zipper_right ls rs |> recur
| Zipper (l :: ls, dd, rs) when dd = Filename.parent_dir_name && l <> dd ->
list_zipper_right ls rs |> recur
| ZipRightEnd xs -> List.rev xs
| zipper -> list_zip_right zipper |> recur
in Str.split_delim delim p
|> list_to_zipper
|> recur
|> String.concat Filename.dir_sep
module Int =
struct
type t = int
let compare = compare
end
|
7378930f595cf193285d50cded42275a4082180b92263b1f72203e6563822147 | FreeAndFair/STAR-Vote | BallotStyle.hs | {-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
|
Module : Application . Star . BallotStyle
Description : Type for ballot templates
The ` BallotStyle ` type represents a ballot that is waiting to be filled out .
It consists of an identifier and a list of races ,
where each race includes a list of candidates .
Module : Application.Star.BallotStyle
Description : Type for ballot templates
The `BallotStyle` type represents a ballot that is waiting to be filled out.
It consists of an identifier and a list of races,
where each race includes a list of candidates.
-}
module Application.Star.BallotStyle where
import Control.Lens
import qualified Data.List as List
import Data.Maybe (fromMaybe)
import Data.SafeCopy (base, deriveSafeCopy)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Typeable
type BallotStyleId = Text
type RaceId = Text
type OptionId = Text
type BallotKey = Text
data Option = Option
{ _oId :: OptionId
, _oName :: Text
, _oImg :: Maybe Text
, _oParty :: Maybe Text
, _oOccupation :: Maybe Text
}
deriving (Show, Typeable)
$(deriveSafeCopy 0 'base ''Option)
$(makeLenses ''Option)
data Race = Race
{ _rDescription :: Text
, _rId :: RaceId
, _rOptions :: [Option]
}
deriving (Show, Typeable)
$(deriveSafeCopy 0 'base ''Race)
$(makeLenses ''Race)
data BallotStyle = BallotStyle
{ _bId :: BallotStyleId
, _bRaces :: [Race]
}
deriving (Show, Typeable)
$(deriveSafeCopy 0 'base ''BallotStyle)
$(makeLenses ''BallotStyle)
type BallotStyles = [(BallotStyle)]
lookup :: BallotStyleId -> BallotStyles -> Maybe BallotStyle
lookup bId styles = safeHead (filter ((== bId) . _bId) styles)
bRace :: RaceId -> BallotStyle -> Maybe Race
bRace rId style = safeHead (filter ((== rId) . _rId) (_bRaces style))
-- | Given a ballot style and a race,
-- returns the next race defined by the ballot style.
nextRace :: BallotStyle -> Race -> Maybe Race
nextRace = incRace 1
-- | Given a ballot style and a race,
-- returns the previous race defined by the ballot style.
prevRace :: BallotStyle -> Race -> Maybe Race
prevRace = incRace (-1)
incRace :: Int -> BallotStyle -> Race -> Maybe Race
incRace n style race = if idx + n < length races && idx + n >= 0 then
Just (races !! (idx + n))
else
Nothing
where
races = view bRaces style
idx = fromMaybe (-1) $ List.findIndex ((== _rId race) . _rId) races
option :: Text -> Race -> Maybe Option
option optId race = safeHead (filter ((== optId) . _oId) (_rOptions race))
-- | Produces a key suitable for uniquely identifying a race in a given election.
-- `Ballot` values use keys produced by this function.
key :: BallotStyle -> Race -> BallotKey
key style race = key' (_bId style) (_rId race)
-- | Variant of `key` that takes a ballot style ID instead of a ballot style
-- value`
key' :: BallotStyleId -> RaceId -> BallotKey
key' bId rId = T.concat [bId, "---", rId]
fromKey :: BallotKey -> Maybe (BallotStyleId, RaceId)
fromKey t = params
where
parts = T.splitOn "---" t
params = case parts of
(bId:rId:_) -> Just (bId, rId)
_ -> Nothing
safeHead :: [a] -> Maybe a
safeHead (x:_) = Just x
safeHead _ = Nothing
| null | https://raw.githubusercontent.com/FreeAndFair/STAR-Vote/2555cbae8794ec6f34889fdabac314ff9f22b437/star-types/src/Application/Star/BallotStyle.hs | haskell | # LANGUAGE DeriveDataTypeable #
# LANGUAGE OverloadedStrings #
# LANGUAGE TemplateHaskell #
| Given a ballot style and a race,
returns the next race defined by the ballot style.
| Given a ballot style and a race,
returns the previous race defined by the ballot style.
| Produces a key suitable for uniquely identifying a race in a given election.
`Ballot` values use keys produced by this function.
| Variant of `key` that takes a ballot style ID instead of a ballot style
value` |
|
Module : Application . Star . BallotStyle
Description : Type for ballot templates
The ` BallotStyle ` type represents a ballot that is waiting to be filled out .
It consists of an identifier and a list of races ,
where each race includes a list of candidates .
Module : Application.Star.BallotStyle
Description : Type for ballot templates
The `BallotStyle` type represents a ballot that is waiting to be filled out.
It consists of an identifier and a list of races,
where each race includes a list of candidates.
-}
module Application.Star.BallotStyle where
import Control.Lens
import qualified Data.List as List
import Data.Maybe (fromMaybe)
import Data.SafeCopy (base, deriveSafeCopy)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Typeable
type BallotStyleId = Text
type RaceId = Text
type OptionId = Text
type BallotKey = Text
data Option = Option
{ _oId :: OptionId
, _oName :: Text
, _oImg :: Maybe Text
, _oParty :: Maybe Text
, _oOccupation :: Maybe Text
}
deriving (Show, Typeable)
$(deriveSafeCopy 0 'base ''Option)
$(makeLenses ''Option)
data Race = Race
{ _rDescription :: Text
, _rId :: RaceId
, _rOptions :: [Option]
}
deriving (Show, Typeable)
$(deriveSafeCopy 0 'base ''Race)
$(makeLenses ''Race)
data BallotStyle = BallotStyle
{ _bId :: BallotStyleId
, _bRaces :: [Race]
}
deriving (Show, Typeable)
$(deriveSafeCopy 0 'base ''BallotStyle)
$(makeLenses ''BallotStyle)
type BallotStyles = [(BallotStyle)]
lookup :: BallotStyleId -> BallotStyles -> Maybe BallotStyle
lookup bId styles = safeHead (filter ((== bId) . _bId) styles)
bRace :: RaceId -> BallotStyle -> Maybe Race
bRace rId style = safeHead (filter ((== rId) . _rId) (_bRaces style))
nextRace :: BallotStyle -> Race -> Maybe Race
nextRace = incRace 1
prevRace :: BallotStyle -> Race -> Maybe Race
prevRace = incRace (-1)
incRace :: Int -> BallotStyle -> Race -> Maybe Race
incRace n style race = if idx + n < length races && idx + n >= 0 then
Just (races !! (idx + n))
else
Nothing
where
races = view bRaces style
idx = fromMaybe (-1) $ List.findIndex ((== _rId race) . _rId) races
option :: Text -> Race -> Maybe Option
option optId race = safeHead (filter ((== optId) . _oId) (_rOptions race))
key :: BallotStyle -> Race -> BallotKey
key style race = key' (_bId style) (_rId race)
key' :: BallotStyleId -> RaceId -> BallotKey
key' bId rId = T.concat [bId, "---", rId]
fromKey :: BallotKey -> Maybe (BallotStyleId, RaceId)
fromKey t = params
where
parts = T.splitOn "---" t
params = case parts of
(bId:rId:_) -> Just (bId, rId)
_ -> Nothing
safeHead :: [a] -> Maybe a
safeHead (x:_) = Just x
safeHead _ = Nothing
|
83bccbac66ab54a8338d85bce7b6f925af0aa3e7ab3070de35b1227eaa553129 | andreas/ocaml-graphql-server | error_test.ml | open Graphql
open Test_common
let suite =
[
( "nullable error",
`Quick,
fun () ->
let schema =
Schema.(
schema
[
io_field "nullable" ~typ:int
~args:Arg.[]
~resolve:(fun _ () -> Error "boom");
])
in
let query = "{ nullable }" in
test_query schema () query
(`Assoc
[
( "errors",
`List
[
`Assoc
[
("message", `String "boom");
("path", `List [ `String "nullable" ]);
];
] );
("data", `Assoc [ ("nullable", `Null) ]);
]) );
( "non-nullable error",
`Quick,
fun () ->
let schema =
Schema.(
schema
[
io_field "non_nullable" ~typ:(non_null int)
~args:Arg.[]
~resolve:(fun _ () -> Error "boom");
])
in
let query = "{ non_nullable }" in
test_query schema () query
(`Assoc
[
( "errors",
`List
[
`Assoc
[
("message", `String "boom");
("path", `List [ `String "non_nullable" ]);
];
] );
("data", `Null);
]) );
( "nested nullable error",
`Quick,
fun () ->
let obj_with_non_nullable_field =
Schema.(
obj "obj" ~fields: [
io_field "non_nullable" ~typ:(non_null int)
~args:Arg.[]
~resolve:(fun _ () -> Error "boom");
])
in
let schema =
Schema.(
schema
[
field "nullable" ~typ:obj_with_non_nullable_field
~args:Arg.[]
~resolve:(fun _ () -> Some ());
])
in
let query = "{ nullable { non_nullable } }" in
test_query schema () query
(`Assoc
[
( "errors",
`List
[
`Assoc
[
("message", `String "boom");
( "path",
`List [ `String "nullable"; `String "non_nullable" ]
);
];
] );
("data", `Assoc [ ("nullable", `Null) ]);
]) );
( "error in list",
`Quick,
fun () ->
let foo =
Schema.(
obj "Foo" ~fields:[
io_field "id" ~typ:int
~args:Arg.[]
~resolve:(fun _ (id, should_fail) ->
if should_fail then Error "boom" else Ok (Some id));
])
in
let schema =
Schema.(
schema
[
field "foos"
~typ:(non_null (list (non_null foo)))
~args:Arg.[]
~resolve:(fun _ () -> [ (0, false); (1, false); (2, true) ]);
])
in
let query = "{ foos { id } }" in
test_query schema () query
(`Assoc
[
( "errors",
`List
[
`Assoc
[
("message", `String "boom");
("path", `List [ `String "foos"; `Int 2; `String "id" ]);
];
] );
( "data",
`Assoc
[
( "foos",
`List
[
`Assoc [ ("id", `Int 0) ];
`Assoc [ ("id", `Int 1) ];
`Assoc [ ("id", `Null) ];
] );
] );
]) );
]
| null | https://raw.githubusercontent.com/andreas/ocaml-graphql-server/d615cbb164d4ddfdc2efeb246a198dfe114adf24/graphql/test/error_test.ml | ocaml | open Graphql
open Test_common
let suite =
[
( "nullable error",
`Quick,
fun () ->
let schema =
Schema.(
schema
[
io_field "nullable" ~typ:int
~args:Arg.[]
~resolve:(fun _ () -> Error "boom");
])
in
let query = "{ nullable }" in
test_query schema () query
(`Assoc
[
( "errors",
`List
[
`Assoc
[
("message", `String "boom");
("path", `List [ `String "nullable" ]);
];
] );
("data", `Assoc [ ("nullable", `Null) ]);
]) );
( "non-nullable error",
`Quick,
fun () ->
let schema =
Schema.(
schema
[
io_field "non_nullable" ~typ:(non_null int)
~args:Arg.[]
~resolve:(fun _ () -> Error "boom");
])
in
let query = "{ non_nullable }" in
test_query schema () query
(`Assoc
[
( "errors",
`List
[
`Assoc
[
("message", `String "boom");
("path", `List [ `String "non_nullable" ]);
];
] );
("data", `Null);
]) );
( "nested nullable error",
`Quick,
fun () ->
let obj_with_non_nullable_field =
Schema.(
obj "obj" ~fields: [
io_field "non_nullable" ~typ:(non_null int)
~args:Arg.[]
~resolve:(fun _ () -> Error "boom");
])
in
let schema =
Schema.(
schema
[
field "nullable" ~typ:obj_with_non_nullable_field
~args:Arg.[]
~resolve:(fun _ () -> Some ());
])
in
let query = "{ nullable { non_nullable } }" in
test_query schema () query
(`Assoc
[
( "errors",
`List
[
`Assoc
[
("message", `String "boom");
( "path",
`List [ `String "nullable"; `String "non_nullable" ]
);
];
] );
("data", `Assoc [ ("nullable", `Null) ]);
]) );
( "error in list",
`Quick,
fun () ->
let foo =
Schema.(
obj "Foo" ~fields:[
io_field "id" ~typ:int
~args:Arg.[]
~resolve:(fun _ (id, should_fail) ->
if should_fail then Error "boom" else Ok (Some id));
])
in
let schema =
Schema.(
schema
[
field "foos"
~typ:(non_null (list (non_null foo)))
~args:Arg.[]
~resolve:(fun _ () -> [ (0, false); (1, false); (2, true) ]);
])
in
let query = "{ foos { id } }" in
test_query schema () query
(`Assoc
[
( "errors",
`List
[
`Assoc
[
("message", `String "boom");
("path", `List [ `String "foos"; `Int 2; `String "id" ]);
];
] );
( "data",
`Assoc
[
( "foos",
`List
[
`Assoc [ ("id", `Int 0) ];
`Assoc [ ("id", `Int 1) ];
`Assoc [ ("id", `Null) ];
] );
] );
]) );
]
|
|
518c4d04a369ecd3b2b56aa14c86b96b9202a1aed8d45c12bfb16179d13cb33e | jellelicht/guix | lego.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2016 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages lego)
#:use-module ((guix licenses) #:prefix license:)
#:use-module (guix download)
#:use-module (guix packages)
#:use-module (guix build-system gnu)
#:use-module (gnu packages)
#:use-module (gnu packages bison)
#:use-module (gnu packages flex))
(define-public nqc
(package
(name "nqc")
(version "3.1.r6")
(source (origin
(method url-fetch)
(uri (string-append "/"
"nqc-" version ".tgz"))
(sha256
(base32
"0rp7pzr8xrdxpv75c2mi8zszzz2ypli4vvzxiic7mbrryrafdmdz"))))
(build-system gnu-build-system)
(native-inputs
`(("bison" ,bison)
("flex" ,flex)))
(arguments
'(#:tests? #f ;no tests
#:make-flags (list (string-append "PREFIX=" %output))
#:phases (modify-phases %standard-phases
(delete 'configure)
(add-before 'build 'rm-generated
;; Regenerating compiler/lexer.cpp avoids an 'undefined
;; reference to `isatty(int)'' error.
(lambda _
(for-each delete-file
'("compiler/lexer.cpp"
"compiler/parse.cpp"))
#t))
(add-after 'unpack 'deal-with-tarbomb
(lambda _
tarbomb
#t)))))
(home-page "/")
(synopsis "C-like language for Lego's MINDSTORMS")
(description
"Not Quite C (NQC) is a simple language for programming several Lego
MINDSTORMS products. The preprocessor and control structures of NQC are very
similar to C. NQC is not a general purpose language -- there are many
restrictions that stem from limitations of the standard RCX firmware.")
(license license:mpl1.0)))
| null | https://raw.githubusercontent.com/jellelicht/guix/83cfc9414fca3ab57c949e18c1ceb375a179b59c/gnu/packages/lego.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
no tests
Regenerating compiler/lexer.cpp avoids an 'undefined
reference to `isatty(int)'' error. | Copyright © 2016 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages lego)
#:use-module ((guix licenses) #:prefix license:)
#:use-module (guix download)
#:use-module (guix packages)
#:use-module (guix build-system gnu)
#:use-module (gnu packages)
#:use-module (gnu packages bison)
#:use-module (gnu packages flex))
(define-public nqc
(package
(name "nqc")
(version "3.1.r6")
(source (origin
(method url-fetch)
(uri (string-append "/"
"nqc-" version ".tgz"))
(sha256
(base32
"0rp7pzr8xrdxpv75c2mi8zszzz2ypli4vvzxiic7mbrryrafdmdz"))))
(build-system gnu-build-system)
(native-inputs
`(("bison" ,bison)
("flex" ,flex)))
(arguments
#:make-flags (list (string-append "PREFIX=" %output))
#:phases (modify-phases %standard-phases
(delete 'configure)
(add-before 'build 'rm-generated
(lambda _
(for-each delete-file
'("compiler/lexer.cpp"
"compiler/parse.cpp"))
#t))
(add-after 'unpack 'deal-with-tarbomb
(lambda _
tarbomb
#t)))))
(home-page "/")
(synopsis "C-like language for Lego's MINDSTORMS")
(description
"Not Quite C (NQC) is a simple language for programming several Lego
MINDSTORMS products. The preprocessor and control structures of NQC are very
similar to C. NQC is not a general purpose language -- there are many
restrictions that stem from limitations of the standard RCX firmware.")
(license license:mpl1.0)))
|
23af304c8f0e45c84f91239955ca37373c5bedf657eaa8555f78e0a0ba19858a | GaloisInc/what4 | Normalize.hs | # LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
# LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE PolyKinds #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TupleSections #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
# LANGUAGE ViewPatterns #
{-# LANGUAGE MultiWayIf #-}
-- | Normalization and equivalence checking for expressions
module What4.Serialize.Normalize
( normSymFn
, normExpr
, testEquivSymFn
, testEquivExpr
, ExprEquivResult(..)
) where
import qualified Data.Parameterized.Context as Ctx
import qualified Data.Parameterized.TraversableFC as FC
import qualified What4.Interface as S
import qualified What4.Expr as S
import qualified What4.Expr.Builder as B
import qualified What4.Expr.WeightedSum as WSum
import Data.Parameterized.Classes
-- | Apply some normalizations to make function call arguments more readable. Examples include:
--
-- * Avoid wrapping single literals in a 'B.SemiRingLiteral' and just represent them as a bare integer literals
-- * Attempt to reduce function calls with constant arguments where possible
normSymFn :: forall sym st fs t args ret. sym ~ B.ExprBuilder t st fs
=> sym
-> B.ExprSymFn t args ret
-> Ctx.Assignment (S.Expr t) args
-> IO (S.Expr t ret)
normSymFn sym symFn argEs = case B.symFnInfo symFn of
B.DefinedFnInfo argBVs expr _ -> do
argEs' <- FC.traverseFC (normExpr sym) argEs
expr' <- B.evalBoundVars sym expr argBVs argEs'
normExpr sym expr'
_ -> S.applySymFn sym symFn argEs
normExpr :: forall sym st fs t tp
. sym ~ B.ExprBuilder t st fs
=> sym
-> B.Expr t tp -> IO (B.Expr t tp)
normExpr sym e = go e
where go :: B.Expr t tp -> IO (B.Expr t tp)
go (B.SemiRingLiteral S.SemiRingIntegerRepr val _) = S.intLit sym val
go (B.AppExpr appExpr) = normAppExpr sym appExpr
go x@(B.NonceAppExpr nae) =
case B.nonceExprApp nae of
B.FnApp fn args -> normSymFn sym fn args
_ -> return x
go x = return x
-- | Normalize an expression by passing it back through the builder
--
-- NOTE: We may want to audit the cases here for completeness
normAppExpr :: forall sym st fs t tp
. sym ~ S.ExprBuilder t st fs
=> sym
-> S.AppExpr t tp
-> IO (S.Expr t tp)
normAppExpr sym ae = do
e' <- go (S.appExprApp ae)
B.sbMakeExpr sym e'
where norm2 :: forall tp' tp'' tp'''
. (S.Expr t tp' -> S.Expr t tp'' -> IO (S.Expr t tp'''))
-> S.Expr t tp' -> S.Expr t tp'' -> IO (S.Expr t tp''')
norm2 f e1 e2 = do
e1' <- normExpr sym e1
e2' <- normExpr sym e2
f e1' e2'
go :: forall tp'. S.App (S.Expr t) tp' -> IO (S.App (S.Expr t) tp')
go (S.BaseIte _ _ test then_ else_) = do
test' <- normExpr sym test
then' <- normExpr sym then_
else' <- normExpr sym else_
Just sm' <- B.asApp <$> S.baseTypeIte sym test' then' else'
return sm'
go x@(S.SemiRingSum sm) =
case WSum.sumRepr sm of
S.SemiRingIntegerRepr -> do
let
smul si i = do
i' <- normExpr sym i
si' <- S.intLit sym si
S.intMul sym si' i'
Just sm' <- B.asApp <$> WSum.evalM (norm2 $ S.intAdd sym) smul (S.intLit sym) sm
return sm'
_ -> return x
go x@(S.SemiRingProd pd) =
case WSum.prodRepr pd of
S.SemiRingIntegerRepr -> do
maybeS <- WSum.prodEvalM (norm2 $ S.intMul sym) return pd
case maybeS of
Just s | Just sm' <- B.asApp s -> return sm'
_ -> return x
_ -> return x
go x@(S.SemiRingLe sr e1 e2) = do
case sr of
S.OrderedSemiRingIntegerRepr -> do
Just sm' <- B.asApp <$> (norm2 $ S.intLe sym) e1 e2
return sm'
_ -> return x
go x = return x
data ExprEquivResult = ExprEquivalent | ExprNormEquivalent | ExprUnequal
testEquivExpr :: forall sym st fs t tp tp'. sym ~ S.ExprBuilder t st fs => sym -> B.Expr t tp -> B.Expr t tp' -> IO (ExprEquivResult)
testEquivExpr sym e1 e2 = case testEquality e1 e2 of
Just Refl -> return ExprEquivalent
_ -> do
e1' <- normExpr sym e1
e2' <- normExpr sym e2
case testEquality e1' e2' of
Just Refl -> return ExprNormEquivalent
_ -> return ExprUnequal
testEquivSymFn :: forall sym st fs t args ret args' ret'. sym ~ S.ExprBuilder t st fs => sym -> S.SymFn sym args ret -> S.SymFn sym args' ret' -> IO (ExprEquivResult)
testEquivSymFn sym fn1 fn2 =
let
argTypes1 = S.fnArgTypes fn1
argTypes2 = S.fnArgTypes fn2
retType1 = S.fnReturnType fn1
retType2 = S.fnReturnType fn2
in if | Just Refl <- testEquality argTypes1 argTypes2
, Just Refl <- testEquality retType1 retType2
, B.symFnName fn1 == B.symFnName fn2 ->
case (S.symFnInfo fn1, S.symFnInfo fn2) of
(S.DefinedFnInfo argBVs1 efn1 _, S.DefinedFnInfo argBVs2 efn2 _) -> do
args <- FC.traverseFC (\bv -> S.freshConstant sym (S.bvarName bv) (B.bvarType bv)) argBVs1
expr1 <- B.evalBoundVars sym efn1 argBVs1 args
expr2 <- B.evalBoundVars sym efn2 argBVs2 args
case testEquality expr1 expr2 of
Just Refl -> return ExprEquivalent
Nothing -> do
expr1' <- normExpr sym expr1
expr2' <- normExpr sym expr2
case testEquality expr1' expr2' of
Just Refl -> return ExprNormEquivalent
Nothing -> return ExprUnequal
(S.UninterpFnInfo _ _, S.UninterpFnInfo _ _) -> return ExprEquivalent
(S.MatlabSolverFnInfo _ _ _, _) -> fail "Unsupported function type for equivalence check."
(_, S.MatlabSolverFnInfo _ _ _) -> fail "Unsupported function type for equivalence check."
(_, _) -> return ExprUnequal
| otherwise -> return ExprUnequal
| null | https://raw.githubusercontent.com/GaloisInc/what4/17483418396afa8384de4089cdfaab9e5bbf912d/what4/src/What4/Serialize/Normalize.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
# LANGUAGE MultiWayIf #
| Normalization and equivalence checking for expressions
| Apply some normalizations to make function call arguments more readable. Examples include:
* Avoid wrapping single literals in a 'B.SemiRingLiteral' and just represent them as a bare integer literals
* Attempt to reduce function calls with constant arguments where possible
| Normalize an expression by passing it back through the builder
NOTE: We may want to audit the cases here for completeness | # LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE PolyKinds #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TupleSections #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
# LANGUAGE ViewPatterns #
module What4.Serialize.Normalize
( normSymFn
, normExpr
, testEquivSymFn
, testEquivExpr
, ExprEquivResult(..)
) where
import qualified Data.Parameterized.Context as Ctx
import qualified Data.Parameterized.TraversableFC as FC
import qualified What4.Interface as S
import qualified What4.Expr as S
import qualified What4.Expr.Builder as B
import qualified What4.Expr.WeightedSum as WSum
import Data.Parameterized.Classes
normSymFn :: forall sym st fs t args ret. sym ~ B.ExprBuilder t st fs
=> sym
-> B.ExprSymFn t args ret
-> Ctx.Assignment (S.Expr t) args
-> IO (S.Expr t ret)
normSymFn sym symFn argEs = case B.symFnInfo symFn of
B.DefinedFnInfo argBVs expr _ -> do
argEs' <- FC.traverseFC (normExpr sym) argEs
expr' <- B.evalBoundVars sym expr argBVs argEs'
normExpr sym expr'
_ -> S.applySymFn sym symFn argEs
normExpr :: forall sym st fs t tp
. sym ~ B.ExprBuilder t st fs
=> sym
-> B.Expr t tp -> IO (B.Expr t tp)
normExpr sym e = go e
where go :: B.Expr t tp -> IO (B.Expr t tp)
go (B.SemiRingLiteral S.SemiRingIntegerRepr val _) = S.intLit sym val
go (B.AppExpr appExpr) = normAppExpr sym appExpr
go x@(B.NonceAppExpr nae) =
case B.nonceExprApp nae of
B.FnApp fn args -> normSymFn sym fn args
_ -> return x
go x = return x
normAppExpr :: forall sym st fs t tp
. sym ~ S.ExprBuilder t st fs
=> sym
-> S.AppExpr t tp
-> IO (S.Expr t tp)
normAppExpr sym ae = do
e' <- go (S.appExprApp ae)
B.sbMakeExpr sym e'
where norm2 :: forall tp' tp'' tp'''
. (S.Expr t tp' -> S.Expr t tp'' -> IO (S.Expr t tp'''))
-> S.Expr t tp' -> S.Expr t tp'' -> IO (S.Expr t tp''')
norm2 f e1 e2 = do
e1' <- normExpr sym e1
e2' <- normExpr sym e2
f e1' e2'
go :: forall tp'. S.App (S.Expr t) tp' -> IO (S.App (S.Expr t) tp')
go (S.BaseIte _ _ test then_ else_) = do
test' <- normExpr sym test
then' <- normExpr sym then_
else' <- normExpr sym else_
Just sm' <- B.asApp <$> S.baseTypeIte sym test' then' else'
return sm'
go x@(S.SemiRingSum sm) =
case WSum.sumRepr sm of
S.SemiRingIntegerRepr -> do
let
smul si i = do
i' <- normExpr sym i
si' <- S.intLit sym si
S.intMul sym si' i'
Just sm' <- B.asApp <$> WSum.evalM (norm2 $ S.intAdd sym) smul (S.intLit sym) sm
return sm'
_ -> return x
go x@(S.SemiRingProd pd) =
case WSum.prodRepr pd of
S.SemiRingIntegerRepr -> do
maybeS <- WSum.prodEvalM (norm2 $ S.intMul sym) return pd
case maybeS of
Just s | Just sm' <- B.asApp s -> return sm'
_ -> return x
_ -> return x
go x@(S.SemiRingLe sr e1 e2) = do
case sr of
S.OrderedSemiRingIntegerRepr -> do
Just sm' <- B.asApp <$> (norm2 $ S.intLe sym) e1 e2
return sm'
_ -> return x
go x = return x
data ExprEquivResult = ExprEquivalent | ExprNormEquivalent | ExprUnequal
testEquivExpr :: forall sym st fs t tp tp'. sym ~ S.ExprBuilder t st fs => sym -> B.Expr t tp -> B.Expr t tp' -> IO (ExprEquivResult)
testEquivExpr sym e1 e2 = case testEquality e1 e2 of
Just Refl -> return ExprEquivalent
_ -> do
e1' <- normExpr sym e1
e2' <- normExpr sym e2
case testEquality e1' e2' of
Just Refl -> return ExprNormEquivalent
_ -> return ExprUnequal
testEquivSymFn :: forall sym st fs t args ret args' ret'. sym ~ S.ExprBuilder t st fs => sym -> S.SymFn sym args ret -> S.SymFn sym args' ret' -> IO (ExprEquivResult)
testEquivSymFn sym fn1 fn2 =
let
argTypes1 = S.fnArgTypes fn1
argTypes2 = S.fnArgTypes fn2
retType1 = S.fnReturnType fn1
retType2 = S.fnReturnType fn2
in if | Just Refl <- testEquality argTypes1 argTypes2
, Just Refl <- testEquality retType1 retType2
, B.symFnName fn1 == B.symFnName fn2 ->
case (S.symFnInfo fn1, S.symFnInfo fn2) of
(S.DefinedFnInfo argBVs1 efn1 _, S.DefinedFnInfo argBVs2 efn2 _) -> do
args <- FC.traverseFC (\bv -> S.freshConstant sym (S.bvarName bv) (B.bvarType bv)) argBVs1
expr1 <- B.evalBoundVars sym efn1 argBVs1 args
expr2 <- B.evalBoundVars sym efn2 argBVs2 args
case testEquality expr1 expr2 of
Just Refl -> return ExprEquivalent
Nothing -> do
expr1' <- normExpr sym expr1
expr2' <- normExpr sym expr2
case testEquality expr1' expr2' of
Just Refl -> return ExprNormEquivalent
Nothing -> return ExprUnequal
(S.UninterpFnInfo _ _, S.UninterpFnInfo _ _) -> return ExprEquivalent
(S.MatlabSolverFnInfo _ _ _, _) -> fail "Unsupported function type for equivalence check."
(_, S.MatlabSolverFnInfo _ _ _) -> fail "Unsupported function type for equivalence check."
(_, _) -> return ExprUnequal
| otherwise -> return ExprUnequal
|
f401e6406077706ba6305d1235ced501e75f1f2e8f03f7a23b0c9df47b68cd0d | dlowe-net/orcabot | credit.lisp | (in-package #:orcabot)
(defparameter +starting-balance+ 100)
(defclass transaction ()
((source :reader source-of :initarg :source)
(dest :reader dest-of :initarg :dest)
(amount :reader amount-of :initarg :amount)
(channel :reader channel-of :initarg :channel)
(source-status :accessor source-status-of :initform :unknown)
(dest-status :accessor dest-status-of :initform :unknown)))
(defmodule credit credit-module ("credits" "give")
(balances :accessor balances-of :initform (make-hash-table :test 'equalp))
(pending :accessor pending-of :initform nil))
(defun balance-for-nick (module nick)
(gethash (normalize-nick nick)
(balances-of module)
100))
(defmethod initialize-module ((module credit-module) config)
(clrhash (balances-of module))
(with-open-file (inf (data-path "credits.lisp")
:direction :input
:if-does-not-exist nil)
(when inf
(loop for tuple = (read inf nil)
while tuple
do (setf (gethash (first tuple) (balances-of module))
(second tuple))))))
(defun save-balances (module)
(with-open-file (ouf (data-path "credits.lisp")
:direction :output
:if-exists :supersede
:if-does-not-exist :create)
(maphash (lambda (k v)
(write (list k v) :stream ouf)
(terpri ouf))
(balances-of module))))
(defun update-pending-txns (module nick status)
(dolist (txn (pending-of module))
(when (string= nick (source-of txn))
(setf (source-status-of txn) status))
(when (string= nick (dest-of txn))
(setf (dest-status-of txn) status))))
(defun process-eligible-txns (module)
(dolist (txn (pending-of module))
(unless (or (eql (source-status-of txn) :unknown)
(eql (dest-status-of txn) :unknown))
(let ((balance (balance-for-nick module (source-of txn))))
(cond
((eql (source-status-of txn) :invalid)
(irc:privmsg (conn-of module) (source-of txn)
"You must be logged into NickServ to transfer credits."))
((eql (dest-status-of txn) :invalid)
(irc:privmsg (conn-of module) (source-of txn)
(format nil "~a must be logged into NickServ to receive credits." (dest-of txn))))
((< balance (amount-of txn))
(irc:privmsg (conn-of module) (source-of txn)
(format nil "You don't have ~d to give to ~a." (amount-of txn) (dest-of txn))))
(t
(decf (gethash (normalize-nick (source-of txn))
(balances-of module)
100)
(amount-of txn))
(incf (gethash (normalize-nick (dest-of txn))
(balances-of module)
100)
(amount-of txn))
(irc:privmsg (conn-of module) (channel-of txn)
(format nil "~a gives ~d credit~:p to ~a."
(source-of txn)
(amount-of txn)
(dest-of txn)))
(irc:privmsg (conn-of module) (source-of txn)
(format nil "You now have ~d credit~:p." (balance-for-nick module (source-of txn))))
(irc:privmsg (conn-of module) (dest-of txn)
(format nil "~a has given you ~d credit~:p. You now have ~d credit~:p."
(source-of txn)
(amount-of txn)
(balance-for-nick module (dest-of txn))))
(irc:privmsg (conn-of module) (dest-of txn)
(format nil "You now have ~d credit~:p." (balance-for-nick module (dest-of txn))))))
(save-balances module))))
;; now delete the transactions just processed
(setf (pending-of module)
(delete-if (lambda (txn)
(not (or (eql (source-status-of txn) :unknown)
(eql (dest-status-of txn) :unknown))))
(pending-of module))))
(defmethod handle-message ((module credit-module)
(message irc:irc-notice-message))
(when (string= (source message) "NickServ")
(multiple-value-bind (match regs)
(ppcre:scan-to-strings "STATUS (\\S+) ([0-3])" (second (arguments message)))
(when match
(let ((nick (aref regs 0))
(status (if (member (aref regs 1) '("2" "3") :test #'string=)
:valid
:invalid)))
(update-pending-txns module nick status)
(process-eligible-txns module))
t))))
(defmethod handle-command ((module credit-module)
(cmd (eql 'give))
message args)
"give <credits> <nick> - transfer your credits to another person"
(multiple-value-bind (amt target)
(let ((first-amt (parse-integer (or (first args) "") :junk-allowed t)))
(if first-amt
(values first-amt (second args))
(values (parse-integer (or (second args) "") :junk-allowed t)
(first args))))
(cond
((or (null amt)
(null target))
(reply-to message "Usage: .give <credits> <nick>"))
((string= (normalize-nick (source message))
(normalize-nick target))
(reply-to message "Sure... Okay..."))
((zerop amt)
(reply-to message "Done."))
((minusp amt)
(reply-to message "Ha, ha. Very funny."))
(t
(irc:privmsg (conn-of module)
"NickServ"
(format nil "STATUS ~a ~a" (source message) target))
(push (make-instance 'transaction
:source (source message)
:dest target
:amount amt
:channel (first (arguments message)))
(pending-of module))))))
(defmethod handle-command ((module credit-module)
(cmd (eql 'credits))
message args)
"credits - check your credit balance"
(irc:privmsg (conn-of module)
(source message)
(format nil "You have ~a credit~:p."
(balance-for-nick module (source message))))) | null | https://raw.githubusercontent.com/dlowe-net/orcabot/bf3c799337531e6b16086e8105906cc9f8808313/src/credit.lisp | lisp | now delete the transactions just processed | (in-package #:orcabot)
(defparameter +starting-balance+ 100)
(defclass transaction ()
((source :reader source-of :initarg :source)
(dest :reader dest-of :initarg :dest)
(amount :reader amount-of :initarg :amount)
(channel :reader channel-of :initarg :channel)
(source-status :accessor source-status-of :initform :unknown)
(dest-status :accessor dest-status-of :initform :unknown)))
(defmodule credit credit-module ("credits" "give")
(balances :accessor balances-of :initform (make-hash-table :test 'equalp))
(pending :accessor pending-of :initform nil))
(defun balance-for-nick (module nick)
(gethash (normalize-nick nick)
(balances-of module)
100))
(defmethod initialize-module ((module credit-module) config)
(clrhash (balances-of module))
(with-open-file (inf (data-path "credits.lisp")
:direction :input
:if-does-not-exist nil)
(when inf
(loop for tuple = (read inf nil)
while tuple
do (setf (gethash (first tuple) (balances-of module))
(second tuple))))))
(defun save-balances (module)
(with-open-file (ouf (data-path "credits.lisp")
:direction :output
:if-exists :supersede
:if-does-not-exist :create)
(maphash (lambda (k v)
(write (list k v) :stream ouf)
(terpri ouf))
(balances-of module))))
(defun update-pending-txns (module nick status)
(dolist (txn (pending-of module))
(when (string= nick (source-of txn))
(setf (source-status-of txn) status))
(when (string= nick (dest-of txn))
(setf (dest-status-of txn) status))))
(defun process-eligible-txns (module)
(dolist (txn (pending-of module))
(unless (or (eql (source-status-of txn) :unknown)
(eql (dest-status-of txn) :unknown))
(let ((balance (balance-for-nick module (source-of txn))))
(cond
((eql (source-status-of txn) :invalid)
(irc:privmsg (conn-of module) (source-of txn)
"You must be logged into NickServ to transfer credits."))
((eql (dest-status-of txn) :invalid)
(irc:privmsg (conn-of module) (source-of txn)
(format nil "~a must be logged into NickServ to receive credits." (dest-of txn))))
((< balance (amount-of txn))
(irc:privmsg (conn-of module) (source-of txn)
(format nil "You don't have ~d to give to ~a." (amount-of txn) (dest-of txn))))
(t
(decf (gethash (normalize-nick (source-of txn))
(balances-of module)
100)
(amount-of txn))
(incf (gethash (normalize-nick (dest-of txn))
(balances-of module)
100)
(amount-of txn))
(irc:privmsg (conn-of module) (channel-of txn)
(format nil "~a gives ~d credit~:p to ~a."
(source-of txn)
(amount-of txn)
(dest-of txn)))
(irc:privmsg (conn-of module) (source-of txn)
(format nil "You now have ~d credit~:p." (balance-for-nick module (source-of txn))))
(irc:privmsg (conn-of module) (dest-of txn)
(format nil "~a has given you ~d credit~:p. You now have ~d credit~:p."
(source-of txn)
(amount-of txn)
(balance-for-nick module (dest-of txn))))
(irc:privmsg (conn-of module) (dest-of txn)
(format nil "You now have ~d credit~:p." (balance-for-nick module (dest-of txn))))))
(save-balances module))))
(setf (pending-of module)
(delete-if (lambda (txn)
(not (or (eql (source-status-of txn) :unknown)
(eql (dest-status-of txn) :unknown))))
(pending-of module))))
(defmethod handle-message ((module credit-module)
(message irc:irc-notice-message))
(when (string= (source message) "NickServ")
(multiple-value-bind (match regs)
(ppcre:scan-to-strings "STATUS (\\S+) ([0-3])" (second (arguments message)))
(when match
(let ((nick (aref regs 0))
(status (if (member (aref regs 1) '("2" "3") :test #'string=)
:valid
:invalid)))
(update-pending-txns module nick status)
(process-eligible-txns module))
t))))
(defmethod handle-command ((module credit-module)
(cmd (eql 'give))
message args)
"give <credits> <nick> - transfer your credits to another person"
(multiple-value-bind (amt target)
(let ((first-amt (parse-integer (or (first args) "") :junk-allowed t)))
(if first-amt
(values first-amt (second args))
(values (parse-integer (or (second args) "") :junk-allowed t)
(first args))))
(cond
((or (null amt)
(null target))
(reply-to message "Usage: .give <credits> <nick>"))
((string= (normalize-nick (source message))
(normalize-nick target))
(reply-to message "Sure... Okay..."))
((zerop amt)
(reply-to message "Done."))
((minusp amt)
(reply-to message "Ha, ha. Very funny."))
(t
(irc:privmsg (conn-of module)
"NickServ"
(format nil "STATUS ~a ~a" (source message) target))
(push (make-instance 'transaction
:source (source message)
:dest target
:amount amt
:channel (first (arguments message)))
(pending-of module))))))
(defmethod handle-command ((module credit-module)
(cmd (eql 'credits))
message args)
"credits - check your credit balance"
(irc:privmsg (conn-of module)
(source message)
(format nil "You have ~a credit~:p."
(balance-for-nick module (source message))))) |
ebee013116067007a77bd9780d687d4a55950f64eded3a0e8f8599cd94d4f105 | marcoheisig/numpy-file-format | python-parser.lisp | (in-package #:numpy-file-format)
;;; This parser is not very sophisticated, but it gets the job done.
(defun read-python-object (stream &optional (skip #\,) (stop nil))
(loop for c = (read-char stream) do
(case c
((#\space #\tab) (values))
((#\' #\") (return (read-python-string c stream)))
(#\( (return (read-python-tuple stream)))
(#\[ (return (read-python-list stream)))
(#\{ (return (read-python-dict stream)))
((#\T #\F)
(unread-char c stream)
(return (read-python-boolean stream)))
(otherwise
(cond ((eql c skip)
(return (read-python-object stream nil stop)))
((eql c stop)
(return stop))
((digit-char-p c)
(unread-char c stream)
(return (read-python-integer stream)))
(t
(error "Invalid character: ~S" c)))))))
(defun read-python-string (delimiter stream)
(coerce
(loop for c = (read-char stream)
while (char/= c delimiter)
collect c)
'string))
(defun read-python-integer (stream)
(let ((result 0))
(loop for c = (read-char stream) do
(let ((weight (digit-char-p c)))
(if (null weight)
(progn
(unread-char c stream)
(loop-finish))
(setf result (+ (* result 10) weight)))))
result))
(defun read-python-boolean (stream)
(flet ((skip (string)
(loop for c across string do
(assert (char= (read-char stream) c)))))
(ecase (read-char stream)
(#\T (skip "rue") t)
(#\F (skip "alse") nil))))
(defun read-python-tuple (stream)
(loop for object = (read-python-object stream nil #\))
then (read-python-object stream #\, #\))
until (eql object #\))
collect object))
(defun read-python-list (stream)
(coerce
(loop for object = (read-python-object stream nil #\])
then (read-python-object stream #\, #\])
until (eql object #\])
collect object)
'vector))
(defun read-python-dict (stream)
(let ((dict (make-hash-table :test #'equal)))
(loop
(let ((key (read-python-object stream #\, #\})))
(when (eql key #\})
(return dict))
(setf (gethash key dict)
(read-python-object stream #\:))))))
(defun read-python-object-from-string (string)
(with-input-from-string (stream string)
(read-python-object stream)))
| null | https://raw.githubusercontent.com/marcoheisig/numpy-file-format/e97aef6c592a412fdd1afa9a5f09d0b1ce134510/code/python-parser.lisp | lisp | This parser is not very sophisticated, but it gets the job done. | (in-package #:numpy-file-format)
(defun read-python-object (stream &optional (skip #\,) (stop nil))
(loop for c = (read-char stream) do
(case c
((#\space #\tab) (values))
((#\' #\") (return (read-python-string c stream)))
(#\( (return (read-python-tuple stream)))
(#\[ (return (read-python-list stream)))
(#\{ (return (read-python-dict stream)))
((#\T #\F)
(unread-char c stream)
(return (read-python-boolean stream)))
(otherwise
(cond ((eql c skip)
(return (read-python-object stream nil stop)))
((eql c stop)
(return stop))
((digit-char-p c)
(unread-char c stream)
(return (read-python-integer stream)))
(t
(error "Invalid character: ~S" c)))))))
(defun read-python-string (delimiter stream)
(coerce
(loop for c = (read-char stream)
while (char/= c delimiter)
collect c)
'string))
(defun read-python-integer (stream)
(let ((result 0))
(loop for c = (read-char stream) do
(let ((weight (digit-char-p c)))
(if (null weight)
(progn
(unread-char c stream)
(loop-finish))
(setf result (+ (* result 10) weight)))))
result))
(defun read-python-boolean (stream)
(flet ((skip (string)
(loop for c across string do
(assert (char= (read-char stream) c)))))
(ecase (read-char stream)
(#\T (skip "rue") t)
(#\F (skip "alse") nil))))
(defun read-python-tuple (stream)
(loop for object = (read-python-object stream nil #\))
then (read-python-object stream #\, #\))
until (eql object #\))
collect object))
(defun read-python-list (stream)
(coerce
(loop for object = (read-python-object stream nil #\])
then (read-python-object stream #\, #\])
until (eql object #\])
collect object)
'vector))
(defun read-python-dict (stream)
(let ((dict (make-hash-table :test #'equal)))
(loop
(let ((key (read-python-object stream #\, #\})))
(when (eql key #\})
(return dict))
(setf (gethash key dict)
(read-python-object stream #\:))))))
(defun read-python-object-from-string (string)
(with-input-from-string (stream string)
(read-python-object stream)))
|
98ece82418130eca07afe1f30a8cc0fc06918197c613c91978b267a104a73c2b | fpco/ide-backend | Hpc.hs | -----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Program.Hpc
-- Copyright : Thomas Tuegel 2011
--
-- Maintainer :
-- Portability : portable
--
This module provides an library interface to the @hpc@ program .
module Distribution.Simple.Program.Hpc
( markup
, union
) where
import Distribution.ModuleName ( ModuleName )
import Distribution.Simple.Program.Run
( ProgramInvocation, programInvocation, runProgramInvocation )
import Distribution.Simple.Program.Types ( ConfiguredProgram )
import Distribution.Text ( display )
import Distribution.Verbosity ( Verbosity )
markup :: ConfiguredProgram
-> Verbosity
-> FilePath -- ^ Path to .tix file
^ Path to directory with .mix files
-> FilePath -- ^ Path where html output should be located
-> [ModuleName] -- ^ List of modules to exclude from report
-> IO ()
markup hpc verbosity tixFile hpcDir destDir excluded =
runProgramInvocation verbosity
(markupInvocation hpc tixFile hpcDir destDir excluded)
markupInvocation :: ConfiguredProgram
-> FilePath -- ^ Path to .tix file
^ Path to directory with .mix files
-> FilePath -- ^ Path where html output should be
-- located
-> [ModuleName] -- ^ List of modules to exclude from
-- report
-> ProgramInvocation
markupInvocation hpc tixFile hpcDir destDir excluded =
let args = [ "markup", tixFile
, "--hpcdir=" ++ hpcDir
, "--destdir=" ++ destDir
]
++ ["--exclude=" ++ display moduleName
| moduleName <- excluded ]
in programInvocation hpc args
union :: ConfiguredProgram
-> Verbosity
-> [FilePath] -- ^ Paths to .tix files
-> FilePath -- ^ Path to resultant .tix file
-> [ModuleName] -- ^ List of modules to exclude from union
-> IO ()
union hpc verbosity tixFiles outFile excluded =
runProgramInvocation verbosity
(unionInvocation hpc tixFiles outFile excluded)
unionInvocation :: ConfiguredProgram
-> [FilePath] -- ^ Paths to .tix files
-> FilePath -- ^ Path to resultant .tix file
-> [ModuleName] -- ^ List of modules to exclude from union
-> ProgramInvocation
unionInvocation hpc tixFiles outFile excluded =
programInvocation hpc $ concat
[ ["sum", "--union"]
, tixFiles
, ["--output=" ++ outFile]
, ["--exclude=" ++ display moduleName
| moduleName <- excluded ]
]
| null | https://raw.githubusercontent.com/fpco/ide-backend/860636f2d0e872e9481569236bce690637e0016e/ide-backend/TestSuite/inputs/Cabal-1.14.0/Distribution/Simple/Program/Hpc.hs | haskell | ---------------------------------------------------------------------------
|
Module : Distribution.Simple.Program.Hpc
Copyright : Thomas Tuegel 2011
Maintainer :
Portability : portable
^ Path to .tix file
^ Path where html output should be located
^ List of modules to exclude from report
^ Path to .tix file
^ Path where html output should be
located
^ List of modules to exclude from
report
^ Paths to .tix files
^ Path to resultant .tix file
^ List of modules to exclude from union
^ Paths to .tix files
^ Path to resultant .tix file
^ List of modules to exclude from union | This module provides an library interface to the @hpc@ program .
module Distribution.Simple.Program.Hpc
( markup
, union
) where
import Distribution.ModuleName ( ModuleName )
import Distribution.Simple.Program.Run
( ProgramInvocation, programInvocation, runProgramInvocation )
import Distribution.Simple.Program.Types ( ConfiguredProgram )
import Distribution.Text ( display )
import Distribution.Verbosity ( Verbosity )
markup :: ConfiguredProgram
-> Verbosity
^ Path to directory with .mix files
-> IO ()
markup hpc verbosity tixFile hpcDir destDir excluded =
runProgramInvocation verbosity
(markupInvocation hpc tixFile hpcDir destDir excluded)
markupInvocation :: ConfiguredProgram
^ Path to directory with .mix files
-> ProgramInvocation
markupInvocation hpc tixFile hpcDir destDir excluded =
let args = [ "markup", tixFile
, "--hpcdir=" ++ hpcDir
, "--destdir=" ++ destDir
]
++ ["--exclude=" ++ display moduleName
| moduleName <- excluded ]
in programInvocation hpc args
union :: ConfiguredProgram
-> Verbosity
-> IO ()
union hpc verbosity tixFiles outFile excluded =
runProgramInvocation verbosity
(unionInvocation hpc tixFiles outFile excluded)
unionInvocation :: ConfiguredProgram
-> ProgramInvocation
unionInvocation hpc tixFiles outFile excluded =
programInvocation hpc $ concat
[ ["sum", "--union"]
, tixFiles
, ["--output=" ++ outFile]
, ["--exclude=" ++ display moduleName
| moduleName <- excluded ]
]
|
ecdf772c898926ebbe851fb91950dfebd2312a0e0629590b0bfbd72f4a68977e | szktty/esca | config.mli | val version : string
val debug_mode : bool ref
val verbose_mode : bool ref
val runlib_path : string ref
val runtime_package : string -> string
| null | https://raw.githubusercontent.com/szktty/esca/11be06b4a9810cdae4ccfd4ce7c5d85597bd1999/src/config.mli | ocaml | val version : string
val debug_mode : bool ref
val verbose_mode : bool ref
val runlib_path : string ref
val runtime_package : string -> string
|
|
d289d3e48f48e8705eff2f3c4517e9be1c9a97dc5596e2ca0dbdc4acecf55dbd | ucsd-progsys/nate | ocaml_tools.mli | (***********************************************************************)
(* ocamlbuild *)
(* *)
, , projet Gallium , INRIA Rocquencourt
(* *)
Copyright 2007 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ I d : ocaml_tools.mli , v 1.2.4.3 2007/11/21 20:46:46 ertai Exp $
Original author :
val ocamldoc_c : Tags.t -> string -> string -> Command.t
val ocamldoc_l_dir : Tags.t -> string list -> string -> string -> Command.t
val ocamldoc_l_file : Tags.t -> string list -> string -> string -> Command.t
val ocamldep_command : string -> string -> Rule.action
val menhir_ocamldep_command : string -> string -> Rule.action
val menhir_modular_ocamldep_command : string -> string -> Rule.action
val menhir_modular : string -> string -> string -> Rule.action
val ocamlyacc : string -> Rule.action
val ocamllex : string -> Rule.action
val menhir : string -> Rule.action
val infer_interface : string -> string -> Rule.action
val document_ocaml_interf : string -> string -> Rule.action
val document_ocaml_implem : string -> string -> Rule.action
val document_ocaml_project :
?ocamldoc:(Tags.t -> string list -> string -> string -> Command.t) ->
string -> string -> string -> Rule.action
| null | https://raw.githubusercontent.com/ucsd-progsys/nate/8b1267cd8b10283d8bc239d16a28c654a4cb8942/eval/sherrloc/easyocaml%2B%2B/ocamlbuild/ocaml_tools.mli | ocaml | *********************************************************************
ocamlbuild
********************************************************************* | , , projet Gallium , INRIA Rocquencourt
Copyright 2007 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ I d : ocaml_tools.mli , v 1.2.4.3 2007/11/21 20:46:46 ertai Exp $
Original author :
val ocamldoc_c : Tags.t -> string -> string -> Command.t
val ocamldoc_l_dir : Tags.t -> string list -> string -> string -> Command.t
val ocamldoc_l_file : Tags.t -> string list -> string -> string -> Command.t
val ocamldep_command : string -> string -> Rule.action
val menhir_ocamldep_command : string -> string -> Rule.action
val menhir_modular_ocamldep_command : string -> string -> Rule.action
val menhir_modular : string -> string -> string -> Rule.action
val ocamlyacc : string -> Rule.action
val ocamllex : string -> Rule.action
val menhir : string -> Rule.action
val infer_interface : string -> string -> Rule.action
val document_ocaml_interf : string -> string -> Rule.action
val document_ocaml_implem : string -> string -> Rule.action
val document_ocaml_project :
?ocamldoc:(Tags.t -> string list -> string -> string -> Command.t) ->
string -> string -> string -> Rule.action
|
14570acd7146c76cbd0f088a412d8ba7bd77f605bdc1a7a1441b4931a0645021 | jhidding/lyonesse | ranges.scm | (library (lyonesse ranges)
(export range for-range map-range reduce-range)
(import (rnrs base (6))
(rnrs control (6)))
(define range
(case-lambda
[(b) (range 0 b 1)]
[(a b) (range a b 1)]
[(a b dx) (let loop ([lst '()]
[x a])
(if (>= x b)
(reverse lst)
(loop (cons x lst) (+ x dx))))]))
(define for-range
(case-lambda
[(f b) (for-range f 0 b 1)]
[(f a b) (for-range f a b 1)]
[(f a b dx) (unless (>= a b)
(f a)
(for-range f (+ a dx) b dx))]))
(define map-range
(case-lambda
[(f b) (map-range f 0 b 1)]
[(f a b) (map-range f a b 1)]
[(f a b dx) (let loop ([x a]
[result '()])
(if (>= x b)
(reverse result)
(loop (+ x dx) (cons (f x) result))))]))
(define reduce-range
(case-lambda
[(f start b) (reduce-range f start 0 b 1)]
[(f start a b) (reduce-range f start a b 1)]
[(f start a b dx) (if (>= a b)
start
(reduce-range f (f start a) (+ a dx) b dx))]))
)
| null | https://raw.githubusercontent.com/jhidding/lyonesse/9d9624e3141ea3acaa670526cbe52c2d6546beef/lyonesse/ranges.scm | scheme | (library (lyonesse ranges)
(export range for-range map-range reduce-range)
(import (rnrs base (6))
(rnrs control (6)))
(define range
(case-lambda
[(b) (range 0 b 1)]
[(a b) (range a b 1)]
[(a b dx) (let loop ([lst '()]
[x a])
(if (>= x b)
(reverse lst)
(loop (cons x lst) (+ x dx))))]))
(define for-range
(case-lambda
[(f b) (for-range f 0 b 1)]
[(f a b) (for-range f a b 1)]
[(f a b dx) (unless (>= a b)
(f a)
(for-range f (+ a dx) b dx))]))
(define map-range
(case-lambda
[(f b) (map-range f 0 b 1)]
[(f a b) (map-range f a b 1)]
[(f a b dx) (let loop ([x a]
[result '()])
(if (>= x b)
(reverse result)
(loop (+ x dx) (cons (f x) result))))]))
(define reduce-range
(case-lambda
[(f start b) (reduce-range f start 0 b 1)]
[(f start a b) (reduce-range f start a b 1)]
[(f start a b dx) (if (>= a b)
start
(reduce-range f (f start a) (+ a dx) b dx))]))
)
|
|
d7cfa539ef2268916b41149302ac7944a7b665fec77387222fdaca60c33df5b2 | ghcjs/ghcjs-base | CaseMapping.hs | {-# LANGUAGE Rank2Types #-}
-- AUTOMATICALLY GENERATED - DO NOT EDIT
Generated by scripts / SpecialCasing.hs
-- CaseFolding-6.3.0.txt
Date : 2012 - 12 - 20 , 22:14:35 GMT [ MD ]
SpecialCasing-6.3.0.txt
Date : 2013 - 05 - 08 , GMT [ MD ]
module Data.JSString.Internal.Fusion.CaseMapping where
import Data.Char
import Data.JSString.Internal.Fusion.Types
upperMapping :: forall s. Char -> s -> Step (CC s) Char
# INLINE upperMapping #
-- LATIN SMALL LETTER SHARP S
upperMapping '\x00df' s = Yield '\x0053' (CC s '\x0053' '\x0000')
-- LATIN SMALL LIGATURE FF
upperMapping '\xfb00' s = Yield '\x0046' (CC s '\x0046' '\x0000')
-- LATIN SMALL LIGATURE FI
upperMapping '\xfb01' s = Yield '\x0046' (CC s '\x0049' '\x0000')
-- LATIN SMALL LIGATURE FL
upperMapping '\xfb02' s = Yield '\x0046' (CC s '\x004c' '\x0000')
LATIN SMALL LIGATURE
upperMapping '\xfb03' s = Yield '\x0046' (CC s '\x0046' '\x0049')
-- LATIN SMALL LIGATURE FFL
upperMapping '\xfb04' s = Yield '\x0046' (CC s '\x0046' '\x004c')
-- LATIN SMALL LIGATURE LONG S T
upperMapping '\xfb05' s = Yield '\x0053' (CC s '\x0054' '\x0000')
-- LATIN SMALL LIGATURE ST
upperMapping '\xfb06' s = Yield '\x0053' (CC s '\x0054' '\x0000')
-- ARMENIAN SMALL LIGATURE ECH YIWN
upperMapping '\x0587' s = Yield '\x0535' (CC s '\x0552' '\x0000')
ARMENIAN SMALL LIGATURE MEN NOW
upperMapping '\xfb13' s = Yield '\x0544' (CC s '\x0546' '\x0000')
-- ARMENIAN SMALL LIGATURE MEN ECH
upperMapping '\xfb14' s = Yield '\x0544' (CC s '\x0535' '\x0000')
-- ARMENIAN SMALL LIGATURE MEN INI
upperMapping '\xfb15' s = Yield '\x0544' (CC s '\x053b' '\x0000')
ARMENIAN SMALL LIGATURE VEW NOW
upperMapping '\xfb16' s = Yield '\x054e' (CC s '\x0546' '\x0000')
-- ARMENIAN SMALL LIGATURE MEN XEH
upperMapping '\xfb17' s = Yield '\x0544' (CC s '\x053d' '\x0000')
LATIN SMALL LETTER N PRECEDED BY
upperMapping '\x0149' s = Yield '\x02bc' (CC s '\x004e' '\x0000')
GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
upperMapping '\x0390' s = Yield '\x0399' (CC s '\x0308' '\x0301')
GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
upperMapping '\x03b0' s = Yield '\x03a5' (CC s '\x0308' '\x0301')
-- LATIN SMALL LETTER J WITH CARON
upperMapping '\x01f0' s = Yield '\x004a' (CC s '\x030c' '\x0000')
-- LATIN SMALL LETTER H WITH LINE BELOW
upperMapping '\x1e96' s = Yield '\x0048' (CC s '\x0331' '\x0000')
LATIN SMALL LETTER T WITH
upperMapping '\x1e97' s = Yield '\x0054' (CC s '\x0308' '\x0000')
-- LATIN SMALL LETTER W WITH RING ABOVE
upperMapping '\x1e98' s = Yield '\x0057' (CC s '\x030a' '\x0000')
-- LATIN SMALL LETTER Y WITH RING ABOVE
upperMapping '\x1e99' s = Yield '\x0059' (CC s '\x030a' '\x0000')
-- LATIN SMALL LETTER A WITH RIGHT HALF RING
upperMapping '\x1e9a' s = Yield '\x0041' (CC s '\x02be' '\x0000')
-- GREEK SMALL LETTER UPSILON WITH PSILI
upperMapping '\x1f50' s = Yield '\x03a5' (CC s '\x0313' '\x0000')
-- GREEK SMALL LETTER UPSILON WITH PSILI AND VARIA
upperMapping '\x1f52' s = Yield '\x03a5' (CC s '\x0313' '\x0300')
GREEK SMALL LETTER UPSILON WITH PSILI AND OXIA
upperMapping '\x1f54' s = Yield '\x03a5' (CC s '\x0313' '\x0301')
-- GREEK SMALL LETTER UPSILON WITH PSILI AND PERISPOMENI
upperMapping '\x1f56' s = Yield '\x03a5' (CC s '\x0313' '\x0342')
-- GREEK SMALL LETTER ALPHA WITH PERISPOMENI
upperMapping '\x1fb6' s = Yield '\x0391' (CC s '\x0342' '\x0000')
-- GREEK SMALL LETTER ETA WITH PERISPOMENI
upperMapping '\x1fc6' s = Yield '\x0397' (CC s '\x0342' '\x0000')
-- GREEK SMALL LETTER IOTA WITH DIALYTIKA AND VARIA
upperMapping '\x1fd2' s = Yield '\x0399' (CC s '\x0308' '\x0300')
GREEK SMALL LETTER IOTA WITH DIALYTIKA AND OXIA
upperMapping '\x1fd3' s = Yield '\x0399' (CC s '\x0308' '\x0301')
-- GREEK SMALL LETTER IOTA WITH PERISPOMENI
upperMapping '\x1fd6' s = Yield '\x0399' (CC s '\x0342' '\x0000')
-- GREEK SMALL LETTER IOTA WITH DIALYTIKA AND PERISPOMENI
upperMapping '\x1fd7' s = Yield '\x0399' (CC s '\x0308' '\x0342')
-- GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND VARIA
upperMapping '\x1fe2' s = Yield '\x03a5' (CC s '\x0308' '\x0300')
GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND OXIA
upperMapping '\x1fe3' s = Yield '\x03a5' (CC s '\x0308' '\x0301')
-- GREEK SMALL LETTER RHO WITH PSILI
upperMapping '\x1fe4' s = Yield '\x03a1' (CC s '\x0313' '\x0000')
-- GREEK SMALL LETTER UPSILON WITH PERISPOMENI
upperMapping '\x1fe6' s = Yield '\x03a5' (CC s '\x0342' '\x0000')
GREEK SMALL LETTER UPSILON WITH AND PERISPOMENI
upperMapping '\x1fe7' s = Yield '\x03a5' (CC s '\x0308' '\x0342')
-- GREEK SMALL LETTER OMEGA WITH PERISPOMENI
upperMapping '\x1ff6' s = Yield '\x03a9' (CC s '\x0342' '\x0000')
GREEK SMALL LETTER ALPHA WITH PSILI AND
upperMapping '\x1f80' s = Yield '\x1f08' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ALPHA WITH DASIA AND
upperMapping '\x1f81' s = Yield '\x1f09' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ALPHA WITH PSILI AND VARIA AND
upperMapping '\x1f82' s = Yield '\x1f0a' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ALPHA WITH DASIA AND VARIA AND
upperMapping '\x1f83' s = Yield '\x1f0b' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ALPHA WITH PSILI AND OXIA AND
upperMapping '\x1f84' s = Yield '\x1f0c' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ALPHA WITH DASIA AND OXIA AND
upperMapping '\x1f85' s = Yield '\x1f0d' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ALPHA WITH PSILI AND PERISPOMENI AND
upperMapping '\x1f86' s = Yield '\x1f0e' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ALPHA WITH DASIA AND PERISPOMENI AND
upperMapping '\x1f87' s = Yield '\x1f0f' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH PSILI AND PROSGEGRAMMENI
upperMapping '\x1f88' s = Yield '\x1f08' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH DASIA AND PROSGEGRAMMENI
upperMapping '\x1f89' s = Yield '\x1f09' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH PSILI AND VARIA AND PROSGEGRAMMENI
upperMapping '\x1f8a' s = Yield '\x1f0a' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH DASIA AND VARIA AND PROSGEGRAMMENI
upperMapping '\x1f8b' s = Yield '\x1f0b' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH PSILI AND OXIA AND PROSGEGRAMMENI
upperMapping '\x1f8c' s = Yield '\x1f0c' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH DASIA AND OXIA AND PROSGEGRAMMENI
upperMapping '\x1f8d' s = Yield '\x1f0d' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI
upperMapping '\x1f8e' s = Yield '\x1f0e' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI
upperMapping '\x1f8f' s = Yield '\x1f0f' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ETA WITH PSILI AND
upperMapping '\x1f90' s = Yield '\x1f28' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ETA WITH DASIA AND
upperMapping '\x1f91' s = Yield '\x1f29' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ETA WITH PSILI AND VARIA AND
upperMapping '\x1f92' s = Yield '\x1f2a' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ETA WITH DASIA AND VARIA AND
upperMapping '\x1f93' s = Yield '\x1f2b' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ETA WITH PSILI AND OXIA AND
upperMapping '\x1f94' s = Yield '\x1f2c' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ETA WITH DASIA AND OXIA AND
upperMapping '\x1f95' s = Yield '\x1f2d' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ETA WITH PSILI AND PERISPOMENI AND
upperMapping '\x1f96' s = Yield '\x1f2e' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ETA WITH DASIA AND PERISPOMENI AND
upperMapping '\x1f97' s = Yield '\x1f2f' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ETA WITH PSILI AND PROSGEGRAMMENI
upperMapping '\x1f98' s = Yield '\x1f28' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ETA WITH DASIA AND PROSGEGRAMMENI
upperMapping '\x1f99' s = Yield '\x1f29' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ETA WITH PSILI AND VARIA AND PROSGEGRAMMENI
upperMapping '\x1f9a' s = Yield '\x1f2a' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ETA WITH DASIA AND VARIA AND PROSGEGRAMMENI
upperMapping '\x1f9b' s = Yield '\x1f2b' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ETA WITH PSILI AND OXIA AND PROSGEGRAMMENI
upperMapping '\x1f9c' s = Yield '\x1f2c' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ETA WITH DASIA AND OXIA AND PROSGEGRAMMENI
upperMapping '\x1f9d' s = Yield '\x1f2d' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ETA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI
upperMapping '\x1f9e' s = Yield '\x1f2e' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ETA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI
upperMapping '\x1f9f' s = Yield '\x1f2f' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER OMEGA WITH PSILI AND
upperMapping '\x1fa0' s = Yield '\x1f68' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER OMEGA WITH DASIA AND YPOGEGRAMMENI
upperMapping '\x1fa1' s = Yield '\x1f69' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER OMEGA WITH PSILI AND VARIA AND
upperMapping '\x1fa2' s = Yield '\x1f6a' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER OMEGA WITH DASIA AND VARIA AND
upperMapping '\x1fa3' s = Yield '\x1f6b' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER OMEGA WITH PSILI AND OXIA AND
upperMapping '\x1fa4' s = Yield '\x1f6c' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER OMEGA WITH DASIA AND OXIA AND
upperMapping '\x1fa5' s = Yield '\x1f6d' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER OMEGA WITH PSILI AND PERISPOMENI AND
upperMapping '\x1fa6' s = Yield '\x1f6e' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER OMEGA WITH DASIA AND PERISPOMENI AND
upperMapping '\x1fa7' s = Yield '\x1f6f' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH PSILI AND PROSGEGRAMMENI
upperMapping '\x1fa8' s = Yield '\x1f68' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH DASIA AND PROSGEGRAMMENI
upperMapping '\x1fa9' s = Yield '\x1f69' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH PSILI AND VARIA AND PROSGEGRAMMENI
upperMapping '\x1faa' s = Yield '\x1f6a' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH DASIA AND VARIA AND PROSGEGRAMMENI
upperMapping '\x1fab' s = Yield '\x1f6b' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH PSILI AND OXIA AND PROSGEGRAMMENI
upperMapping '\x1fac' s = Yield '\x1f6c' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH DASIA AND OXIA AND PROSGEGRAMMENI
upperMapping '\x1fad' s = Yield '\x1f6d' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI
upperMapping '\x1fae' s = Yield '\x1f6e' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI
upperMapping '\x1faf' s = Yield '\x1f6f' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ALPHA WITH YPOGEGRAMMENI
upperMapping '\x1fb3' s = Yield '\x0391' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH PROSGEGRAMMENI
upperMapping '\x1fbc' s = Yield '\x0391' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ETA WITH YPOGEGRAMMENI
upperMapping '\x1fc3' s = Yield '\x0397' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ETA WITH PROSGEGRAMMENI
upperMapping '\x1fcc' s = Yield '\x0397' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER OMEGA WITH YPOGEGRAMMENI
upperMapping '\x1ff3' s = Yield '\x03a9' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH PROSGEGRAMMENI
upperMapping '\x1ffc' s = Yield '\x03a9' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ALPHA WITH VARIA AND
upperMapping '\x1fb2' s = Yield '\x1fba' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ALPHA WITH OXIA AND
upperMapping '\x1fb4' s = Yield '\x0386' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ETA WITH VARIA AND
upperMapping '\x1fc2' s = Yield '\x1fca' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ETA WITH OXIA AND
upperMapping '\x1fc4' s = Yield '\x0389' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER OMEGA WITH VARIA AND
upperMapping '\x1ff2' s = Yield '\x1ffa' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER OMEGA WITH OXIA AND
upperMapping '\x1ff4' s = Yield '\x038f' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ALPHA WITH PERISPOMENI AND
upperMapping '\x1fb7' s = Yield '\x0391' (CC s '\x0342' '\x0399')
GREEK SMALL LETTER ETA WITH PERISPOMENI AND
upperMapping '\x1fc7' s = Yield '\x0397' (CC s '\x0342' '\x0399')
GREEK SMALL LETTER OMEGA WITH PERISPOMENI AND
upperMapping '\x1ff7' s = Yield '\x03a9' (CC s '\x0342' '\x0399')
upperMapping c s = Yield (toUpper c) (CC s '\0' '\0')
lowerMapping :: forall s. Char -> s -> Step (CC s) Char
# INLINE lowerMapping #
-- LATIN CAPITAL LETTER I WITH DOT ABOVE
lowerMapping '\x0130' s = Yield '\x0069' (CC s '\x0307' '\x0000')
lowerMapping c s = Yield (toLower c) (CC s '\0' '\0')
titleMapping :: forall s. Char -> s -> Step (CC s) Char
# INLINE titleMapping #
-- LATIN SMALL LETTER SHARP S
titleMapping '\x00df' s = Yield '\x0053' (CC s '\x0073' '\x0000')
-- LATIN SMALL LIGATURE FF
titleMapping '\xfb00' s = Yield '\x0046' (CC s '\x0066' '\x0000')
-- LATIN SMALL LIGATURE FI
titleMapping '\xfb01' s = Yield '\x0046' (CC s '\x0069' '\x0000')
-- LATIN SMALL LIGATURE FL
titleMapping '\xfb02' s = Yield '\x0046' (CC s '\x006c' '\x0000')
LATIN SMALL LIGATURE
titleMapping '\xfb03' s = Yield '\x0046' (CC s '\x0066' '\x0069')
-- LATIN SMALL LIGATURE FFL
titleMapping '\xfb04' s = Yield '\x0046' (CC s '\x0066' '\x006c')
-- LATIN SMALL LIGATURE LONG S T
titleMapping '\xfb05' s = Yield '\x0053' (CC s '\x0074' '\x0000')
-- LATIN SMALL LIGATURE ST
titleMapping '\xfb06' s = Yield '\x0053' (CC s '\x0074' '\x0000')
-- ARMENIAN SMALL LIGATURE ECH YIWN
titleMapping '\x0587' s = Yield '\x0535' (CC s '\x0582' '\x0000')
ARMENIAN SMALL LIGATURE MEN NOW
titleMapping '\xfb13' s = Yield '\x0544' (CC s '\x0576' '\x0000')
-- ARMENIAN SMALL LIGATURE MEN ECH
titleMapping '\xfb14' s = Yield '\x0544' (CC s '\x0565' '\x0000')
-- ARMENIAN SMALL LIGATURE MEN INI
titleMapping '\xfb15' s = Yield '\x0544' (CC s '\x056b' '\x0000')
ARMENIAN SMALL LIGATURE VEW NOW
titleMapping '\xfb16' s = Yield '\x054e' (CC s '\x0576' '\x0000')
-- ARMENIAN SMALL LIGATURE MEN XEH
titleMapping '\xfb17' s = Yield '\x0544' (CC s '\x056d' '\x0000')
LATIN SMALL LETTER N PRECEDED BY
titleMapping '\x0149' s = Yield '\x02bc' (CC s '\x004e' '\x0000')
GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
titleMapping '\x0390' s = Yield '\x0399' (CC s '\x0308' '\x0301')
GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
titleMapping '\x03b0' s = Yield '\x03a5' (CC s '\x0308' '\x0301')
-- LATIN SMALL LETTER J WITH CARON
titleMapping '\x01f0' s = Yield '\x004a' (CC s '\x030c' '\x0000')
-- LATIN SMALL LETTER H WITH LINE BELOW
titleMapping '\x1e96' s = Yield '\x0048' (CC s '\x0331' '\x0000')
LATIN SMALL LETTER T WITH
titleMapping '\x1e97' s = Yield '\x0054' (CC s '\x0308' '\x0000')
-- LATIN SMALL LETTER W WITH RING ABOVE
titleMapping '\x1e98' s = Yield '\x0057' (CC s '\x030a' '\x0000')
-- LATIN SMALL LETTER Y WITH RING ABOVE
titleMapping '\x1e99' s = Yield '\x0059' (CC s '\x030a' '\x0000')
-- LATIN SMALL LETTER A WITH RIGHT HALF RING
titleMapping '\x1e9a' s = Yield '\x0041' (CC s '\x02be' '\x0000')
-- GREEK SMALL LETTER UPSILON WITH PSILI
titleMapping '\x1f50' s = Yield '\x03a5' (CC s '\x0313' '\x0000')
-- GREEK SMALL LETTER UPSILON WITH PSILI AND VARIA
titleMapping '\x1f52' s = Yield '\x03a5' (CC s '\x0313' '\x0300')
GREEK SMALL LETTER UPSILON WITH PSILI AND OXIA
titleMapping '\x1f54' s = Yield '\x03a5' (CC s '\x0313' '\x0301')
-- GREEK SMALL LETTER UPSILON WITH PSILI AND PERISPOMENI
titleMapping '\x1f56' s = Yield '\x03a5' (CC s '\x0313' '\x0342')
-- GREEK SMALL LETTER ALPHA WITH PERISPOMENI
titleMapping '\x1fb6' s = Yield '\x0391' (CC s '\x0342' '\x0000')
-- GREEK SMALL LETTER ETA WITH PERISPOMENI
titleMapping '\x1fc6' s = Yield '\x0397' (CC s '\x0342' '\x0000')
-- GREEK SMALL LETTER IOTA WITH DIALYTIKA AND VARIA
titleMapping '\x1fd2' s = Yield '\x0399' (CC s '\x0308' '\x0300')
GREEK SMALL LETTER IOTA WITH DIALYTIKA AND OXIA
titleMapping '\x1fd3' s = Yield '\x0399' (CC s '\x0308' '\x0301')
-- GREEK SMALL LETTER IOTA WITH PERISPOMENI
titleMapping '\x1fd6' s = Yield '\x0399' (CC s '\x0342' '\x0000')
-- GREEK SMALL LETTER IOTA WITH DIALYTIKA AND PERISPOMENI
titleMapping '\x1fd7' s = Yield '\x0399' (CC s '\x0308' '\x0342')
-- GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND VARIA
titleMapping '\x1fe2' s = Yield '\x03a5' (CC s '\x0308' '\x0300')
GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND OXIA
titleMapping '\x1fe3' s = Yield '\x03a5' (CC s '\x0308' '\x0301')
-- GREEK SMALL LETTER RHO WITH PSILI
titleMapping '\x1fe4' s = Yield '\x03a1' (CC s '\x0313' '\x0000')
-- GREEK SMALL LETTER UPSILON WITH PERISPOMENI
titleMapping '\x1fe6' s = Yield '\x03a5' (CC s '\x0342' '\x0000')
GREEK SMALL LETTER UPSILON WITH AND PERISPOMENI
titleMapping '\x1fe7' s = Yield '\x03a5' (CC s '\x0308' '\x0342')
-- GREEK SMALL LETTER OMEGA WITH PERISPOMENI
titleMapping '\x1ff6' s = Yield '\x03a9' (CC s '\x0342' '\x0000')
GREEK SMALL LETTER ALPHA WITH VARIA AND
titleMapping '\x1fb2' s = Yield '\x1fba' (CC s '\x0345' '\x0000')
GREEK SMALL LETTER ALPHA WITH OXIA AND
titleMapping '\x1fb4' s = Yield '\x0386' (CC s '\x0345' '\x0000')
GREEK SMALL LETTER ETA WITH VARIA AND
titleMapping '\x1fc2' s = Yield '\x1fca' (CC s '\x0345' '\x0000')
GREEK SMALL LETTER ETA WITH OXIA AND
titleMapping '\x1fc4' s = Yield '\x0389' (CC s '\x0345' '\x0000')
GREEK SMALL LETTER OMEGA WITH VARIA AND
titleMapping '\x1ff2' s = Yield '\x1ffa' (CC s '\x0345' '\x0000')
GREEK SMALL LETTER OMEGA WITH OXIA AND
titleMapping '\x1ff4' s = Yield '\x038f' (CC s '\x0345' '\x0000')
GREEK SMALL LETTER ALPHA WITH PERISPOMENI AND
titleMapping '\x1fb7' s = Yield '\x0391' (CC s '\x0342' '\x0345')
GREEK SMALL LETTER ETA WITH PERISPOMENI AND
titleMapping '\x1fc7' s = Yield '\x0397' (CC s '\x0342' '\x0345')
GREEK SMALL LETTER OMEGA WITH PERISPOMENI AND
titleMapping '\x1ff7' s = Yield '\x03a9' (CC s '\x0342' '\x0345')
titleMapping c s = Yield (toTitle c) (CC s '\0' '\0')
foldMapping :: forall s. Char -> s -> Step (CC s) Char
# INLINE foldMapping #
-- MICRO SIGN
foldMapping '\x00b5' s = Yield '\x03bc' (CC s '\x0000' '\x0000')
-- LATIN SMALL LETTER SHARP S
foldMapping '\x00df' s = Yield '\x0073' (CC s '\x0073' '\x0000')
-- LATIN CAPITAL LETTER I WITH DOT ABOVE
foldMapping '\x0130' s = Yield '\x0069' (CC s '\x0307' '\x0000')
LATIN SMALL LETTER N PRECEDED BY
foldMapping '\x0149' s = Yield '\x02bc' (CC s '\x006e' '\x0000')
-- LATIN SMALL LETTER LONG S
foldMapping '\x017f' s = Yield '\x0073' (CC s '\x0000' '\x0000')
-- LATIN SMALL LETTER J WITH CARON
foldMapping '\x01f0' s = Yield '\x006a' (CC s '\x030c' '\x0000')
-- COMBINING GREEK YPOGEGRAMMENI
foldMapping '\x0345' s = Yield '\x03b9' (CC s '\x0000' '\x0000')
GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
foldMapping '\x0390' s = Yield '\x03b9' (CC s '\x0308' '\x0301')
GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
foldMapping '\x03b0' s = Yield '\x03c5' (CC s '\x0308' '\x0301')
-- GREEK SMALL LETTER FINAL SIGMA
foldMapping '\x03c2' s = Yield '\x03c3' (CC s '\x0000' '\x0000')
-- GREEK BETA SYMBOL
foldMapping '\x03d0' s = Yield '\x03b2' (CC s '\x0000' '\x0000')
-- GREEK THETA SYMBOL
foldMapping '\x03d1' s = Yield '\x03b8' (CC s '\x0000' '\x0000')
-- GREEK PHI SYMBOL
foldMapping '\x03d5' s = Yield '\x03c6' (CC s '\x0000' '\x0000')
-- GREEK PI SYMBOL
foldMapping '\x03d6' s = Yield '\x03c0' (CC s '\x0000' '\x0000')
-- GREEK KAPPA SYMBOL
foldMapping '\x03f0' s = Yield '\x03ba' (CC s '\x0000' '\x0000')
-- GREEK RHO SYMBOL
foldMapping '\x03f1' s = Yield '\x03c1' (CC s '\x0000' '\x0000')
-- GREEK LUNATE EPSILON SYMBOL
foldMapping '\x03f5' s = Yield '\x03b5' (CC s '\x0000' '\x0000')
-- ARMENIAN SMALL LIGATURE ECH YIWN
foldMapping '\x0587' s = Yield '\x0565' (CC s '\x0582' '\x0000')
-- GEORGIAN CAPITAL LETTER YN
foldMapping '\x10c7' s = Yield '\x2d27' (CC s '\x0000' '\x0000')
-- GEORGIAN CAPITAL LETTER AEN
foldMapping '\x10cd' s = Yield '\x2d2d' (CC s '\x0000' '\x0000')
-- LATIN SMALL LETTER H WITH LINE BELOW
foldMapping '\x1e96' s = Yield '\x0068' (CC s '\x0331' '\x0000')
LATIN SMALL LETTER T WITH
foldMapping '\x1e97' s = Yield '\x0074' (CC s '\x0308' '\x0000')
-- LATIN SMALL LETTER W WITH RING ABOVE
foldMapping '\x1e98' s = Yield '\x0077' (CC s '\x030a' '\x0000')
-- LATIN SMALL LETTER Y WITH RING ABOVE
foldMapping '\x1e99' s = Yield '\x0079' (CC s '\x030a' '\x0000')
-- LATIN SMALL LETTER A WITH RIGHT HALF RING
foldMapping '\x1e9a' s = Yield '\x0061' (CC s '\x02be' '\x0000')
-- LATIN SMALL LETTER LONG S WITH DOT ABOVE
foldMapping '\x1e9b' s = Yield '\x1e61' (CC s '\x0000' '\x0000')
-- LATIN CAPITAL LETTER SHARP S
foldMapping '\x1e9e' s = Yield '\x0073' (CC s '\x0073' '\x0000')
-- GREEK SMALL LETTER UPSILON WITH PSILI
foldMapping '\x1f50' s = Yield '\x03c5' (CC s '\x0313' '\x0000')
-- GREEK SMALL LETTER UPSILON WITH PSILI AND VARIA
foldMapping '\x1f52' s = Yield '\x03c5' (CC s '\x0313' '\x0300')
GREEK SMALL LETTER UPSILON WITH PSILI AND OXIA
foldMapping '\x1f54' s = Yield '\x03c5' (CC s '\x0313' '\x0301')
-- GREEK SMALL LETTER UPSILON WITH PSILI AND PERISPOMENI
foldMapping '\x1f56' s = Yield '\x03c5' (CC s '\x0313' '\x0342')
GREEK SMALL LETTER ALPHA WITH PSILI AND
foldMapping '\x1f80' s = Yield '\x1f00' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ALPHA WITH DASIA AND
foldMapping '\x1f81' s = Yield '\x1f01' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ALPHA WITH PSILI AND VARIA AND
foldMapping '\x1f82' s = Yield '\x1f02' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ALPHA WITH DASIA AND VARIA AND
foldMapping '\x1f83' s = Yield '\x1f03' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ALPHA WITH PSILI AND OXIA AND
foldMapping '\x1f84' s = Yield '\x1f04' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ALPHA WITH DASIA AND OXIA AND
foldMapping '\x1f85' s = Yield '\x1f05' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ALPHA WITH PSILI AND PERISPOMENI AND
foldMapping '\x1f86' s = Yield '\x1f06' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ALPHA WITH DASIA AND PERISPOMENI AND
foldMapping '\x1f87' s = Yield '\x1f07' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH PSILI AND PROSGEGRAMMENI
foldMapping '\x1f88' s = Yield '\x1f00' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH DASIA AND PROSGEGRAMMENI
foldMapping '\x1f89' s = Yield '\x1f01' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH PSILI AND VARIA AND PROSGEGRAMMENI
foldMapping '\x1f8a' s = Yield '\x1f02' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH DASIA AND VARIA AND PROSGEGRAMMENI
foldMapping '\x1f8b' s = Yield '\x1f03' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH PSILI AND OXIA AND PROSGEGRAMMENI
foldMapping '\x1f8c' s = Yield '\x1f04' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH DASIA AND OXIA AND PROSGEGRAMMENI
foldMapping '\x1f8d' s = Yield '\x1f05' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI
foldMapping '\x1f8e' s = Yield '\x1f06' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI
foldMapping '\x1f8f' s = Yield '\x1f07' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ETA WITH PSILI AND
foldMapping '\x1f90' s = Yield '\x1f20' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ETA WITH DASIA AND
foldMapping '\x1f91' s = Yield '\x1f21' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ETA WITH PSILI AND VARIA AND
foldMapping '\x1f92' s = Yield '\x1f22' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ETA WITH DASIA AND VARIA AND
foldMapping '\x1f93' s = Yield '\x1f23' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ETA WITH PSILI AND OXIA AND
foldMapping '\x1f94' s = Yield '\x1f24' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ETA WITH DASIA AND OXIA AND
foldMapping '\x1f95' s = Yield '\x1f25' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ETA WITH PSILI AND PERISPOMENI AND
foldMapping '\x1f96' s = Yield '\x1f26' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ETA WITH DASIA AND PERISPOMENI AND
foldMapping '\x1f97' s = Yield '\x1f27' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ETA WITH PSILI AND PROSGEGRAMMENI
foldMapping '\x1f98' s = Yield '\x1f20' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ETA WITH DASIA AND PROSGEGRAMMENI
foldMapping '\x1f99' s = Yield '\x1f21' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ETA WITH PSILI AND VARIA AND PROSGEGRAMMENI
foldMapping '\x1f9a' s = Yield '\x1f22' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ETA WITH DASIA AND VARIA AND PROSGEGRAMMENI
foldMapping '\x1f9b' s = Yield '\x1f23' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ETA WITH PSILI AND OXIA AND PROSGEGRAMMENI
foldMapping '\x1f9c' s = Yield '\x1f24' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ETA WITH DASIA AND OXIA AND PROSGEGRAMMENI
foldMapping '\x1f9d' s = Yield '\x1f25' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ETA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI
foldMapping '\x1f9e' s = Yield '\x1f26' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ETA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI
foldMapping '\x1f9f' s = Yield '\x1f27' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER OMEGA WITH PSILI AND
foldMapping '\x1fa0' s = Yield '\x1f60' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER OMEGA WITH DASIA AND YPOGEGRAMMENI
foldMapping '\x1fa1' s = Yield '\x1f61' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER OMEGA WITH PSILI AND VARIA AND
foldMapping '\x1fa2' s = Yield '\x1f62' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER OMEGA WITH DASIA AND VARIA AND
foldMapping '\x1fa3' s = Yield '\x1f63' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER OMEGA WITH PSILI AND OXIA AND
foldMapping '\x1fa4' s = Yield '\x1f64' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER OMEGA WITH DASIA AND OXIA AND
foldMapping '\x1fa5' s = Yield '\x1f65' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER OMEGA WITH PSILI AND PERISPOMENI AND
foldMapping '\x1fa6' s = Yield '\x1f66' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER OMEGA WITH DASIA AND PERISPOMENI AND
foldMapping '\x1fa7' s = Yield '\x1f67' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH PSILI AND PROSGEGRAMMENI
foldMapping '\x1fa8' s = Yield '\x1f60' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH DASIA AND PROSGEGRAMMENI
foldMapping '\x1fa9' s = Yield '\x1f61' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH PSILI AND VARIA AND PROSGEGRAMMENI
foldMapping '\x1faa' s = Yield '\x1f62' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH DASIA AND VARIA AND PROSGEGRAMMENI
foldMapping '\x1fab' s = Yield '\x1f63' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH PSILI AND OXIA AND PROSGEGRAMMENI
foldMapping '\x1fac' s = Yield '\x1f64' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH DASIA AND OXIA AND PROSGEGRAMMENI
foldMapping '\x1fad' s = Yield '\x1f65' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI
foldMapping '\x1fae' s = Yield '\x1f66' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI
foldMapping '\x1faf' s = Yield '\x1f67' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ALPHA WITH VARIA AND
foldMapping '\x1fb2' s = Yield '\x1f70' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ALPHA WITH YPOGEGRAMMENI
foldMapping '\x1fb3' s = Yield '\x03b1' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ALPHA WITH OXIA AND
foldMapping '\x1fb4' s = Yield '\x03ac' (CC s '\x03b9' '\x0000')
-- GREEK SMALL LETTER ALPHA WITH PERISPOMENI
foldMapping '\x1fb6' s = Yield '\x03b1' (CC s '\x0342' '\x0000')
GREEK SMALL LETTER ALPHA WITH PERISPOMENI AND
foldMapping '\x1fb7' s = Yield '\x03b1' (CC s '\x0342' '\x03b9')
GREEK CAPITAL LETTER ALPHA WITH PROSGEGRAMMENI
foldMapping '\x1fbc' s = Yield '\x03b1' (CC s '\x03b9' '\x0000')
-- GREEK PROSGEGRAMMENI
foldMapping '\x1fbe' s = Yield '\x03b9' (CC s '\x0000' '\x0000')
GREEK SMALL LETTER ETA WITH VARIA AND
foldMapping '\x1fc2' s = Yield '\x1f74' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ETA WITH YPOGEGRAMMENI
foldMapping '\x1fc3' s = Yield '\x03b7' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ETA WITH OXIA AND
foldMapping '\x1fc4' s = Yield '\x03ae' (CC s '\x03b9' '\x0000')
-- GREEK SMALL LETTER ETA WITH PERISPOMENI
foldMapping '\x1fc6' s = Yield '\x03b7' (CC s '\x0342' '\x0000')
GREEK SMALL LETTER ETA WITH PERISPOMENI AND
foldMapping '\x1fc7' s = Yield '\x03b7' (CC s '\x0342' '\x03b9')
GREEK CAPITAL LETTER ETA WITH PROSGEGRAMMENI
foldMapping '\x1fcc' s = Yield '\x03b7' (CC s '\x03b9' '\x0000')
-- GREEK SMALL LETTER IOTA WITH DIALYTIKA AND VARIA
foldMapping '\x1fd2' s = Yield '\x03b9' (CC s '\x0308' '\x0300')
GREEK SMALL LETTER IOTA WITH DIALYTIKA AND OXIA
foldMapping '\x1fd3' s = Yield '\x03b9' (CC s '\x0308' '\x0301')
-- GREEK SMALL LETTER IOTA WITH PERISPOMENI
foldMapping '\x1fd6' s = Yield '\x03b9' (CC s '\x0342' '\x0000')
-- GREEK SMALL LETTER IOTA WITH DIALYTIKA AND PERISPOMENI
foldMapping '\x1fd7' s = Yield '\x03b9' (CC s '\x0308' '\x0342')
-- GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND VARIA
foldMapping '\x1fe2' s = Yield '\x03c5' (CC s '\x0308' '\x0300')
GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND OXIA
foldMapping '\x1fe3' s = Yield '\x03c5' (CC s '\x0308' '\x0301')
-- GREEK SMALL LETTER RHO WITH PSILI
foldMapping '\x1fe4' s = Yield '\x03c1' (CC s '\x0313' '\x0000')
-- GREEK SMALL LETTER UPSILON WITH PERISPOMENI
foldMapping '\x1fe6' s = Yield '\x03c5' (CC s '\x0342' '\x0000')
GREEK SMALL LETTER UPSILON WITH AND PERISPOMENI
foldMapping '\x1fe7' s = Yield '\x03c5' (CC s '\x0308' '\x0342')
GREEK SMALL LETTER OMEGA WITH VARIA AND
foldMapping '\x1ff2' s = Yield '\x1f7c' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER OMEGA WITH YPOGEGRAMMENI
foldMapping '\x1ff3' s = Yield '\x03c9' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER OMEGA WITH OXIA AND
foldMapping '\x1ff4' s = Yield '\x03ce' (CC s '\x03b9' '\x0000')
-- GREEK SMALL LETTER OMEGA WITH PERISPOMENI
foldMapping '\x1ff6' s = Yield '\x03c9' (CC s '\x0342' '\x0000')
GREEK SMALL LETTER OMEGA WITH PERISPOMENI AND
foldMapping '\x1ff7' s = Yield '\x03c9' (CC s '\x0342' '\x03b9')
GREEK CAPITAL LETTER OMEGA WITH PROSGEGRAMMENI
foldMapping '\x1ffc' s = Yield '\x03c9' (CC s '\x03b9' '\x0000')
COPTIC CAPITAL LETTER
foldMapping '\x2cf2' s = Yield '\x2cf3' (CC s '\x0000' '\x0000')
-- LATIN CAPITAL LETTER C WITH BAR
foldMapping '\xa792' s = Yield '\xa793' (CC s '\x0000' '\x0000')
-- LATIN CAPITAL LETTER H WITH HOOK
foldMapping '\xa7aa' s = Yield '\x0266' (CC s '\x0000' '\x0000')
-- LATIN SMALL LIGATURE FF
foldMapping '\xfb00' s = Yield '\x0066' (CC s '\x0066' '\x0000')
-- LATIN SMALL LIGATURE FI
foldMapping '\xfb01' s = Yield '\x0066' (CC s '\x0069' '\x0000')
-- LATIN SMALL LIGATURE FL
foldMapping '\xfb02' s = Yield '\x0066' (CC s '\x006c' '\x0000')
LATIN SMALL LIGATURE
foldMapping '\xfb03' s = Yield '\x0066' (CC s '\x0066' '\x0069')
-- LATIN SMALL LIGATURE FFL
foldMapping '\xfb04' s = Yield '\x0066' (CC s '\x0066' '\x006c')
-- LATIN SMALL LIGATURE LONG S T
foldMapping '\xfb05' s = Yield '\x0073' (CC s '\x0074' '\x0000')
-- LATIN SMALL LIGATURE ST
foldMapping '\xfb06' s = Yield '\x0073' (CC s '\x0074' '\x0000')
ARMENIAN SMALL LIGATURE MEN NOW
foldMapping '\xfb13' s = Yield '\x0574' (CC s '\x0576' '\x0000')
-- ARMENIAN SMALL LIGATURE MEN ECH
foldMapping '\xfb14' s = Yield '\x0574' (CC s '\x0565' '\x0000')
-- ARMENIAN SMALL LIGATURE MEN INI
foldMapping '\xfb15' s = Yield '\x0574' (CC s '\x056b' '\x0000')
ARMENIAN SMALL LIGATURE VEW NOW
foldMapping '\xfb16' s = Yield '\x057e' (CC s '\x0576' '\x0000')
-- ARMENIAN SMALL LIGATURE MEN XEH
foldMapping '\xfb17' s = Yield '\x0574' (CC s '\x056d' '\x0000')
foldMapping c s = Yield (toLower c) (CC s '\0' '\0')
| null | https://raw.githubusercontent.com/ghcjs/ghcjs-base/18f31dec5d9eae1ef35ff8bbf163394942efd227/Data/JSString/Internal/Fusion/CaseMapping.hs | haskell | # LANGUAGE Rank2Types #
AUTOMATICALLY GENERATED - DO NOT EDIT
CaseFolding-6.3.0.txt
LATIN SMALL LETTER SHARP S
LATIN SMALL LIGATURE FF
LATIN SMALL LIGATURE FI
LATIN SMALL LIGATURE FL
LATIN SMALL LIGATURE FFL
LATIN SMALL LIGATURE LONG S T
LATIN SMALL LIGATURE ST
ARMENIAN SMALL LIGATURE ECH YIWN
ARMENIAN SMALL LIGATURE MEN ECH
ARMENIAN SMALL LIGATURE MEN INI
ARMENIAN SMALL LIGATURE MEN XEH
LATIN SMALL LETTER J WITH CARON
LATIN SMALL LETTER H WITH LINE BELOW
LATIN SMALL LETTER W WITH RING ABOVE
LATIN SMALL LETTER Y WITH RING ABOVE
LATIN SMALL LETTER A WITH RIGHT HALF RING
GREEK SMALL LETTER UPSILON WITH PSILI
GREEK SMALL LETTER UPSILON WITH PSILI AND VARIA
GREEK SMALL LETTER UPSILON WITH PSILI AND PERISPOMENI
GREEK SMALL LETTER ALPHA WITH PERISPOMENI
GREEK SMALL LETTER ETA WITH PERISPOMENI
GREEK SMALL LETTER IOTA WITH DIALYTIKA AND VARIA
GREEK SMALL LETTER IOTA WITH PERISPOMENI
GREEK SMALL LETTER IOTA WITH DIALYTIKA AND PERISPOMENI
GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND VARIA
GREEK SMALL LETTER RHO WITH PSILI
GREEK SMALL LETTER UPSILON WITH PERISPOMENI
GREEK SMALL LETTER OMEGA WITH PERISPOMENI
LATIN CAPITAL LETTER I WITH DOT ABOVE
LATIN SMALL LETTER SHARP S
LATIN SMALL LIGATURE FF
LATIN SMALL LIGATURE FI
LATIN SMALL LIGATURE FL
LATIN SMALL LIGATURE FFL
LATIN SMALL LIGATURE LONG S T
LATIN SMALL LIGATURE ST
ARMENIAN SMALL LIGATURE ECH YIWN
ARMENIAN SMALL LIGATURE MEN ECH
ARMENIAN SMALL LIGATURE MEN INI
ARMENIAN SMALL LIGATURE MEN XEH
LATIN SMALL LETTER J WITH CARON
LATIN SMALL LETTER H WITH LINE BELOW
LATIN SMALL LETTER W WITH RING ABOVE
LATIN SMALL LETTER Y WITH RING ABOVE
LATIN SMALL LETTER A WITH RIGHT HALF RING
GREEK SMALL LETTER UPSILON WITH PSILI
GREEK SMALL LETTER UPSILON WITH PSILI AND VARIA
GREEK SMALL LETTER UPSILON WITH PSILI AND PERISPOMENI
GREEK SMALL LETTER ALPHA WITH PERISPOMENI
GREEK SMALL LETTER ETA WITH PERISPOMENI
GREEK SMALL LETTER IOTA WITH DIALYTIKA AND VARIA
GREEK SMALL LETTER IOTA WITH PERISPOMENI
GREEK SMALL LETTER IOTA WITH DIALYTIKA AND PERISPOMENI
GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND VARIA
GREEK SMALL LETTER RHO WITH PSILI
GREEK SMALL LETTER UPSILON WITH PERISPOMENI
GREEK SMALL LETTER OMEGA WITH PERISPOMENI
MICRO SIGN
LATIN SMALL LETTER SHARP S
LATIN CAPITAL LETTER I WITH DOT ABOVE
LATIN SMALL LETTER LONG S
LATIN SMALL LETTER J WITH CARON
COMBINING GREEK YPOGEGRAMMENI
GREEK SMALL LETTER FINAL SIGMA
GREEK BETA SYMBOL
GREEK THETA SYMBOL
GREEK PHI SYMBOL
GREEK PI SYMBOL
GREEK KAPPA SYMBOL
GREEK RHO SYMBOL
GREEK LUNATE EPSILON SYMBOL
ARMENIAN SMALL LIGATURE ECH YIWN
GEORGIAN CAPITAL LETTER YN
GEORGIAN CAPITAL LETTER AEN
LATIN SMALL LETTER H WITH LINE BELOW
LATIN SMALL LETTER W WITH RING ABOVE
LATIN SMALL LETTER Y WITH RING ABOVE
LATIN SMALL LETTER A WITH RIGHT HALF RING
LATIN SMALL LETTER LONG S WITH DOT ABOVE
LATIN CAPITAL LETTER SHARP S
GREEK SMALL LETTER UPSILON WITH PSILI
GREEK SMALL LETTER UPSILON WITH PSILI AND VARIA
GREEK SMALL LETTER UPSILON WITH PSILI AND PERISPOMENI
GREEK SMALL LETTER ALPHA WITH PERISPOMENI
GREEK PROSGEGRAMMENI
GREEK SMALL LETTER ETA WITH PERISPOMENI
GREEK SMALL LETTER IOTA WITH DIALYTIKA AND VARIA
GREEK SMALL LETTER IOTA WITH PERISPOMENI
GREEK SMALL LETTER IOTA WITH DIALYTIKA AND PERISPOMENI
GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND VARIA
GREEK SMALL LETTER RHO WITH PSILI
GREEK SMALL LETTER UPSILON WITH PERISPOMENI
GREEK SMALL LETTER OMEGA WITH PERISPOMENI
LATIN CAPITAL LETTER C WITH BAR
LATIN CAPITAL LETTER H WITH HOOK
LATIN SMALL LIGATURE FF
LATIN SMALL LIGATURE FI
LATIN SMALL LIGATURE FL
LATIN SMALL LIGATURE FFL
LATIN SMALL LIGATURE LONG S T
LATIN SMALL LIGATURE ST
ARMENIAN SMALL LIGATURE MEN ECH
ARMENIAN SMALL LIGATURE MEN INI
ARMENIAN SMALL LIGATURE MEN XEH | Generated by scripts / SpecialCasing.hs
Date : 2012 - 12 - 20 , 22:14:35 GMT [ MD ]
SpecialCasing-6.3.0.txt
Date : 2013 - 05 - 08 , GMT [ MD ]
module Data.JSString.Internal.Fusion.CaseMapping where
import Data.Char
import Data.JSString.Internal.Fusion.Types
upperMapping :: forall s. Char -> s -> Step (CC s) Char
# INLINE upperMapping #
upperMapping '\x00df' s = Yield '\x0053' (CC s '\x0053' '\x0000')
upperMapping '\xfb00' s = Yield '\x0046' (CC s '\x0046' '\x0000')
upperMapping '\xfb01' s = Yield '\x0046' (CC s '\x0049' '\x0000')
upperMapping '\xfb02' s = Yield '\x0046' (CC s '\x004c' '\x0000')
LATIN SMALL LIGATURE
upperMapping '\xfb03' s = Yield '\x0046' (CC s '\x0046' '\x0049')
upperMapping '\xfb04' s = Yield '\x0046' (CC s '\x0046' '\x004c')
upperMapping '\xfb05' s = Yield '\x0053' (CC s '\x0054' '\x0000')
upperMapping '\xfb06' s = Yield '\x0053' (CC s '\x0054' '\x0000')
upperMapping '\x0587' s = Yield '\x0535' (CC s '\x0552' '\x0000')
ARMENIAN SMALL LIGATURE MEN NOW
upperMapping '\xfb13' s = Yield '\x0544' (CC s '\x0546' '\x0000')
upperMapping '\xfb14' s = Yield '\x0544' (CC s '\x0535' '\x0000')
upperMapping '\xfb15' s = Yield '\x0544' (CC s '\x053b' '\x0000')
ARMENIAN SMALL LIGATURE VEW NOW
upperMapping '\xfb16' s = Yield '\x054e' (CC s '\x0546' '\x0000')
upperMapping '\xfb17' s = Yield '\x0544' (CC s '\x053d' '\x0000')
LATIN SMALL LETTER N PRECEDED BY
upperMapping '\x0149' s = Yield '\x02bc' (CC s '\x004e' '\x0000')
GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
upperMapping '\x0390' s = Yield '\x0399' (CC s '\x0308' '\x0301')
GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
upperMapping '\x03b0' s = Yield '\x03a5' (CC s '\x0308' '\x0301')
upperMapping '\x01f0' s = Yield '\x004a' (CC s '\x030c' '\x0000')
upperMapping '\x1e96' s = Yield '\x0048' (CC s '\x0331' '\x0000')
LATIN SMALL LETTER T WITH
upperMapping '\x1e97' s = Yield '\x0054' (CC s '\x0308' '\x0000')
upperMapping '\x1e98' s = Yield '\x0057' (CC s '\x030a' '\x0000')
upperMapping '\x1e99' s = Yield '\x0059' (CC s '\x030a' '\x0000')
upperMapping '\x1e9a' s = Yield '\x0041' (CC s '\x02be' '\x0000')
upperMapping '\x1f50' s = Yield '\x03a5' (CC s '\x0313' '\x0000')
upperMapping '\x1f52' s = Yield '\x03a5' (CC s '\x0313' '\x0300')
GREEK SMALL LETTER UPSILON WITH PSILI AND OXIA
upperMapping '\x1f54' s = Yield '\x03a5' (CC s '\x0313' '\x0301')
upperMapping '\x1f56' s = Yield '\x03a5' (CC s '\x0313' '\x0342')
upperMapping '\x1fb6' s = Yield '\x0391' (CC s '\x0342' '\x0000')
upperMapping '\x1fc6' s = Yield '\x0397' (CC s '\x0342' '\x0000')
upperMapping '\x1fd2' s = Yield '\x0399' (CC s '\x0308' '\x0300')
GREEK SMALL LETTER IOTA WITH DIALYTIKA AND OXIA
upperMapping '\x1fd3' s = Yield '\x0399' (CC s '\x0308' '\x0301')
upperMapping '\x1fd6' s = Yield '\x0399' (CC s '\x0342' '\x0000')
upperMapping '\x1fd7' s = Yield '\x0399' (CC s '\x0308' '\x0342')
upperMapping '\x1fe2' s = Yield '\x03a5' (CC s '\x0308' '\x0300')
GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND OXIA
upperMapping '\x1fe3' s = Yield '\x03a5' (CC s '\x0308' '\x0301')
upperMapping '\x1fe4' s = Yield '\x03a1' (CC s '\x0313' '\x0000')
upperMapping '\x1fe6' s = Yield '\x03a5' (CC s '\x0342' '\x0000')
GREEK SMALL LETTER UPSILON WITH AND PERISPOMENI
upperMapping '\x1fe7' s = Yield '\x03a5' (CC s '\x0308' '\x0342')
upperMapping '\x1ff6' s = Yield '\x03a9' (CC s '\x0342' '\x0000')
GREEK SMALL LETTER ALPHA WITH PSILI AND
upperMapping '\x1f80' s = Yield '\x1f08' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ALPHA WITH DASIA AND
upperMapping '\x1f81' s = Yield '\x1f09' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ALPHA WITH PSILI AND VARIA AND
upperMapping '\x1f82' s = Yield '\x1f0a' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ALPHA WITH DASIA AND VARIA AND
upperMapping '\x1f83' s = Yield '\x1f0b' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ALPHA WITH PSILI AND OXIA AND
upperMapping '\x1f84' s = Yield '\x1f0c' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ALPHA WITH DASIA AND OXIA AND
upperMapping '\x1f85' s = Yield '\x1f0d' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ALPHA WITH PSILI AND PERISPOMENI AND
upperMapping '\x1f86' s = Yield '\x1f0e' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ALPHA WITH DASIA AND PERISPOMENI AND
upperMapping '\x1f87' s = Yield '\x1f0f' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH PSILI AND PROSGEGRAMMENI
upperMapping '\x1f88' s = Yield '\x1f08' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH DASIA AND PROSGEGRAMMENI
upperMapping '\x1f89' s = Yield '\x1f09' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH PSILI AND VARIA AND PROSGEGRAMMENI
upperMapping '\x1f8a' s = Yield '\x1f0a' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH DASIA AND VARIA AND PROSGEGRAMMENI
upperMapping '\x1f8b' s = Yield '\x1f0b' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH PSILI AND OXIA AND PROSGEGRAMMENI
upperMapping '\x1f8c' s = Yield '\x1f0c' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH DASIA AND OXIA AND PROSGEGRAMMENI
upperMapping '\x1f8d' s = Yield '\x1f0d' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI
upperMapping '\x1f8e' s = Yield '\x1f0e' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI
upperMapping '\x1f8f' s = Yield '\x1f0f' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ETA WITH PSILI AND
upperMapping '\x1f90' s = Yield '\x1f28' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ETA WITH DASIA AND
upperMapping '\x1f91' s = Yield '\x1f29' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ETA WITH PSILI AND VARIA AND
upperMapping '\x1f92' s = Yield '\x1f2a' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ETA WITH DASIA AND VARIA AND
upperMapping '\x1f93' s = Yield '\x1f2b' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ETA WITH PSILI AND OXIA AND
upperMapping '\x1f94' s = Yield '\x1f2c' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ETA WITH DASIA AND OXIA AND
upperMapping '\x1f95' s = Yield '\x1f2d' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ETA WITH PSILI AND PERISPOMENI AND
upperMapping '\x1f96' s = Yield '\x1f2e' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ETA WITH DASIA AND PERISPOMENI AND
upperMapping '\x1f97' s = Yield '\x1f2f' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ETA WITH PSILI AND PROSGEGRAMMENI
upperMapping '\x1f98' s = Yield '\x1f28' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ETA WITH DASIA AND PROSGEGRAMMENI
upperMapping '\x1f99' s = Yield '\x1f29' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ETA WITH PSILI AND VARIA AND PROSGEGRAMMENI
upperMapping '\x1f9a' s = Yield '\x1f2a' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ETA WITH DASIA AND VARIA AND PROSGEGRAMMENI
upperMapping '\x1f9b' s = Yield '\x1f2b' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ETA WITH PSILI AND OXIA AND PROSGEGRAMMENI
upperMapping '\x1f9c' s = Yield '\x1f2c' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ETA WITH DASIA AND OXIA AND PROSGEGRAMMENI
upperMapping '\x1f9d' s = Yield '\x1f2d' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ETA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI
upperMapping '\x1f9e' s = Yield '\x1f2e' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ETA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI
upperMapping '\x1f9f' s = Yield '\x1f2f' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER OMEGA WITH PSILI AND
upperMapping '\x1fa0' s = Yield '\x1f68' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER OMEGA WITH DASIA AND YPOGEGRAMMENI
upperMapping '\x1fa1' s = Yield '\x1f69' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER OMEGA WITH PSILI AND VARIA AND
upperMapping '\x1fa2' s = Yield '\x1f6a' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER OMEGA WITH DASIA AND VARIA AND
upperMapping '\x1fa3' s = Yield '\x1f6b' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER OMEGA WITH PSILI AND OXIA AND
upperMapping '\x1fa4' s = Yield '\x1f6c' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER OMEGA WITH DASIA AND OXIA AND
upperMapping '\x1fa5' s = Yield '\x1f6d' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER OMEGA WITH PSILI AND PERISPOMENI AND
upperMapping '\x1fa6' s = Yield '\x1f6e' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER OMEGA WITH DASIA AND PERISPOMENI AND
upperMapping '\x1fa7' s = Yield '\x1f6f' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH PSILI AND PROSGEGRAMMENI
upperMapping '\x1fa8' s = Yield '\x1f68' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH DASIA AND PROSGEGRAMMENI
upperMapping '\x1fa9' s = Yield '\x1f69' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH PSILI AND VARIA AND PROSGEGRAMMENI
upperMapping '\x1faa' s = Yield '\x1f6a' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH DASIA AND VARIA AND PROSGEGRAMMENI
upperMapping '\x1fab' s = Yield '\x1f6b' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH PSILI AND OXIA AND PROSGEGRAMMENI
upperMapping '\x1fac' s = Yield '\x1f6c' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH DASIA AND OXIA AND PROSGEGRAMMENI
upperMapping '\x1fad' s = Yield '\x1f6d' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI
upperMapping '\x1fae' s = Yield '\x1f6e' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI
upperMapping '\x1faf' s = Yield '\x1f6f' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ALPHA WITH YPOGEGRAMMENI
upperMapping '\x1fb3' s = Yield '\x0391' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH PROSGEGRAMMENI
upperMapping '\x1fbc' s = Yield '\x0391' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ETA WITH YPOGEGRAMMENI
upperMapping '\x1fc3' s = Yield '\x0397' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER ETA WITH PROSGEGRAMMENI
upperMapping '\x1fcc' s = Yield '\x0397' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER OMEGA WITH YPOGEGRAMMENI
upperMapping '\x1ff3' s = Yield '\x03a9' (CC s '\x0399' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH PROSGEGRAMMENI
upperMapping '\x1ffc' s = Yield '\x03a9' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ALPHA WITH VARIA AND
upperMapping '\x1fb2' s = Yield '\x1fba' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ALPHA WITH OXIA AND
upperMapping '\x1fb4' s = Yield '\x0386' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ETA WITH VARIA AND
upperMapping '\x1fc2' s = Yield '\x1fca' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ETA WITH OXIA AND
upperMapping '\x1fc4' s = Yield '\x0389' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER OMEGA WITH VARIA AND
upperMapping '\x1ff2' s = Yield '\x1ffa' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER OMEGA WITH OXIA AND
upperMapping '\x1ff4' s = Yield '\x038f' (CC s '\x0399' '\x0000')
GREEK SMALL LETTER ALPHA WITH PERISPOMENI AND
upperMapping '\x1fb7' s = Yield '\x0391' (CC s '\x0342' '\x0399')
GREEK SMALL LETTER ETA WITH PERISPOMENI AND
upperMapping '\x1fc7' s = Yield '\x0397' (CC s '\x0342' '\x0399')
GREEK SMALL LETTER OMEGA WITH PERISPOMENI AND
upperMapping '\x1ff7' s = Yield '\x03a9' (CC s '\x0342' '\x0399')
upperMapping c s = Yield (toUpper c) (CC s '\0' '\0')
lowerMapping :: forall s. Char -> s -> Step (CC s) Char
# INLINE lowerMapping #
lowerMapping '\x0130' s = Yield '\x0069' (CC s '\x0307' '\x0000')
lowerMapping c s = Yield (toLower c) (CC s '\0' '\0')
titleMapping :: forall s. Char -> s -> Step (CC s) Char
# INLINE titleMapping #
titleMapping '\x00df' s = Yield '\x0053' (CC s '\x0073' '\x0000')
titleMapping '\xfb00' s = Yield '\x0046' (CC s '\x0066' '\x0000')
titleMapping '\xfb01' s = Yield '\x0046' (CC s '\x0069' '\x0000')
titleMapping '\xfb02' s = Yield '\x0046' (CC s '\x006c' '\x0000')
LATIN SMALL LIGATURE
titleMapping '\xfb03' s = Yield '\x0046' (CC s '\x0066' '\x0069')
titleMapping '\xfb04' s = Yield '\x0046' (CC s '\x0066' '\x006c')
titleMapping '\xfb05' s = Yield '\x0053' (CC s '\x0074' '\x0000')
titleMapping '\xfb06' s = Yield '\x0053' (CC s '\x0074' '\x0000')
titleMapping '\x0587' s = Yield '\x0535' (CC s '\x0582' '\x0000')
ARMENIAN SMALL LIGATURE MEN NOW
titleMapping '\xfb13' s = Yield '\x0544' (CC s '\x0576' '\x0000')
titleMapping '\xfb14' s = Yield '\x0544' (CC s '\x0565' '\x0000')
titleMapping '\xfb15' s = Yield '\x0544' (CC s '\x056b' '\x0000')
ARMENIAN SMALL LIGATURE VEW NOW
titleMapping '\xfb16' s = Yield '\x054e' (CC s '\x0576' '\x0000')
titleMapping '\xfb17' s = Yield '\x0544' (CC s '\x056d' '\x0000')
LATIN SMALL LETTER N PRECEDED BY
titleMapping '\x0149' s = Yield '\x02bc' (CC s '\x004e' '\x0000')
GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
titleMapping '\x0390' s = Yield '\x0399' (CC s '\x0308' '\x0301')
GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
titleMapping '\x03b0' s = Yield '\x03a5' (CC s '\x0308' '\x0301')
titleMapping '\x01f0' s = Yield '\x004a' (CC s '\x030c' '\x0000')
titleMapping '\x1e96' s = Yield '\x0048' (CC s '\x0331' '\x0000')
LATIN SMALL LETTER T WITH
titleMapping '\x1e97' s = Yield '\x0054' (CC s '\x0308' '\x0000')
titleMapping '\x1e98' s = Yield '\x0057' (CC s '\x030a' '\x0000')
titleMapping '\x1e99' s = Yield '\x0059' (CC s '\x030a' '\x0000')
titleMapping '\x1e9a' s = Yield '\x0041' (CC s '\x02be' '\x0000')
titleMapping '\x1f50' s = Yield '\x03a5' (CC s '\x0313' '\x0000')
titleMapping '\x1f52' s = Yield '\x03a5' (CC s '\x0313' '\x0300')
GREEK SMALL LETTER UPSILON WITH PSILI AND OXIA
titleMapping '\x1f54' s = Yield '\x03a5' (CC s '\x0313' '\x0301')
titleMapping '\x1f56' s = Yield '\x03a5' (CC s '\x0313' '\x0342')
titleMapping '\x1fb6' s = Yield '\x0391' (CC s '\x0342' '\x0000')
titleMapping '\x1fc6' s = Yield '\x0397' (CC s '\x0342' '\x0000')
titleMapping '\x1fd2' s = Yield '\x0399' (CC s '\x0308' '\x0300')
GREEK SMALL LETTER IOTA WITH DIALYTIKA AND OXIA
titleMapping '\x1fd3' s = Yield '\x0399' (CC s '\x0308' '\x0301')
titleMapping '\x1fd6' s = Yield '\x0399' (CC s '\x0342' '\x0000')
titleMapping '\x1fd7' s = Yield '\x0399' (CC s '\x0308' '\x0342')
titleMapping '\x1fe2' s = Yield '\x03a5' (CC s '\x0308' '\x0300')
GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND OXIA
titleMapping '\x1fe3' s = Yield '\x03a5' (CC s '\x0308' '\x0301')
titleMapping '\x1fe4' s = Yield '\x03a1' (CC s '\x0313' '\x0000')
titleMapping '\x1fe6' s = Yield '\x03a5' (CC s '\x0342' '\x0000')
GREEK SMALL LETTER UPSILON WITH AND PERISPOMENI
titleMapping '\x1fe7' s = Yield '\x03a5' (CC s '\x0308' '\x0342')
titleMapping '\x1ff6' s = Yield '\x03a9' (CC s '\x0342' '\x0000')
GREEK SMALL LETTER ALPHA WITH VARIA AND
titleMapping '\x1fb2' s = Yield '\x1fba' (CC s '\x0345' '\x0000')
GREEK SMALL LETTER ALPHA WITH OXIA AND
titleMapping '\x1fb4' s = Yield '\x0386' (CC s '\x0345' '\x0000')
GREEK SMALL LETTER ETA WITH VARIA AND
titleMapping '\x1fc2' s = Yield '\x1fca' (CC s '\x0345' '\x0000')
GREEK SMALL LETTER ETA WITH OXIA AND
titleMapping '\x1fc4' s = Yield '\x0389' (CC s '\x0345' '\x0000')
GREEK SMALL LETTER OMEGA WITH VARIA AND
titleMapping '\x1ff2' s = Yield '\x1ffa' (CC s '\x0345' '\x0000')
GREEK SMALL LETTER OMEGA WITH OXIA AND
titleMapping '\x1ff4' s = Yield '\x038f' (CC s '\x0345' '\x0000')
GREEK SMALL LETTER ALPHA WITH PERISPOMENI AND
titleMapping '\x1fb7' s = Yield '\x0391' (CC s '\x0342' '\x0345')
GREEK SMALL LETTER ETA WITH PERISPOMENI AND
titleMapping '\x1fc7' s = Yield '\x0397' (CC s '\x0342' '\x0345')
GREEK SMALL LETTER OMEGA WITH PERISPOMENI AND
titleMapping '\x1ff7' s = Yield '\x03a9' (CC s '\x0342' '\x0345')
titleMapping c s = Yield (toTitle c) (CC s '\0' '\0')
foldMapping :: forall s. Char -> s -> Step (CC s) Char
# INLINE foldMapping #
foldMapping '\x00b5' s = Yield '\x03bc' (CC s '\x0000' '\x0000')
foldMapping '\x00df' s = Yield '\x0073' (CC s '\x0073' '\x0000')
foldMapping '\x0130' s = Yield '\x0069' (CC s '\x0307' '\x0000')
LATIN SMALL LETTER N PRECEDED BY
foldMapping '\x0149' s = Yield '\x02bc' (CC s '\x006e' '\x0000')
foldMapping '\x017f' s = Yield '\x0073' (CC s '\x0000' '\x0000')
foldMapping '\x01f0' s = Yield '\x006a' (CC s '\x030c' '\x0000')
foldMapping '\x0345' s = Yield '\x03b9' (CC s '\x0000' '\x0000')
GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
foldMapping '\x0390' s = Yield '\x03b9' (CC s '\x0308' '\x0301')
GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
foldMapping '\x03b0' s = Yield '\x03c5' (CC s '\x0308' '\x0301')
foldMapping '\x03c2' s = Yield '\x03c3' (CC s '\x0000' '\x0000')
foldMapping '\x03d0' s = Yield '\x03b2' (CC s '\x0000' '\x0000')
foldMapping '\x03d1' s = Yield '\x03b8' (CC s '\x0000' '\x0000')
foldMapping '\x03d5' s = Yield '\x03c6' (CC s '\x0000' '\x0000')
foldMapping '\x03d6' s = Yield '\x03c0' (CC s '\x0000' '\x0000')
foldMapping '\x03f0' s = Yield '\x03ba' (CC s '\x0000' '\x0000')
foldMapping '\x03f1' s = Yield '\x03c1' (CC s '\x0000' '\x0000')
foldMapping '\x03f5' s = Yield '\x03b5' (CC s '\x0000' '\x0000')
foldMapping '\x0587' s = Yield '\x0565' (CC s '\x0582' '\x0000')
foldMapping '\x10c7' s = Yield '\x2d27' (CC s '\x0000' '\x0000')
foldMapping '\x10cd' s = Yield '\x2d2d' (CC s '\x0000' '\x0000')
foldMapping '\x1e96' s = Yield '\x0068' (CC s '\x0331' '\x0000')
LATIN SMALL LETTER T WITH
foldMapping '\x1e97' s = Yield '\x0074' (CC s '\x0308' '\x0000')
foldMapping '\x1e98' s = Yield '\x0077' (CC s '\x030a' '\x0000')
foldMapping '\x1e99' s = Yield '\x0079' (CC s '\x030a' '\x0000')
foldMapping '\x1e9a' s = Yield '\x0061' (CC s '\x02be' '\x0000')
foldMapping '\x1e9b' s = Yield '\x1e61' (CC s '\x0000' '\x0000')
foldMapping '\x1e9e' s = Yield '\x0073' (CC s '\x0073' '\x0000')
foldMapping '\x1f50' s = Yield '\x03c5' (CC s '\x0313' '\x0000')
foldMapping '\x1f52' s = Yield '\x03c5' (CC s '\x0313' '\x0300')
GREEK SMALL LETTER UPSILON WITH PSILI AND OXIA
foldMapping '\x1f54' s = Yield '\x03c5' (CC s '\x0313' '\x0301')
foldMapping '\x1f56' s = Yield '\x03c5' (CC s '\x0313' '\x0342')
GREEK SMALL LETTER ALPHA WITH PSILI AND
foldMapping '\x1f80' s = Yield '\x1f00' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ALPHA WITH DASIA AND
foldMapping '\x1f81' s = Yield '\x1f01' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ALPHA WITH PSILI AND VARIA AND
foldMapping '\x1f82' s = Yield '\x1f02' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ALPHA WITH DASIA AND VARIA AND
foldMapping '\x1f83' s = Yield '\x1f03' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ALPHA WITH PSILI AND OXIA AND
foldMapping '\x1f84' s = Yield '\x1f04' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ALPHA WITH DASIA AND OXIA AND
foldMapping '\x1f85' s = Yield '\x1f05' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ALPHA WITH PSILI AND PERISPOMENI AND
foldMapping '\x1f86' s = Yield '\x1f06' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ALPHA WITH DASIA AND PERISPOMENI AND
foldMapping '\x1f87' s = Yield '\x1f07' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH PSILI AND PROSGEGRAMMENI
foldMapping '\x1f88' s = Yield '\x1f00' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH DASIA AND PROSGEGRAMMENI
foldMapping '\x1f89' s = Yield '\x1f01' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH PSILI AND VARIA AND PROSGEGRAMMENI
foldMapping '\x1f8a' s = Yield '\x1f02' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH DASIA AND VARIA AND PROSGEGRAMMENI
foldMapping '\x1f8b' s = Yield '\x1f03' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH PSILI AND OXIA AND PROSGEGRAMMENI
foldMapping '\x1f8c' s = Yield '\x1f04' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH DASIA AND OXIA AND PROSGEGRAMMENI
foldMapping '\x1f8d' s = Yield '\x1f05' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI
foldMapping '\x1f8e' s = Yield '\x1f06' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ALPHA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI
foldMapping '\x1f8f' s = Yield '\x1f07' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ETA WITH PSILI AND
foldMapping '\x1f90' s = Yield '\x1f20' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ETA WITH DASIA AND
foldMapping '\x1f91' s = Yield '\x1f21' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ETA WITH PSILI AND VARIA AND
foldMapping '\x1f92' s = Yield '\x1f22' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ETA WITH DASIA AND VARIA AND
foldMapping '\x1f93' s = Yield '\x1f23' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ETA WITH PSILI AND OXIA AND
foldMapping '\x1f94' s = Yield '\x1f24' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ETA WITH DASIA AND OXIA AND
foldMapping '\x1f95' s = Yield '\x1f25' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ETA WITH PSILI AND PERISPOMENI AND
foldMapping '\x1f96' s = Yield '\x1f26' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ETA WITH DASIA AND PERISPOMENI AND
foldMapping '\x1f97' s = Yield '\x1f27' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ETA WITH PSILI AND PROSGEGRAMMENI
foldMapping '\x1f98' s = Yield '\x1f20' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ETA WITH DASIA AND PROSGEGRAMMENI
foldMapping '\x1f99' s = Yield '\x1f21' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ETA WITH PSILI AND VARIA AND PROSGEGRAMMENI
foldMapping '\x1f9a' s = Yield '\x1f22' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ETA WITH DASIA AND VARIA AND PROSGEGRAMMENI
foldMapping '\x1f9b' s = Yield '\x1f23' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ETA WITH PSILI AND OXIA AND PROSGEGRAMMENI
foldMapping '\x1f9c' s = Yield '\x1f24' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ETA WITH DASIA AND OXIA AND PROSGEGRAMMENI
foldMapping '\x1f9d' s = Yield '\x1f25' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ETA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI
foldMapping '\x1f9e' s = Yield '\x1f26' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER ETA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI
foldMapping '\x1f9f' s = Yield '\x1f27' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER OMEGA WITH PSILI AND
foldMapping '\x1fa0' s = Yield '\x1f60' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER OMEGA WITH DASIA AND YPOGEGRAMMENI
foldMapping '\x1fa1' s = Yield '\x1f61' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER OMEGA WITH PSILI AND VARIA AND
foldMapping '\x1fa2' s = Yield '\x1f62' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER OMEGA WITH DASIA AND VARIA AND
foldMapping '\x1fa3' s = Yield '\x1f63' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER OMEGA WITH PSILI AND OXIA AND
foldMapping '\x1fa4' s = Yield '\x1f64' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER OMEGA WITH DASIA AND OXIA AND
foldMapping '\x1fa5' s = Yield '\x1f65' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER OMEGA WITH PSILI AND PERISPOMENI AND
foldMapping '\x1fa6' s = Yield '\x1f66' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER OMEGA WITH DASIA AND PERISPOMENI AND
foldMapping '\x1fa7' s = Yield '\x1f67' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH PSILI AND PROSGEGRAMMENI
foldMapping '\x1fa8' s = Yield '\x1f60' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH DASIA AND PROSGEGRAMMENI
foldMapping '\x1fa9' s = Yield '\x1f61' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH PSILI AND VARIA AND PROSGEGRAMMENI
foldMapping '\x1faa' s = Yield '\x1f62' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH DASIA AND VARIA AND PROSGEGRAMMENI
foldMapping '\x1fab' s = Yield '\x1f63' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH PSILI AND OXIA AND PROSGEGRAMMENI
foldMapping '\x1fac' s = Yield '\x1f64' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH DASIA AND OXIA AND PROSGEGRAMMENI
foldMapping '\x1fad' s = Yield '\x1f65' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI
foldMapping '\x1fae' s = Yield '\x1f66' (CC s '\x03b9' '\x0000')
GREEK CAPITAL LETTER OMEGA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI
foldMapping '\x1faf' s = Yield '\x1f67' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ALPHA WITH VARIA AND
foldMapping '\x1fb2' s = Yield '\x1f70' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ALPHA WITH YPOGEGRAMMENI
foldMapping '\x1fb3' s = Yield '\x03b1' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ALPHA WITH OXIA AND
foldMapping '\x1fb4' s = Yield '\x03ac' (CC s '\x03b9' '\x0000')
foldMapping '\x1fb6' s = Yield '\x03b1' (CC s '\x0342' '\x0000')
GREEK SMALL LETTER ALPHA WITH PERISPOMENI AND
foldMapping '\x1fb7' s = Yield '\x03b1' (CC s '\x0342' '\x03b9')
GREEK CAPITAL LETTER ALPHA WITH PROSGEGRAMMENI
foldMapping '\x1fbc' s = Yield '\x03b1' (CC s '\x03b9' '\x0000')
foldMapping '\x1fbe' s = Yield '\x03b9' (CC s '\x0000' '\x0000')
GREEK SMALL LETTER ETA WITH VARIA AND
foldMapping '\x1fc2' s = Yield '\x1f74' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ETA WITH YPOGEGRAMMENI
foldMapping '\x1fc3' s = Yield '\x03b7' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER ETA WITH OXIA AND
foldMapping '\x1fc4' s = Yield '\x03ae' (CC s '\x03b9' '\x0000')
foldMapping '\x1fc6' s = Yield '\x03b7' (CC s '\x0342' '\x0000')
GREEK SMALL LETTER ETA WITH PERISPOMENI AND
foldMapping '\x1fc7' s = Yield '\x03b7' (CC s '\x0342' '\x03b9')
GREEK CAPITAL LETTER ETA WITH PROSGEGRAMMENI
foldMapping '\x1fcc' s = Yield '\x03b7' (CC s '\x03b9' '\x0000')
foldMapping '\x1fd2' s = Yield '\x03b9' (CC s '\x0308' '\x0300')
GREEK SMALL LETTER IOTA WITH DIALYTIKA AND OXIA
foldMapping '\x1fd3' s = Yield '\x03b9' (CC s '\x0308' '\x0301')
foldMapping '\x1fd6' s = Yield '\x03b9' (CC s '\x0342' '\x0000')
foldMapping '\x1fd7' s = Yield '\x03b9' (CC s '\x0308' '\x0342')
foldMapping '\x1fe2' s = Yield '\x03c5' (CC s '\x0308' '\x0300')
GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND OXIA
foldMapping '\x1fe3' s = Yield '\x03c5' (CC s '\x0308' '\x0301')
foldMapping '\x1fe4' s = Yield '\x03c1' (CC s '\x0313' '\x0000')
foldMapping '\x1fe6' s = Yield '\x03c5' (CC s '\x0342' '\x0000')
GREEK SMALL LETTER UPSILON WITH AND PERISPOMENI
foldMapping '\x1fe7' s = Yield '\x03c5' (CC s '\x0308' '\x0342')
GREEK SMALL LETTER OMEGA WITH VARIA AND
foldMapping '\x1ff2' s = Yield '\x1f7c' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER OMEGA WITH YPOGEGRAMMENI
foldMapping '\x1ff3' s = Yield '\x03c9' (CC s '\x03b9' '\x0000')
GREEK SMALL LETTER OMEGA WITH OXIA AND
foldMapping '\x1ff4' s = Yield '\x03ce' (CC s '\x03b9' '\x0000')
foldMapping '\x1ff6' s = Yield '\x03c9' (CC s '\x0342' '\x0000')
GREEK SMALL LETTER OMEGA WITH PERISPOMENI AND
foldMapping '\x1ff7' s = Yield '\x03c9' (CC s '\x0342' '\x03b9')
GREEK CAPITAL LETTER OMEGA WITH PROSGEGRAMMENI
foldMapping '\x1ffc' s = Yield '\x03c9' (CC s '\x03b9' '\x0000')
COPTIC CAPITAL LETTER
foldMapping '\x2cf2' s = Yield '\x2cf3' (CC s '\x0000' '\x0000')
foldMapping '\xa792' s = Yield '\xa793' (CC s '\x0000' '\x0000')
foldMapping '\xa7aa' s = Yield '\x0266' (CC s '\x0000' '\x0000')
foldMapping '\xfb00' s = Yield '\x0066' (CC s '\x0066' '\x0000')
foldMapping '\xfb01' s = Yield '\x0066' (CC s '\x0069' '\x0000')
foldMapping '\xfb02' s = Yield '\x0066' (CC s '\x006c' '\x0000')
LATIN SMALL LIGATURE
foldMapping '\xfb03' s = Yield '\x0066' (CC s '\x0066' '\x0069')
foldMapping '\xfb04' s = Yield '\x0066' (CC s '\x0066' '\x006c')
foldMapping '\xfb05' s = Yield '\x0073' (CC s '\x0074' '\x0000')
foldMapping '\xfb06' s = Yield '\x0073' (CC s '\x0074' '\x0000')
ARMENIAN SMALL LIGATURE MEN NOW
foldMapping '\xfb13' s = Yield '\x0574' (CC s '\x0576' '\x0000')
foldMapping '\xfb14' s = Yield '\x0574' (CC s '\x0565' '\x0000')
foldMapping '\xfb15' s = Yield '\x0574' (CC s '\x056b' '\x0000')
ARMENIAN SMALL LIGATURE VEW NOW
foldMapping '\xfb16' s = Yield '\x057e' (CC s '\x0576' '\x0000')
foldMapping '\xfb17' s = Yield '\x0574' (CC s '\x056d' '\x0000')
foldMapping c s = Yield (toLower c) (CC s '\0' '\0')
|
7877225acab99a5664b030e0999ff75faeabc249bcebd4f0815de84aa90e8bf5 | ssardina/ergo | two-towers.scm | This is a program for the Two Towers problem
;; possible initial values for stack A
(define stackA-values '(() (red) (blue) (red red blue red)))
the BAT
(include "red-blue-bat.scm")
;; make a tower of reds on stack B and a tower of blues on stack C
(define (goal?)
(and (eq? hand 'empty) (null? (stack 'A))
(for/and ((o (stack 'B))) (eq? o 'red))
(for/and ((o (stack 'C))) (eq? o 'blue))))
(define (main)
(ergo-genplan goal? (append (map pick! '(A B C)) (map put! '(A B C)))))
| null | https://raw.githubusercontent.com/ssardina/ergo/4225ebb95779d1748f377cf2e4d0a593d6a2a103/Examples/PlanningExamples/two-towers.scm | scheme | possible initial values for stack A
make a tower of reds on stack B and a tower of blues on stack C | This is a program for the Two Towers problem
(define stackA-values '(() (red) (blue) (red red blue red)))
the BAT
(include "red-blue-bat.scm")
(define (goal?)
(and (eq? hand 'empty) (null? (stack 'A))
(for/and ((o (stack 'B))) (eq? o 'red))
(for/and ((o (stack 'C))) (eq? o 'blue))))
(define (main)
(ergo-genplan goal? (append (map pick! '(A B C)) (map put! '(A B C)))))
|
9aafae25d0cf4f745c0e04a251e332ac86bb3b78601328754fab844f48804ca4 | bvaugon/ocapic | flash.ml | (*************************************************************************)
(* *)
(* OCaPIC *)
(* *)
(* *)
This file is distributed under the terms of the CeCILL license .
(* See file ../../LICENSE-en. *)
(* *)
(*************************************************************************)
open Hexfile
type t = {
program : int array;
config : int array;
}
let parse hexfile =
let len = Array.length hexfile in
let dontknow s =
failwith ("Don't know what to do with an hexfile line " ^ s)
in
let esar_error () = dontknow "'Extended Segment Address Record'" in
let ssar_error () = dontknow "'Start Segment Address Record'" in
let slar_error () = dontknow "'Start Linear Address Record'" in
let rec compute_sizes i ofs psize csize =
if i = len then
failwith "Invalid hexfile: no End of file at the end of hexfile";
match hexfile.(i) with
| Extended_LAR addr ->
compute_sizes (succ i) (addr lsl 16) psize csize
| Data (addr, data) ->
let size = ofs + addr + Array.length data in
if ofs = 0 then
compute_sizes (succ i) ofs (max size psize) csize
else if ofs = 0x300000 then
compute_sizes (succ i) ofs psize (max (size - 0x300000) csize)
else
failwith (Printf.sprintf
"Invalid hexfile: invalid address (0x%x)" ofs)
| Eof ->
if i <> pred len then
failwith "Invalid hexfile: End of file before the end of hexfile"
else (psize, csize)
| Extended_SAR _ -> esar_error ()
| Start_SAR (_, _) -> ssar_error ()
| Start_LAR _ -> slar_error ()
in
let (psize, csize) = compute_sizes 0 0 0 0 in
let program = Array.make psize (-1) in
let config = Array.make csize (-1) in
let rec fill_arrays i ofs =
match hexfile.(i) with
| Extended_LAR addr -> fill_arrays (succ i) (addr lsl 16);
| Data (addr, data) ->
if ofs = 0 then
Array.blit data 0 program (ofs+addr) (Array.length data)
else
Array.blit data 0 config (ofs+addr-0x300000) (Array.length data);
fill_arrays (succ i) ofs;
| Eof -> ()
| Extended_SAR _ -> esar_error ()
| Start_SAR (_, _) -> ssar_error ()
| Start_LAR _ -> slar_error ()
in
fill_arrays 0 0;
{ program = program ; config = config }
;;
let print oc { program = program ; config = config } =
let f i b =
if b <> -1 then Printf.fprintf oc " %02X" b
else Printf.fprintf oc " --";
if i mod 16 = 15 then Printf.fprintf oc "\n";
in
Printf.fprintf oc "Program:\n";
Array.iteri f program;
if Array.length program mod 16 <> 0 then Printf.fprintf oc "\n";
Printf.fprintf oc "Config:\n";
Array.iteri f config;
if Array.length config mod 16 <> 0 then Printf.fprintf oc "\n";
;;
| null | https://raw.githubusercontent.com/bvaugon/ocapic/a14cd9ec3f5022aeb5fe2264d595d7e8f1ddf58a/src/ocasim/flash.ml | ocaml | ***********************************************************************
OCaPIC
See file ../../LICENSE-en.
*********************************************************************** |
This file is distributed under the terms of the CeCILL license .
open Hexfile
type t = {
program : int array;
config : int array;
}
let parse hexfile =
let len = Array.length hexfile in
let dontknow s =
failwith ("Don't know what to do with an hexfile line " ^ s)
in
let esar_error () = dontknow "'Extended Segment Address Record'" in
let ssar_error () = dontknow "'Start Segment Address Record'" in
let slar_error () = dontknow "'Start Linear Address Record'" in
let rec compute_sizes i ofs psize csize =
if i = len then
failwith "Invalid hexfile: no End of file at the end of hexfile";
match hexfile.(i) with
| Extended_LAR addr ->
compute_sizes (succ i) (addr lsl 16) psize csize
| Data (addr, data) ->
let size = ofs + addr + Array.length data in
if ofs = 0 then
compute_sizes (succ i) ofs (max size psize) csize
else if ofs = 0x300000 then
compute_sizes (succ i) ofs psize (max (size - 0x300000) csize)
else
failwith (Printf.sprintf
"Invalid hexfile: invalid address (0x%x)" ofs)
| Eof ->
if i <> pred len then
failwith "Invalid hexfile: End of file before the end of hexfile"
else (psize, csize)
| Extended_SAR _ -> esar_error ()
| Start_SAR (_, _) -> ssar_error ()
| Start_LAR _ -> slar_error ()
in
let (psize, csize) = compute_sizes 0 0 0 0 in
let program = Array.make psize (-1) in
let config = Array.make csize (-1) in
let rec fill_arrays i ofs =
match hexfile.(i) with
| Extended_LAR addr -> fill_arrays (succ i) (addr lsl 16);
| Data (addr, data) ->
if ofs = 0 then
Array.blit data 0 program (ofs+addr) (Array.length data)
else
Array.blit data 0 config (ofs+addr-0x300000) (Array.length data);
fill_arrays (succ i) ofs;
| Eof -> ()
| Extended_SAR _ -> esar_error ()
| Start_SAR (_, _) -> ssar_error ()
| Start_LAR _ -> slar_error ()
in
fill_arrays 0 0;
{ program = program ; config = config }
;;
let print oc { program = program ; config = config } =
let f i b =
if b <> -1 then Printf.fprintf oc " %02X" b
else Printf.fprintf oc " --";
if i mod 16 = 15 then Printf.fprintf oc "\n";
in
Printf.fprintf oc "Program:\n";
Array.iteri f program;
if Array.length program mod 16 <> 0 then Printf.fprintf oc "\n";
Printf.fprintf oc "Config:\n";
Array.iteri f config;
if Array.length config mod 16 <> 0 then Printf.fprintf oc "\n";
;;
|
f7a68a4e1ca40c855484827ffb486e95fd17421fbc1086fd67200bafa7b11624 | clojang/clojang | node.clj | (ns clojang.node
(:require [clojang.util :as util]
[clojure.core.memoize :as memo]
[jiface.otp.nodes :as nodes]
[jiface.util :as ji-util]
[potemkin :refer [import-vars]]
[trifl.net :as net])
(:refer-clojure :exclude [new]))
(defn new
"An alias for ``jiface.otp.nodes/node`` but one that allows for
symbols and keywords to be used as node names, a closer match for BEAM
language nodes, which use atoms for their names."
[& args]
(apply #'nodes/node (util/->str-args args)))
(defn self
"An alias for for the constructor``jiface.otp.nodes/self``
but one that allows for symbols and keywords to be used as node names, a
closer match for BEAM language nodes, which use atoms for their names."
[& args]
(apply #'nodes/self (util/->str-args args)))
(defn peer
"An alias for the constructor ``jiface.otp.nodes/peer`` but
one that allows for symbols and keywords to be used as node names, a
closer match for BEAM language nodes, which use atoms for their names."
[& args]
(apply #'nodes/peer (util/->str-args args)))
(defn get-short-name
"Get the OTP-style short name for the default node of this JVM instance."
[]
(System/getProperty "node.sname"))
(defn get-long-name
"Get the OTP-style long name for the default node of this JVM instance."
[]
(System/getProperty "node.name"))
(defn get-default-name
"Get the node name for the default node of this JVM instance. First the
node's short name will be checked, and it that's null, the long name will
be used."
[]
(if-let [short-name (get-short-name)]
(format "%s@%s" short-name (net/get-local-hostname))
(get-long-name)))
(defn get-name
"Get the name of the given node. If no name is given, return the name of
the default node for the currently running JVM."
([]
(get-default-name))
([node]
(nodes/get-name node)))
(defn get-default-node
"Get the default node object for the currently running instance of the JVM.
In general, one should not need more than one node per JVM."
[]
(nodes/default-node (get-default-name)))
(defn get-names
"An alias for ``jiface.otp.nodes/get-names`` that returns a list
of a node's registered mailbox names as a list of strings."
([]
(get-names (get-default-node)))
([node]
(into [] (nodes/get-names node))))
(defn ping
"An alias ``jiface.otp.nodes/ping`` that also allows for a
2-arity call (with the default timeout set to 1000)."
([node-name]
(ping node-name 1000))
([node-name timeout]
(ping (get-default-node) node-name timeout))
([this-node node-name timeout]
(if (apply #'nodes/ping (util/->str-args [this-node node-name timeout]))
:pong
:pang)))
(defn whereis
"An alias for ``jiface.otp.nodes/whereis`` that also allows for
the mailbox name argument to be a symbol, keyword, or string."
[& args]
(apply #'nodes/whereis (util/->str-args args)))
(def connect
"An alias for the constructor ``jiface.otp.nodes/connect`` but one that
caches connections based on source and destination node name and allows for
symbols and keywords to be used as node names, a closer match for BEAM
language nodes, which use atoms for their names."
(memo/lru
(fn ([remote-node-name]
(connect (get-default-name) remote-node-name))
([local-node-name remote-node-name]
(let [self (self (util/->str-arg local-node-name))
peer (peer (util/->str-arg remote-node-name))]
(nodes/connect self peer))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Aliases ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def get-node #'get-default-node)
(def get-default #'get-default-node)
(import-vars
[nodes
;; abstract-node-behaviour
get-alivename
get-cookie
create-transport
create-server-transport
get-hostname
;; get-name -- see above
set-cookie
->str
;; local-node-behaviour
create-pid
create-port
create-ref
get-port
;; node-behaviour
close
close-mbox
create-mbox
;; get-names -- see above
;; ping -- see above
register-mbox
register-status-handler
set-flags
whereis -- see above
;; self-behaviour
accept
;; connect -- see above
get-pid
publish-port
unpublish-port])
| null | https://raw.githubusercontent.com/clojang/clojang/d248e22f3ba2488fd1f16f7bb99bdd63d324d3e6/src/clojure/clojang/node.clj | clojure |
Aliases ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
abstract-node-behaviour
get-name -- see above
local-node-behaviour
node-behaviour
get-names -- see above
ping -- see above
self-behaviour
connect -- see above | (ns clojang.node
(:require [clojang.util :as util]
[clojure.core.memoize :as memo]
[jiface.otp.nodes :as nodes]
[jiface.util :as ji-util]
[potemkin :refer [import-vars]]
[trifl.net :as net])
(:refer-clojure :exclude [new]))
(defn new
"An alias for ``jiface.otp.nodes/node`` but one that allows for
symbols and keywords to be used as node names, a closer match for BEAM
language nodes, which use atoms for their names."
[& args]
(apply #'nodes/node (util/->str-args args)))
(defn self
"An alias for for the constructor``jiface.otp.nodes/self``
but one that allows for symbols and keywords to be used as node names, a
closer match for BEAM language nodes, which use atoms for their names."
[& args]
(apply #'nodes/self (util/->str-args args)))
(defn peer
"An alias for the constructor ``jiface.otp.nodes/peer`` but
one that allows for symbols and keywords to be used as node names, a
closer match for BEAM language nodes, which use atoms for their names."
[& args]
(apply #'nodes/peer (util/->str-args args)))
(defn get-short-name
"Get the OTP-style short name for the default node of this JVM instance."
[]
(System/getProperty "node.sname"))
(defn get-long-name
"Get the OTP-style long name for the default node of this JVM instance."
[]
(System/getProperty "node.name"))
(defn get-default-name
"Get the node name for the default node of this JVM instance. First the
node's short name will be checked, and it that's null, the long name will
be used."
[]
(if-let [short-name (get-short-name)]
(format "%s@%s" short-name (net/get-local-hostname))
(get-long-name)))
(defn get-name
"Get the name of the given node. If no name is given, return the name of
the default node for the currently running JVM."
([]
(get-default-name))
([node]
(nodes/get-name node)))
(defn get-default-node
"Get the default node object for the currently running instance of the JVM.
In general, one should not need more than one node per JVM."
[]
(nodes/default-node (get-default-name)))
(defn get-names
"An alias for ``jiface.otp.nodes/get-names`` that returns a list
of a node's registered mailbox names as a list of strings."
([]
(get-names (get-default-node)))
([node]
(into [] (nodes/get-names node))))
(defn ping
"An alias ``jiface.otp.nodes/ping`` that also allows for a
2-arity call (with the default timeout set to 1000)."
([node-name]
(ping node-name 1000))
([node-name timeout]
(ping (get-default-node) node-name timeout))
([this-node node-name timeout]
(if (apply #'nodes/ping (util/->str-args [this-node node-name timeout]))
:pong
:pang)))
(defn whereis
"An alias for ``jiface.otp.nodes/whereis`` that also allows for
the mailbox name argument to be a symbol, keyword, or string."
[& args]
(apply #'nodes/whereis (util/->str-args args)))
(def connect
"An alias for the constructor ``jiface.otp.nodes/connect`` but one that
caches connections based on source and destination node name and allows for
symbols and keywords to be used as node names, a closer match for BEAM
language nodes, which use atoms for their names."
(memo/lru
(fn ([remote-node-name]
(connect (get-default-name) remote-node-name))
([local-node-name remote-node-name]
(let [self (self (util/->str-arg local-node-name))
peer (peer (util/->str-arg remote-node-name))]
(nodes/connect self peer))))))
(def get-node #'get-default-node)
(def get-default #'get-default-node)
(import-vars
[nodes
get-alivename
get-cookie
create-transport
create-server-transport
get-hostname
set-cookie
->str
create-pid
create-port
create-ref
get-port
close
close-mbox
create-mbox
register-mbox
register-status-handler
set-flags
whereis -- see above
accept
get-pid
publish-port
unpublish-port])
|
b5af3c91140bf59761f6d28832d98382b8f2b08ff34a62454fc589366de4c0cd | cljfx/cljfx | v_line_to.clj | (ns cljfx.fx.v-line-to
"Part of a public API"
(:require [cljfx.composite :as composite]
[cljfx.lifecycle :as lifecycle]
[cljfx.fx.path-element :as fx.path-element])
(:import [javafx.scene.shape VLineTo]))
(set! *warn-on-reflection* true)
(def props
(merge
fx.path-element/props
(composite/props VLineTo
:y [:setter lifecycle/scalar :coerce double :default 0])))
(def lifecycle
(lifecycle/annotate
(composite/describe VLineTo
:ctor []
:props props)
:v-line-to))
| null | https://raw.githubusercontent.com/cljfx/cljfx/543f7409290051e9444771d2cd86dadeb8cdce33/src/cljfx/fx/v_line_to.clj | clojure | (ns cljfx.fx.v-line-to
"Part of a public API"
(:require [cljfx.composite :as composite]
[cljfx.lifecycle :as lifecycle]
[cljfx.fx.path-element :as fx.path-element])
(:import [javafx.scene.shape VLineTo]))
(set! *warn-on-reflection* true)
(def props
(merge
fx.path-element/props
(composite/props VLineTo
:y [:setter lifecycle/scalar :coerce double :default 0])))
(def lifecycle
(lifecycle/annotate
(composite/describe VLineTo
:ctor []
:props props)
:v-line-to))
|
|
fea91abecc317a3ea4f8e468519536bb3b4e506c5f36dfaea99c1175dbc07c92 | freizl/dive-into-haskell | labweekexercise-solns.hs | Informatics 1 - Functional Programming
Lab Week Exercise
--
-- Solutions
--
-- Remember: there are many possible solutions, and if your solution produces
-- the right results, then it is (most likely) correct. However, if your code
-- looks far more complicated than these sample solutions, then you're probably
-- making things too difficult for yourself---try to keep it simple!
import Test.QuickCheck
Exercise 3 :
double :: Int -> Int
double x = x + x
square :: Int -> Int
square x = x * x
Exercise 4 :
isTriple :: Int -> Int -> Int -> Bool
isTriple a b c = square a + square b == square c
Exercise 5 :
leg1 :: Int -> Int -> Int
leg1 x y = square x - square y
leg2 :: Int -> Int -> Int
leg2 x y = 2 * x * y
hyp :: Int -> Int -> Int
hyp x y = square x + square y
Exercise 6 :
prop_triple :: Int -> Int -> Bool
prop_triple x y = isTriple (leg1 x y) (leg2 x y) (hyp x y)
| null | https://raw.githubusercontent.com/freizl/dive-into-haskell/b18a6bfe212db6c3a5d707b4a640170b8bcf9330/lectures/informatics1-FP/tutorials/Labweek-solutions/labweekexercise-solns.hs | haskell |
Solutions
Remember: there are many possible solutions, and if your solution produces
the right results, then it is (most likely) correct. However, if your code
looks far more complicated than these sample solutions, then you're probably
making things too difficult for yourself---try to keep it simple! | Informatics 1 - Functional Programming
Lab Week Exercise
import Test.QuickCheck
Exercise 3 :
double :: Int -> Int
double x = x + x
square :: Int -> Int
square x = x * x
Exercise 4 :
isTriple :: Int -> Int -> Int -> Bool
isTriple a b c = square a + square b == square c
Exercise 5 :
leg1 :: Int -> Int -> Int
leg1 x y = square x - square y
leg2 :: Int -> Int -> Int
leg2 x y = 2 * x * y
hyp :: Int -> Int -> Int
hyp x y = square x + square y
Exercise 6 :
prop_triple :: Int -> Int -> Bool
prop_triple x y = isTriple (leg1 x y) (leg2 x y) (hyp x y)
|
7ba38c3fcd77a4ff295dd436f9c639741a21558f05d3086f4e68d46356d98996 | chowells79/lrucache | MemTest.hs | import Prelude hiding ( lookup )
import Control.Monad
import Control.Concurrent
import Data.IORef
import Data.Cache.LRU.IO.Internal
main :: IO ()
main = do
v1 <- newAtomicLRU $ Just 10 -- for endless inserts
v2 <- newAtomicLRU $ Just 10 -- for endless lookups (miss)
v3 <- newAtomicLRU $ Just 10 -- for endless lookups (hit)
counter <- newIORef (0 :: Int)
insert 1 "bar" v3
forever $ do
c <- readIORef counter
writeIORef counter $ c + 1
insert c (show c) v1
lookup (1 :: Int) v2
lookup (1 :: Int) v3
| null | https://raw.githubusercontent.com/chowells79/lrucache/14992f1361cffb25652f4420f94438931f064890/MemTest.hs | haskell | for endless inserts
for endless lookups (miss)
for endless lookups (hit) | import Prelude hiding ( lookup )
import Control.Monad
import Control.Concurrent
import Data.IORef
import Data.Cache.LRU.IO.Internal
main :: IO ()
main = do
counter <- newIORef (0 :: Int)
insert 1 "bar" v3
forever $ do
c <- readIORef counter
writeIORef counter $ c + 1
insert c (show c) v1
lookup (1 :: Int) v2
lookup (1 :: Int) v3
|
f9b737a8e47214076091ac6c41cb8d82c0e1138d2ec9888541af3a32002522fd | facebook/Haxl | MemoizationTests.hs | Copyright ( c ) 2014 - present , Facebook , Inc.
-- All rights reserved.
--
This source code is distributed under the terms of a BSD license ,
-- found in the LICENSE file.
module MemoizationTests (tests) where
import Data.IORef
import Test.HUnit
import Haxl.Core
import Haxl.Core.Monad (unsafeLiftIO)
import ExampleDataSource
memoSoundness :: Test
memoSoundness = TestCase $ do
iEnv <- do
exState <- ExampleDataSource.initGlobalState
initEnv (stateSet exState stateEmpty) () :: IO (Env () ())
unMemoizedWombats <- runHaxl iEnv $ listWombats 100
(initialGet, subsequentGet) <- runHaxl iEnv $ do
wombatsMemo <- newMemoWith (listWombats 100)
let memoizedWombats = runMemo wombatsMemo
initialGet <- memoizedWombats
subsequentGet <- memoizedWombats
return (initialGet, subsequentGet)
assertBool "Memo Soundness 1" $ initialGet == unMemoizedWombats
assertBool "Memo Soundness 2" $ subsequentGet == unMemoizedWombats
let impure runCounterRef = unsafeLiftIO $ do
modifyIORef runCounterRef succ
readIORef runCounterRef
initialRunCounter = 0 :: Int
runCounterRef <- newIORef initialRunCounter
(initialImpureGet, subsequentImpureGet) <- runHaxl iEnv $ do
impureMemo <- newMemoWith (impure runCounterRef)
let memoizedImpure = runMemo impureMemo
initialImpureGet <- memoizedImpure
subsequentImpureGet <- memoizedImpure
return (initialImpureGet, subsequentImpureGet)
assertBool "Memo Soundness 3" $ initialImpureGet == succ initialRunCounter
assertBool "Memo Soundness 4" $ subsequentImpureGet == initialImpureGet
let fMemoVal = 42 :: Int
dependentResult <- runHaxl iEnv $ do
fMemoRef <- newMemo
gMemoRef <- newMemo
let f = runMemo fMemoRef
g = runMemo gMemoRef
prepareMemo fMemoRef $ return fMemoVal
prepareMemo gMemoRef $ succ <$> f
a <- f
b <- g
return (a + b)
assertBool "Memo Soundness 5" $ dependentResult == fMemoVal + succ fMemoVal
tests = TestList [TestLabel "Memo Soundness" memoSoundness]
| null | https://raw.githubusercontent.com/facebook/Haxl/260a97b757a6239df153b69b127ded5c47efa13c/tests/MemoizationTests.hs | haskell | All rights reserved.
found in the LICENSE file. | Copyright ( c ) 2014 - present , Facebook , Inc.
This source code is distributed under the terms of a BSD license ,
module MemoizationTests (tests) where
import Data.IORef
import Test.HUnit
import Haxl.Core
import Haxl.Core.Monad (unsafeLiftIO)
import ExampleDataSource
memoSoundness :: Test
memoSoundness = TestCase $ do
iEnv <- do
exState <- ExampleDataSource.initGlobalState
initEnv (stateSet exState stateEmpty) () :: IO (Env () ())
unMemoizedWombats <- runHaxl iEnv $ listWombats 100
(initialGet, subsequentGet) <- runHaxl iEnv $ do
wombatsMemo <- newMemoWith (listWombats 100)
let memoizedWombats = runMemo wombatsMemo
initialGet <- memoizedWombats
subsequentGet <- memoizedWombats
return (initialGet, subsequentGet)
assertBool "Memo Soundness 1" $ initialGet == unMemoizedWombats
assertBool "Memo Soundness 2" $ subsequentGet == unMemoizedWombats
let impure runCounterRef = unsafeLiftIO $ do
modifyIORef runCounterRef succ
readIORef runCounterRef
initialRunCounter = 0 :: Int
runCounterRef <- newIORef initialRunCounter
(initialImpureGet, subsequentImpureGet) <- runHaxl iEnv $ do
impureMemo <- newMemoWith (impure runCounterRef)
let memoizedImpure = runMemo impureMemo
initialImpureGet <- memoizedImpure
subsequentImpureGet <- memoizedImpure
return (initialImpureGet, subsequentImpureGet)
assertBool "Memo Soundness 3" $ initialImpureGet == succ initialRunCounter
assertBool "Memo Soundness 4" $ subsequentImpureGet == initialImpureGet
let fMemoVal = 42 :: Int
dependentResult <- runHaxl iEnv $ do
fMemoRef <- newMemo
gMemoRef <- newMemo
let f = runMemo fMemoRef
g = runMemo gMemoRef
prepareMemo fMemoRef $ return fMemoVal
prepareMemo gMemoRef $ succ <$> f
a <- f
b <- g
return (a + b)
assertBool "Memo Soundness 5" $ dependentResult == fMemoVal + succ fMemoVal
tests = TestList [TestLabel "Memo Soundness" memoSoundness]
|
e46c6fc5c5e6d00661d66a66608df4314b3a539a8ae63ebf1d2f6f1ab30ff68f | mrb/soundwave | Logger.hs | cribbed from
module Soundwave.Logger (start, info, Log) where
import System.IO (openFile, hPutStrLn, hFlush, hClose,
IOMode (AppendMode), Handle)
import Control.Concurrent (forkIO)
import Control.Monad (void, forever)
import Control.Monad.STM (atomically)
import Control.Concurrent.STM.TChan (writeTChan, readTChan, newTChanIO, TChan)
type Log = TChan String
logstdout :: TChan String -> IO ()
logstdout chan = do
line <- atomically $ readTChan chan
putStrLn line
start :: IO (TChan String)
start = do
chan <- newTChanIO
void $ forkIO $ forever $ logstdout chan
return chan
info :: TChan String -> String -> IO ()
info chan msg = do
let info_msg = "[info] " ++ msg
atomically $ writeTChan chan info_msg
| null | https://raw.githubusercontent.com/mrb/soundwave/5906f07310ffc2be7ccda550bf639e1d061262e8/Soundwave/Logger.hs | haskell | cribbed from
module Soundwave.Logger (start, info, Log) where
import System.IO (openFile, hPutStrLn, hFlush, hClose,
IOMode (AppendMode), Handle)
import Control.Concurrent (forkIO)
import Control.Monad (void, forever)
import Control.Monad.STM (atomically)
import Control.Concurrent.STM.TChan (writeTChan, readTChan, newTChanIO, TChan)
type Log = TChan String
logstdout :: TChan String -> IO ()
logstdout chan = do
line <- atomically $ readTChan chan
putStrLn line
start :: IO (TChan String)
start = do
chan <- newTChanIO
void $ forkIO $ forever $ logstdout chan
return chan
info :: TChan String -> String -> IO ()
info chan msg = do
let info_msg = "[info] " ++ msg
atomically $ writeTChan chan info_msg
|
|
fec03652721836a3cf70fdf23d8914ef2798292499a996dd09855782fac339c3 | UBTECH-Walker/WalkerSimulationFor2020WAIC | _package_cruiserBatteryInfo.lisp | (cl:in-package cruiser_msgs-msg)
(cl:export '(BATTERY_LEVEL-VAL
BATTERY_LEVEL
VOLTAGE-VAL
VOLTAGE
CHARGE_STATUS-VAL
CHARGE_STATUS
TEMPERATURE-VAL
TEMPERATURE
)) | null | https://raw.githubusercontent.com/UBTECH-Walker/WalkerSimulationFor2020WAIC/7cdb21dabb8423994ba3f6021bc7934290d5faa9/walker_WAIC_16.04_v1.2_20200616/walker_install/share/common-lisp/ros/cruiser_msgs/msg/_package_cruiserBatteryInfo.lisp | lisp | (cl:in-package cruiser_msgs-msg)
(cl:export '(BATTERY_LEVEL-VAL
BATTERY_LEVEL
VOLTAGE-VAL
VOLTAGE
CHARGE_STATUS-VAL
CHARGE_STATUS
TEMPERATURE-VAL
TEMPERATURE
)) |
|
5850d9c4f7b14036b11ad1ee10c6b94e6eba42ee15866c69e33d0d2f465fe973 | TerrorJack/ghc-alter | Error.hs | # LANGUAGE Trustworthy #
# LANGUAGE CPP , NoImplicitPrelude #
-----------------------------------------------------------------------------
-- |
-- Module : Foreign.Marshal.Error
Copyright : ( c ) The FFI task force 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
Maintainer :
-- Stability : provisional
-- Portability : portable
--
Routines for testing return values and raising a ' userError ' exception
-- in case of values indicating an error state.
--
-----------------------------------------------------------------------------
module Foreign.Marshal.Error (
throwIf,
throwIf_,
throwIfNeg,
throwIfNeg_,
throwIfNull,
-- Discard return value
--
void
) where
import Foreign.Ptr
import GHC.Base
import GHC.Num
import GHC.IO.Exception
-- exported functions
-- ------------------
|Execute an ' IO ' action , throwing a ' userError ' if the predicate yields
-- 'True' when applied to the result returned by the 'IO' action.
-- If no exception is raised, return the result of the computation.
--
throwIf :: (a -> Bool) -- ^ error condition on the result of the 'IO' action
-> (a -> String) -- ^ computes an error message from erroneous results
-- of the 'IO' action
-> IO a -- ^ the 'IO' action to be executed
-> IO a
throwIf pred msgfct act =
do
res <- act
(if pred res then ioError . userError . msgfct else return) res
-- |Like 'throwIf', but discarding the result
--
throwIf_ :: (a -> Bool) -> (a -> String) -> IO a -> IO ()
throwIf_ pred msgfct act = void $ throwIf pred msgfct act
-- |Guards against negative result values
--
throwIfNeg :: (Ord a, Num a) => (a -> String) -> IO a -> IO a
throwIfNeg = throwIf (< 0)
-- |Like 'throwIfNeg', but discarding the result
--
throwIfNeg_ :: (Ord a, Num a) => (a -> String) -> IO a -> IO ()
throwIfNeg_ = throwIf_ (< 0)
-- |Guards against null pointers
--
throwIfNull :: String -> IO (Ptr a) -> IO (Ptr a)
throwIfNull = throwIf (== nullPtr) . const
-- |Discard the return value of an 'IO' action
--
void :: IO a -> IO ()
void act = act >> return ()
deprecated in 7.6
| null | https://raw.githubusercontent.com/TerrorJack/ghc-alter/db736f34095eef416b7e077f9b26fc03aa78c311/ghc-alter/boot-lib/base/Foreign/Marshal/Error.hs | haskell | ---------------------------------------------------------------------------
|
Module : Foreign.Marshal.Error
License : BSD-style (see the file libraries/base/LICENSE)
Stability : provisional
Portability : portable
in case of values indicating an error state.
---------------------------------------------------------------------------
Discard return value
exported functions
------------------
'True' when applied to the result returned by the 'IO' action.
If no exception is raised, return the result of the computation.
^ error condition on the result of the 'IO' action
^ computes an error message from erroneous results
of the 'IO' action
^ the 'IO' action to be executed
|Like 'throwIf', but discarding the result
|Guards against negative result values
|Like 'throwIfNeg', but discarding the result
|Guards against null pointers
|Discard the return value of an 'IO' action
| # LANGUAGE Trustworthy #
# LANGUAGE CPP , NoImplicitPrelude #
Copyright : ( c ) The FFI task force 2001
Maintainer :
Routines for testing return values and raising a ' userError ' exception
module Foreign.Marshal.Error (
throwIf,
throwIf_,
throwIfNeg,
throwIfNeg_,
throwIfNull,
void
) where
import Foreign.Ptr
import GHC.Base
import GHC.Num
import GHC.IO.Exception
|Execute an ' IO ' action , throwing a ' userError ' if the predicate yields
-> IO a
throwIf pred msgfct act =
do
res <- act
(if pred res then ioError . userError . msgfct else return) res
throwIf_ :: (a -> Bool) -> (a -> String) -> IO a -> IO ()
throwIf_ pred msgfct act = void $ throwIf pred msgfct act
throwIfNeg :: (Ord a, Num a) => (a -> String) -> IO a -> IO a
throwIfNeg = throwIf (< 0)
throwIfNeg_ :: (Ord a, Num a) => (a -> String) -> IO a -> IO ()
throwIfNeg_ = throwIf_ (< 0)
throwIfNull :: String -> IO (Ptr a) -> IO (Ptr a)
throwIfNull = throwIf (== nullPtr) . const
void :: IO a -> IO ()
void act = act >> return ()
deprecated in 7.6
|
76435e7d74ca69caa66a0b396e282934955e63b57912eb10d423760cb7f4b80e | krohrer/caml-aesq | text.ml | , Fri Apr 23 00:11:58 CEST 2010
type justification = [`left | `center | `right | `block | `none]
let option_default a opt =
match opt with
| None -> a
| Some a -> a
let justification_to_string =
function
| `none -> "none"
| `left -> "left"
| `center -> "center"
| `right -> "right"
| `block -> "block"
type raw =
| RFrag of string
| RAttr of Ansi.t
| RBreak
| RLineBreak
type cooked =
| CFrag of string
| CAttr of Ansi.t
| CSpace of int
| CSeq of cooked array
type size = int
type line = cooked array * size
let rec measure_line_width elements =
let aux sum =
function
| CFrag f -> sum + String.length f
| CSpace n -> sum + n
| CSeq elements -> sum + measure_line_width elements
| CAttr _ -> sum
in
Array.fold_left aux 0 elements
let empty_line =
([||], 0)
let make_line elements =
(elements, measure_line_width elements)
let line_width (_,width) = width
let line_concat lines =
let elements_array, widths = List.split lines in
let elements = Array.concat elements_array in
let width = List.fold_left (+) 0 widths in
(elements, width)
let min_width = 1 (* Minimum width allowed for formatting *)
(* Chop text up into lines (lines still need to be cooked tough) *)
let rec chop attributes width stream =
let width = max min_width width in
chop_aux width attributes stream
(* Closures are a poor mans objects, and [width] is the only
constant field. *)
and chop_aux width =
(* Chop text so its lines are no longer than [width] *)
let rec chop_line
~attributes
(* Current attributes *)
?(rem_width=width)
(* Remaining width *)
?(line_rev=[RAttr attributes])
(* Reverse list of line elements, start each line explicitly
with the current attributes. Makes it easier to concat lines
later on. *)
?dismissables
(* Line elements including elements after the last printable
that might be dismissed, if no printable was to
follow before the end of the line. *)
(lazy cell)
=
match cell with
| LazyList.Nil ->
(* Done *)
chop_done
~attributes
~line_rev
~dismissables
()
| LazyList.Cons (x, stream) ->
Dispatch
begin match x with
| RFrag _ as x ->
chop_fragment
~attributes
~rem_width
~line_rev
~dismissables
x
stream
| RBreak ->
chop_break
~attributes
~rem_width
~line_rev
~dismissables
stream
| RLineBreak ->
chop_linebreak
~attributes
~line_rev
stream
| RAttr attributes as x ->
chop_set_attributes
~attributes
~rem_width
~line_rev
~dismissables
x
stream
end
(* Make partial line from the remaining elements *)
and chop_done
~attributes
~line_rev
~dismissables
()
=
let line =
make_chopped
~partial:true
(option_default line_rev dismissables)
in
LazyList.Cons (line, LazyList.nil)
(* Chop fragment and justify line if necessary *)
and chop_fragment
~attributes
~rem_width
~line_rev
~dismissables
fragment
stream
=
let frag = match fragment with RFrag f -> f | _ -> assert false in
let len = String.length frag in
if len <= rem_width then
(* Fragment still fits on this line *)
chop_line
~attributes
~rem_width:(rem_width - len)
~line_rev:(fragment :: option_default line_rev dismissables)
stream
else if len > width && rem_width >= min_width then
(* Fragment must be split anyway, may as well start on this
line, if possible. It must always be possible if the line
does not yet contain any printable elements. *)
let line =
let frag_left = String.sub frag 0 rem_width in
make_chopped
(RFrag frag_left :: option_default line_rev dismissables)
and cell =
let frag_right = String.sub frag rem_width (len - rem_width) in
(* Prefix stream for next line with left-overs
from current line *)
LazyList.Cons (RFrag frag_right,
stream)
in
LazyList.Cons (line,
lazy (chop_line
~attributes
(lazy cell)))
else
(* Fragment does not fit on current line, retry on
next line. *)
let line =
make_chopped line_rev
and cell =
(* This exact cell already exists, but we dont want to
pass it as an argument since it only gets used in this
case. So we simply construct it anew. (Thank science we
have immutable streams!) *)
LazyList.Cons (fragment, stream);
in
LazyList.Cons (line,
lazy (chop_line ~attributes (lazy cell)))
Break : Ignore at the beginning of the line , or add it to .
and chop_break
~attributes
~rem_width
~line_rev
~dismissables
stream
=
if width = rem_width then
Simply ignore break at the beginning of the
line
line *)
chop_line
~attributes
~rem_width
~line_rev
stream
else
Line already has more than one column
begin match dismissables with
| None ->
Add a dismissable break at cost of ( at least ) one
column , if there is still space left .
column, if there is still space left. *)
if rem_width > 1 then
let dismissables =
RBreak :: option_default line_rev dismissables
in
chop_line
~attributes
~rem_width:(rem_width - 1)
~line_rev
~dismissables
stream
else
let line =
make_chopped line_rev
in
LazyList.Cons (line,
lazy (chop_line attributes stream))
| Some _ ->
(* Already has dismissable break, ignore *)
chop_line
~attributes
~rem_width
~line_rev
?dismissables
stream
end
Linebreak : justify line without loose breaks and
continue with next line
continue with next line *)
and chop_linebreak
~attributes
~line_rev
stream
=
let line =
make_chopped ~partial:true line_rev
in
LazyList.Cons (line,
lazy (chop_line attributes stream))
(* Set attributes: Update current attributes and add it to the other elements *)
and chop_set_attributes
~attributes
~rem_width
~line_rev
~dismissables
attr
stream
=
match dismissables with
| None ->
(* Simply add to current line if we have no
loose breaks *)
chop_line
~attributes
~rem_width
~line_rev:(attr :: line_rev)
stream
| Some breaks ->
We have one or more loose breaks after a
fragment , so we add it to that list .
fragment, so we add it to that list. *)
chop_line
~attributes
~rem_width
~line_rev
~dismissables:(attr :: breaks)
stream
(* Make a chopped line *)
and make_chopped
?(partial=false)
line_rev
=
let line = Array.of_list (List.rev line_rev) in
line, partial
in
fun attributes stream -> lazy (chop_line ~attributes stream)
(*------------------------------------*)
Convert break to space of width 1
let break_to_space =
function
| RFrag f -> CFrag f
| RAttr a -> CAttr a
| RBreak -> CSpace 1
| RLineBreak -> CSpace 0
Convert breaks to spaces by distributing [ break_space ] evenly
among all spaces . Because widths are integers , we use something
similar to the Bresenham line - drawing algorithm
among all spaces. Because widths are integers, we use something
similar to the Bresenham line-drawing algorithm *)
let break_to_space' ~break_space ~break_count () =
let a = ref break_space in
function
| RFrag f -> CFrag f
| RAttr a -> CAttr a
| RBreak ->
let x = CSpace (!a / break_count) in
a := !a mod break_count + break_space;
x
| RLineBreak -> CSpace 0
(* Measure total width of all printable elements *)
let measure_fragments elements =
let aux sum =
function
| RFrag f -> sum + String.length f
| _ -> sum
in
Array.fold_left aux 0 elements
(* Count number of breaks *)
let count_breaks elements =
let aux sum =
function
| RBreak -> sum + 1
| _ -> sum
in
Array.fold_left aux 0 elements
(* Return an array of justified line elements *)
let justify_line fill width justification (elements, partial) =
let break_count = count_breaks elements in
let fragment_width = measure_fragments elements in
let justification =
match justification with
| `block -> if partial || break_count = 0 then `left else `block
| j -> j
in
match justification with
| `none ->
(* Simply convert breaks to spaces, ignore desired width *)
(Array.map break_to_space elements, fragment_width + break_count)
| `left ->
(* Add left-over space on the right side *)
let space = max 0 (width - fragment_width - break_count) in
let line =
[|
CSeq (Array.map break_to_space elements);
CAttr fill;
CSpace space
|]
in
(line, width)
| `center ->
(* Add a bit of left-over space on both sides *)
let space = max 0 (width - fragment_width - break_count) in
let left_space = space / 2 in
let right_space = space - left_space in
let line =
[|
CAttr fill;
CSpace left_space;
CSeq (Array.map break_to_space elements);
CAttr fill;
CSpace right_space
|]
in
(line, width)
| `right ->
(* Add left-over space on the left side *)
let space = max 0 (width - fragment_width - break_count) in
let line =
[|
CAttr fill;
CSpace space;
CSeq (Array.map break_to_space elements)
|]
in
(line, width)
| `block ->
Distribute
assert (break_count > 0);
let break_space = width - fragment_width in
let aux = break_to_space' ~break_space ~break_count () in
let line = Array.map aux elements in
(line, width)
let format
?(attr=Ansi.default)
?(fill=Ansi.default)
?(width=78)
?(just=`none)
stream
=
let chopped_stream = chop attr width stream in
LazyList.map (justify_line fill width just) chopped_stream
(*----------------------------------------------------------------------------*)
let max_width_over_all_lines stream =
let aux m l = max m (line_width l) in
LazyList.fold aux 0 stream
let width_of_first_line (lazy cell) =
match cell with
| LazyList.Nil -> 0
| LazyList.Cons (x, stream) -> line_width x
(*----------------------------------------------------------------------------*)
type tab = line LazyList.t * size
let empty_tab width =
(LazyList.nil, width)
let make_tab width stream =
(stream, width)
let rec tabulate
?(attr=Ansi.default)
?(fill=Ansi.default)
streams
=
let streams = Array.of_list streams in
let widths = Array.map width_of_first_line streams in
tabulate_aux fill widths streams
and tabulate_aux fill widths streams =
(* Use mutable state to generate the data for a lazy stream. This
works because the function is only called once for each cons
cell and the order of invocations is implicitly given by the
definition of lazy-streams.
IDEA: Could this be applied to chop as well? Probably, but I do
not see any immediate benefits. *)
let count = Array.length streams in
let exhausted = ref false in
let rec gen () =
exhausted := true;
let elems = Array.make count (CSpace 0) in
for i = 0 to count - 1 do
match Lazy.force streams.(i) with
| LazyList.Nil ->
elems.(i) <-
CSeq [|
CAttr fill;
CSpace widths.(i)
|]
| LazyList.Cons ((elements,width), s) ->
widths.(i) <- width;
exhausted := false;
streams.(i) <- s;
elems.(i) <- CSeq elements;
done;
if !exhausted then
LazyList.Nil
else
LazyList.Cons (make_line elems,
Lazy.lazy_from_fun gen)
in
Lazy.lazy_from_fun gen
(*----------------------------------------------------------------------------*)
let rec pad
?(fill=Ansi.default)
?(left=1)
?(right=1)
?(top=1)
?(bottom=1)
stream
=
let width = width_of_first_line stream in
let filler =
let fill_line =
make_line [|
CAttr fill;
CSpace (width + left + right)
|]
in
LazyList.forever fill_line
in
let aux (elems,_) =
make_line [|
CAttr fill;
CSpace left;
CSeq elems;
CAttr fill;
CSpace right
|]
in
LazyList.flatten [
LazyList.take top filler;
LazyList.map aux stream;
LazyList.take bottom filler
]
let indent
?(fill=Ansi.default)
left
stream
=
pad ~fill ~left ~right:0 ~bottom:0 ~top:0 stream
(*----------------------------------------------------------------------------*)
open Printf
let rec dump_raw outc (lazy cell) =
match cell with
| LazyList.Nil ->
fprintf outc "\n";
Pervasives.flush outc
| LazyList.Cons (x, stream) ->
begin match x with
| RFrag f ->
fprintf outc "%S " f
| RBreak ->
fprintf outc "BR "
| RLineBreak ->
fprintf outc "LBR\n"
| RAttr c ->
fprintf outc "ATTRS(%s) " (Ansi.to_string c)
end;
dump_raw outc stream
let dump outc =
let rec dump_stream (lazy cell) =
match cell with
| LazyList.Nil ->
fprintf outc "\n";
Pervasives.flush outc
| LazyList.Cons ((elements,_), stream) ->
Array.iter dump_element elements;
fprintf outc "\n";
dump_stream stream
and dump_element =
function
| CFrag f ->
fprintf outc "%S " f
| CSpace n ->
fprintf outc "SP(%d) " n
| CAttr c ->
fprintf outc "ATTRS(%s) " (Ansi.to_string c)
| CSeq elements ->
fprintf outc "[[ ";
Array.iter dump_element elements;
fprintf outc "]] "
in
dump_stream
type printer = {
pr_ansi : bool;
mutable pr_attr : Ansi.t option;
mutable pr_outc : out_channel
}
let make_printer ?(ansi=true) outc =
{
pr_ansi = ansi;
pr_attr = None;
pr_outc = outc
}
let printer_attributes pr =
pr.pr_attr
let printer_set_attributes pr attr =
pr.pr_attr <- attr
let print_string pr str =
output_string pr.pr_outc str
let print_newline pr () =
output_string pr.pr_outc "\n"
let print_ansi pr attr =
let codes =
match pr.pr_attr with
| None -> Ansi.to_codes Ansi.default
| Some pr_attr -> Ansi.codes_of_transition pr_attr attr
in
let seq = Ansi.sequence_of_codes codes in
if pr.pr_ansi then print_string pr seq;
pr.pr_attr <- Some attr
let printf pr fmt =
Printf.kprintf (print_string pr) fmt
let flush pr =
if pr.pr_ansi then output_string stdout Ansi.reset_sequence;
pr.pr_attr <- None;
flush pr.pr_outc;
()
let print_lines pr stream =
let rec print_stream (lazy cell) =
match cell with
| LazyList.Nil ->
()
| LazyList.Cons ((elements,_), stream) ->
Array.iter print_element elements;
print_newline pr ();
print_stream stream
and print_element =
function
| CFrag f ->
print_string pr f
| CSpace n ->
for i = 0 to n-1 do print_string pr " " done
| CAttr attr ->
print_ansi pr attr
| CSeq elements ->
Array.iter print_element elements
in
print_stream stream
| null | https://raw.githubusercontent.com/krohrer/caml-aesq/4cdbd23c010ad3e8eb67ca6a86cdb6a9efafc9c6/text.ml | ocaml | Minimum width allowed for formatting
Chop text up into lines (lines still need to be cooked tough)
Closures are a poor mans objects, and [width] is the only
constant field.
Chop text so its lines are no longer than [width]
Current attributes
Remaining width
Reverse list of line elements, start each line explicitly
with the current attributes. Makes it easier to concat lines
later on.
Line elements including elements after the last printable
that might be dismissed, if no printable was to
follow before the end of the line.
Done
Make partial line from the remaining elements
Chop fragment and justify line if necessary
Fragment still fits on this line
Fragment must be split anyway, may as well start on this
line, if possible. It must always be possible if the line
does not yet contain any printable elements.
Prefix stream for next line with left-overs
from current line
Fragment does not fit on current line, retry on
next line.
This exact cell already exists, but we dont want to
pass it as an argument since it only gets used in this
case. So we simply construct it anew. (Thank science we
have immutable streams!)
Already has dismissable break, ignore
Set attributes: Update current attributes and add it to the other elements
Simply add to current line if we have no
loose breaks
Make a chopped line
------------------------------------
Measure total width of all printable elements
Count number of breaks
Return an array of justified line elements
Simply convert breaks to spaces, ignore desired width
Add left-over space on the right side
Add a bit of left-over space on both sides
Add left-over space on the left side
----------------------------------------------------------------------------
----------------------------------------------------------------------------
Use mutable state to generate the data for a lazy stream. This
works because the function is only called once for each cons
cell and the order of invocations is implicitly given by the
definition of lazy-streams.
IDEA: Could this be applied to chop as well? Probably, but I do
not see any immediate benefits.
----------------------------------------------------------------------------
---------------------------------------------------------------------------- | , Fri Apr 23 00:11:58 CEST 2010
type justification = [`left | `center | `right | `block | `none]
let option_default a opt =
match opt with
| None -> a
| Some a -> a
let justification_to_string =
function
| `none -> "none"
| `left -> "left"
| `center -> "center"
| `right -> "right"
| `block -> "block"
type raw =
| RFrag of string
| RAttr of Ansi.t
| RBreak
| RLineBreak
type cooked =
| CFrag of string
| CAttr of Ansi.t
| CSpace of int
| CSeq of cooked array
type size = int
type line = cooked array * size
let rec measure_line_width elements =
let aux sum =
function
| CFrag f -> sum + String.length f
| CSpace n -> sum + n
| CSeq elements -> sum + measure_line_width elements
| CAttr _ -> sum
in
Array.fold_left aux 0 elements
let empty_line =
([||], 0)
let make_line elements =
(elements, measure_line_width elements)
let line_width (_,width) = width
let line_concat lines =
let elements_array, widths = List.split lines in
let elements = Array.concat elements_array in
let width = List.fold_left (+) 0 widths in
(elements, width)
let rec chop attributes width stream =
let width = max min_width width in
chop_aux width attributes stream
and chop_aux width =
let rec chop_line
~attributes
?(rem_width=width)
?(line_rev=[RAttr attributes])
?dismissables
(lazy cell)
=
match cell with
| LazyList.Nil ->
chop_done
~attributes
~line_rev
~dismissables
()
| LazyList.Cons (x, stream) ->
Dispatch
begin match x with
| RFrag _ as x ->
chop_fragment
~attributes
~rem_width
~line_rev
~dismissables
x
stream
| RBreak ->
chop_break
~attributes
~rem_width
~line_rev
~dismissables
stream
| RLineBreak ->
chop_linebreak
~attributes
~line_rev
stream
| RAttr attributes as x ->
chop_set_attributes
~attributes
~rem_width
~line_rev
~dismissables
x
stream
end
and chop_done
~attributes
~line_rev
~dismissables
()
=
let line =
make_chopped
~partial:true
(option_default line_rev dismissables)
in
LazyList.Cons (line, LazyList.nil)
and chop_fragment
~attributes
~rem_width
~line_rev
~dismissables
fragment
stream
=
let frag = match fragment with RFrag f -> f | _ -> assert false in
let len = String.length frag in
if len <= rem_width then
chop_line
~attributes
~rem_width:(rem_width - len)
~line_rev:(fragment :: option_default line_rev dismissables)
stream
else if len > width && rem_width >= min_width then
let line =
let frag_left = String.sub frag 0 rem_width in
make_chopped
(RFrag frag_left :: option_default line_rev dismissables)
and cell =
let frag_right = String.sub frag rem_width (len - rem_width) in
LazyList.Cons (RFrag frag_right,
stream)
in
LazyList.Cons (line,
lazy (chop_line
~attributes
(lazy cell)))
else
let line =
make_chopped line_rev
and cell =
LazyList.Cons (fragment, stream);
in
LazyList.Cons (line,
lazy (chop_line ~attributes (lazy cell)))
Break : Ignore at the beginning of the line , or add it to .
and chop_break
~attributes
~rem_width
~line_rev
~dismissables
stream
=
if width = rem_width then
Simply ignore break at the beginning of the
line
line *)
chop_line
~attributes
~rem_width
~line_rev
stream
else
Line already has more than one column
begin match dismissables with
| None ->
Add a dismissable break at cost of ( at least ) one
column , if there is still space left .
column, if there is still space left. *)
if rem_width > 1 then
let dismissables =
RBreak :: option_default line_rev dismissables
in
chop_line
~attributes
~rem_width:(rem_width - 1)
~line_rev
~dismissables
stream
else
let line =
make_chopped line_rev
in
LazyList.Cons (line,
lazy (chop_line attributes stream))
| Some _ ->
chop_line
~attributes
~rem_width
~line_rev
?dismissables
stream
end
Linebreak : justify line without loose breaks and
continue with next line
continue with next line *)
and chop_linebreak
~attributes
~line_rev
stream
=
let line =
make_chopped ~partial:true line_rev
in
LazyList.Cons (line,
lazy (chop_line attributes stream))
and chop_set_attributes
~attributes
~rem_width
~line_rev
~dismissables
attr
stream
=
match dismissables with
| None ->
chop_line
~attributes
~rem_width
~line_rev:(attr :: line_rev)
stream
| Some breaks ->
We have one or more loose breaks after a
fragment , so we add it to that list .
fragment, so we add it to that list. *)
chop_line
~attributes
~rem_width
~line_rev
~dismissables:(attr :: breaks)
stream
and make_chopped
?(partial=false)
line_rev
=
let line = Array.of_list (List.rev line_rev) in
line, partial
in
fun attributes stream -> lazy (chop_line ~attributes stream)
Convert break to space of width 1
let break_to_space =
function
| RFrag f -> CFrag f
| RAttr a -> CAttr a
| RBreak -> CSpace 1
| RLineBreak -> CSpace 0
Convert breaks to spaces by distributing [ break_space ] evenly
among all spaces . Because widths are integers , we use something
similar to the Bresenham line - drawing algorithm
among all spaces. Because widths are integers, we use something
similar to the Bresenham line-drawing algorithm *)
let break_to_space' ~break_space ~break_count () =
let a = ref break_space in
function
| RFrag f -> CFrag f
| RAttr a -> CAttr a
| RBreak ->
let x = CSpace (!a / break_count) in
a := !a mod break_count + break_space;
x
| RLineBreak -> CSpace 0
let measure_fragments elements =
let aux sum =
function
| RFrag f -> sum + String.length f
| _ -> sum
in
Array.fold_left aux 0 elements
let count_breaks elements =
let aux sum =
function
| RBreak -> sum + 1
| _ -> sum
in
Array.fold_left aux 0 elements
let justify_line fill width justification (elements, partial) =
let break_count = count_breaks elements in
let fragment_width = measure_fragments elements in
let justification =
match justification with
| `block -> if partial || break_count = 0 then `left else `block
| j -> j
in
match justification with
| `none ->
(Array.map break_to_space elements, fragment_width + break_count)
| `left ->
let space = max 0 (width - fragment_width - break_count) in
let line =
[|
CSeq (Array.map break_to_space elements);
CAttr fill;
CSpace space
|]
in
(line, width)
| `center ->
let space = max 0 (width - fragment_width - break_count) in
let left_space = space / 2 in
let right_space = space - left_space in
let line =
[|
CAttr fill;
CSpace left_space;
CSeq (Array.map break_to_space elements);
CAttr fill;
CSpace right_space
|]
in
(line, width)
| `right ->
let space = max 0 (width - fragment_width - break_count) in
let line =
[|
CAttr fill;
CSpace space;
CSeq (Array.map break_to_space elements)
|]
in
(line, width)
| `block ->
Distribute
assert (break_count > 0);
let break_space = width - fragment_width in
let aux = break_to_space' ~break_space ~break_count () in
let line = Array.map aux elements in
(line, width)
let format
?(attr=Ansi.default)
?(fill=Ansi.default)
?(width=78)
?(just=`none)
stream
=
let chopped_stream = chop attr width stream in
LazyList.map (justify_line fill width just) chopped_stream
let max_width_over_all_lines stream =
let aux m l = max m (line_width l) in
LazyList.fold aux 0 stream
let width_of_first_line (lazy cell) =
match cell with
| LazyList.Nil -> 0
| LazyList.Cons (x, stream) -> line_width x
type tab = line LazyList.t * size
let empty_tab width =
(LazyList.nil, width)
let make_tab width stream =
(stream, width)
let rec tabulate
?(attr=Ansi.default)
?(fill=Ansi.default)
streams
=
let streams = Array.of_list streams in
let widths = Array.map width_of_first_line streams in
tabulate_aux fill widths streams
and tabulate_aux fill widths streams =
let count = Array.length streams in
let exhausted = ref false in
let rec gen () =
exhausted := true;
let elems = Array.make count (CSpace 0) in
for i = 0 to count - 1 do
match Lazy.force streams.(i) with
| LazyList.Nil ->
elems.(i) <-
CSeq [|
CAttr fill;
CSpace widths.(i)
|]
| LazyList.Cons ((elements,width), s) ->
widths.(i) <- width;
exhausted := false;
streams.(i) <- s;
elems.(i) <- CSeq elements;
done;
if !exhausted then
LazyList.Nil
else
LazyList.Cons (make_line elems,
Lazy.lazy_from_fun gen)
in
Lazy.lazy_from_fun gen
let rec pad
?(fill=Ansi.default)
?(left=1)
?(right=1)
?(top=1)
?(bottom=1)
stream
=
let width = width_of_first_line stream in
let filler =
let fill_line =
make_line [|
CAttr fill;
CSpace (width + left + right)
|]
in
LazyList.forever fill_line
in
let aux (elems,_) =
make_line [|
CAttr fill;
CSpace left;
CSeq elems;
CAttr fill;
CSpace right
|]
in
LazyList.flatten [
LazyList.take top filler;
LazyList.map aux stream;
LazyList.take bottom filler
]
let indent
?(fill=Ansi.default)
left
stream
=
pad ~fill ~left ~right:0 ~bottom:0 ~top:0 stream
open Printf
let rec dump_raw outc (lazy cell) =
match cell with
| LazyList.Nil ->
fprintf outc "\n";
Pervasives.flush outc
| LazyList.Cons (x, stream) ->
begin match x with
| RFrag f ->
fprintf outc "%S " f
| RBreak ->
fprintf outc "BR "
| RLineBreak ->
fprintf outc "LBR\n"
| RAttr c ->
fprintf outc "ATTRS(%s) " (Ansi.to_string c)
end;
dump_raw outc stream
let dump outc =
let rec dump_stream (lazy cell) =
match cell with
| LazyList.Nil ->
fprintf outc "\n";
Pervasives.flush outc
| LazyList.Cons ((elements,_), stream) ->
Array.iter dump_element elements;
fprintf outc "\n";
dump_stream stream
and dump_element =
function
| CFrag f ->
fprintf outc "%S " f
| CSpace n ->
fprintf outc "SP(%d) " n
| CAttr c ->
fprintf outc "ATTRS(%s) " (Ansi.to_string c)
| CSeq elements ->
fprintf outc "[[ ";
Array.iter dump_element elements;
fprintf outc "]] "
in
dump_stream
type printer = {
pr_ansi : bool;
mutable pr_attr : Ansi.t option;
mutable pr_outc : out_channel
}
let make_printer ?(ansi=true) outc =
{
pr_ansi = ansi;
pr_attr = None;
pr_outc = outc
}
let printer_attributes pr =
pr.pr_attr
let printer_set_attributes pr attr =
pr.pr_attr <- attr
let print_string pr str =
output_string pr.pr_outc str
let print_newline pr () =
output_string pr.pr_outc "\n"
let print_ansi pr attr =
let codes =
match pr.pr_attr with
| None -> Ansi.to_codes Ansi.default
| Some pr_attr -> Ansi.codes_of_transition pr_attr attr
in
let seq = Ansi.sequence_of_codes codes in
if pr.pr_ansi then print_string pr seq;
pr.pr_attr <- Some attr
let printf pr fmt =
Printf.kprintf (print_string pr) fmt
let flush pr =
if pr.pr_ansi then output_string stdout Ansi.reset_sequence;
pr.pr_attr <- None;
flush pr.pr_outc;
()
let print_lines pr stream =
let rec print_stream (lazy cell) =
match cell with
| LazyList.Nil ->
()
| LazyList.Cons ((elements,_), stream) ->
Array.iter print_element elements;
print_newline pr ();
print_stream stream
and print_element =
function
| CFrag f ->
print_string pr f
| CSpace n ->
for i = 0 to n-1 do print_string pr " " done
| CAttr attr ->
print_ansi pr attr
| CSeq elements ->
Array.iter print_element elements
in
print_stream stream
|
244976ed90566e4e05c0fb36cf728dc49f526a47c8051c0adfe60de0c1cd9ad5 | easyuc/EasyUC | ecPhlCond.mli | (* -------------------------------------------------------------------- *)
open EcParsetree
open EcCoreGoal.FApi
(* -------------------------------------------------------------------- *)
val t_hoare_cond : backward
val t_choare_cond : EcFol.form option -> backward
val t_bdhoare_cond : backward
val t_equiv_cond : oside -> backward
(* -------------------------------------------------------------------- *)
val t_equiv_match : matchmode -> backward
| null | https://raw.githubusercontent.com/easyuc/EasyUC/a9ce3e3a47b48a498ec9944eab85f2677886c7ae/uc-dsl/ucdsl-proj/src/ECsrc/phl/ecPhlCond.mli | ocaml | --------------------------------------------------------------------
--------------------------------------------------------------------
-------------------------------------------------------------------- | open EcParsetree
open EcCoreGoal.FApi
val t_hoare_cond : backward
val t_choare_cond : EcFol.form option -> backward
val t_bdhoare_cond : backward
val t_equiv_cond : oside -> backward
val t_equiv_match : matchmode -> backward
|
55a8b2cc682161c32694a3b18536eade98687d28fcc36a2f9f472c93e79ff0cf | lispcord/lispcord | emoji.lisp | (in-package lispcord.http)
(defmethod from-id (id (g lc:guild) &optional (bot *client*))
(if (getcache-id id :emoji)
(getcache-id id :emoji)
(cache :emoji
(discord-req (str-concat "guilds/" (lc:id g)
"/emojis/" id)
:bot bot))))
(defun get-emojis (guild &optional (bot *client*))
(declare (type (or snowflake lc:guild) guild))
(let ((g (if (typep guild 'lc:guild) (lc:id guild) guild)))
(map '(vector lc:emoji)
(curry #'cache :emoji)
(discord-req (str-concat "guilds/" g
"/emojis")
:bot bot))))
(defmethod create ((e lc:partial-emoji) (g lc:guild)
&optional (bot *client*))
(cache :emoji
(discord-req (str-concat "guilds/" (lc:id g)
"/emojis")
:bot bot
:type :post
:content (to-json e))))
(defmethod edit ((e lc:partial-emoji) (g lc:guild)
&optional (bot *client*))
(cache :emoji
(discord-req (str-concat "guilds/" (lc:id g) "/emojis")
:bot bot
:type :patch
:content (jmake
`(("name" . ,(slot-value e 'name))
("roles" . ,(slot-value e 'roles)))))))
(defmethod edit ((e lc:emoji) (g lc:guild) &optional (bot *client*))
(cache :emoji
(discord-req (str-concat "guilds/" (lc:id g) "/emojis")
:bot bot
:type :patch
:content (jmake
`(("name" . ,(slot-value e 'name))
("roles" . ,(slot-value e 'roles)))))))
(defun erase-emoji (emoji guild &optional (bot *client*))
(declare (type (or snowflake lc:guild) guild)
(type (or snowflake lc:emoji) emoji))
(let ((e (if (typep emoji 'lc:emoji) (lc:id emoji) emoji))
(g (if (typep guild 'lc:guild) (lc:id guild) guild)))
(discord-req (str-concat "guilds/" (lc:id g) "/emojis/" (lc:id e))
:bot bot
:type :delete)))
| null | https://raw.githubusercontent.com/lispcord/lispcord/448190cc503a0d7e59bdc0ffddb2e9dba0a706af/src/http/emoji.lisp | lisp | (in-package lispcord.http)
(defmethod from-id (id (g lc:guild) &optional (bot *client*))
(if (getcache-id id :emoji)
(getcache-id id :emoji)
(cache :emoji
(discord-req (str-concat "guilds/" (lc:id g)
"/emojis/" id)
:bot bot))))
(defun get-emojis (guild &optional (bot *client*))
(declare (type (or snowflake lc:guild) guild))
(let ((g (if (typep guild 'lc:guild) (lc:id guild) guild)))
(map '(vector lc:emoji)
(curry #'cache :emoji)
(discord-req (str-concat "guilds/" g
"/emojis")
:bot bot))))
(defmethod create ((e lc:partial-emoji) (g lc:guild)
&optional (bot *client*))
(cache :emoji
(discord-req (str-concat "guilds/" (lc:id g)
"/emojis")
:bot bot
:type :post
:content (to-json e))))
(defmethod edit ((e lc:partial-emoji) (g lc:guild)
&optional (bot *client*))
(cache :emoji
(discord-req (str-concat "guilds/" (lc:id g) "/emojis")
:bot bot
:type :patch
:content (jmake
`(("name" . ,(slot-value e 'name))
("roles" . ,(slot-value e 'roles)))))))
(defmethod edit ((e lc:emoji) (g lc:guild) &optional (bot *client*))
(cache :emoji
(discord-req (str-concat "guilds/" (lc:id g) "/emojis")
:bot bot
:type :patch
:content (jmake
`(("name" . ,(slot-value e 'name))
("roles" . ,(slot-value e 'roles)))))))
(defun erase-emoji (emoji guild &optional (bot *client*))
(declare (type (or snowflake lc:guild) guild)
(type (or snowflake lc:emoji) emoji))
(let ((e (if (typep emoji 'lc:emoji) (lc:id emoji) emoji))
(g (if (typep guild 'lc:guild) (lc:id guild) guild)))
(discord-req (str-concat "guilds/" (lc:id g) "/emojis/" (lc:id e))
:bot bot
:type :delete)))
|
|
90dd00f5387a733b4a0ba96de46ec10c83cceba59d630c897c7aa8a30a81adc3 | ocaml-flambda/flambda-backend | printclambda.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
open Format
open Clambda
module V = Backend_var
module VP = Backend_var.With_provenance
let mutable_flag = function
| Lambda.Mutable-> "[mut]"
| Lambda.Immutable | Lambda.Immutable_unique -> ""
let rec value_kind0 ppf kind =
let open Lambda in
match kind with
| Pgenval -> Format.pp_print_string ppf ""
| Pintval -> Format.pp_print_string ppf ":int"
| Pfloatval -> Format.pp_print_string ppf ":float"
| Parrayval Pgenarray -> Format.pp_print_string ppf ":genarray"
| Parrayval Pintarray -> Format.pp_print_string ppf ":intarray"
| Parrayval Pfloatarray -> Format.pp_print_string ppf ":floatarray"
| Parrayval Paddrarray -> Format.pp_print_string ppf ":addrarray"
| Pboxedintval Pnativeint -> Format.pp_print_string ppf ":nativeint"
| Pboxedintval Pint32 -> Format.pp_print_string ppf ":int32"
| Pboxedintval Pint64 -> Format.pp_print_string ppf ":int64"
| Pvariant { consts; non_consts } ->
Format.fprintf ppf "@[<hov 1>[(consts (%a))@ (non_consts (%a))]@]"
(Format.pp_print_list ~pp_sep:Format.pp_print_space Format.pp_print_int)
consts
(Format.pp_print_list ~pp_sep:Format.pp_print_space
(fun ppf (tag, fields) ->
fprintf ppf "@[<hov 1>[%d:@ %a]@]" tag
(Format.pp_print_list
~pp_sep:(fun ppf () -> fprintf ppf ",@ ")
value_kind0)
fields))
non_consts
let value_kind kind = Format.asprintf "%a" value_kind0 kind
let layout (Lambda.Pvalue kind) = value_kind kind
let rec structured_constant ppf = function
| Uconst_float x -> fprintf ppf "%F" x
| Uconst_int32 x -> fprintf ppf "%ldl" x
| Uconst_int64 x -> fprintf ppf "%LdL" x
| Uconst_nativeint x -> fprintf ppf "%ndn" x
| Uconst_block (tag, l) ->
fprintf ppf "block(%i" tag;
List.iter (fun u -> fprintf ppf ",%a" uconstant u) l;
fprintf ppf ")"
| Uconst_float_array [] ->
fprintf ppf "floatarray()"
| Uconst_float_array (f1 :: fl) ->
fprintf ppf "floatarray(%F" f1;
List.iter (fun f -> fprintf ppf ",%F" f) fl;
fprintf ppf ")"
| Uconst_string s -> fprintf ppf "%S" s
| Uconst_closure(clos, sym, fv) ->
let funs ppf =
List.iter (fprintf ppf "@ %a" one_fun) in
let sconsts ppf scl =
List.iter (fun sc -> fprintf ppf "@ %a" uconstant sc) scl in
fprintf ppf "@[<2>(const_closure%a %s@ %a)@]" funs clos sym sconsts fv
and one_fun ppf f =
let idents ppf =
let rec iter params layouts =
match params, layouts with
| [], [] -> ()
| [param], [] ->
fprintf ppf "@ %a%a"
VP.print param Printlambda.layout Lambda.layout_function
| param :: params, layout :: layouts ->
fprintf ppf "@ %a%a"
VP.print param Printlambda.layout layout;
iter params layouts
| _ -> Misc.fatal_error "arity inconsistent with params"
in
iter f.params f.arity.params_layout
in
fprintf ppf "(fun@ %s%s%a@ %d@ @[<2>%t@]@ @[<2>%a@])"
f.label (layout f.arity.return_layout) Printlambda.check_attribute f.check
(List.length f.arity.params_layout) idents lam f.body
and phantom_defining_expr ppf = function
| Uphantom_const const -> uconstant ppf const
| Uphantom_var var -> Ident.print ppf var
| Uphantom_offset_var { var; offset_in_words; } ->
Format.fprintf ppf "%a+(%d)" Backend_var.print var offset_in_words
| Uphantom_read_field { var; field; } ->
Format.fprintf ppf "%a[%d]" Backend_var.print var field
| Uphantom_read_symbol_field { sym; field; } ->
Format.fprintf ppf "%s[%d]" sym field
| Uphantom_block { tag; fields; } ->
Format.fprintf ppf "[%d: " tag;
List.iter (fun field ->
Format.fprintf ppf "%a; " Backend_var.print field)
fields;
Format.fprintf ppf "]"
and phantom_defining_expr_opt ppf = function
| None -> Format.fprintf ppf "DEAD"
| Some expr -> phantom_defining_expr ppf expr
and uconstant ppf = function
| Uconst_ref (s, Some c) ->
fprintf ppf "%S=%a" s structured_constant c
| Uconst_ref (s, None) -> fprintf ppf "%S"s
| Uconst_int i -> fprintf ppf "%i" i
and apply_kind ppf : apply_kind -> unit = function
| (Rc_normal | Rc_nontail), Alloc_heap -> fprintf ppf "apply"
| Rc_close_at_apply, Alloc_heap -> fprintf ppf "apply[end_region]"
| (Rc_normal | Rc_nontail), Alloc_local -> fprintf ppf "apply[L]"
| Rc_close_at_apply, Alloc_local -> fprintf ppf "apply[end_region][L]"
and lam ppf = function
| Uvar id ->
V.print ppf id
| Uconst c -> uconstant ppf c
| Udirect_apply(f, largs, probe, _, kind, _) ->
let lams ppf largs =
List.iter (fun l -> fprintf ppf "@ %a" lam l) largs in
let pr ppf (probe : Lambda.probe) =
match probe with
| None -> ()
| Some {name} -> fprintf ppf " (probe %s)" name
in
fprintf ppf "@[<2>(%a*@ %s %a%a)@]" apply_kind kind f lams largs pr probe
| Ugeneric_apply(lfun, largs, _, _, kind, _) ->
let lams ppf largs =
List.iter (fun l -> fprintf ppf "@ %a" lam l) largs in
fprintf ppf "@[<2>(%a@ %a%a)@]" apply_kind kind lam lfun lams largs
| Uclosure { functions ; not_scanned_slots ; scanned_slots } ->
let funs ppf =
List.iter (fprintf ppf "@ @[<2>%a@]" one_fun) in
let lams ppf =
List.iter (fprintf ppf "@ %a" lam) in
fprintf ppf "@[<2>(closure@ %a (%a) %a)@]" funs functions
lams not_scanned_slots lams scanned_slots
| Uoffset(l,i) -> fprintf ppf "@[<2>(offset %a %d)@]" lam l i
| Ulet(mut, kind, id, arg, body) ->
let rec letbody ul = match ul with
| Ulet(mut, kind, id, arg, body) ->
fprintf ppf "@ @[<2>%a%s%s@ %a@]"
VP.print id
(mutable_flag mut) (layout kind) lam arg;
letbody body
| _ -> ul in
fprintf ppf "@[<2>(let@ @[<hv 1>(@[<2>%a%s%s@ %a@]"
VP.print id (mutable_flag mut)
(layout kind) lam arg;
let expr = letbody body in
fprintf ppf ")@]@ %a)@]" lam expr
| Uphantom_let (id, defining_expr, body) ->
let rec letbody ul = match ul with
| Uphantom_let (id, defining_expr, body) ->
fprintf ppf "@ @[<2>%a@ %a@]"
Backend_var.With_provenance.print id
phantom_defining_expr_opt defining_expr;
letbody body
| _ -> ul in
fprintf ppf "@[<2>(phantom_let@ @[<hv 1>(@[<2>%a@ %a@]"
Backend_var.With_provenance.print id
phantom_defining_expr_opt defining_expr;
let expr = letbody body in
fprintf ppf ")@]@ %a)@]" lam expr
| Uletrec(id_arg_list, body) ->
let bindings ppf id_arg_list =
let spc = ref false in
List.iter
(fun (id, l) ->
if !spc then fprintf ppf "@ " else spc := true;
fprintf ppf "@[<2>%a@ %a@]"
VP.print id
lam l)
id_arg_list in
fprintf ppf
"@[<2>(letrec@ (@[<hv 1>%a@])@ %a)@]" bindings id_arg_list lam body
| Uprim(prim, largs, _) ->
let lams ppf largs =
List.iter (fun l -> fprintf ppf "@ %a" lam l) largs in
fprintf ppf "@[<2>(%a%a)@]"
Printclambda_primitives.primitive prim lams largs
| Uswitch(larg, sw, _dbg, _kind) ->
let print_case tag index i ppf =
for j = 0 to Array.length index - 1 do
if index.(j) = i then fprintf ppf "case %s %i:" tag j
done in
let print_cases tag index cases ppf =
for i = 0 to Array.length cases - 1 do
fprintf ppf "@ @[<2>%t@ %a@]"
(print_case tag index i) sequence cases.(i)
done in
let switch ppf sw =
print_cases "int" sw.us_index_consts sw.us_actions_consts ppf ;
print_cases "tag" sw.us_index_blocks sw.us_actions_blocks ppf in
fprintf ppf
"@[<v 0>@[<2>(switch@ %a@ @]%a)@]"
lam larg switch sw
| Ustringswitch(larg,sw,d, _kind) ->
let switch ppf sw =
let spc = ref false in
List.iter
(fun (s,l) ->
if !spc then fprintf ppf "@ " else spc := true;
fprintf ppf "@[<hv 1>case \"%s\":@ %a@]"
(String.escaped s) lam l)
sw ;
begin match d with
| Some d ->
if !spc then fprintf ppf "@ " else spc := true;
fprintf ppf "@[<hv 1>default:@ %a@]" lam d
| None -> ()
end in
fprintf ppf
"@[<1>(switch %a@ @[<v 0>%a@])@]"
lam larg switch sw
| Ustaticfail (i, ls) ->
let lams ppf largs =
List.iter (fun l -> fprintf ppf "@ %a" lam l) largs in
fprintf ppf "@[<2>(exit@ %d%a)@]" i lams ls;
| Ucatch(i, vars, lbody, lhandler, _kind) ->
fprintf ppf "@[<2>(catch@ %a@;<1 -1>with (%d%a)@ %a)@]"
lam lbody i
(fun ppf vars ->
List.iter
(fun (x, k) ->
fprintf ppf " %a%a"
VP.print x
Printlambda.layout k
)
vars
)
vars
lam lhandler
| Utrywith(lbody, param, lhandler, _kind) ->
fprintf ppf "@[<2>(try@ %a@;<1 -1>with %a@ %a)@]"
lam lbody VP.print param lam lhandler
| Uifthenelse(lcond, lif, lelse, _kind) ->
fprintf ppf "@[<2>(if@ %a@ %a@ %a)@]" lam lcond lam lif lam lelse
| Usequence(l1, l2) ->
fprintf ppf "@[<2>(seq@ %a@ %a)@]" lam l1 sequence l2
| Uwhile(lcond, lbody) ->
fprintf ppf "@[<2>(while@ %a@ %a)@]" lam lcond lam lbody
| Ufor(param, lo, hi, dir, body) ->
fprintf ppf "@[<2>(for %a@ %a@ %s@ %a@ %a)@]"
VP.print param lam lo
(match dir with Upto -> "to" | Downto -> "downto")
lam hi lam body
| Uassign(id, expr) ->
fprintf ppf "@[<2>(assign@ %a@ %a)@]" V.print id lam expr
| Usend (k, met, obj, largs, _, _, (pos,_) , _) ->
let form =
match pos with
| Rc_normal | Rc_nontail -> "send"
| Rc_close_at_apply -> "send[end_region]"
in
let args ppf largs =
List.iter (fun l -> fprintf ppf "@ %a" lam l) largs in
let kind =
if k = Lambda.Self then "self"
else if k = Lambda.Cached then "cache"
else "" in
fprintf ppf "@[<2>(%s%s@ %a@ %a%a)@]"
form kind lam obj lam met args largs
| Uunreachable ->
fprintf ppf "unreachable"
| Uregion e ->
fprintf ppf "@[<2>(region@ %a)@]" lam e
| Utail e ->
fprintf ppf "@[<2>(tail@ %a)@]" lam e
and sequence ppf ulam = match ulam with
| Usequence(l1, l2) ->
fprintf ppf "%a@ %a" sequence l1 sequence l2
| _ -> lam ppf ulam
let clambda ppf ulam =
fprintf ppf "%a@." lam ulam
let rec approx ppf = function
Value_closure(_, fundesc, a) ->
Format.fprintf ppf "@[<2>function %s"
fundesc.fun_label;
let n = List.length fundesc.fun_arity.params_layout in
begin match fundesc.fun_arity.function_kind with
| Tupled -> Format.fprintf ppf "@ arity -%i" n
| Curried {nlocal=0} -> Format.fprintf ppf "@ arity %i" n
| Curried {nlocal=k} -> Format.fprintf ppf "@ arity %i(%i L)" n k
end;
if fundesc.fun_closed then begin
Format.fprintf ppf "@ (closed)"
end;
if fundesc.fun_inline <> None then begin
Format.fprintf ppf "@ (inline)"
end;
Format.fprintf ppf "@ -> @ %a@]" approx a
| Value_tuple (_,a) ->
let tuple ppf a =
for i = 0 to Array.length a - 1 do
if i > 0 then Format.fprintf ppf ";@ ";
Format.fprintf ppf "%i: %a" i approx a.(i)
done in
Format.fprintf ppf "@[<hov 1>(%a)@]" tuple a
| Value_unknown ->
Format.fprintf ppf "_"
| Value_const c ->
fprintf ppf "@[const(%a)@]" uconstant c
| Value_global_field (s, i) ->
fprintf ppf "@[global(%s,%i)@]" s i
| null | https://raw.githubusercontent.com/ocaml-flambda/flambda-backend/9c5d6eecfc6fc1cf067e53e530700c85a0a7b1dd/middle_end/printclambda.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************ | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Format
open Clambda
module V = Backend_var
module VP = Backend_var.With_provenance
let mutable_flag = function
| Lambda.Mutable-> "[mut]"
| Lambda.Immutable | Lambda.Immutable_unique -> ""
let rec value_kind0 ppf kind =
let open Lambda in
match kind with
| Pgenval -> Format.pp_print_string ppf ""
| Pintval -> Format.pp_print_string ppf ":int"
| Pfloatval -> Format.pp_print_string ppf ":float"
| Parrayval Pgenarray -> Format.pp_print_string ppf ":genarray"
| Parrayval Pintarray -> Format.pp_print_string ppf ":intarray"
| Parrayval Pfloatarray -> Format.pp_print_string ppf ":floatarray"
| Parrayval Paddrarray -> Format.pp_print_string ppf ":addrarray"
| Pboxedintval Pnativeint -> Format.pp_print_string ppf ":nativeint"
| Pboxedintval Pint32 -> Format.pp_print_string ppf ":int32"
| Pboxedintval Pint64 -> Format.pp_print_string ppf ":int64"
| Pvariant { consts; non_consts } ->
Format.fprintf ppf "@[<hov 1>[(consts (%a))@ (non_consts (%a))]@]"
(Format.pp_print_list ~pp_sep:Format.pp_print_space Format.pp_print_int)
consts
(Format.pp_print_list ~pp_sep:Format.pp_print_space
(fun ppf (tag, fields) ->
fprintf ppf "@[<hov 1>[%d:@ %a]@]" tag
(Format.pp_print_list
~pp_sep:(fun ppf () -> fprintf ppf ",@ ")
value_kind0)
fields))
non_consts
let value_kind kind = Format.asprintf "%a" value_kind0 kind
let layout (Lambda.Pvalue kind) = value_kind kind
let rec structured_constant ppf = function
| Uconst_float x -> fprintf ppf "%F" x
| Uconst_int32 x -> fprintf ppf "%ldl" x
| Uconst_int64 x -> fprintf ppf "%LdL" x
| Uconst_nativeint x -> fprintf ppf "%ndn" x
| Uconst_block (tag, l) ->
fprintf ppf "block(%i" tag;
List.iter (fun u -> fprintf ppf ",%a" uconstant u) l;
fprintf ppf ")"
| Uconst_float_array [] ->
fprintf ppf "floatarray()"
| Uconst_float_array (f1 :: fl) ->
fprintf ppf "floatarray(%F" f1;
List.iter (fun f -> fprintf ppf ",%F" f) fl;
fprintf ppf ")"
| Uconst_string s -> fprintf ppf "%S" s
| Uconst_closure(clos, sym, fv) ->
let funs ppf =
List.iter (fprintf ppf "@ %a" one_fun) in
let sconsts ppf scl =
List.iter (fun sc -> fprintf ppf "@ %a" uconstant sc) scl in
fprintf ppf "@[<2>(const_closure%a %s@ %a)@]" funs clos sym sconsts fv
and one_fun ppf f =
let idents ppf =
let rec iter params layouts =
match params, layouts with
| [], [] -> ()
| [param], [] ->
fprintf ppf "@ %a%a"
VP.print param Printlambda.layout Lambda.layout_function
| param :: params, layout :: layouts ->
fprintf ppf "@ %a%a"
VP.print param Printlambda.layout layout;
iter params layouts
| _ -> Misc.fatal_error "arity inconsistent with params"
in
iter f.params f.arity.params_layout
in
fprintf ppf "(fun@ %s%s%a@ %d@ @[<2>%t@]@ @[<2>%a@])"
f.label (layout f.arity.return_layout) Printlambda.check_attribute f.check
(List.length f.arity.params_layout) idents lam f.body
and phantom_defining_expr ppf = function
| Uphantom_const const -> uconstant ppf const
| Uphantom_var var -> Ident.print ppf var
| Uphantom_offset_var { var; offset_in_words; } ->
Format.fprintf ppf "%a+(%d)" Backend_var.print var offset_in_words
| Uphantom_read_field { var; field; } ->
Format.fprintf ppf "%a[%d]" Backend_var.print var field
| Uphantom_read_symbol_field { sym; field; } ->
Format.fprintf ppf "%s[%d]" sym field
| Uphantom_block { tag; fields; } ->
Format.fprintf ppf "[%d: " tag;
List.iter (fun field ->
Format.fprintf ppf "%a; " Backend_var.print field)
fields;
Format.fprintf ppf "]"
and phantom_defining_expr_opt ppf = function
| None -> Format.fprintf ppf "DEAD"
| Some expr -> phantom_defining_expr ppf expr
and uconstant ppf = function
| Uconst_ref (s, Some c) ->
fprintf ppf "%S=%a" s structured_constant c
| Uconst_ref (s, None) -> fprintf ppf "%S"s
| Uconst_int i -> fprintf ppf "%i" i
and apply_kind ppf : apply_kind -> unit = function
| (Rc_normal | Rc_nontail), Alloc_heap -> fprintf ppf "apply"
| Rc_close_at_apply, Alloc_heap -> fprintf ppf "apply[end_region]"
| (Rc_normal | Rc_nontail), Alloc_local -> fprintf ppf "apply[L]"
| Rc_close_at_apply, Alloc_local -> fprintf ppf "apply[end_region][L]"
and lam ppf = function
| Uvar id ->
V.print ppf id
| Uconst c -> uconstant ppf c
| Udirect_apply(f, largs, probe, _, kind, _) ->
let lams ppf largs =
List.iter (fun l -> fprintf ppf "@ %a" lam l) largs in
let pr ppf (probe : Lambda.probe) =
match probe with
| None -> ()
| Some {name} -> fprintf ppf " (probe %s)" name
in
fprintf ppf "@[<2>(%a*@ %s %a%a)@]" apply_kind kind f lams largs pr probe
| Ugeneric_apply(lfun, largs, _, _, kind, _) ->
let lams ppf largs =
List.iter (fun l -> fprintf ppf "@ %a" lam l) largs in
fprintf ppf "@[<2>(%a@ %a%a)@]" apply_kind kind lam lfun lams largs
| Uclosure { functions ; not_scanned_slots ; scanned_slots } ->
let funs ppf =
List.iter (fprintf ppf "@ @[<2>%a@]" one_fun) in
let lams ppf =
List.iter (fprintf ppf "@ %a" lam) in
fprintf ppf "@[<2>(closure@ %a (%a) %a)@]" funs functions
lams not_scanned_slots lams scanned_slots
| Uoffset(l,i) -> fprintf ppf "@[<2>(offset %a %d)@]" lam l i
| Ulet(mut, kind, id, arg, body) ->
let rec letbody ul = match ul with
| Ulet(mut, kind, id, arg, body) ->
fprintf ppf "@ @[<2>%a%s%s@ %a@]"
VP.print id
(mutable_flag mut) (layout kind) lam arg;
letbody body
| _ -> ul in
fprintf ppf "@[<2>(let@ @[<hv 1>(@[<2>%a%s%s@ %a@]"
VP.print id (mutable_flag mut)
(layout kind) lam arg;
let expr = letbody body in
fprintf ppf ")@]@ %a)@]" lam expr
| Uphantom_let (id, defining_expr, body) ->
let rec letbody ul = match ul with
| Uphantom_let (id, defining_expr, body) ->
fprintf ppf "@ @[<2>%a@ %a@]"
Backend_var.With_provenance.print id
phantom_defining_expr_opt defining_expr;
letbody body
| _ -> ul in
fprintf ppf "@[<2>(phantom_let@ @[<hv 1>(@[<2>%a@ %a@]"
Backend_var.With_provenance.print id
phantom_defining_expr_opt defining_expr;
let expr = letbody body in
fprintf ppf ")@]@ %a)@]" lam expr
| Uletrec(id_arg_list, body) ->
let bindings ppf id_arg_list =
let spc = ref false in
List.iter
(fun (id, l) ->
if !spc then fprintf ppf "@ " else spc := true;
fprintf ppf "@[<2>%a@ %a@]"
VP.print id
lam l)
id_arg_list in
fprintf ppf
"@[<2>(letrec@ (@[<hv 1>%a@])@ %a)@]" bindings id_arg_list lam body
| Uprim(prim, largs, _) ->
let lams ppf largs =
List.iter (fun l -> fprintf ppf "@ %a" lam l) largs in
fprintf ppf "@[<2>(%a%a)@]"
Printclambda_primitives.primitive prim lams largs
| Uswitch(larg, sw, _dbg, _kind) ->
let print_case tag index i ppf =
for j = 0 to Array.length index - 1 do
if index.(j) = i then fprintf ppf "case %s %i:" tag j
done in
let print_cases tag index cases ppf =
for i = 0 to Array.length cases - 1 do
fprintf ppf "@ @[<2>%t@ %a@]"
(print_case tag index i) sequence cases.(i)
done in
let switch ppf sw =
print_cases "int" sw.us_index_consts sw.us_actions_consts ppf ;
print_cases "tag" sw.us_index_blocks sw.us_actions_blocks ppf in
fprintf ppf
"@[<v 0>@[<2>(switch@ %a@ @]%a)@]"
lam larg switch sw
| Ustringswitch(larg,sw,d, _kind) ->
let switch ppf sw =
let spc = ref false in
List.iter
(fun (s,l) ->
if !spc then fprintf ppf "@ " else spc := true;
fprintf ppf "@[<hv 1>case \"%s\":@ %a@]"
(String.escaped s) lam l)
sw ;
begin match d with
| Some d ->
if !spc then fprintf ppf "@ " else spc := true;
fprintf ppf "@[<hv 1>default:@ %a@]" lam d
| None -> ()
end in
fprintf ppf
"@[<1>(switch %a@ @[<v 0>%a@])@]"
lam larg switch sw
| Ustaticfail (i, ls) ->
let lams ppf largs =
List.iter (fun l -> fprintf ppf "@ %a" lam l) largs in
fprintf ppf "@[<2>(exit@ %d%a)@]" i lams ls;
| Ucatch(i, vars, lbody, lhandler, _kind) ->
fprintf ppf "@[<2>(catch@ %a@;<1 -1>with (%d%a)@ %a)@]"
lam lbody i
(fun ppf vars ->
List.iter
(fun (x, k) ->
fprintf ppf " %a%a"
VP.print x
Printlambda.layout k
)
vars
)
vars
lam lhandler
| Utrywith(lbody, param, lhandler, _kind) ->
fprintf ppf "@[<2>(try@ %a@;<1 -1>with %a@ %a)@]"
lam lbody VP.print param lam lhandler
| Uifthenelse(lcond, lif, lelse, _kind) ->
fprintf ppf "@[<2>(if@ %a@ %a@ %a)@]" lam lcond lam lif lam lelse
| Usequence(l1, l2) ->
fprintf ppf "@[<2>(seq@ %a@ %a)@]" lam l1 sequence l2
| Uwhile(lcond, lbody) ->
fprintf ppf "@[<2>(while@ %a@ %a)@]" lam lcond lam lbody
| Ufor(param, lo, hi, dir, body) ->
fprintf ppf "@[<2>(for %a@ %a@ %s@ %a@ %a)@]"
VP.print param lam lo
(match dir with Upto -> "to" | Downto -> "downto")
lam hi lam body
| Uassign(id, expr) ->
fprintf ppf "@[<2>(assign@ %a@ %a)@]" V.print id lam expr
| Usend (k, met, obj, largs, _, _, (pos,_) , _) ->
let form =
match pos with
| Rc_normal | Rc_nontail -> "send"
| Rc_close_at_apply -> "send[end_region]"
in
let args ppf largs =
List.iter (fun l -> fprintf ppf "@ %a" lam l) largs in
let kind =
if k = Lambda.Self then "self"
else if k = Lambda.Cached then "cache"
else "" in
fprintf ppf "@[<2>(%s%s@ %a@ %a%a)@]"
form kind lam obj lam met args largs
| Uunreachable ->
fprintf ppf "unreachable"
| Uregion e ->
fprintf ppf "@[<2>(region@ %a)@]" lam e
| Utail e ->
fprintf ppf "@[<2>(tail@ %a)@]" lam e
and sequence ppf ulam = match ulam with
| Usequence(l1, l2) ->
fprintf ppf "%a@ %a" sequence l1 sequence l2
| _ -> lam ppf ulam
let clambda ppf ulam =
fprintf ppf "%a@." lam ulam
let rec approx ppf = function
Value_closure(_, fundesc, a) ->
Format.fprintf ppf "@[<2>function %s"
fundesc.fun_label;
let n = List.length fundesc.fun_arity.params_layout in
begin match fundesc.fun_arity.function_kind with
| Tupled -> Format.fprintf ppf "@ arity -%i" n
| Curried {nlocal=0} -> Format.fprintf ppf "@ arity %i" n
| Curried {nlocal=k} -> Format.fprintf ppf "@ arity %i(%i L)" n k
end;
if fundesc.fun_closed then begin
Format.fprintf ppf "@ (closed)"
end;
if fundesc.fun_inline <> None then begin
Format.fprintf ppf "@ (inline)"
end;
Format.fprintf ppf "@ -> @ %a@]" approx a
| Value_tuple (_,a) ->
let tuple ppf a =
for i = 0 to Array.length a - 1 do
if i > 0 then Format.fprintf ppf ";@ ";
Format.fprintf ppf "%i: %a" i approx a.(i)
done in
Format.fprintf ppf "@[<hov 1>(%a)@]" tuple a
| Value_unknown ->
Format.fprintf ppf "_"
| Value_const c ->
fprintf ppf "@[const(%a)@]" uconstant c
| Value_global_field (s, i) ->
fprintf ppf "@[global(%s,%i)@]" s i
|
f70ef045294fc163d7e83f3d306d74d6478920265fb5aed9decf810958ef8511 | maximedenes/native-coq | gmap.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
* Maps using the generic comparison function of ocaml . Same interface as
the module [ Map ] from the ocaml standard library .
the module [Map] from the ocaml standard library. *)
type ('a,'b) t
val empty : ('a,'b) t
val is_empty : ('a,'b) t -> bool
val add : 'a -> 'b -> ('a,'b) t -> ('a,'b) t
val find : 'a -> ('a,'b) t -> 'b
val remove : 'a -> ('a,'b) t -> ('a,'b) t
val mem : 'a -> ('a,'b) t -> bool
val iter : ('a -> 'b -> unit) -> ('a,'b) t -> unit
val map : ('b -> 'c) -> ('a,'b) t -> ('a,'c) t
val fold : ('a -> 'b -> 'c -> 'c) -> ('a,'b) t -> 'c -> 'c
(** Additions with respect to ocaml standard library. *)
val dom : ('a,'b) t -> 'a list
val rng : ('a,'b) t -> 'b list
val to_list : ('a,'b) t -> ('a * 'b) list
| null | https://raw.githubusercontent.com/maximedenes/native-coq/3623a4d9fe95c165f02f7119c0e6564a83a9f4c9/lib/gmap.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
* Additions with respect to ocaml standard library. | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Maps using the generic comparison function of ocaml . Same interface as
the module [ Map ] from the ocaml standard library .
the module [Map] from the ocaml standard library. *)
type ('a,'b) t
val empty : ('a,'b) t
val is_empty : ('a,'b) t -> bool
val add : 'a -> 'b -> ('a,'b) t -> ('a,'b) t
val find : 'a -> ('a,'b) t -> 'b
val remove : 'a -> ('a,'b) t -> ('a,'b) t
val mem : 'a -> ('a,'b) t -> bool
val iter : ('a -> 'b -> unit) -> ('a,'b) t -> unit
val map : ('b -> 'c) -> ('a,'b) t -> ('a,'c) t
val fold : ('a -> 'b -> 'c -> 'c) -> ('a,'b) t -> 'c -> 'c
val dom : ('a,'b) t -> 'a list
val rng : ('a,'b) t -> 'b list
val to_list : ('a,'b) t -> ('a * 'b) list
|
860ae6f8dee041501d51e74a3ead71daf5fdcca88c9bcfb1bfa580640959f2ba | ferdinand-beyer/init | component.clj | (ns init.component
(:require [init.errors :as errors]))
(defrecord Component [name start-fn])
(defprotocol AsComponent
(component [x] "Coerces `x` to component."))
(extend-protocol AsComponent
Component
(component [c] c)
clojure.lang.IPersistentMap
(component [{:keys [name] :as m}]
(when-not (qualified-ident? name)
(throw (errors/invalid-name-exception name)))
(map->Component m)))
(defn provided-tags
"Returns a set of all tags provided by `component`."
[component]
(into #{(:name component)} (:tags component)))
(defn provides?
"Returns true if `component` provides `selector`."
[component selector]
(let [provided (provided-tags component)
matches? (fn [tag] (some #(isa? % tag) provided))]
(if (coll? selector)
(every? matches? selector)
(matches? selector))))
(defn tag
"Returns an updated component that provides `tag`."
[component tag]
(update component :tags (fnil conj #{}) tag))
(defn start
"Starts `component` with resolved dependencies as `input`, and returns an
instance value."
[component inputs]
{:pre [(:start-fn component)
(= (count inputs) (count (:deps component)))]}
((:start-fn component) inputs))
(defn stop
"Stops the instance `value` of the `component`."
[component value]
(if-let [stop-fn (:stop-fn component)]
(stop-fn value)
(when (instance? java.lang.AutoCloseable value)
(.close ^java.lang.AutoCloseable value))))
| null | https://raw.githubusercontent.com/ferdinand-beyer/init/9f986feea3f49c407c9ae58e6cb97553b349fd29/src/init/component.clj | clojure | (ns init.component
(:require [init.errors :as errors]))
(defrecord Component [name start-fn])
(defprotocol AsComponent
(component [x] "Coerces `x` to component."))
(extend-protocol AsComponent
Component
(component [c] c)
clojure.lang.IPersistentMap
(component [{:keys [name] :as m}]
(when-not (qualified-ident? name)
(throw (errors/invalid-name-exception name)))
(map->Component m)))
(defn provided-tags
"Returns a set of all tags provided by `component`."
[component]
(into #{(:name component)} (:tags component)))
(defn provides?
"Returns true if `component` provides `selector`."
[component selector]
(let [provided (provided-tags component)
matches? (fn [tag] (some #(isa? % tag) provided))]
(if (coll? selector)
(every? matches? selector)
(matches? selector))))
(defn tag
"Returns an updated component that provides `tag`."
[component tag]
(update component :tags (fnil conj #{}) tag))
(defn start
"Starts `component` with resolved dependencies as `input`, and returns an
instance value."
[component inputs]
{:pre [(:start-fn component)
(= (count inputs) (count (:deps component)))]}
((:start-fn component) inputs))
(defn stop
"Stops the instance `value` of the `component`."
[component value]
(if-let [stop-fn (:stop-fn component)]
(stop-fn value)
(when (instance? java.lang.AutoCloseable value)
(.close ^java.lang.AutoCloseable value))))
|
|
11b1f66727c196dd618f2aff780175decc9b8d73ac7146946e40477e59d997b7 | erlang/corba | java_client_erl_server_SUITE.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2003 - 2016 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%%
%%%----------------------------------------------------------------------
Purpose : Test suite for the backends of the IDL compiler
%%%----------------------------------------------------------------------
-module(java_client_erl_server_SUITE).
-include_lib("common_test/include/ct.hrl").
-export([all/0, suite/0,groups/0,init_per_group/2,end_per_group/2,
init_per_suite/1,end_per_suite/1,
init_per_testcase/2,end_per_testcase/2]).
-export([marshal_ll/1,marshal_ull/1,
marshal_l/1,marshal_ul/1,
marshal_s/1,marshal_us/1,
marshal_c/1,marshal_wc/1,
marshal_str/1,
marshal_any_3/1,marshal_any_2/1]).
%% Top of cases
suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
cases().
groups() ->
[].
init_per_group(_GroupName, Config) ->
Config.
end_per_group(_GroupName, Config) ->
Config.
cases() ->
[marshal_ll, marshal_ull, marshal_l, marshal_ul,
marshal_s, marshal_us, marshal_c, marshal_wc,
marshal_str, marshal_any_3, marshal_any_2].
init_per_suite(Config) when is_list(Config) ->
case case code:priv_dir(jinterface) of
{error,bad_name} ->
false;
P ->
case filelib:wildcard(filename:join(P, "*.jar")) of
[_|_] ->
true;
[] ->
false
end
end
of
true ->
case find_executable(["java"]) of
false ->
{skip,"Found no Java VM"};
Path ->
[{java,Path}|Config]
end;
false ->
{skip,"No jinterface application"}
end.
find_executable([]) ->
false;
find_executable([E|T]) ->
case os:find_executable(E) of
false -> find_executable(T);
Path -> Path
end.
end_per_suite(Config) -> Config.
%% Add/remove code path and watchdog before/after each test case.
%%
init_per_testcase(_Case, Config) ->
DataDir = proplists:get_value(data_dir, Config),
code:add_patha(DataDir),
Since other test suites use the module m_i et , , we have
%% to make sure we are using the right modules.
code:purge(m_i),
code:purge(m_i_impl),
code:purge(oe_java_erl_test),
code:load_file(m_i),
code:load_file(m_i_impl),
code:load_file(oe_java_erl_test),
WatchDog = test_server:timetrap(test_server:seconds(20)),
[{watchdog, WatchDog}| Config].
end_per_testcase(_Case, Config) ->
DataDir = proplists:get_value(data_dir, Config),
code:del_path(DataDir),
WatchDog = proplists:get_value(watchdog, Config),
test_server:timetrap_cancel(WatchDog).
%%--------------------------------------------------------------------
%%
%% Test cases
Testing marshalling of IDL long long
marshal_ll(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
{ok,Server} = m_i:oe_create_link([], {local,marshal_ll}),
ok = java(proplists:get_value(java, Config), DataDir, "JavaClient",
["JavaClient",node(),erlang:get_cookie(),marshal_ll]),
ok = m_i:stop(Server),
ok.
Testing marshalling of IDL unsigned long long
marshal_ull(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
{ok,Server} = m_i:oe_create_link([], {local,marshal_ull}),
ok = java(proplists:get_value(java, Config), DataDir, "JavaClient",
["JavaClient",node(),erlang:get_cookie(),marshal_ull]),
ok = m_i:stop(Server),
ok.
Testing marshalling of IDL long
marshal_l(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
{ok,Server} = m_i:oe_create_link([], {local,marshal_l}),
ok = java(proplists:get_value(java, Config), DataDir, "JavaClient",
["JavaClient",node(),erlang:get_cookie(),marshal_l]),
ok = m_i:stop(Server),
ok.
Testing marshalling of IDL unsigned long
marshal_ul(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
{ok,Server} = m_i:oe_create_link([], {local,marshal_ul}),
ok = java(proplists:get_value(java, Config), DataDir, "JavaClient",
["JavaClient",node(),erlang:get_cookie(),marshal_ul]),
ok = m_i:stop(Server),
ok.
Testing marshalling of IDL short
marshal_s(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
{ok,Server} = m_i:oe_create_link([], {local,marshal_s}),
ok = java(proplists:get_value(java, Config), DataDir, "JavaClient",
["JavaClient",node(),erlang:get_cookie(),marshal_s]),
ok = m_i:stop(Server),
ok.
Testing marshalling of IDL unsigned short
marshal_us(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
{ok,Server} = m_i:oe_create_link([], {local,marshal_us}),
ok = java(proplists:get_value(java, Config), DataDir, "JavaClient",
["JavaClient",node(),erlang:get_cookie(),marshal_us]),
ok = m_i:stop(Server),
ok.
%% Testing marshalling of IDL char
marshal_c(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
{ok,Server} = m_i:oe_create_link([], {local,marshal_c}),
ok = java(proplists:get_value(java, Config), DataDir, "JavaClient",
["JavaClient",node(),erlang:get_cookie(),marshal_c]),
ok = m_i:stop(Server),
ok.
%% Testing marshalling of IDL char
marshal_wc(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
{ok,Server} = m_i:oe_create_link([], {local,marshal_wc}),
ok = java(proplists:get_value(java, Config), DataDir, "JavaClient",
["JavaClient",node(),erlang:get_cookie(),marshal_wc]),
ok = m_i:stop(Server),
ok.
Testing marshalling of IDL string
marshal_str(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
{ok,Server} = m_i:oe_create_link([], {local,marshal_str}),
ok = java(proplists:get_value(java, Config), DataDir,
%%% "-DOtpConnection.trace=4 "
"JavaClient",
["JavaClient",node(),erlang:get_cookie(),marshal_str]),
ok = m_i:stop(Server),
ok.
Testing marshalling of IDL any
marshal_any_3(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
{ok,Server} = m_i:oe_create_link([], {local,marshal_any_3}),
ok = java(proplists:get_value(java, Config), DataDir, "JavaClient",
["JavaClient",node(),erlang:get_cookie(),marshal_any_3]),
ok = m_i:stop(Server),
ok.
marshal_any_2(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
{ok,Server} = m_i:oe_create_link([], {local,marshal_any_2}),
ok = java(proplists:get_value(java, Config), DataDir, "JavaClient",
["JavaClient",node(),erlang:get_cookie(),marshal_any_2]),
ok = m_i:stop(Server),
ok.
%%--------------------------------------------------------------------
%%
Utilities
java(Java, Dir, ClassAndArgs) ->
cmd(Java++" -classpath \""++classpath(Dir)++"\" "++ClassAndArgs).
java(Java, Dir, Class, Args) ->
java(Java, Dir, Class++" "++to_string(Args)).
to_string([H|T]) when is_integer(H) ->
integer_to_list(H)++" "++to_string(T);
to_string([H|T]) when is_atom(H) ->
atom_to_list(H)++" "++to_string(T);
to_string([H|T]) when is_list(H) ->
lists:flatten(H)++" "++to_string(T);
to_string([]) -> [].
% javac(Dir, File) ->
cmd("javac -d " + + Dir++ " -classpath " + + classpath(Dir)++ " " + +
% filename:join(Dir, File)).
classpath(Dir) ->
PS =
case os:type() of
{win32, _} -> ";";
_ -> ":"
end,
Dir++PS++
filename:join([code:lib_dir(ic),"priv","ic.jar"])++PS++
filename:join([code:lib_dir(jinterface),"priv","OtpErlang.jar"])++PS++
os:getenv("CLASSPATH", "").
cmd(Cmd) ->
PortOpts = [{line,80},eof,exit_status,stderr_to_stdout],
io:format("<cmd> ~ts~n", [Cmd]),
case catch open_port({spawn,Cmd}, PortOpts) of
Port when is_port(Port) ->
Result = cmd_loop(Port, []),
io:format("<cmd=~w>~n", [Result]),
case Result of
0 -> ok;
ExitCode when is_integer(ExitCode) -> {error,ExitCode};
Error -> Error
end;
{'EXIT',Reason} ->
{error,Reason}
end.
cmd_loop(Port, Line) ->
receive
{Port,eof} ->
receive
{Port,{exit_status,ExitStatus}} ->
ExitStatus
after 1 ->
undefined
end;
{Port,{exit_status,ExitStatus}} ->
receive
{Port,eof} ->
ok after 1 -> ok end,
ExitStatus;
{Port,{data,{Tag,Data}}} ->
case Tag of
eol ->
io:put_chars([Line|cr_to_nl(Data)]),
io:nl(),
cmd_loop(Port, []);
noeol ->
cmd_loop(Port, [Line|cr_to_nl(Data)])
end;
{'EXIT',Port,Reason} ->
{error,Reason};
Other ->
io:format("WARNING: Unexpected at ~s:~p: ~p~n",
[?MODULE_STRING,?LINE,Other]),
cmd_loop(Port, Line)
end.
Convert lonely CR to NL , and CRLF to NL
%%
cr_to_nl([$\r,$\n|T]) ->
[$\n|cr_to_nl(T)];
cr_to_nl([$\r|T]) ->
[$\n|cr_to_nl(T)];
cr_to_nl([C|T]) ->
[C|cr_to_nl(T)];
cr_to_nl([]) ->
[].
| null | https://raw.githubusercontent.com/erlang/corba/396df81473a386d0315bbba830db6f9d4b12a04f/lib/ic/test/java_client_erl_server_SUITE.erl | erlang |
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
----------------------------------------------------------------------
----------------------------------------------------------------------
Top of cases
Add/remove code path and watchdog before/after each test case.
to make sure we are using the right modules.
--------------------------------------------------------------------
Test cases
Testing marshalling of IDL char
Testing marshalling of IDL char
"-DOtpConnection.trace=4 "
--------------------------------------------------------------------
javac(Dir, File) ->
filename:join(Dir, File)).
| Copyright Ericsson AB 2003 - 2016 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
Purpose : Test suite for the backends of the IDL compiler
-module(java_client_erl_server_SUITE).
-include_lib("common_test/include/ct.hrl").
-export([all/0, suite/0,groups/0,init_per_group/2,end_per_group/2,
init_per_suite/1,end_per_suite/1,
init_per_testcase/2,end_per_testcase/2]).
-export([marshal_ll/1,marshal_ull/1,
marshal_l/1,marshal_ul/1,
marshal_s/1,marshal_us/1,
marshal_c/1,marshal_wc/1,
marshal_str/1,
marshal_any_3/1,marshal_any_2/1]).
suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
cases().
groups() ->
[].
init_per_group(_GroupName, Config) ->
Config.
end_per_group(_GroupName, Config) ->
Config.
cases() ->
[marshal_ll, marshal_ull, marshal_l, marshal_ul,
marshal_s, marshal_us, marshal_c, marshal_wc,
marshal_str, marshal_any_3, marshal_any_2].
init_per_suite(Config) when is_list(Config) ->
case case code:priv_dir(jinterface) of
{error,bad_name} ->
false;
P ->
case filelib:wildcard(filename:join(P, "*.jar")) of
[_|_] ->
true;
[] ->
false
end
end
of
true ->
case find_executable(["java"]) of
false ->
{skip,"Found no Java VM"};
Path ->
[{java,Path}|Config]
end;
false ->
{skip,"No jinterface application"}
end.
find_executable([]) ->
false;
find_executable([E|T]) ->
case os:find_executable(E) of
false -> find_executable(T);
Path -> Path
end.
end_per_suite(Config) -> Config.
init_per_testcase(_Case, Config) ->
DataDir = proplists:get_value(data_dir, Config),
code:add_patha(DataDir),
Since other test suites use the module m_i et , , we have
code:purge(m_i),
code:purge(m_i_impl),
code:purge(oe_java_erl_test),
code:load_file(m_i),
code:load_file(m_i_impl),
code:load_file(oe_java_erl_test),
WatchDog = test_server:timetrap(test_server:seconds(20)),
[{watchdog, WatchDog}| Config].
end_per_testcase(_Case, Config) ->
DataDir = proplists:get_value(data_dir, Config),
code:del_path(DataDir),
WatchDog = proplists:get_value(watchdog, Config),
test_server:timetrap_cancel(WatchDog).
Testing marshalling of IDL long long
marshal_ll(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
{ok,Server} = m_i:oe_create_link([], {local,marshal_ll}),
ok = java(proplists:get_value(java, Config), DataDir, "JavaClient",
["JavaClient",node(),erlang:get_cookie(),marshal_ll]),
ok = m_i:stop(Server),
ok.
Testing marshalling of IDL unsigned long long
marshal_ull(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
{ok,Server} = m_i:oe_create_link([], {local,marshal_ull}),
ok = java(proplists:get_value(java, Config), DataDir, "JavaClient",
["JavaClient",node(),erlang:get_cookie(),marshal_ull]),
ok = m_i:stop(Server),
ok.
Testing marshalling of IDL long
marshal_l(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
{ok,Server} = m_i:oe_create_link([], {local,marshal_l}),
ok = java(proplists:get_value(java, Config), DataDir, "JavaClient",
["JavaClient",node(),erlang:get_cookie(),marshal_l]),
ok = m_i:stop(Server),
ok.
Testing marshalling of IDL unsigned long
marshal_ul(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
{ok,Server} = m_i:oe_create_link([], {local,marshal_ul}),
ok = java(proplists:get_value(java, Config), DataDir, "JavaClient",
["JavaClient",node(),erlang:get_cookie(),marshal_ul]),
ok = m_i:stop(Server),
ok.
Testing marshalling of IDL short
marshal_s(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
{ok,Server} = m_i:oe_create_link([], {local,marshal_s}),
ok = java(proplists:get_value(java, Config), DataDir, "JavaClient",
["JavaClient",node(),erlang:get_cookie(),marshal_s]),
ok = m_i:stop(Server),
ok.
Testing marshalling of IDL unsigned short
marshal_us(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
{ok,Server} = m_i:oe_create_link([], {local,marshal_us}),
ok = java(proplists:get_value(java, Config), DataDir, "JavaClient",
["JavaClient",node(),erlang:get_cookie(),marshal_us]),
ok = m_i:stop(Server),
ok.
marshal_c(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
{ok,Server} = m_i:oe_create_link([], {local,marshal_c}),
ok = java(proplists:get_value(java, Config), DataDir, "JavaClient",
["JavaClient",node(),erlang:get_cookie(),marshal_c]),
ok = m_i:stop(Server),
ok.
marshal_wc(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
{ok,Server} = m_i:oe_create_link([], {local,marshal_wc}),
ok = java(proplists:get_value(java, Config), DataDir, "JavaClient",
["JavaClient",node(),erlang:get_cookie(),marshal_wc]),
ok = m_i:stop(Server),
ok.
Testing marshalling of IDL string
marshal_str(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
{ok,Server} = m_i:oe_create_link([], {local,marshal_str}),
ok = java(proplists:get_value(java, Config), DataDir,
"JavaClient",
["JavaClient",node(),erlang:get_cookie(),marshal_str]),
ok = m_i:stop(Server),
ok.
Testing marshalling of IDL any
marshal_any_3(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
{ok,Server} = m_i:oe_create_link([], {local,marshal_any_3}),
ok = java(proplists:get_value(java, Config), DataDir, "JavaClient",
["JavaClient",node(),erlang:get_cookie(),marshal_any_3]),
ok = m_i:stop(Server),
ok.
marshal_any_2(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
{ok,Server} = m_i:oe_create_link([], {local,marshal_any_2}),
ok = java(proplists:get_value(java, Config), DataDir, "JavaClient",
["JavaClient",node(),erlang:get_cookie(),marshal_any_2]),
ok = m_i:stop(Server),
ok.
Utilities
java(Java, Dir, ClassAndArgs) ->
cmd(Java++" -classpath \""++classpath(Dir)++"\" "++ClassAndArgs).
java(Java, Dir, Class, Args) ->
java(Java, Dir, Class++" "++to_string(Args)).
to_string([H|T]) when is_integer(H) ->
integer_to_list(H)++" "++to_string(T);
to_string([H|T]) when is_atom(H) ->
atom_to_list(H)++" "++to_string(T);
to_string([H|T]) when is_list(H) ->
lists:flatten(H)++" "++to_string(T);
to_string([]) -> [].
cmd("javac -d " + + Dir++ " -classpath " + + classpath(Dir)++ " " + +
classpath(Dir) ->
PS =
case os:type() of
{win32, _} -> ";";
_ -> ":"
end,
Dir++PS++
filename:join([code:lib_dir(ic),"priv","ic.jar"])++PS++
filename:join([code:lib_dir(jinterface),"priv","OtpErlang.jar"])++PS++
os:getenv("CLASSPATH", "").
cmd(Cmd) ->
PortOpts = [{line,80},eof,exit_status,stderr_to_stdout],
io:format("<cmd> ~ts~n", [Cmd]),
case catch open_port({spawn,Cmd}, PortOpts) of
Port when is_port(Port) ->
Result = cmd_loop(Port, []),
io:format("<cmd=~w>~n", [Result]),
case Result of
0 -> ok;
ExitCode when is_integer(ExitCode) -> {error,ExitCode};
Error -> Error
end;
{'EXIT',Reason} ->
{error,Reason}
end.
cmd_loop(Port, Line) ->
receive
{Port,eof} ->
receive
{Port,{exit_status,ExitStatus}} ->
ExitStatus
after 1 ->
undefined
end;
{Port,{exit_status,ExitStatus}} ->
receive
{Port,eof} ->
ok after 1 -> ok end,
ExitStatus;
{Port,{data,{Tag,Data}}} ->
case Tag of
eol ->
io:put_chars([Line|cr_to_nl(Data)]),
io:nl(),
cmd_loop(Port, []);
noeol ->
cmd_loop(Port, [Line|cr_to_nl(Data)])
end;
{'EXIT',Port,Reason} ->
{error,Reason};
Other ->
io:format("WARNING: Unexpected at ~s:~p: ~p~n",
[?MODULE_STRING,?LINE,Other]),
cmd_loop(Port, Line)
end.
Convert lonely CR to NL , and CRLF to NL
cr_to_nl([$\r,$\n|T]) ->
[$\n|cr_to_nl(T)];
cr_to_nl([$\r|T]) ->
[$\n|cr_to_nl(T)];
cr_to_nl([C|T]) ->
[C|cr_to_nl(T)];
cr_to_nl([]) ->
[].
|
c2c74559ce2cd18d869e7b2086442fe4af5c7603a2f8df6d978d53f889086ed9 | louispan/ghcjs-base-stub | Internal.hs | # LANGUAGE ForeignFunctionInterface , UnliftedFFITypes , JavaScriptFFI ,
UnboxedTuples , DeriveDataTypeable , GHCForeignImportPrim ,
MagicHash , FlexibleInstances , BangPatterns , Rank2Types , #
UnboxedTuples, DeriveDataTypeable, GHCForeignImportPrim,
MagicHash, FlexibleInstances, BangPatterns, Rank2Types, CPP #-}
| Basic interop between and JavaScript .
The principal type here is ' ' , which is a lifted type that contains
a JavaScript reference . The ' JSVal ' type is parameterized with one phantom
type , and defines several type synonyms for specific variants .
The code in this module makes no assumptions about ' a ' types .
Operations that can result in a JS exception that can kill a thread
are marked unsafe ( for example if the ' ' contains a null or undefined
value ) . There are safe variants where the JS exception is propagated as
a exception , so that it can be handled on the side .
For more specific types , like ' JSArray ' or ' JSBool ' , the code assumes that
the contents of the ' ' actually is a JavaScript array or bool value .
If it contains an unexpected value , the code can result in exceptions that
kill the thread , even for functions not marked unsafe .
The code makes use of ` foreign import javascript ' , enabled with the
` JavaScriptFFI ` extension , available since GHC 7.8 . There are three different
safety levels :
* unsafe : The imported code is run directly . returning an incorrectly typed
value leads to undefined behaviour . JavaScript exceptions in the foreign
code kill the thread .
* safe : Returned values are replaced with a default value if they have
the wrong type . JavaScript exceptions are caught and propagated as
exceptions ( ' JSException ' ) , so they can be handled with the
standard " Control . Exception " machinery .
* interruptible : The import is asynchronous . The calling thread
sleeps until the foreign code calls the ` $ c ` JavaScript function with
the result . The thread is in interruptible state while blocked , so it
can receive asynchronous exceptions .
Unlike the FFI for native code , it 's safe to call back into
( ` h$run ` , ` h$runSync ` ) from foreign code in any of the safety levels .
Since JavaScript is single threaded , no threads can run while
the foreign code is running .
The principal type here is 'JSVal', which is a lifted type that contains
a JavaScript reference. The 'JSVal' type is parameterized with one phantom
type, and GHCJS.Types defines several type synonyms for specific variants.
The code in this module makes no assumptions about 'JSVal a' types.
Operations that can result in a JS exception that can kill a Haskell thread
are marked unsafe (for example if the 'JSVal' contains a null or undefined
value). There are safe variants where the JS exception is propagated as
a Haskell exception, so that it can be handled on the Haskell side.
For more specific types, like 'JSArray' or 'JSBool', the code assumes that
the contents of the 'JSVal' actually is a JavaScript array or bool value.
If it contains an unexpected value, the code can result in exceptions that
kill the Haskell thread, even for functions not marked unsafe.
The code makes use of `foreign import javascript', enabled with the
`JavaScriptFFI` extension, available since GHC 7.8. There are three different
safety levels:
* unsafe: The imported code is run directly. returning an incorrectly typed
value leads to undefined behaviour. JavaScript exceptions in the foreign
code kill the Haskell thread.
* safe: Returned values are replaced with a default value if they have
the wrong type. JavaScript exceptions are caught and propagated as
Haskell exceptions ('JSException'), so they can be handled with the
standard "Control.Exception" machinery.
* interruptible: The import is asynchronous. The calling Haskell thread
sleeps until the foreign code calls the `$c` JavaScript function with
the result. The thread is in interruptible state while blocked, so it
can receive asynchronous exceptions.
Unlike the FFI for native code, it's safe to call back into Haskell
(`h$run`, `h$runSync`) from foreign code in any of the safety levels.
Since JavaScript is single threaded, no Haskell threads can run while
the foreign code is running.
-}
module GHCJS.Foreign.Internal ( JSType(..)
, jsTypeOf
, JSONType(..)
, jsonTypeOf
-- , mvarRef
, isTruthy
, fromJSBool
, toJSBool
, jsTrue
, jsFalse
, jsNull
, jsUndefined
, isNull
-- type predicates
, isUndefined
, isNumber
, isObject
, isBoolean
, isString
, isSymbol
, isFunction
) where
import GHCJS.Types
import qualified GHCJS.Prim as Prim
import Data.Typeable (Typeable)
types returned by operator
data JSType = Undefined
| Object
| Boolean
| Number
| String
| Symbol
| Function
| Other -- ^ implementation dependent
deriving (Show, Eq, Ord, Enum, Typeable)
-- JSON value type
data JSONType = JSONNull
| JSONInteger
| JSONFloat
| JSONBool
| JSONString
| JSONArray
| JSONObject
deriving (Show, Eq, Ord, Enum, Typeable)
fromJSBool :: JSVal -> Bool
fromJSBool _ = False
# INLINE fromJSBool #
toJSBool :: Bool -> JSVal
toJSBool _ = jsNull
# INLINE toJSBool #
jsTrue :: JSVal
jsTrue = jsNull
# INLINE jsTrue #
jsFalse :: JSVal
jsFalse = Prim.jsNull
# INLINE jsFalse #
jsNull :: JSVal
jsNull = jsNull
# INLINE jsNull #
jsUndefined :: JSVal
jsUndefined = jsNull
# INLINE jsUndefined #
check whether a reference is ` ' in the JavaScript sense
isTruthy :: JSVal -> Bool
isTruthy _ = False
# INLINE isTruthy #
isObject :: JSVal -> Bool
isObject _ = False
# INLINE isObject #
isNumber :: JSVal -> Bool
isNumber _ = False
# INLINE isNumber #
isString :: JSVal -> Bool
isString _ = False
# INLINE isString #
isBoolean :: JSVal -> Bool
isBoolean _ = False
# INLINE isBoolean #
isFunction :: JSVal -> Bool
isFunction _ = False
# INLINE isFunction #
isSymbol :: JSVal -> Bool
isSymbol _ = False
# INLINE isSymbol #
jsTypeOf :: JSVal -> JSType
jsTypeOf _ = Undefined
# INLINE jsTypeOf #
jsonTypeOf :: JSVal -> JSONType
jsonTypeOf _ = JSONNull
# INLINE jsonTypeOf #
| null | https://raw.githubusercontent.com/louispan/ghcjs-base-stub/8eaee240c9af1a2290f4572a87528f3ddb3e9f12/src/GHCJS/Foreign/Internal.hs | haskell | , mvarRef
type predicates
^ implementation dependent
JSON value type | # LANGUAGE ForeignFunctionInterface , UnliftedFFITypes , JavaScriptFFI ,
UnboxedTuples , DeriveDataTypeable , GHCForeignImportPrim ,
MagicHash , FlexibleInstances , BangPatterns , Rank2Types , #
UnboxedTuples, DeriveDataTypeable, GHCForeignImportPrim,
MagicHash, FlexibleInstances, BangPatterns, Rank2Types, CPP #-}
| Basic interop between and JavaScript .
The principal type here is ' ' , which is a lifted type that contains
a JavaScript reference . The ' JSVal ' type is parameterized with one phantom
type , and defines several type synonyms for specific variants .
The code in this module makes no assumptions about ' a ' types .
Operations that can result in a JS exception that can kill a thread
are marked unsafe ( for example if the ' ' contains a null or undefined
value ) . There are safe variants where the JS exception is propagated as
a exception , so that it can be handled on the side .
For more specific types , like ' JSArray ' or ' JSBool ' , the code assumes that
the contents of the ' ' actually is a JavaScript array or bool value .
If it contains an unexpected value , the code can result in exceptions that
kill the thread , even for functions not marked unsafe .
The code makes use of ` foreign import javascript ' , enabled with the
` JavaScriptFFI ` extension , available since GHC 7.8 . There are three different
safety levels :
* unsafe : The imported code is run directly . returning an incorrectly typed
value leads to undefined behaviour . JavaScript exceptions in the foreign
code kill the thread .
* safe : Returned values are replaced with a default value if they have
the wrong type . JavaScript exceptions are caught and propagated as
exceptions ( ' JSException ' ) , so they can be handled with the
standard " Control . Exception " machinery .
* interruptible : The import is asynchronous . The calling thread
sleeps until the foreign code calls the ` $ c ` JavaScript function with
the result . The thread is in interruptible state while blocked , so it
can receive asynchronous exceptions .
Unlike the FFI for native code , it 's safe to call back into
( ` h$run ` , ` h$runSync ` ) from foreign code in any of the safety levels .
Since JavaScript is single threaded , no threads can run while
the foreign code is running .
The principal type here is 'JSVal', which is a lifted type that contains
a JavaScript reference. The 'JSVal' type is parameterized with one phantom
type, and GHCJS.Types defines several type synonyms for specific variants.
The code in this module makes no assumptions about 'JSVal a' types.
Operations that can result in a JS exception that can kill a Haskell thread
are marked unsafe (for example if the 'JSVal' contains a null or undefined
value). There are safe variants where the JS exception is propagated as
a Haskell exception, so that it can be handled on the Haskell side.
For more specific types, like 'JSArray' or 'JSBool', the code assumes that
the contents of the 'JSVal' actually is a JavaScript array or bool value.
If it contains an unexpected value, the code can result in exceptions that
kill the Haskell thread, even for functions not marked unsafe.
The code makes use of `foreign import javascript', enabled with the
`JavaScriptFFI` extension, available since GHC 7.8. There are three different
safety levels:
* unsafe: The imported code is run directly. returning an incorrectly typed
value leads to undefined behaviour. JavaScript exceptions in the foreign
code kill the Haskell thread.
* safe: Returned values are replaced with a default value if they have
the wrong type. JavaScript exceptions are caught and propagated as
Haskell exceptions ('JSException'), so they can be handled with the
standard "Control.Exception" machinery.
* interruptible: The import is asynchronous. The calling Haskell thread
sleeps until the foreign code calls the `$c` JavaScript function with
the result. The thread is in interruptible state while blocked, so it
can receive asynchronous exceptions.
Unlike the FFI for native code, it's safe to call back into Haskell
(`h$run`, `h$runSync`) from foreign code in any of the safety levels.
Since JavaScript is single threaded, no Haskell threads can run while
the foreign code is running.
-}
module GHCJS.Foreign.Internal ( JSType(..)
, jsTypeOf
, JSONType(..)
, jsonTypeOf
, isTruthy
, fromJSBool
, toJSBool
, jsTrue
, jsFalse
, jsNull
, jsUndefined
, isNull
, isUndefined
, isNumber
, isObject
, isBoolean
, isString
, isSymbol
, isFunction
) where
import GHCJS.Types
import qualified GHCJS.Prim as Prim
import Data.Typeable (Typeable)
types returned by operator
data JSType = Undefined
| Object
| Boolean
| Number
| String
| Symbol
| Function
deriving (Show, Eq, Ord, Enum, Typeable)
data JSONType = JSONNull
| JSONInteger
| JSONFloat
| JSONBool
| JSONString
| JSONArray
| JSONObject
deriving (Show, Eq, Ord, Enum, Typeable)
fromJSBool :: JSVal -> Bool
fromJSBool _ = False
# INLINE fromJSBool #
toJSBool :: Bool -> JSVal
toJSBool _ = jsNull
# INLINE toJSBool #
jsTrue :: JSVal
jsTrue = jsNull
# INLINE jsTrue #
jsFalse :: JSVal
jsFalse = Prim.jsNull
# INLINE jsFalse #
jsNull :: JSVal
jsNull = jsNull
# INLINE jsNull #
jsUndefined :: JSVal
jsUndefined = jsNull
# INLINE jsUndefined #
check whether a reference is ` ' in the JavaScript sense
isTruthy :: JSVal -> Bool
isTruthy _ = False
# INLINE isTruthy #
isObject :: JSVal -> Bool
isObject _ = False
# INLINE isObject #
isNumber :: JSVal -> Bool
isNumber _ = False
# INLINE isNumber #
isString :: JSVal -> Bool
isString _ = False
# INLINE isString #
isBoolean :: JSVal -> Bool
isBoolean _ = False
# INLINE isBoolean #
isFunction :: JSVal -> Bool
isFunction _ = False
# INLINE isFunction #
isSymbol :: JSVal -> Bool
isSymbol _ = False
# INLINE isSymbol #
jsTypeOf :: JSVal -> JSType
jsTypeOf _ = Undefined
# INLINE jsTypeOf #
jsonTypeOf :: JSVal -> JSONType
jsonTypeOf _ = JSONNull
# INLINE jsonTypeOf #
|
0066d6982b732935596e976d61ec9402a2ee53571573f18e978b0a64c6f61196 | hasktorch/hasktorch | Dataset.hs | module Dataset where
import Torch hiding (take)
import qualified Torch.Typed.Vision as V hiding (getImages')
import qualified Torch.Vision as V
-- This is a placeholder for this example until we have a more formal data loader abstraction
--
class MockDataset d where
getItem ::
d ->
Int -> -- index
Int -> -- batchSize
IO (Tensor, Tensor) -- input, label
data MNIST = MNIST
{ dataset :: V.MnistData,
idxList :: [Int]
}
instance MockDataset MNIST where
getItem mnistData index n = do
let currIndex = index
let idx = take n (drop (currIndex + n) (idxList mnistData))
let input = V.getImages' n mnistDataDim (dataset mnistData) idx
let label = V.getLabels' n (dataset mnistData) idx
pure (input, label)
mnistDataDim = 784
-- | Load MNIST data as dataset abstraction
loadMNIST dataLocation = do
(train, test) <- V.initMnist dataLocation
let mnistTrain =
MNIST
{ dataset = train,
idxList = V.randomIndexes (V.length train)
}
let mnistTest =
MNIST
{ dataset = test,
idxList = V.randomIndexes (V.length train)
}
pure (mnistTrain, mnistTest)
| null | https://raw.githubusercontent.com/hasktorch/hasktorch/8fa4d2a6cdb7f144484f7d24d8d4924fb0faecd2/examples/distill/Dataset.hs | haskell | This is a placeholder for this example until we have a more formal data loader abstraction
index
batchSize
input, label
| Load MNIST data as dataset abstraction | module Dataset where
import Torch hiding (take)
import qualified Torch.Typed.Vision as V hiding (getImages')
import qualified Torch.Vision as V
class MockDataset d where
getItem ::
d ->
data MNIST = MNIST
{ dataset :: V.MnistData,
idxList :: [Int]
}
instance MockDataset MNIST where
getItem mnistData index n = do
let currIndex = index
let idx = take n (drop (currIndex + n) (idxList mnistData))
let input = V.getImages' n mnistDataDim (dataset mnistData) idx
let label = V.getLabels' n (dataset mnistData) idx
pure (input, label)
mnistDataDim = 784
loadMNIST dataLocation = do
(train, test) <- V.initMnist dataLocation
let mnistTrain =
MNIST
{ dataset = train,
idxList = V.randomIndexes (V.length train)
}
let mnistTest =
MNIST
{ dataset = test,
idxList = V.randomIndexes (V.length train)
}
pure (mnistTrain, mnistTest)
|
65948856ebea33ce21d4902e0259b0a6e7dfade7b6149067ca2f65f8eeffd52d | runtimeverification/haskell-backend | Matcher.hs | |
Copyright : ( c ) Runtime Verification , 2018 - 2021
License : BSD-3 - Clause
Copyright : (c) Runtime Verification, 2018-2021
License : BSD-3-Clause
-}
module Kore.Rewrite.Axiom.Matcher (
MatchingVariable,
MatchResult,
patternMatch,
) where
import Control.Lens qualified as Lens
import Data.Align qualified as Align (
align,
)
import Data.Bifunctor qualified as Bifunctor
import Data.Functor.Foldable qualified as Recursive
import Data.Generics.Product
import Data.HashMap.Strict qualified as HashMap
import Data.Map.Strict (
Map,
)
import Data.Map.Strict qualified as Map
import Data.PQueue.Min (
MinQueue,
)
import Data.PQueue.Min qualified as MinQueue
import Data.Sequence qualified as Seq
import Data.Set (
Set,
)
import Data.Set qualified as Set
import Data.Text (
Text,
)
import Data.These (
These (..),
)
import Kore.Attribute.Pattern.FreeVariables (
FreeVariables,
)
import Kore.Attribute.Pattern.FreeVariables qualified as FreeVariables
import Kore.Builtin.AssociativeCommutative qualified as Ac
import Kore.Builtin.List qualified as List
import Kore.Internal.InternalList
import Kore.Internal.InternalMap hiding (
Element,
NormalizedAc,
Value,
)
import Kore.Internal.InternalSet hiding (
Element,
NormalizedAc,
Value,
)
import Kore.Internal.MultiAnd (
MultiAnd,
)
import Kore.Internal.MultiAnd qualified as MultiAnd
import Kore.Internal.NormalizedAc qualified as Builtin (
Element,
NormalizedAc,
Value,
)
import Kore.Internal.Predicate (
Predicate,
makeCeilPredicate,
)
import Kore.Internal.Predicate qualified as Predicate
import Kore.Internal.SideCondition
import Kore.Internal.SideCondition qualified as SideCondition
import Kore.Internal.TermLike
import Kore.Rewrite.Axiom.MatcherData
import Kore.Rewrite.RewritingVariable (
RewritingVariableName,
)
import Kore.Simplify.InjSimplifier as InjSimplifier
import Kore.Simplify.Overloading (
OverloadingResolution (..),
flipResult,
unifyOverloadingCommonOverload,
unifyOverloadingVsOverloaded,
)
import Kore.Simplify.Overloading qualified as Overloading (
MatchResult (..),
)
import Kore.Simplify.Simplify (
Simplifier,
)
import Kore.Simplify.Simplify qualified as Simplifier
import Kore.Substitute
import Kore.Unparser (
unparseToText,
)
import Pair
import Prelude.Kore
-- * Matching
| Match a TermLike against a pattern as represented by another TermLike .
-
- Unlike unification , pattern matching is not bidirectional . Variables may
- appear in the term being matched , but they will not be unified with the
- pattern , and are treated as if they were constants .
-
- At a high level , the pattern match algorithm maintains two lists . The first
- list contains pairs of patterns and terms to match on immediately , and is
- initialized with the initial pattern and term . The second list contains
- pairs of map or set patterns and terms to be matched on on a deferred basis
- after other matching finishes . Matching proceeds by popping a pattern and
- a term off the first list , processing it , and performing one of several
- actions :
-
- * failing matching , indicating pattern matching does n't succeed and
- returning immediately .
- * discharging it , indicating that matching locally succeeds and removing it
- from the list
- * binding a variable to a term by adding it to the current substitution , and
- removing it from the list
- * decomposing it into one or more subpatterns and subterms and adding them
- to the list
- * deferring it by moving it to the second list if it is a map or set pattern
- * some combination of the above
-
- Once the first list is empty , we remove one map or set pattern and term from
- the second list and perform AC matching . This will perform some combination
- of the above actions , potentially adding new elements to the first list .
-
- The algorithm then repeats this entire process until either matching fails
- or both lists are empty , at which time matching succeeds with the
- substitution that was accumulated .
-
-
- Unlike unification, pattern matching is not bidirectional. Variables may
- appear in the term being matched, but they will not be unified with the
- pattern, and are treated as if they were constants.
-
- At a high level, the pattern match algorithm maintains two lists. The first
- list contains pairs of patterns and terms to match on immediately, and is
- initialized with the initial pattern and term. The second list contains
- pairs of map or set patterns and terms to be matched on on a deferred basis
- after other matching finishes. Matching proceeds by popping a pattern and
- a term off the first list, processing it, and performing one of several
- actions:
-
- * failing matching, indicating pattern matching doesn't succeed and
- returning immediately.
- * discharging it, indicating that matching locally succeeds and removing it
- from the list
- * binding a variable to a term by adding it to the current substitution, and
- removing it from the list
- * decomposing it into one or more subpatterns and subterms and adding them
- to the list
- * deferring it by moving it to the second list if it is a map or set pattern
- * some combination of the above
-
- Once the first list is empty, we remove one map or set pattern and term from
- the second list and perform AC matching. This will perform some combination
- of the above actions, potentially adding new elements to the first list.
-
- The algorithm then repeats this entire process until either matching fails
- or both lists are empty, at which time matching succeeds with the
- substitution that was accumulated.
-
-}
patternMatch ::
SideCondition RewritingVariableName ->
TermLike RewritingVariableName ->
TermLike RewritingVariableName ->
Simplifier (Either Text (MatchResult RewritingVariableName))
patternMatch sideCondition pat subject =
patternMatch' sideCondition [MatchItem pat subject [] Set.empty] MinQueue.empty MultiAnd.top Map.empty
-- Pattern
-- Subject
-- List of variables bound by binders in pattern and subject
-- Set of variables bound by binders in subject
data MatchItem = MatchItem (TermLike RewritingVariableName) (TermLike RewritingVariableName) [(SomeVariable RewritingVariableName, SomeVariable RewritingVariableName)] (Set (SomeVariableName RewritingVariableName))
deriving stock (Eq)
type Element normalized =
Builtin.Element normalized (TermLike RewritingVariableName)
type Value normalized =
Builtin.Value normalized (TermLike RewritingVariableName)
type NormalizedAc normalized =
Builtin.NormalizedAc normalized Key (TermLike RewritingVariableName)
instance Ord MatchItem where
compare a@(MatchItem pat1 subject1 bound1 set1) b@(MatchItem pat2 subject2 bound2 set2)
| a == b = EQ
| pat1 `needs` pat2 = GT
| pat2 `needs` pat1 = LT
| otherwise = compare (pat1, subject1, bound1, set1) (pat2, subject2, bound2, set2)
where
-- term A needs term B if term A is a map or set pattern and the keys
-- of the map or set contain variables that are free vareiables in
-- term B
needs ::
TermLike RewritingVariableName ->
TermLike RewritingVariableName ->
Bool
needs (InternalSet_ s) term = needsAc (builtinAcChild s) term
needs (InternalMap_ m) term = needsAc (builtinAcChild m) term
needs _ _ = False
needsAc ::
forall normalized.
AcWrapper normalized =>
normalized Key (TermLike RewritingVariableName) ->
TermLike RewritingVariableName ->
Bool
needsAc collection term =
not $ Set.disjoint (FreeVariables.toSet abstractFreeVars) $ FreeVariables.toSet $ freeVariables term
where
abstractKeys :: [TermLike RewritingVariableName]
abstractKeys = getSymbolicKeysOfAc collection
abstractFreeVars :: FreeVariables RewritingVariableName
abstractFreeVars = foldMap freeVariables abstractKeys
-- AC matching works by substituting the current substitution into the
-- pre-existing pattern. However, that substitution may bind variables to
-- patterns that themselves contain variables, and those variables should not
-- be matched on when doing pattern matching since pattern matching is
-- unidirectional. We implement this behavior by adding a binding of a variable
to itself to the substitution when this happens . ` finalizeSubst ` exists
-- to remove this binding since it exists purely for internal purposes.
finalizeSubst ::
Map (SomeVariableName RewritingVariableName) (TermLike RewritingVariableName) ->
Map (SomeVariableName RewritingVariableName) (TermLike RewritingVariableName)
finalizeSubst subst = Map.filterWithKey go subst
where
go k (Var_ v) = k /= variableName v
go _ _ = True
| Match a collection of MatchItems , representing subject / pattern pairs to
- perform matching .
-
- The MatchItems are divided into two lists . The first represents items to
- process immediately and uses a stack since the order within that list does
- not matter . The second represents Set / Map patterns to perform AC matching
- for . Items in the second list are processed once the first list empties .
- We use a priority queue to represent the second list since matching a
- Set / Map pattern may introduce new bindings which will in turn affect future
- Set / Map matching . As a result , we order the items so that matching proceeds
- in the correct order and is as deterministic as possible .
-
- The MultiAnd is used to construct the final predicate , which consists
- primarily of \ceil predicates . The Map represents the current substitution
- at this point in the matching process .
- perform matching.
-
- The MatchItems are divided into two lists. The first represents items to
- process immediately and uses a stack since the order within that list does
- not matter. The second represents Set/Map patterns to perform AC matching
- for. Items in the second list are processed once the first list empties.
- We use a priority queue to represent the second list since matching a
- Set/Map pattern may introduce new bindings which will in turn affect future
- Set/Map matching. As a result, we order the items so that matching proceeds
- in the correct order and is as deterministic as possible.
-
- The MultiAnd is used to construct the final predicate, which consists
- primarily of \ceil predicates. The Map represents the current substitution
- at this point in the matching process.
-}
patternMatch' ::
SideCondition RewritingVariableName ->
[MatchItem] ->
MinQueue MatchItem ->
MultiAnd (Predicate RewritingVariableName) ->
Map (SomeVariableName RewritingVariableName) (TermLike RewritingVariableName) ->
Simplifier (Either Text (MatchResult RewritingVariableName))
patternMatch' _ [] queue predicate subst
| MinQueue.null queue =
return $ Right (Predicate.fromMultiAnd predicate, finalizeSubst subst)
patternMatch' sideCondition [] queue predicate subst = do
injSimplifier <- Simplifier.askInjSimplifier
let pat' = renormalizeBuiltins $ InjSimplifier.normalize injSimplifier $ substitute subst pat
case (pat', subject) of
(InternalMap_ map1, InternalMap_ map2) ->
matchNormalizedAc decomposeList unwrapMapValue unwrapMapElement (wrapMap map2) (unwrapAc $ builtinAcChild map1) (unwrapAc $ builtinAcChild map2)
(InternalSet_ set1, InternalSet_ set2) ->
matchNormalizedAc decomposeList unwrapSetValue unwrapSetElement (wrapSet set2) (unwrapAc $ builtinAcChild set1) (unwrapAc $ builtinAcChild set2)
_ -> error "error in matching algorithm: unexpected deferred term"
where
(MatchItem pat subject boundVars boundSet, rest) = MinQueue.deleteFindMin queue
recursively pattern match with a list of new MatchItems
decomposeList ::
[(TermLike RewritingVariableName, TermLike RewritingVariableName)] ->
Simplifier (Either Text (MatchResult RewritingVariableName))
decomposeList l =
let l' = map (\(p, s) -> MatchItem p s boundVars boundSet) l
in patternMatch' sideCondition l' rest predicate $ Map.foldl' processSubst subst subst
-- add bindings of variables in the term to themselves. See comment above
on ` finalizeSubst ` .
processSubst ::
Map (SomeVariableName RewritingVariableName) (TermLike RewritingVariableName) ->
TermLike RewritingVariableName ->
Map (SomeVariableName RewritingVariableName) (TermLike RewritingVariableName)
processSubst subst' term =
let vars = FreeVariables.toList $ freeVariables term
newSubst = foldMap (\var -> Map.singleton (variableName var) (mkVar var)) vars
in subst' <> newSubst
the below functions are used to guide matchNormalizedAc so that it can
-- reuse the same code for both maps and sets
wrapSet ::
InternalAc Key NormalizedSet (TermLike RewritingVariableName) ->
NormalizedAc NormalizedSet ->
TermLike RewritingVariableName
wrapSet set2 unwrapped =
set2
& Lens.set (field @"builtinAcChild") (wrapAc unwrapped)
& mkInternalSet
wrapMap ::
InternalAc Key NormalizedMap (TermLike RewritingVariableName) ->
NormalizedAc NormalizedMap ->
TermLike RewritingVariableName
wrapMap map2 unwrapped =
map2
& Lens.set (field @"builtinAcChild") (wrapAc unwrapped)
& mkInternalMap
unwrapSetValue ::
[(Value NormalizedSet, Value NormalizedSet)] ->
[(TermLike RewritingVariableName, TermLike RewritingVariableName)]
unwrapSetValue _ = []
unwrapMapValue ::
[(Value NormalizedMap, Value NormalizedMap)] ->
[(TermLike RewritingVariableName, TermLike RewritingVariableName)]
unwrapMapValue vals = map (Bifunctor.bimap getMapValue getMapValue) vals
unwrapSetElement ::
Element NormalizedSet ->
Element NormalizedSet ->
[(TermLike RewritingVariableName, TermLike RewritingVariableName)]
unwrapSetElement elem1 elem2 = [(getSetElement elem1, getSetElement elem2)]
unwrapMapElement ::
Element NormalizedMap ->
Element NormalizedMap ->
[(TermLike RewritingVariableName, TermLike RewritingVariableName)]
unwrapMapElement elem1 elem2 =
let (key1, val1) = getMapElement elem1
(key2, val2) = getMapElement elem2
in [(key1, key2), (val1, val2)]
patternMatch' sideCondition ((MatchItem pat subject boundVars boundSet) : rest) deferred predicate subst = do
tools <- Simplifier.askMetadataTools
injSimplifier <- Simplifier.askInjSimplifier
overloadSimplifier <- Simplifier.askOverloadSimplifier
let InjSimplifier{matchInjs, evaluateInj} = injSimplifier
case (pat, subject) of
(Var_ var1, Var_ var2)
| isFree var1
, var1 == var2 ->
discharge
(ElemVar_ var1, _)
| isFree (inject var1)
, isFunctionPattern subject ->
bind (inject var1) subject
(SetVar_ var1, _)
| isFree (inject var1) ->
bind (inject var1) subject
(Var_ var1, Var_ var2)
| not $ isFree var1
, var1 `isBoundToSameAs` var2 ->
discharge
(StringLiteral_ str1, StringLiteral_ str2) ->
if str1 == str2
then discharge
else failMatch "distinct string literals"
(InternalInt_ int1, InternalInt_ int2) ->
if int1 == int2
then discharge
else failMatch "distinct builtin integers"
(InternalBool_ bool1, InternalBool_ bool2) ->
if bool1 == bool2
then discharge
else failMatch "distinct builtin booleans"
(InternalString_ string1, InternalString_ string2) ->
if string1 == string2
then discharge
else failMatch "distinct builtin strings"
(InternalBytes_ _ bytes1, InternalBytes_ _ bytes2) ->
if bytes1 == bytes2
then discharge
else failMatch "distinct builtin bytes"
(Endianness_ symbol1, Endianness_ symbol2) ->
if symbol1 == symbol2
then discharge
else failMatch "distinct endianness"
(Signedness_ symbol1, Signedness_ symbol2) ->
if symbol1 == symbol2
then discharge
else failMatch "distinct signedness"
(DV_ _ dv1, DV_ _ dv2) ->
if dv1 == dv2
then discharge
else failMatch "distinct domain values"
(Bottom_ _, Bottom_ _) ->
discharge
(Top_ _, Top_ _) ->
discharge
(Ceil_ _ _ term1, Ceil_ _ _ term2) ->
decompose term1 term2
(Equals_ _ _ term11 term12, Equals_ _ _ term21 term22) ->
decomposeTwo term11 term21 term12 term22
(And_ _ term1 term2, _) ->
subject should match _ both _ and
decomposeTwo term1 subject term2 subject
(Not_ _ term1, Not_ _ term2) ->
decompose term1 term2
(Forall_ _ variable1 term1, Forall_ _ variable2 term2) ->
decomposeBinder (inject variable1) term1 (inject variable2) term2
(Exists_ _ variable1 term1, Exists_ _ variable2 term2) ->
decomposeBinder (inject variable1) term1 (inject variable2) term2
(App_ symbol1 children1, App_ symbol2 children2) ->
if symbol1 == symbol2
then decomposeList (zip children1 children2)
else failMatch $ "distinct application symbols: " <> (unparseToText symbol1) <> ", " <> (unparseToText symbol2)
(Inj_ inj1, Inj_ inj2)
| Just unifyData <- matchInjs inj1 inj2 ->
case unifyData of
UnifyInjDirect _ -> decompose (injChild inj1) (injChild inj2)
UnifyInjSplit InjPair{inj1 = firstInj} ->
if injFrom firstInj == injFrom inj1
then decompose (injChild inj1) (evaluateInj inj2{injTo = injFrom inj1})
else decompose (evaluateInj inj1{injTo = injFrom inj2}) (injChild inj2)
UnifyInjDistinct _ -> failMatch "distinct injections"
(Inj_ inj@Inj{injChild = App_ firstHead firstChildren}, secondTerm@(App_ secondHead _))
| Just unifyData <-
unifyOverloadingVsOverloaded
overloadSimplifier
secondHead
secondTerm
(Application firstHead firstChildren)
inj{injChild = ()} ->
decomposeOverload $ flipResult unifyData
(firstTerm@(App_ firstHead _), Inj_ inj@Inj{injChild = App_ secondHead secondChildren})
| Just unifyData <-
unifyOverloadingVsOverloaded
overloadSimplifier
firstHead
firstTerm
(Application secondHead secondChildren)
inj{injChild = ()} ->
decomposeOverload unifyData
(Inj_ inj1@Inj{injChild = App_ firstHead firstChildren}, Inj_ Inj{injChild = App_ secondHead secondChildren})
| Just unifyData <-
unifyOverloadingCommonOverload
overloadSimplifier
(Application firstHead firstChildren)
(Application secondHead secondChildren)
inj1{injChild = ()} ->
decomposeOverload unifyData
(_, _)
| Just True <- List.isListSort tools sort ->
case (List.normalize pat, List.normalize subject) of
(Var_ var1, Var_ var2)
| var1 == var2 ->
discharge
(ElemVar_ var1, _)
| isFunctionPattern subject ->
bind (inject var1) subject
(SetVar_ var1, _) ->
bind (inject var1) subject
(InternalList_ InternalList{internalListChild = l1}, InternalList_ InternalList{internalListChild = l2}) ->
if length l1 == length l2
then decomposeList $ zip (toList l1) (toList l2)
else failMatch "list lengths are not equal"
(App_ symbol [InternalList_ InternalList{internalListChild = l1}, var@(ElemVar_ _)], InternalList_ InternalList{internalListChild = l2})
| List.isSymbolConcat symbol ->
if length l1 <= length l2
then
let (start, l2') = Seq.splitAt (length l1) l2
in decomposeList $ (var, List.asInternal tools sort l2') : zip (toList l1) (toList start)
else failMatch "subject list is too short"
(App_ symbol [var@(ElemVar_ _), InternalList_ InternalList{internalListChild = l1}], InternalList_ InternalList{internalListChild = l2})
| List.isSymbolConcat symbol ->
if length l1 <= length l2
then
let (l2', end) = Seq.splitAt (length l2 - length l1) l2
in decomposeList $ (var, List.asInternal tools sort l2') : zip (toList l1) (toList end)
else failMatch "subject list is too short"
_ -> failMatch "unimplemented list matching case"
(InternalMap_ _, InternalMap_ _) ->
defer
(InternalSet_ _, InternalSet_ _) ->
defer
_ -> failMatch "unimplemented matching case"
where
sort = termLikeSort pat
recurse by deleting the current MatchItem
discharge ::
Simplifier (Either Text (MatchResult RewritingVariableName))
~discharge = patternMatch' sideCondition rest deferred predicate subst
-- recurse by adding a variable binding to the current substitution
bind ::
SomeVariable RewritingVariableName ->
TermLike RewritingVariableName ->
Simplifier (Either Text (MatchResult RewritingVariableName))
bind var term
| variableSort var == termLikeSort term =
let varName = variableName var
freeVars = FreeVariables.toNames (freeVariables term)
in if not $ Set.disjoint freeVars boundSet
then failMatch "bound variable would escape binder"
else case Map.lookup varName subst of
Nothing -> patternMatch' sideCondition rest deferred (isTermDefined var term) (Map.insert (variableName var) term subst)
Just binding -> if binding == term then patternMatch' sideCondition rest deferred predicate subst else failMatch "nonlinear matching fails equality test"
| otherwise = failMatch "sorts don't match"
-- compute the new predicate of a `bind` operation
isTermDefined ::
SomeVariable RewritingVariableName ->
TermLike RewritingVariableName ->
MultiAnd (Predicate RewritingVariableName)
isTermDefined var term
| SideCondition.isDefined sideCondition term || isSetVariable var = predicate
| otherwise = (predicate <> MultiAnd.make [makeCeilPredicate term])
recurse by adding a new MatchItem
decompose ::
TermLike RewritingVariableName ->
TermLike RewritingVariableName ->
Simplifier (Either Text (MatchResult RewritingVariableName))
decompose term1 term2 = patternMatch' sideCondition ((MatchItem term1 term2 boundVars boundSet) : rest) deferred predicate subst
recusre by moving a MatchItem to the priority queue
defer ::
Simplifier (Either Text (MatchResult RewritingVariableName))
~defer = patternMatch' sideCondition rest (MinQueue.insert (MatchItem pat subject boundVars boundSet) deferred) predicate subst
- recurse by adding two new MatchItems
decomposeTwo ::
TermLike RewritingVariableName ->
TermLike RewritingVariableName ->
TermLike RewritingVariableName ->
TermLike RewritingVariableName ->
Simplifier (Either Text (MatchResult RewritingVariableName))
decomposeTwo term11 term21 term12 term22 = patternMatch' sideCondition ((MatchItem term11 term21 boundVars boundSet) : (MatchItem term12 term22 boundVars boundSet) : rest) deferred predicate subst
recurse by adding a list of MatchItems
decomposeList ::
[(TermLike RewritingVariableName, TermLike RewritingVariableName)] ->
Simplifier (Either Text (MatchResult RewritingVariableName))
decomposeList l =
let l' = map (\(p, s) -> MatchItem p s boundVars boundSet) l
in patternMatch' sideCondition (l' ++ rest) deferred predicate subst
recurse on an \exists or \forall
decomposeBinder ::
SomeVariable RewritingVariableName ->
TermLike RewritingVariableName ->
SomeVariable RewritingVariableName ->
TermLike RewritingVariableName ->
Simplifier (Either Text (MatchResult RewritingVariableName))
decomposeBinder var1 term1 var2 term2 = patternMatch' sideCondition ((MatchItem term1 term2 ((var1, var2) : boundVars) (Set.insert (variableName var2) boundSet)) : rest) deferred predicate subst
-- recurse with a specified result from the overload simplifier
decomposeOverload (Overloading.Resolution (Simple (Pair term1 term2))) = decompose term1 term2
decomposeOverload _ = failMatch "unsupported overload case in matching"
-- returns true if a variable is not bound by a binder
isFree ::
SomeVariable RewritingVariableName ->
Bool
isFree var = not $ any ((== var) . fst) boundVars
returns true if two variables are the same bound variable
isBoundToSameAs var1 var2 =
case find ((== var1) . fst) boundVars of
Nothing -> undefined
Just (_, bound) -> var2 == bound
-- fail pattern matching with an error message
failMatch ::
Text ->
Simplifier (Either Text (MatchResult RewritingVariableName))
failMatch msg = return $ Left msg
type MatchingVariable variable = InternalVariable variable
type PushList a = [(a, a)] -> Simplifier (Either Text (MatchResult RewritingVariableName))
-- perform AC matching on a particular Set/Map pair.
matchNormalizedAc ::
forall normalized.
( AcWrapper normalized
) =>
PushList (TermLike RewritingVariableName) ->
([(Value normalized, Value normalized)] -> [(TermLike RewritingVariableName, TermLike RewritingVariableName)]) ->
(Element normalized -> Element normalized -> [(TermLike RewritingVariableName, TermLike RewritingVariableName)]) ->
(NormalizedAc normalized -> TermLike RewritingVariableName) ->
NormalizedAc normalized ->
NormalizedAc normalized ->
Simplifier (Either Text (MatchResult RewritingVariableName))
matchNormalizedAc decomposeList unwrapValues unwrapElementToTermLike wrapTermLike normalized1 normalized2
-- all concrete elements in the AC pattern must appear in the AC subject
| not (null excessConcrete1) =
failMatch "AC collection missing concrete elements"
( exactly ) one concrete excess elements is mapped to an abstract
-- element in the pattern
| null excessConcrete1 -- see above, should not happen
, [element1] <- excessAbstract1 -- excess in pattern is single K |-> V
, null opaque1
, [concElem2] <- HashMap.toList excessConcrete2 -- excess in subject is single assoc
, null excessAbstract2
, null opaque2 -- ? do we need this? could also mean opaques are all empty?
-- ensure the symbolic key is not in the subject map
( see intersectionMerge , should not happen )
, (key1, _) <- unwrapElement element1
, isNothing (lookupSymbolicKeyOfAc key1 normalized2) =
bind element1 < - concElem2 , deal with the identical parts
let concElem2' = wrapElement $ Bifunctor.first (from @Key) concElem2
in decomposeList $
unwrapElementToTermLike element1 concElem2'
<> unwrapValues (concrete12 <> abstractMerge)
Case for when all symbolic elements in normalized1 appear in normalized2 :
| [] <- excessAbstract1 =
do
case opaque1 of
-- Without opaques and syntactically equal
[] ->
if not (null opaque2) || not (null excessConcrete2) || not (null excessAbstract2)
then failMatch "AC collection without opaque terms has excess elements"
else decomposeList $ unwrapValues $ concrete12 ++ abstractMerge
[frame1]
One opaque each , rest are syntactically equal
| null excessAbstract2
, null excessConcrete2
, [frame2] <- opaque2 ->
decomposeList $ (frame1, frame2) : unwrapValues (concrete12 ++ abstractMerge)
-- Match singular opaque1 with excess part of normalized2
| otherwise ->
let normalized2' =
wrapTermLike
normalized2
{ concreteElements = excessConcrete2
, elementsWithVariables = excessAbstract2
}
in decomposeList $ (frame1, normalized2') : unwrapValues (concrete12 ++ abstractMerge)
frames1
-- Opaque parts are equivalent, rest is syntactically equal
| null excessAbstract2
, null excessConcrete2
, frames2 <- opaque2
, length frames1 == length frames2 ->
decomposeList $ unwrapValues (concrete12 ++ abstractMerge) ++ zip opaque1ACs opaque2ACs
| otherwise -> failMatch "unimplemented ac collection case"
-- Case for AC iteration:
Normalized1 looks like K |- > V M : Map
| [element1] <- abstract1
, [frame1] <- opaque1
, null concrete1 = do
let (key1, value1) = unwrapElement element1
case lookupSymbolicKeyOfAc key1 normalized2 of
-- If K in_keys(normalized2)
Just value2 ->
let normalized2' =
wrapTermLike $
removeSymbolicKeyOfAc key1 normalized2
in decomposeList $ (frame1, normalized2') : unwrapValues [(value1, value2)]
Nothing ->
case (headMay . HashMap.toList $ concrete2, headMay abstract2) of
Select first concrete element of normalized2 ,
Match K |- > V with
Match M with remove(normalized2 , )
(Just concreteElement2, _) ->
let liftedConcreteElement2 =
Bifunctor.first (from @Key) concreteElement2
& wrapElement
(key2, _) = concreteElement2
normalized2' =
wrapTermLike $
removeConcreteKeyOfAc key2 normalized2
in decomposeList $ (frame1, normalized2') : unwrapElementToTermLike element1 liftedConcreteElement2
Select first symbolic element of normalized2 , symbolic2
Match K |- > V with symbolic2
Match M with remove(normalized2 , symbolic2 )
(_, Just abstractElement2) ->
let (key2, _) = unwrapElement abstractElement2
normalized2' =
wrapTermLike $
removeSymbolicKeyOfAc key2 normalized2
in decomposeList $ (frame1, normalized2') : unwrapElementToTermLike element1 abstractElement2
_ -> failMatch "unimplemented ac collection case"
-- Case for ACs which are structurally equal:
| length excessAbstract1 == length excessAbstract2
, length concrete1 == length concrete2
, length opaque1 == length opaque2 =
decomposeList $ unwrapValues (abstractMerge ++ concrete12) ++ unwrapElements (zip excessAbstract1 excessAbstract2) ++ (zip opaque1ACs opaque2ACs)
| otherwise = failMatch "unimplemented ac collection case"
where
abstract1 = elementsWithVariables normalized1
concrete1 = concreteElements normalized1
opaque1 = opaque normalized1
opaque1ACs = wrapTermLike . toSingleOpaqueElem <$> opaque1
abstract2 = elementsWithVariables normalized2
concrete2 = concreteElements normalized2
opaque2 = opaque normalized2
opaque2ACs = wrapTermLike . toSingleOpaqueElem <$> opaque2
excessConcrete1 = HashMap.difference concrete1 concrete2
excessConcrete2 = HashMap.difference concrete2 concrete1
concrete12 = HashMap.elems $ HashMap.intersectionWith (,) concrete1 concrete2
unwrapElements = concatMap $ uncurry unwrapElementToTermLike
IntersectionDifference
{ intersection = abstractMerge
, excessFirst = excessAbstract1
, excessSecond = excessAbstract2
} = abstractIntersectionMerge abstract1 abstract2
abstractIntersectionMerge ::
[Element normalized] ->
[Element normalized] ->
IntersectionDifference
(Element normalized)
(Value normalized, Value normalized)
abstractIntersectionMerge first second =
keyBasedIntersectionDifference
elementMerger
(toMap first)
(toMap second)
where
toMap ::
[Element normalized] ->
Map (TermLike RewritingVariableName) (Element normalized)
toMap elements =
let elementMap =
Map.fromList
( map
(\value -> (elementKey value, value))
elements
)
in if length elementMap == length elements
then elementMap
else error "Invalid map: duplicated keys."
elementKey ::
Element normalized ->
TermLike RewritingVariableName
elementKey = fst . unwrapElement
elementMerger ::
Element normalized ->
Element normalized ->
(Value normalized, Value normalized)
elementMerger = (,) `on` (snd . unwrapElement)
data IntersectionDifference a b = IntersectionDifference
{ intersection :: ![b]
, excessFirst :: ![a]
, excessSecond :: ![a]
}
deriving stock (Show)
emptyIntersectionDifference :: IntersectionDifference a b
emptyIntersectionDifference =
IntersectionDifference
{ intersection = []
, excessFirst = []
, excessSecond = []
}
keyBasedIntersectionDifference ::
forall a b k.
Ord k =>
(a -> a -> b) ->
Map k a ->
Map k a ->
IntersectionDifference a b
keyBasedIntersectionDifference merger firsts seconds =
foldl'
helper
emptyIntersectionDifference
(Map.elems $ Align.align firsts seconds)
where
helper ::
IntersectionDifference a b ->
These a a ->
IntersectionDifference a b
helper
result@IntersectionDifference{excessFirst}
(This first) =
result{excessFirst = first : excessFirst}
helper
result@IntersectionDifference{excessSecond}
(That second) =
result{excessSecond = second : excessSecond}
helper
result@IntersectionDifference{intersection}
(These first second) =
result{intersection = merger first second : intersection}
-- | Renormalize builtin types after substitution.
renormalizeBuiltins ::
InternalVariable variable =>
TermLike variable ->
TermLike variable
renormalizeBuiltins =
Recursive.fold $ \base@(attrs :< termLikeF) ->
let bottom' = mkBottom (termSort attrs)
in case termLikeF of
InternalMapF internalMap ->
Lens.traverseOf (field @"builtinAcChild") Ac.renormalize internalMap
& maybe bottom' mkInternalMap
InternalSetF internalSet ->
Lens.traverseOf (field @"builtinAcChild") Ac.renormalize internalSet
& maybe bottom' mkInternalSet
_ -> Recursive.embed base
| null | https://raw.githubusercontent.com/runtimeverification/haskell-backend/a7ff15f2263dcf274a196a958cfb99c95809afba/kore/src/Kore/Rewrite/Axiom/Matcher.hs | haskell | * Matching
Pattern
Subject
List of variables bound by binders in pattern and subject
Set of variables bound by binders in subject
term A needs term B if term A is a map or set pattern and the keys
of the map or set contain variables that are free vareiables in
term B
AC matching works by substituting the current substitution into the
pre-existing pattern. However, that substitution may bind variables to
patterns that themselves contain variables, and those variables should not
be matched on when doing pattern matching since pattern matching is
unidirectional. We implement this behavior by adding a binding of a variable
to remove this binding since it exists purely for internal purposes.
add bindings of variables in the term to themselves. See comment above
reuse the same code for both maps and sets
recurse by adding a variable binding to the current substitution
compute the new predicate of a `bind` operation
recurse with a specified result from the overload simplifier
returns true if a variable is not bound by a binder
fail pattern matching with an error message
perform AC matching on a particular Set/Map pair.
all concrete elements in the AC pattern must appear in the AC subject
element in the pattern
see above, should not happen
excess in pattern is single K |-> V
excess in subject is single assoc
? do we need this? could also mean opaques are all empty?
ensure the symbolic key is not in the subject map
Without opaques and syntactically equal
Match singular opaque1 with excess part of normalized2
Opaque parts are equivalent, rest is syntactically equal
Case for AC iteration:
If K in_keys(normalized2)
Case for ACs which are structurally equal:
| Renormalize builtin types after substitution. | |
Copyright : ( c ) Runtime Verification , 2018 - 2021
License : BSD-3 - Clause
Copyright : (c) Runtime Verification, 2018-2021
License : BSD-3-Clause
-}
module Kore.Rewrite.Axiom.Matcher (
MatchingVariable,
MatchResult,
patternMatch,
) where
import Control.Lens qualified as Lens
import Data.Align qualified as Align (
align,
)
import Data.Bifunctor qualified as Bifunctor
import Data.Functor.Foldable qualified as Recursive
import Data.Generics.Product
import Data.HashMap.Strict qualified as HashMap
import Data.Map.Strict (
Map,
)
import Data.Map.Strict qualified as Map
import Data.PQueue.Min (
MinQueue,
)
import Data.PQueue.Min qualified as MinQueue
import Data.Sequence qualified as Seq
import Data.Set (
Set,
)
import Data.Set qualified as Set
import Data.Text (
Text,
)
import Data.These (
These (..),
)
import Kore.Attribute.Pattern.FreeVariables (
FreeVariables,
)
import Kore.Attribute.Pattern.FreeVariables qualified as FreeVariables
import Kore.Builtin.AssociativeCommutative qualified as Ac
import Kore.Builtin.List qualified as List
import Kore.Internal.InternalList
import Kore.Internal.InternalMap hiding (
Element,
NormalizedAc,
Value,
)
import Kore.Internal.InternalSet hiding (
Element,
NormalizedAc,
Value,
)
import Kore.Internal.MultiAnd (
MultiAnd,
)
import Kore.Internal.MultiAnd qualified as MultiAnd
import Kore.Internal.NormalizedAc qualified as Builtin (
Element,
NormalizedAc,
Value,
)
import Kore.Internal.Predicate (
Predicate,
makeCeilPredicate,
)
import Kore.Internal.Predicate qualified as Predicate
import Kore.Internal.SideCondition
import Kore.Internal.SideCondition qualified as SideCondition
import Kore.Internal.TermLike
import Kore.Rewrite.Axiom.MatcherData
import Kore.Rewrite.RewritingVariable (
RewritingVariableName,
)
import Kore.Simplify.InjSimplifier as InjSimplifier
import Kore.Simplify.Overloading (
OverloadingResolution (..),
flipResult,
unifyOverloadingCommonOverload,
unifyOverloadingVsOverloaded,
)
import Kore.Simplify.Overloading qualified as Overloading (
MatchResult (..),
)
import Kore.Simplify.Simplify (
Simplifier,
)
import Kore.Simplify.Simplify qualified as Simplifier
import Kore.Substitute
import Kore.Unparser (
unparseToText,
)
import Pair
import Prelude.Kore
| Match a TermLike against a pattern as represented by another TermLike .
-
- Unlike unification , pattern matching is not bidirectional . Variables may
- appear in the term being matched , but they will not be unified with the
- pattern , and are treated as if they were constants .
-
- At a high level , the pattern match algorithm maintains two lists . The first
- list contains pairs of patterns and terms to match on immediately , and is
- initialized with the initial pattern and term . The second list contains
- pairs of map or set patterns and terms to be matched on on a deferred basis
- after other matching finishes . Matching proceeds by popping a pattern and
- a term off the first list , processing it , and performing one of several
- actions :
-
- * failing matching , indicating pattern matching does n't succeed and
- returning immediately .
- * discharging it , indicating that matching locally succeeds and removing it
- from the list
- * binding a variable to a term by adding it to the current substitution , and
- removing it from the list
- * decomposing it into one or more subpatterns and subterms and adding them
- to the list
- * deferring it by moving it to the second list if it is a map or set pattern
- * some combination of the above
-
- Once the first list is empty , we remove one map or set pattern and term from
- the second list and perform AC matching . This will perform some combination
- of the above actions , potentially adding new elements to the first list .
-
- The algorithm then repeats this entire process until either matching fails
- or both lists are empty , at which time matching succeeds with the
- substitution that was accumulated .
-
-
- Unlike unification, pattern matching is not bidirectional. Variables may
- appear in the term being matched, but they will not be unified with the
- pattern, and are treated as if they were constants.
-
- At a high level, the pattern match algorithm maintains two lists. The first
- list contains pairs of patterns and terms to match on immediately, and is
- initialized with the initial pattern and term. The second list contains
- pairs of map or set patterns and terms to be matched on on a deferred basis
- after other matching finishes. Matching proceeds by popping a pattern and
- a term off the first list, processing it, and performing one of several
- actions:
-
- * failing matching, indicating pattern matching doesn't succeed and
- returning immediately.
- * discharging it, indicating that matching locally succeeds and removing it
- from the list
- * binding a variable to a term by adding it to the current substitution, and
- removing it from the list
- * decomposing it into one or more subpatterns and subterms and adding them
- to the list
- * deferring it by moving it to the second list if it is a map or set pattern
- * some combination of the above
-
- Once the first list is empty, we remove one map or set pattern and term from
- the second list and perform AC matching. This will perform some combination
- of the above actions, potentially adding new elements to the first list.
-
- The algorithm then repeats this entire process until either matching fails
- or both lists are empty, at which time matching succeeds with the
- substitution that was accumulated.
-
-}
patternMatch ::
SideCondition RewritingVariableName ->
TermLike RewritingVariableName ->
TermLike RewritingVariableName ->
Simplifier (Either Text (MatchResult RewritingVariableName))
patternMatch sideCondition pat subject =
patternMatch' sideCondition [MatchItem pat subject [] Set.empty] MinQueue.empty MultiAnd.top Map.empty
data MatchItem = MatchItem (TermLike RewritingVariableName) (TermLike RewritingVariableName) [(SomeVariable RewritingVariableName, SomeVariable RewritingVariableName)] (Set (SomeVariableName RewritingVariableName))
deriving stock (Eq)
type Element normalized =
Builtin.Element normalized (TermLike RewritingVariableName)
type Value normalized =
Builtin.Value normalized (TermLike RewritingVariableName)
type NormalizedAc normalized =
Builtin.NormalizedAc normalized Key (TermLike RewritingVariableName)
instance Ord MatchItem where
compare a@(MatchItem pat1 subject1 bound1 set1) b@(MatchItem pat2 subject2 bound2 set2)
| a == b = EQ
| pat1 `needs` pat2 = GT
| pat2 `needs` pat1 = LT
| otherwise = compare (pat1, subject1, bound1, set1) (pat2, subject2, bound2, set2)
where
needs ::
TermLike RewritingVariableName ->
TermLike RewritingVariableName ->
Bool
needs (InternalSet_ s) term = needsAc (builtinAcChild s) term
needs (InternalMap_ m) term = needsAc (builtinAcChild m) term
needs _ _ = False
needsAc ::
forall normalized.
AcWrapper normalized =>
normalized Key (TermLike RewritingVariableName) ->
TermLike RewritingVariableName ->
Bool
needsAc collection term =
not $ Set.disjoint (FreeVariables.toSet abstractFreeVars) $ FreeVariables.toSet $ freeVariables term
where
abstractKeys :: [TermLike RewritingVariableName]
abstractKeys = getSymbolicKeysOfAc collection
abstractFreeVars :: FreeVariables RewritingVariableName
abstractFreeVars = foldMap freeVariables abstractKeys
to itself to the substitution when this happens . ` finalizeSubst ` exists
finalizeSubst ::
Map (SomeVariableName RewritingVariableName) (TermLike RewritingVariableName) ->
Map (SomeVariableName RewritingVariableName) (TermLike RewritingVariableName)
finalizeSubst subst = Map.filterWithKey go subst
where
go k (Var_ v) = k /= variableName v
go _ _ = True
| Match a collection of MatchItems , representing subject / pattern pairs to
- perform matching .
-
- The MatchItems are divided into two lists . The first represents items to
- process immediately and uses a stack since the order within that list does
- not matter . The second represents Set / Map patterns to perform AC matching
- for . Items in the second list are processed once the first list empties .
- We use a priority queue to represent the second list since matching a
- Set / Map pattern may introduce new bindings which will in turn affect future
- Set / Map matching . As a result , we order the items so that matching proceeds
- in the correct order and is as deterministic as possible .
-
- The MultiAnd is used to construct the final predicate , which consists
- primarily of \ceil predicates . The Map represents the current substitution
- at this point in the matching process .
- perform matching.
-
- The MatchItems are divided into two lists. The first represents items to
- process immediately and uses a stack since the order within that list does
- not matter. The second represents Set/Map patterns to perform AC matching
- for. Items in the second list are processed once the first list empties.
- We use a priority queue to represent the second list since matching a
- Set/Map pattern may introduce new bindings which will in turn affect future
- Set/Map matching. As a result, we order the items so that matching proceeds
- in the correct order and is as deterministic as possible.
-
- The MultiAnd is used to construct the final predicate, which consists
- primarily of \ceil predicates. The Map represents the current substitution
- at this point in the matching process.
-}
patternMatch' ::
SideCondition RewritingVariableName ->
[MatchItem] ->
MinQueue MatchItem ->
MultiAnd (Predicate RewritingVariableName) ->
Map (SomeVariableName RewritingVariableName) (TermLike RewritingVariableName) ->
Simplifier (Either Text (MatchResult RewritingVariableName))
patternMatch' _ [] queue predicate subst
| MinQueue.null queue =
return $ Right (Predicate.fromMultiAnd predicate, finalizeSubst subst)
patternMatch' sideCondition [] queue predicate subst = do
injSimplifier <- Simplifier.askInjSimplifier
let pat' = renormalizeBuiltins $ InjSimplifier.normalize injSimplifier $ substitute subst pat
case (pat', subject) of
(InternalMap_ map1, InternalMap_ map2) ->
matchNormalizedAc decomposeList unwrapMapValue unwrapMapElement (wrapMap map2) (unwrapAc $ builtinAcChild map1) (unwrapAc $ builtinAcChild map2)
(InternalSet_ set1, InternalSet_ set2) ->
matchNormalizedAc decomposeList unwrapSetValue unwrapSetElement (wrapSet set2) (unwrapAc $ builtinAcChild set1) (unwrapAc $ builtinAcChild set2)
_ -> error "error in matching algorithm: unexpected deferred term"
where
(MatchItem pat subject boundVars boundSet, rest) = MinQueue.deleteFindMin queue
recursively pattern match with a list of new MatchItems
decomposeList ::
[(TermLike RewritingVariableName, TermLike RewritingVariableName)] ->
Simplifier (Either Text (MatchResult RewritingVariableName))
decomposeList l =
let l' = map (\(p, s) -> MatchItem p s boundVars boundSet) l
in patternMatch' sideCondition l' rest predicate $ Map.foldl' processSubst subst subst
on ` finalizeSubst ` .
processSubst ::
Map (SomeVariableName RewritingVariableName) (TermLike RewritingVariableName) ->
TermLike RewritingVariableName ->
Map (SomeVariableName RewritingVariableName) (TermLike RewritingVariableName)
processSubst subst' term =
let vars = FreeVariables.toList $ freeVariables term
newSubst = foldMap (\var -> Map.singleton (variableName var) (mkVar var)) vars
in subst' <> newSubst
the below functions are used to guide matchNormalizedAc so that it can
wrapSet ::
InternalAc Key NormalizedSet (TermLike RewritingVariableName) ->
NormalizedAc NormalizedSet ->
TermLike RewritingVariableName
wrapSet set2 unwrapped =
set2
& Lens.set (field @"builtinAcChild") (wrapAc unwrapped)
& mkInternalSet
wrapMap ::
InternalAc Key NormalizedMap (TermLike RewritingVariableName) ->
NormalizedAc NormalizedMap ->
TermLike RewritingVariableName
wrapMap map2 unwrapped =
map2
& Lens.set (field @"builtinAcChild") (wrapAc unwrapped)
& mkInternalMap
unwrapSetValue ::
[(Value NormalizedSet, Value NormalizedSet)] ->
[(TermLike RewritingVariableName, TermLike RewritingVariableName)]
unwrapSetValue _ = []
unwrapMapValue ::
[(Value NormalizedMap, Value NormalizedMap)] ->
[(TermLike RewritingVariableName, TermLike RewritingVariableName)]
unwrapMapValue vals = map (Bifunctor.bimap getMapValue getMapValue) vals
unwrapSetElement ::
Element NormalizedSet ->
Element NormalizedSet ->
[(TermLike RewritingVariableName, TermLike RewritingVariableName)]
unwrapSetElement elem1 elem2 = [(getSetElement elem1, getSetElement elem2)]
unwrapMapElement ::
Element NormalizedMap ->
Element NormalizedMap ->
[(TermLike RewritingVariableName, TermLike RewritingVariableName)]
unwrapMapElement elem1 elem2 =
let (key1, val1) = getMapElement elem1
(key2, val2) = getMapElement elem2
in [(key1, key2), (val1, val2)]
patternMatch' sideCondition ((MatchItem pat subject boundVars boundSet) : rest) deferred predicate subst = do
tools <- Simplifier.askMetadataTools
injSimplifier <- Simplifier.askInjSimplifier
overloadSimplifier <- Simplifier.askOverloadSimplifier
let InjSimplifier{matchInjs, evaluateInj} = injSimplifier
case (pat, subject) of
(Var_ var1, Var_ var2)
| isFree var1
, var1 == var2 ->
discharge
(ElemVar_ var1, _)
| isFree (inject var1)
, isFunctionPattern subject ->
bind (inject var1) subject
(SetVar_ var1, _)
| isFree (inject var1) ->
bind (inject var1) subject
(Var_ var1, Var_ var2)
| not $ isFree var1
, var1 `isBoundToSameAs` var2 ->
discharge
(StringLiteral_ str1, StringLiteral_ str2) ->
if str1 == str2
then discharge
else failMatch "distinct string literals"
(InternalInt_ int1, InternalInt_ int2) ->
if int1 == int2
then discharge
else failMatch "distinct builtin integers"
(InternalBool_ bool1, InternalBool_ bool2) ->
if bool1 == bool2
then discharge
else failMatch "distinct builtin booleans"
(InternalString_ string1, InternalString_ string2) ->
if string1 == string2
then discharge
else failMatch "distinct builtin strings"
(InternalBytes_ _ bytes1, InternalBytes_ _ bytes2) ->
if bytes1 == bytes2
then discharge
else failMatch "distinct builtin bytes"
(Endianness_ symbol1, Endianness_ symbol2) ->
if symbol1 == symbol2
then discharge
else failMatch "distinct endianness"
(Signedness_ symbol1, Signedness_ symbol2) ->
if symbol1 == symbol2
then discharge
else failMatch "distinct signedness"
(DV_ _ dv1, DV_ _ dv2) ->
if dv1 == dv2
then discharge
else failMatch "distinct domain values"
(Bottom_ _, Bottom_ _) ->
discharge
(Top_ _, Top_ _) ->
discharge
(Ceil_ _ _ term1, Ceil_ _ _ term2) ->
decompose term1 term2
(Equals_ _ _ term11 term12, Equals_ _ _ term21 term22) ->
decomposeTwo term11 term21 term12 term22
(And_ _ term1 term2, _) ->
subject should match _ both _ and
decomposeTwo term1 subject term2 subject
(Not_ _ term1, Not_ _ term2) ->
decompose term1 term2
(Forall_ _ variable1 term1, Forall_ _ variable2 term2) ->
decomposeBinder (inject variable1) term1 (inject variable2) term2
(Exists_ _ variable1 term1, Exists_ _ variable2 term2) ->
decomposeBinder (inject variable1) term1 (inject variable2) term2
(App_ symbol1 children1, App_ symbol2 children2) ->
if symbol1 == symbol2
then decomposeList (zip children1 children2)
else failMatch $ "distinct application symbols: " <> (unparseToText symbol1) <> ", " <> (unparseToText symbol2)
(Inj_ inj1, Inj_ inj2)
| Just unifyData <- matchInjs inj1 inj2 ->
case unifyData of
UnifyInjDirect _ -> decompose (injChild inj1) (injChild inj2)
UnifyInjSplit InjPair{inj1 = firstInj} ->
if injFrom firstInj == injFrom inj1
then decompose (injChild inj1) (evaluateInj inj2{injTo = injFrom inj1})
else decompose (evaluateInj inj1{injTo = injFrom inj2}) (injChild inj2)
UnifyInjDistinct _ -> failMatch "distinct injections"
(Inj_ inj@Inj{injChild = App_ firstHead firstChildren}, secondTerm@(App_ secondHead _))
| Just unifyData <-
unifyOverloadingVsOverloaded
overloadSimplifier
secondHead
secondTerm
(Application firstHead firstChildren)
inj{injChild = ()} ->
decomposeOverload $ flipResult unifyData
(firstTerm@(App_ firstHead _), Inj_ inj@Inj{injChild = App_ secondHead secondChildren})
| Just unifyData <-
unifyOverloadingVsOverloaded
overloadSimplifier
firstHead
firstTerm
(Application secondHead secondChildren)
inj{injChild = ()} ->
decomposeOverload unifyData
(Inj_ inj1@Inj{injChild = App_ firstHead firstChildren}, Inj_ Inj{injChild = App_ secondHead secondChildren})
| Just unifyData <-
unifyOverloadingCommonOverload
overloadSimplifier
(Application firstHead firstChildren)
(Application secondHead secondChildren)
inj1{injChild = ()} ->
decomposeOverload unifyData
(_, _)
| Just True <- List.isListSort tools sort ->
case (List.normalize pat, List.normalize subject) of
(Var_ var1, Var_ var2)
| var1 == var2 ->
discharge
(ElemVar_ var1, _)
| isFunctionPattern subject ->
bind (inject var1) subject
(SetVar_ var1, _) ->
bind (inject var1) subject
(InternalList_ InternalList{internalListChild = l1}, InternalList_ InternalList{internalListChild = l2}) ->
if length l1 == length l2
then decomposeList $ zip (toList l1) (toList l2)
else failMatch "list lengths are not equal"
(App_ symbol [InternalList_ InternalList{internalListChild = l1}, var@(ElemVar_ _)], InternalList_ InternalList{internalListChild = l2})
| List.isSymbolConcat symbol ->
if length l1 <= length l2
then
let (start, l2') = Seq.splitAt (length l1) l2
in decomposeList $ (var, List.asInternal tools sort l2') : zip (toList l1) (toList start)
else failMatch "subject list is too short"
(App_ symbol [var@(ElemVar_ _), InternalList_ InternalList{internalListChild = l1}], InternalList_ InternalList{internalListChild = l2})
| List.isSymbolConcat symbol ->
if length l1 <= length l2
then
let (l2', end) = Seq.splitAt (length l2 - length l1) l2
in decomposeList $ (var, List.asInternal tools sort l2') : zip (toList l1) (toList end)
else failMatch "subject list is too short"
_ -> failMatch "unimplemented list matching case"
(InternalMap_ _, InternalMap_ _) ->
defer
(InternalSet_ _, InternalSet_ _) ->
defer
_ -> failMatch "unimplemented matching case"
where
sort = termLikeSort pat
recurse by deleting the current MatchItem
discharge ::
Simplifier (Either Text (MatchResult RewritingVariableName))
~discharge = patternMatch' sideCondition rest deferred predicate subst
bind ::
SomeVariable RewritingVariableName ->
TermLike RewritingVariableName ->
Simplifier (Either Text (MatchResult RewritingVariableName))
bind var term
| variableSort var == termLikeSort term =
let varName = variableName var
freeVars = FreeVariables.toNames (freeVariables term)
in if not $ Set.disjoint freeVars boundSet
then failMatch "bound variable would escape binder"
else case Map.lookup varName subst of
Nothing -> patternMatch' sideCondition rest deferred (isTermDefined var term) (Map.insert (variableName var) term subst)
Just binding -> if binding == term then patternMatch' sideCondition rest deferred predicate subst else failMatch "nonlinear matching fails equality test"
| otherwise = failMatch "sorts don't match"
isTermDefined ::
SomeVariable RewritingVariableName ->
TermLike RewritingVariableName ->
MultiAnd (Predicate RewritingVariableName)
isTermDefined var term
| SideCondition.isDefined sideCondition term || isSetVariable var = predicate
| otherwise = (predicate <> MultiAnd.make [makeCeilPredicate term])
recurse by adding a new MatchItem
decompose ::
TermLike RewritingVariableName ->
TermLike RewritingVariableName ->
Simplifier (Either Text (MatchResult RewritingVariableName))
decompose term1 term2 = patternMatch' sideCondition ((MatchItem term1 term2 boundVars boundSet) : rest) deferred predicate subst
recusre by moving a MatchItem to the priority queue
defer ::
Simplifier (Either Text (MatchResult RewritingVariableName))
~defer = patternMatch' sideCondition rest (MinQueue.insert (MatchItem pat subject boundVars boundSet) deferred) predicate subst
- recurse by adding two new MatchItems
decomposeTwo ::
TermLike RewritingVariableName ->
TermLike RewritingVariableName ->
TermLike RewritingVariableName ->
TermLike RewritingVariableName ->
Simplifier (Either Text (MatchResult RewritingVariableName))
decomposeTwo term11 term21 term12 term22 = patternMatch' sideCondition ((MatchItem term11 term21 boundVars boundSet) : (MatchItem term12 term22 boundVars boundSet) : rest) deferred predicate subst
recurse by adding a list of MatchItems
decomposeList ::
[(TermLike RewritingVariableName, TermLike RewritingVariableName)] ->
Simplifier (Either Text (MatchResult RewritingVariableName))
decomposeList l =
let l' = map (\(p, s) -> MatchItem p s boundVars boundSet) l
in patternMatch' sideCondition (l' ++ rest) deferred predicate subst
recurse on an \exists or \forall
decomposeBinder ::
SomeVariable RewritingVariableName ->
TermLike RewritingVariableName ->
SomeVariable RewritingVariableName ->
TermLike RewritingVariableName ->
Simplifier (Either Text (MatchResult RewritingVariableName))
decomposeBinder var1 term1 var2 term2 = patternMatch' sideCondition ((MatchItem term1 term2 ((var1, var2) : boundVars) (Set.insert (variableName var2) boundSet)) : rest) deferred predicate subst
decomposeOverload (Overloading.Resolution (Simple (Pair term1 term2))) = decompose term1 term2
decomposeOverload _ = failMatch "unsupported overload case in matching"
isFree ::
SomeVariable RewritingVariableName ->
Bool
isFree var = not $ any ((== var) . fst) boundVars
returns true if two variables are the same bound variable
isBoundToSameAs var1 var2 =
case find ((== var1) . fst) boundVars of
Nothing -> undefined
Just (_, bound) -> var2 == bound
failMatch ::
Text ->
Simplifier (Either Text (MatchResult RewritingVariableName))
failMatch msg = return $ Left msg
type MatchingVariable variable = InternalVariable variable
type PushList a = [(a, a)] -> Simplifier (Either Text (MatchResult RewritingVariableName))
matchNormalizedAc ::
forall normalized.
( AcWrapper normalized
) =>
PushList (TermLike RewritingVariableName) ->
([(Value normalized, Value normalized)] -> [(TermLike RewritingVariableName, TermLike RewritingVariableName)]) ->
(Element normalized -> Element normalized -> [(TermLike RewritingVariableName, TermLike RewritingVariableName)]) ->
(NormalizedAc normalized -> TermLike RewritingVariableName) ->
NormalizedAc normalized ->
NormalizedAc normalized ->
Simplifier (Either Text (MatchResult RewritingVariableName))
matchNormalizedAc decomposeList unwrapValues unwrapElementToTermLike wrapTermLike normalized1 normalized2
| not (null excessConcrete1) =
failMatch "AC collection missing concrete elements"
( exactly ) one concrete excess elements is mapped to an abstract
, null opaque1
, null excessAbstract2
( see intersectionMerge , should not happen )
, (key1, _) <- unwrapElement element1
, isNothing (lookupSymbolicKeyOfAc key1 normalized2) =
bind element1 < - concElem2 , deal with the identical parts
let concElem2' = wrapElement $ Bifunctor.first (from @Key) concElem2
in decomposeList $
unwrapElementToTermLike element1 concElem2'
<> unwrapValues (concrete12 <> abstractMerge)
Case for when all symbolic elements in normalized1 appear in normalized2 :
| [] <- excessAbstract1 =
do
case opaque1 of
[] ->
if not (null opaque2) || not (null excessConcrete2) || not (null excessAbstract2)
then failMatch "AC collection without opaque terms has excess elements"
else decomposeList $ unwrapValues $ concrete12 ++ abstractMerge
[frame1]
One opaque each , rest are syntactically equal
| null excessAbstract2
, null excessConcrete2
, [frame2] <- opaque2 ->
decomposeList $ (frame1, frame2) : unwrapValues (concrete12 ++ abstractMerge)
| otherwise ->
let normalized2' =
wrapTermLike
normalized2
{ concreteElements = excessConcrete2
, elementsWithVariables = excessAbstract2
}
in decomposeList $ (frame1, normalized2') : unwrapValues (concrete12 ++ abstractMerge)
frames1
| null excessAbstract2
, null excessConcrete2
, frames2 <- opaque2
, length frames1 == length frames2 ->
decomposeList $ unwrapValues (concrete12 ++ abstractMerge) ++ zip opaque1ACs opaque2ACs
| otherwise -> failMatch "unimplemented ac collection case"
Normalized1 looks like K |- > V M : Map
| [element1] <- abstract1
, [frame1] <- opaque1
, null concrete1 = do
let (key1, value1) = unwrapElement element1
case lookupSymbolicKeyOfAc key1 normalized2 of
Just value2 ->
let normalized2' =
wrapTermLike $
removeSymbolicKeyOfAc key1 normalized2
in decomposeList $ (frame1, normalized2') : unwrapValues [(value1, value2)]
Nothing ->
case (headMay . HashMap.toList $ concrete2, headMay abstract2) of
Select first concrete element of normalized2 ,
Match K |- > V with
Match M with remove(normalized2 , )
(Just concreteElement2, _) ->
let liftedConcreteElement2 =
Bifunctor.first (from @Key) concreteElement2
& wrapElement
(key2, _) = concreteElement2
normalized2' =
wrapTermLike $
removeConcreteKeyOfAc key2 normalized2
in decomposeList $ (frame1, normalized2') : unwrapElementToTermLike element1 liftedConcreteElement2
Select first symbolic element of normalized2 , symbolic2
Match K |- > V with symbolic2
Match M with remove(normalized2 , symbolic2 )
(_, Just abstractElement2) ->
let (key2, _) = unwrapElement abstractElement2
normalized2' =
wrapTermLike $
removeSymbolicKeyOfAc key2 normalized2
in decomposeList $ (frame1, normalized2') : unwrapElementToTermLike element1 abstractElement2
_ -> failMatch "unimplemented ac collection case"
| length excessAbstract1 == length excessAbstract2
, length concrete1 == length concrete2
, length opaque1 == length opaque2 =
decomposeList $ unwrapValues (abstractMerge ++ concrete12) ++ unwrapElements (zip excessAbstract1 excessAbstract2) ++ (zip opaque1ACs opaque2ACs)
| otherwise = failMatch "unimplemented ac collection case"
where
abstract1 = elementsWithVariables normalized1
concrete1 = concreteElements normalized1
opaque1 = opaque normalized1
opaque1ACs = wrapTermLike . toSingleOpaqueElem <$> opaque1
abstract2 = elementsWithVariables normalized2
concrete2 = concreteElements normalized2
opaque2 = opaque normalized2
opaque2ACs = wrapTermLike . toSingleOpaqueElem <$> opaque2
excessConcrete1 = HashMap.difference concrete1 concrete2
excessConcrete2 = HashMap.difference concrete2 concrete1
concrete12 = HashMap.elems $ HashMap.intersectionWith (,) concrete1 concrete2
unwrapElements = concatMap $ uncurry unwrapElementToTermLike
IntersectionDifference
{ intersection = abstractMerge
, excessFirst = excessAbstract1
, excessSecond = excessAbstract2
} = abstractIntersectionMerge abstract1 abstract2
abstractIntersectionMerge ::
[Element normalized] ->
[Element normalized] ->
IntersectionDifference
(Element normalized)
(Value normalized, Value normalized)
abstractIntersectionMerge first second =
keyBasedIntersectionDifference
elementMerger
(toMap first)
(toMap second)
where
toMap ::
[Element normalized] ->
Map (TermLike RewritingVariableName) (Element normalized)
toMap elements =
let elementMap =
Map.fromList
( map
(\value -> (elementKey value, value))
elements
)
in if length elementMap == length elements
then elementMap
else error "Invalid map: duplicated keys."
elementKey ::
Element normalized ->
TermLike RewritingVariableName
elementKey = fst . unwrapElement
elementMerger ::
Element normalized ->
Element normalized ->
(Value normalized, Value normalized)
elementMerger = (,) `on` (snd . unwrapElement)
data IntersectionDifference a b = IntersectionDifference
{ intersection :: ![b]
, excessFirst :: ![a]
, excessSecond :: ![a]
}
deriving stock (Show)
emptyIntersectionDifference :: IntersectionDifference a b
emptyIntersectionDifference =
IntersectionDifference
{ intersection = []
, excessFirst = []
, excessSecond = []
}
keyBasedIntersectionDifference ::
forall a b k.
Ord k =>
(a -> a -> b) ->
Map k a ->
Map k a ->
IntersectionDifference a b
keyBasedIntersectionDifference merger firsts seconds =
foldl'
helper
emptyIntersectionDifference
(Map.elems $ Align.align firsts seconds)
where
helper ::
IntersectionDifference a b ->
These a a ->
IntersectionDifference a b
helper
result@IntersectionDifference{excessFirst}
(This first) =
result{excessFirst = first : excessFirst}
helper
result@IntersectionDifference{excessSecond}
(That second) =
result{excessSecond = second : excessSecond}
helper
result@IntersectionDifference{intersection}
(These first second) =
result{intersection = merger first second : intersection}
renormalizeBuiltins ::
InternalVariable variable =>
TermLike variable ->
TermLike variable
renormalizeBuiltins =
Recursive.fold $ \base@(attrs :< termLikeF) ->
let bottom' = mkBottom (termSort attrs)
in case termLikeF of
InternalMapF internalMap ->
Lens.traverseOf (field @"builtinAcChild") Ac.renormalize internalMap
& maybe bottom' mkInternalMap
InternalSetF internalSet ->
Lens.traverseOf (field @"builtinAcChild") Ac.renormalize internalSet
& maybe bottom' mkInternalSet
_ -> Recursive.embed base
|
482749839ff0a5180aaf362c8e61caf416477a2990ac93a4a53ca18b60fe45dd | Shirakumo/trial | build.lisp | (in-package #:org.shirakumo.fraf.trial.release)
(defvar *default-build-features*
'(:trial-optimize-all :cl-opengl-no-masked-traps :cl-opengl-no-check-error
:cl-mixed-no-restarts :trial-release))
(defmethod build :around (target)
(restart-case
(call-next-method)
(continue ()
:report "Treat the build as successful")
(retry ()
:report "Retry the build"
(build target))))
(defun build-args ()
(let ((features (append *default-build-features* (config :build :features))))
(append (list "--dynamic-space-size" (princ-to-string (config :build :dynamic-space-size))
"--eval" (format NIL "(setf *features* (append *features* '~s))" features))
(config :build :build-arguments)
(list "--eval" (format NIL "(asdf:make :~a :force T)" (config :system))
"--disable-debugger" "--quit"))))
(defmethod build ((target (eql :linux)))
#+linux (apply #'run (config :build :linux) (build-args))
#+windows (apply #'run "wsl.exe" (config :build :linux) (build-args)))
(defmethod build ((target (eql :windows)))
(apply #'run (config :build :windows) (build-args)))
(defmethod build ((target (eql :macos)))
(apply #'run (config :build :macos) (build-args)))
(defmethod build ((target (eql T)))
(dolist (target (config :build :targets))
(build target)))
(defmethod build ((targets cons))
(dolist (target targets)
(build target)))
(defmethod build ((target null)))
(defmethod test :around (target)
(setf (uiop:getenv "TRIAL_QUIT_AFTER_INIT") "true")
(unwind-protect
(restart-case (call-next-method)
(continue ()
:report "Treat test as successful")
(retry ()
:report "Retry the test"
(test target))
(rebuild ()
:report "Rebuild and retry"
(build target)
(test target)))
(setf (uiop:getenv "TRIAL_QUIT_AFTER_INIT") "false")))
(defmethod test ((target (eql :linux)))
(dolist (file (directory (merge-pathnames "bin/*.run" (asdf:system-source-directory (config :system)))))
#+linux (run file)
#+windows (run "wsl.exe" file)))
(defmethod test ((target (eql :windows)))
(dolist (file (directory (merge-pathnames "bin/*.exe" (asdf:system-source-directory (config :system)))))
#+windows (run file)
#-windows (run "wine" file)))
(defmethod test ((target (eql T)))
(dolist (target (config :build :targets))
(test target)))
(defmethod test ((targets cons))
(dolist (target targets)
(test target)))
(defmethod test ((target null)))
| null | https://raw.githubusercontent.com/Shirakumo/trial/c4882a4526b91ef700270b29702d723f98ff5817/release/build.lisp | lisp | (in-package #:org.shirakumo.fraf.trial.release)
(defvar *default-build-features*
'(:trial-optimize-all :cl-opengl-no-masked-traps :cl-opengl-no-check-error
:cl-mixed-no-restarts :trial-release))
(defmethod build :around (target)
(restart-case
(call-next-method)
(continue ()
:report "Treat the build as successful")
(retry ()
:report "Retry the build"
(build target))))
(defun build-args ()
(let ((features (append *default-build-features* (config :build :features))))
(append (list "--dynamic-space-size" (princ-to-string (config :build :dynamic-space-size))
"--eval" (format NIL "(setf *features* (append *features* '~s))" features))
(config :build :build-arguments)
(list "--eval" (format NIL "(asdf:make :~a :force T)" (config :system))
"--disable-debugger" "--quit"))))
(defmethod build ((target (eql :linux)))
#+linux (apply #'run (config :build :linux) (build-args))
#+windows (apply #'run "wsl.exe" (config :build :linux) (build-args)))
(defmethod build ((target (eql :windows)))
(apply #'run (config :build :windows) (build-args)))
(defmethod build ((target (eql :macos)))
(apply #'run (config :build :macos) (build-args)))
(defmethod build ((target (eql T)))
(dolist (target (config :build :targets))
(build target)))
(defmethod build ((targets cons))
(dolist (target targets)
(build target)))
(defmethod build ((target null)))
(defmethod test :around (target)
(setf (uiop:getenv "TRIAL_QUIT_AFTER_INIT") "true")
(unwind-protect
(restart-case (call-next-method)
(continue ()
:report "Treat test as successful")
(retry ()
:report "Retry the test"
(test target))
(rebuild ()
:report "Rebuild and retry"
(build target)
(test target)))
(setf (uiop:getenv "TRIAL_QUIT_AFTER_INIT") "false")))
(defmethod test ((target (eql :linux)))
(dolist (file (directory (merge-pathnames "bin/*.run" (asdf:system-source-directory (config :system)))))
#+linux (run file)
#+windows (run "wsl.exe" file)))
(defmethod test ((target (eql :windows)))
(dolist (file (directory (merge-pathnames "bin/*.exe" (asdf:system-source-directory (config :system)))))
#+windows (run file)
#-windows (run "wine" file)))
(defmethod test ((target (eql T)))
(dolist (target (config :build :targets))
(test target)))
(defmethod test ((targets cons))
(dolist (target targets)
(test target)))
(defmethod test ((target null)))
|
|
211bb721f3f9980979103efdd6a2a35defbe2c1d17df921010831a2f670d61f0 | scalaris-team/scalaris | yaws_ls.erl | %% -*- coding: latin-1 -*-
%%%----------------------------------------------------------------------
%%% File : yaws_ls.erl
Author : < >
%%% Purpose :
Created : 5 Feb 2002 by < >
Modified : 13 Jan 2004 by < >
Modified : Jan 2006 by < >
%%%----------------------------------------------------------------------
-module(yaws_ls).
-author('').
-include("../include/yaws.hrl").
-include("../include/yaws_api.hrl").
-include("yaws_debug.hrl").
-include_lib("kernel/include/file.hrl").
-export([list_directory/6, out/1]).
Exports for EUNIT .
-export([parse_query/1, trim/2]).
-define(FILE_LEN_SZ, 45).
list_directory(_Arg, CliSock, List, DirName, Req, DoAllZip) ->
{abs_path, Path} = Req#http_request.path,
{DirStr, Pos, Direction, Qry} = parse_query(Path),
?Debug("List=~p Dirname~p~n", [List, DirName]),
Descriptions = read_descriptions(DirName),
L0 = lists:zf(
fun(F) ->
File = DirName ++ [$/|F],
FI = file:read_file_info(File),
file_entry(FI, DirName, F, Qry,Descriptions)
end, List),
L1 = lists:keysort(Pos, L0),
L2 = if Direction == normal -> L1;
Direction == reverse -> lists:reverse(L1)
end,
L3 = [Html || {_, _, _, _, Html} <- L2],
Body = [ doc_head(DirStr),
dir_header(DirName,DirStr),
table_head(Direction),
parent_dir(),
if
DoAllZip == true ->
allzip();
DoAllZip == true_nozip ->
[];
true ->
[]
end,
%% if DoAllGZip == true -> alltgz() end,
if DoAllBZip2 = = true - > ( ) end ,
if DoAllZip = = true - > alltgz ( ) end ,
if DoAllZip = = true - > ( ) end ,
L3,
table_tail(),
dir_footer(DirName),%yaws:address(),
doc_tail()
],
B = unicode:characters_to_binary(Body),
Always use UTF-8 encoded file names . So , set the UTF-8 charset in the
%% Content-Type header
NewCT = case yaws:outh_get_content_type() of
undefined ->
"text/html; charset=utf-8";
CT0 ->
[CT|_] = yaws:split_sep(CT0, $;),
CT++"; charset=utf-8"
end,
yaws:outh_set_content_type(NewCT),
yaws_server:accumulate_content(B),
yaws_server:deliver_accumulated(CliSock),
yaws_server:done_or_continue().
parse_query(Path) ->
case string:tokens(Path, [$?]) of
[DirStr, [PosC, $=, DirC] = Q] ->
Pos = case PosC of
$m -> 2;
$M -> 2; % last modified
$s -> 3;
$S -> 3; % size
$d -> 4;
$D -> 4; % description
_ -> 1 % name (default)
end,
Dir = case DirC of
$r -> reverse;
_ -> normal
end,
{DirStr, Pos, Dir, "/?"++Q};
_ ->
{Path, 1, normal, "/"}
end.
parse_description(Line) ->
L = string:strip(Line),
Pos = string:chr(L,$ ),
Filename = string:substr(L, 1, Pos-1),
D = string:substr(L,Pos+1),
Description = string:strip(D,left),
{Filename,Description}.
read_descriptions(DirName) ->
File = filename:join(DirName, "MANIFEST.txt"),
case file:read_file(File) of
{ok,Bin} -> Lines = string:tokens(binary_to_list(Bin),"\n"),
lists:map(fun parse_description/1,Lines);
_ -> []
end.
get_description(Name,Descriptions) ->
case lists:keysearch(Name,1,Descriptions) of
{value, {_,Description}} -> Description;
_ -> []
end.
doc_head(DirName) ->
EncDirName = file_display_name(yaws_api:url_decode(DirName)),
HtmlDirName = yaws_api:htmlize(EncDirName),
?F("<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" \"-strict.dtd\">\n"
"<html>\n"
" <head>\n"
" <meta charset=\"utf-8\">"
" <title>Index of ~ts</title>\n"
" <style type=\"text/css\">\n"
" img { border: 0; padding: 0 2px; vertical-align: text-bottom; }\n"
" td { font-family: monospace; padding: 2px 3px; text-align:left;\n"
" vertical-align: bottom; white-space: pre; }\n"
" td:first-child { text-align: left; padding: 2px 10px 2px 3px; }\n"
" table { border: 0; }\n"
" </style>\n"
"</head> \n"
"<body>\n",
[list_to_binary(HtmlDirName)]
).
doc_tail() ->
"</body>\n"
"</html>\n".
table_head(Direction) ->
NextDirection = if Direction == normal -> "r";
Direction == reverse -> "n"
end,
["<table>\n"
" <tr>\n"
" <td><img src=\"/icons/blank.gif\" alt=\" \"/><a href=\"?N=",NextDirection,"\">Name</a></td>\n"
" <td><a href=\"?M=",NextDirection,"\">Last Modified</a></td>\n"
" <td><a href=\"?S=",NextDirection,"\">Size</a></td>\n"
" <td><a href=\"?D=",NextDirection,"\">Description</a></td>\n"
" </tr>\n"
" <tr><th colspan=\"4\"><hr/></th></tr>\n"].
table_tail() ->
" <tr><th colspan=\"4\"><hr/></th></tr>\n"
"</table>\n".
dir_footer(DirName) ->
File = DirName ++ [$/ | "README.txt"],
case file:read_file(File) of
{ok,Bin} -> "<pre>\n" ++ binary_to_list(Bin) ++ "</pre>\n";
_ -> yaws:address()
end.
dir_header(DirName,DirStr) ->
File = DirName ++ [$/ | "HEADER.txt"],
case file:read_file(File) of
{ok,Bin} ->
"<pre>\n" ++ binary_to_list(Bin) ++ "</pre>\n";
_ ->
EncDirStr = file_display_name(yaws_api:url_decode(DirStr)),
HtmlDirName = yaws_api:htmlize(EncDirStr),
?F("<h1>Index of ~ts</h1>\n", [list_to_binary(HtmlDirName)])
end.
parent_dir() ->
{Gif, Alt} = list_gif(directory,"."),
?F(" <tr>\n"
" <td><img src=~p alt=~p/><a href=\"..\">Parent Directory</a></td>\n"
" <td></td>\n"
" <td>-</td>\n"
" <td></td>\n"
" </tr>\n",
["/icons/" ++ Gif,
Alt
]).
%% FIXME: would be nice with a good size approx. but it would require
%% a deep scan of possibly the entire docroot, (and also some knowledge
%% about zip's compression ratio in advance...)
allzip() ->
{Gif, Alt} = list_gif(zip,""),
?F(" <tr>\n"
" <td><img src=~p alt=~p/><a href=\"all.zip\">all.zip</a></td>\n"
" <td></td>\n"
" <td>-</td>\n"
" <td>Build a zip archive of current directory</td>\n"
" </tr>\n",
["/icons/" ++ Gif,
Alt]).
%% alltgz() ->
} = list_gif(zip , " " ) ,
%% ?F(" <tr>\n"
%% " <td><img src=~p alt=~p/><a href=\"all.tgz\">all.tgz</a></td>\n"
%% " <td></td>\n"
%% " <td>-</td>\n"
%% " <td>Build a gzip archive of current directory</td>\n"
%% " </tr>\n",
%% ["/icons/" ++ Gif,
%% Alt]).
( ) - >
} = list_gif(zip , " " ) ,
%% ?F(" <tr>\n"
%% " <td><img src=~p alt=~p/><a href=\"all.tbz2\">all.tbz2</a></td>\n"
%% " <td></td>\n"
%% " <td>-</td>\n"
%% " <td>Build a bzip2 archive of current directory</td>\n"
%% " </tr>\n",
%% ["/icons/" ++ Gif,
%% Alt]).
is_user_dir(SP) ->
case SP of
[$/,$~ | T] -> User = string:sub_word(T,1,$/),
case catch yaws:user_to_home(User) of
{'EXIT', _} ->
false;
Home ->
{true,Home}
end;
_ -> false
end.
out(A) ->
SP = A#arg.server_path,
PP = A#arg.appmod_prepath,
Dir = case is_user_dir(SP) of
{true,Home} -> Home ++ "/public_html";
false -> A#arg.docroot
end ++ PP,
%% {html,?F("<h2>~p</h2>",[Dir])}.
YPid = self(),
Forbidden_Paths = accumulate_forbidden_paths(),
case filename:basename(A#arg.server_path) of
"all.zip" -> spawn_link(fun() -> zip(YPid, Dir, Forbidden_Paths) end),
{streamcontent, "application/zip", ""}
" all.tgz " - > spawn_link(fun ( ) - > tgz(YPid , ) end ) ,
, " application / gzip " , " " } ;
%% "all.tbz2" -> spawn_link(fun() -> tbz2(YPid, Dir) end),
, " application / gzip " , " " }
end.
generate_random_fn() ->
Bytes = try yaws_dynopts:rand_bytes(64) of
B when is_bitstring(B) ->
B
catch _:_ ->
%% for installations without crypto
<< <<(yaws_dynopt:random_uniform(256) - 1)>> || _ <- lists:seq(1,64) >>
end,
<< Int:512/unsigned-big-integer >> = << Bytes/binary >>,
integer_to_list(Int).
mktempfilename([]) ->
{error, no_temp_dir};
mktempfilename([Dir|R]) ->
RandomFN = generate_random_fn(),
Filename = filename:join(Dir, RandomFN),
case file:open(Filename, [write]) of
{ok, FileHandle} ->
{ok, {Filename, FileHandle}};
_Else ->
mktempfilename(R)
end.
mktempfilename() ->
%% TODO: Add code to determine the temporary directory on various
%% operating systems.
PossibleDirs = ["/tmp", "/var/tmp"],
mktempfilename(PossibleDirs).
zip(YPid, Dir, ForbiddenPaths) ->
{ok, RE_ForbiddenNames} = re:compile("\\.yaws\$", [unicode]),
Files = dig_through_dir(Dir, ForbiddenPaths, RE_ForbiddenNames),
{ok, {Tempfile, TempfileH}} = mktempfilename(),
file:write(TempfileH, lists:foldl(fun(I, Acc) ->
[Acc, list_to_binary(file_display_name(I)), "\n"]
end, [], Files)),
file:close(TempfileH),
process_flag(trap_exit, true),
%% TODO: find a way to directly pass the list of files to
zip . Erlang ports do not allow stdin to be closed
independently ; however , zip needs stdin to be closed as an
%% indicator that the list of files is complete.
P = open_port({spawn, "zip -q -1 - -@ < " ++ Tempfile},
[{cd, Dir},use_stdio, binary, exit_status]),
F = fun() ->
file:delete(Tempfile)
end,
stream_loop(YPid, P, F).
accumulate_forbidden_paths() ->
SC = get(sc),
Auth = SC#sconf.authdirs,
lists:foldl(fun({Path, _Auth}, Acc) ->
Acc ++ [Path]
end, [], Auth).
tgz(YPid , ) - >
%% process_flag(trap_exit, true),
%% P = open_port({spawn, "tar cz ."},
[ { cd , Dir},use_stdio , binary , exit_status ] ) ,
%% stream_loop(YPid, P).
%% tbz2(YPid, Dir) ->
%% process_flag(trap_exit, true),
%% P = open_port({spawn, "tar cj ."},
[ { cd , Dir},use_stdio , binary , exit_status ] ) ,
%% stream_loop(YPid, P).
dir_contains_indexfile(_Dir, []) ->
false;
dir_contains_indexfile(Dir, [File|R]) ->
case file:read_file_info(filename:join(Dir, File)) of
{ok, _} ->
true;
_Else ->
dir_contains_indexfile(Dir, R)
end.
dir_contains_indexfile(Dir) ->
Indexfiles = [".yaws.auth", "index.yaws", "index.html", "index.htm"],
dir_contains_indexfile(Dir, Indexfiles).
dig_through_dir(Basedirlen, Dir, ForbiddenPaths, RE_ForbiddenNames) ->
Dir1 = string:sub_string(Dir, Basedirlen),
case {lists:member(Dir1, ForbiddenPaths),
dir_contains_indexfile(Dir)} of
{true,_} ->
[];
{_,true} ->
[];
{false, false} ->
{ok, Files} = file:list_dir(Dir),
lists:foldl(fun(I, Acc) ->
Filename = filename:join(Dir, I),
case {file:read_file_info(Filename),
re:run(Filename, RE_ForbiddenNames)} of
{_, {match, _}} ->
Acc;
{{ok, #file_info{type=directory}}, _} ->
Acc ++ dig_through_dir(
Basedirlen,
Filename,
ForbiddenPaths,
RE_ForbiddenNames);
{{ok, #file_info{type=regular}}, _} ->
Acc ++ [string:sub_string(
Filename, Basedirlen)];
_Else ->
Acc %% Ignore other files
end
end, [], Files)
end.
dig_through_dir(Dir, ForbiddenPaths, RE_ForbiddenNames) ->
dig_through_dir(length(Dir) + 1,
Dir,
ForbiddenPaths,
RE_ForbiddenNames).
stream_loop(YPid, P, FinishedFun) ->
receive
{P, {data, Data}} ->
yaws_api:stream_chunk_deliver_blocking(YPid, Data),
stream_loop(YPid, P, FinishedFun);
{P, {exit_status, _}} ->
yaws_api:stream_chunk_end(YPid),
FinishedFun();
{'EXIT', YPid, Status} ->
FinishedFun(),
exit(Status);
Else ->
FinishedFun(),
error_logger:error_msg("Could not deliver zip file: ~p\n", [Else])
end.
file_entry({ok, FI}, _DirName, Name, Qry, Descriptions) ->
?Debug("file_entry(~p) ", [Name]),
Ext = filename:extension(Name),
{Gif, Alt} = list_gif(FI#file_info.type, Ext),
QryStr = if FI#file_info.type == directory -> Qry;
true -> ""
end,
EncName = file_display_name(Name),
Description = get_description(Name,Descriptions),
Entry =
?F(" <tr>\n"
" <td><img src=~p alt=~p/><a href=~p title=\"~ts\">~ts</a></td>\n"
" <td>~s</td>\n"
" <td>~s</td>\n"
" <td>~s</td>\n"
" </tr>\n",
["/icons/" ++ Gif,
Alt,
yaws_api:url_encode(Name) ++ QryStr,
list_to_binary(EncName),
list_to_binary(trim(EncName,?FILE_LEN_SZ)),
datestr(FI),
sizestr(FI),
Description]),
?Debug("Entry:~p", [Entry]),
{true, {EncName, FI#file_info.mtime, FI#file_info.size, Description, Entry}};
file_entry(_Err, _, _Name, _, _) ->
?Debug("no entry for ~p: ~p", [_Name, _Err]),
false.
trim(L,N) ->
trim(L,N,[]).
trim([_H1,_H2,_H3]=[H|T], 3=I, Acc) ->
trim(T, I-1, [H|Acc]);
trim([H1,H2,H3|_T], 3=_I, Acc) when H1 < 128, H2 < 128, H3 < 128 ->
lists:reverse(Acc) ++ "..>";
trim([H1,H2,H3|_T], 3=_I, [H0|Acc]) ->
Drop UTF8 continuation bytes : 10xxxxxx
Hs0 = lists:dropwhile(fun(Byte) -> Byte bsr 6 == 2#10 end, [H3,H2,H1,H0]),
Drop UTF8 leading byte : 11xxxxxx
Hs = lists:dropwhile(fun(Byte) -> Byte bsr 6 == 2#11 end, Hs0),
lists:reverse(Hs++Acc) ++ "..>";
trim([H|T], I, Acc) ->
trim(T, I-1, [H|Acc]);
trim([], _I, Acc) ->
lists:reverse(Acc).
%% FI -> 16-Jan-2006 23:06
datestr(FI) ->
{{Year, Month, Day}, {Hour, Min, _}} = FI#file_info.mtime,
io_lib:format("~s-~s-~w ~s:~s",
[yaws:mk2(Day),yaws:month(Month),Year,
yaws:mk2(Hour),yaws:mk2(Min)]).
sizestr(FI) when FI#file_info.size > 1000000 ->
?F("~.1fM", [FI#file_info.size / 1000000]);
sizestr(FI) when FI#file_info.size > 1000 ->
?F("~wk", [trunc(FI#file_info.size / 1000)]);
sizestr(FI) when FI#file_info.size == 0 ->
?F("0k", []);
sizestr(_FI) ->
?F("1k", []). % As apache does it...
list_gif(directory, ".") ->
{"back.gif", "[DIR]"};
list_gif(regular, ".txt") ->
{"text.gif", "[TXT]"};
list_gif(regular, ".c") ->
{"c.gif", "[ ]"};
list_gif(regular, ".dvi") ->
{"dvi.gif", "[ ]"};
list_gif(regular, ".pdf") ->
{"pdf.gif", "[ ]"};
list_gif(regular, _) ->
{"layout.gif", "[ ]"};
list_gif(directory, _) ->
{"dir.gif", "[DIR]"};
list_gif(zip, _) ->
{"compressed.gif", "[DIR]"};
list_gif(_, _) ->
{"unknown.gif", "[OTH]"}.
Assume that all file names are UTF-8 encoded . If the VM uses ISO - latin-1
%% encoding, then no conversion is needed (file already returns the byte
representation of file names ) . If the VM uses UTF-8 , we need to do a little
%% conversion to return the byte representation of file names.
file_display_name(Name) ->
case file:native_name_encoding() of
latin1 -> Name;
utf8 -> binary_to_list(unicode:characters_to_binary(Name))
end.
| null | https://raw.githubusercontent.com/scalaris-team/scalaris/feb894d54e642bb3530e709e730156b0ecc1635f/contrib/yaws/src/yaws_ls.erl | erlang | -*- coding: latin-1 -*-
----------------------------------------------------------------------
File : yaws_ls.erl
Purpose :
----------------------------------------------------------------------
if DoAllGZip == true -> alltgz() end,
yaws:address(),
Content-Type header
last modified
size
description
name (default)
FIXME: would be nice with a good size approx. but it would require
a deep scan of possibly the entire docroot, (and also some knowledge
about zip's compression ratio in advance...)
alltgz() ->
?F(" <tr>\n"
" <td><img src=~p alt=~p/><a href=\"all.tgz\">all.tgz</a></td>\n"
" <td></td>\n"
" <td>-</td>\n"
" <td>Build a gzip archive of current directory</td>\n"
" </tr>\n",
["/icons/" ++ Gif,
Alt]).
?F(" <tr>\n"
" <td><img src=~p alt=~p/><a href=\"all.tbz2\">all.tbz2</a></td>\n"
" <td></td>\n"
" <td>-</td>\n"
" <td>Build a bzip2 archive of current directory</td>\n"
" </tr>\n",
["/icons/" ++ Gif,
Alt]).
{html,?F("<h2>~p</h2>",[Dir])}.
"all.tbz2" -> spawn_link(fun() -> tbz2(YPid, Dir) end),
for installations without crypto
TODO: Add code to determine the temporary directory on various
operating systems.
TODO: find a way to directly pass the list of files to
indicator that the list of files is complete.
process_flag(trap_exit, true),
P = open_port({spawn, "tar cz ."},
stream_loop(YPid, P).
tbz2(YPid, Dir) ->
process_flag(trap_exit, true),
P = open_port({spawn, "tar cj ."},
stream_loop(YPid, P).
Ignore other files
FI -> 16-Jan-2006 23:06
As apache does it...
encoding, then no conversion is needed (file already returns the byte
conversion to return the byte representation of file names. | Author : < >
Created : 5 Feb 2002 by < >
Modified : 13 Jan 2004 by < >
Modified : Jan 2006 by < >
-module(yaws_ls).
-author('').
-include("../include/yaws.hrl").
-include("../include/yaws_api.hrl").
-include("yaws_debug.hrl").
-include_lib("kernel/include/file.hrl").
-export([list_directory/6, out/1]).
Exports for EUNIT .
-export([parse_query/1, trim/2]).
-define(FILE_LEN_SZ, 45).
list_directory(_Arg, CliSock, List, DirName, Req, DoAllZip) ->
{abs_path, Path} = Req#http_request.path,
{DirStr, Pos, Direction, Qry} = parse_query(Path),
?Debug("List=~p Dirname~p~n", [List, DirName]),
Descriptions = read_descriptions(DirName),
L0 = lists:zf(
fun(F) ->
File = DirName ++ [$/|F],
FI = file:read_file_info(File),
file_entry(FI, DirName, F, Qry,Descriptions)
end, List),
L1 = lists:keysort(Pos, L0),
L2 = if Direction == normal -> L1;
Direction == reverse -> lists:reverse(L1)
end,
L3 = [Html || {_, _, _, _, Html} <- L2],
Body = [ doc_head(DirStr),
dir_header(DirName,DirStr),
table_head(Direction),
parent_dir(),
if
DoAllZip == true ->
allzip();
DoAllZip == true_nozip ->
[];
true ->
[]
end,
if DoAllBZip2 = = true - > ( ) end ,
if DoAllZip = = true - > alltgz ( ) end ,
if DoAllZip = = true - > ( ) end ,
L3,
table_tail(),
doc_tail()
],
B = unicode:characters_to_binary(Body),
Always use UTF-8 encoded file names . So , set the UTF-8 charset in the
NewCT = case yaws:outh_get_content_type() of
undefined ->
"text/html; charset=utf-8";
CT0 ->
[CT|_] = yaws:split_sep(CT0, $;),
CT++"; charset=utf-8"
end,
yaws:outh_set_content_type(NewCT),
yaws_server:accumulate_content(B),
yaws_server:deliver_accumulated(CliSock),
yaws_server:done_or_continue().
parse_query(Path) ->
case string:tokens(Path, [$?]) of
[DirStr, [PosC, $=, DirC] = Q] ->
Pos = case PosC of
$m -> 2;
$s -> 3;
$d -> 4;
end,
Dir = case DirC of
$r -> reverse;
_ -> normal
end,
{DirStr, Pos, Dir, "/?"++Q};
_ ->
{Path, 1, normal, "/"}
end.
parse_description(Line) ->
L = string:strip(Line),
Pos = string:chr(L,$ ),
Filename = string:substr(L, 1, Pos-1),
D = string:substr(L,Pos+1),
Description = string:strip(D,left),
{Filename,Description}.
read_descriptions(DirName) ->
File = filename:join(DirName, "MANIFEST.txt"),
case file:read_file(File) of
{ok,Bin} -> Lines = string:tokens(binary_to_list(Bin),"\n"),
lists:map(fun parse_description/1,Lines);
_ -> []
end.
get_description(Name,Descriptions) ->
case lists:keysearch(Name,1,Descriptions) of
{value, {_,Description}} -> Description;
_ -> []
end.
doc_head(DirName) ->
EncDirName = file_display_name(yaws_api:url_decode(DirName)),
HtmlDirName = yaws_api:htmlize(EncDirName),
?F("<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" \"-strict.dtd\">\n"
"<html>\n"
" <head>\n"
" <meta charset=\"utf-8\">"
" <title>Index of ~ts</title>\n"
" <style type=\"text/css\">\n"
" img { border: 0; padding: 0 2px; vertical-align: text-bottom; }\n"
" td { font-family: monospace; padding: 2px 3px; text-align:left;\n"
" vertical-align: bottom; white-space: pre; }\n"
" td:first-child { text-align: left; padding: 2px 10px 2px 3px; }\n"
" table { border: 0; }\n"
" </style>\n"
"</head> \n"
"<body>\n",
[list_to_binary(HtmlDirName)]
).
doc_tail() ->
"</body>\n"
"</html>\n".
table_head(Direction) ->
NextDirection = if Direction == normal -> "r";
Direction == reverse -> "n"
end,
["<table>\n"
" <tr>\n"
" <td><img src=\"/icons/blank.gif\" alt=\" \"/><a href=\"?N=",NextDirection,"\">Name</a></td>\n"
" <td><a href=\"?M=",NextDirection,"\">Last Modified</a></td>\n"
" <td><a href=\"?S=",NextDirection,"\">Size</a></td>\n"
" <td><a href=\"?D=",NextDirection,"\">Description</a></td>\n"
" </tr>\n"
" <tr><th colspan=\"4\"><hr/></th></tr>\n"].
table_tail() ->
" <tr><th colspan=\"4\"><hr/></th></tr>\n"
"</table>\n".
dir_footer(DirName) ->
File = DirName ++ [$/ | "README.txt"],
case file:read_file(File) of
{ok,Bin} -> "<pre>\n" ++ binary_to_list(Bin) ++ "</pre>\n";
_ -> yaws:address()
end.
dir_header(DirName,DirStr) ->
File = DirName ++ [$/ | "HEADER.txt"],
case file:read_file(File) of
{ok,Bin} ->
"<pre>\n" ++ binary_to_list(Bin) ++ "</pre>\n";
_ ->
EncDirStr = file_display_name(yaws_api:url_decode(DirStr)),
HtmlDirName = yaws_api:htmlize(EncDirStr),
?F("<h1>Index of ~ts</h1>\n", [list_to_binary(HtmlDirName)])
end.
parent_dir() ->
{Gif, Alt} = list_gif(directory,"."),
?F(" <tr>\n"
" <td><img src=~p alt=~p/><a href=\"..\">Parent Directory</a></td>\n"
" <td></td>\n"
" <td>-</td>\n"
" <td></td>\n"
" </tr>\n",
["/icons/" ++ Gif,
Alt
]).
allzip() ->
{Gif, Alt} = list_gif(zip,""),
?F(" <tr>\n"
" <td><img src=~p alt=~p/><a href=\"all.zip\">all.zip</a></td>\n"
" <td></td>\n"
" <td>-</td>\n"
" <td>Build a zip archive of current directory</td>\n"
" </tr>\n",
["/icons/" ++ Gif,
Alt]).
} = list_gif(zip , " " ) ,
( ) - >
} = list_gif(zip , " " ) ,
is_user_dir(SP) ->
case SP of
[$/,$~ | T] -> User = string:sub_word(T,1,$/),
case catch yaws:user_to_home(User) of
{'EXIT', _} ->
false;
Home ->
{true,Home}
end;
_ -> false
end.
out(A) ->
SP = A#arg.server_path,
PP = A#arg.appmod_prepath,
Dir = case is_user_dir(SP) of
{true,Home} -> Home ++ "/public_html";
false -> A#arg.docroot
end ++ PP,
YPid = self(),
Forbidden_Paths = accumulate_forbidden_paths(),
case filename:basename(A#arg.server_path) of
"all.zip" -> spawn_link(fun() -> zip(YPid, Dir, Forbidden_Paths) end),
{streamcontent, "application/zip", ""}
" all.tgz " - > spawn_link(fun ( ) - > tgz(YPid , ) end ) ,
, " application / gzip " , " " } ;
, " application / gzip " , " " }
end.
generate_random_fn() ->
Bytes = try yaws_dynopts:rand_bytes(64) of
B when is_bitstring(B) ->
B
catch _:_ ->
<< <<(yaws_dynopt:random_uniform(256) - 1)>> || _ <- lists:seq(1,64) >>
end,
<< Int:512/unsigned-big-integer >> = << Bytes/binary >>,
integer_to_list(Int).
mktempfilename([]) ->
{error, no_temp_dir};
mktempfilename([Dir|R]) ->
RandomFN = generate_random_fn(),
Filename = filename:join(Dir, RandomFN),
case file:open(Filename, [write]) of
{ok, FileHandle} ->
{ok, {Filename, FileHandle}};
_Else ->
mktempfilename(R)
end.
mktempfilename() ->
PossibleDirs = ["/tmp", "/var/tmp"],
mktempfilename(PossibleDirs).
zip(YPid, Dir, ForbiddenPaths) ->
{ok, RE_ForbiddenNames} = re:compile("\\.yaws\$", [unicode]),
Files = dig_through_dir(Dir, ForbiddenPaths, RE_ForbiddenNames),
{ok, {Tempfile, TempfileH}} = mktempfilename(),
file:write(TempfileH, lists:foldl(fun(I, Acc) ->
[Acc, list_to_binary(file_display_name(I)), "\n"]
end, [], Files)),
file:close(TempfileH),
process_flag(trap_exit, true),
zip . Erlang ports do not allow stdin to be closed
independently ; however , zip needs stdin to be closed as an
P = open_port({spawn, "zip -q -1 - -@ < " ++ Tempfile},
[{cd, Dir},use_stdio, binary, exit_status]),
F = fun() ->
file:delete(Tempfile)
end,
stream_loop(YPid, P, F).
accumulate_forbidden_paths() ->
SC = get(sc),
Auth = SC#sconf.authdirs,
lists:foldl(fun({Path, _Auth}, Acc) ->
Acc ++ [Path]
end, [], Auth).
tgz(YPid , ) - >
[ { cd , Dir},use_stdio , binary , exit_status ] ) ,
[ { cd , Dir},use_stdio , binary , exit_status ] ) ,
dir_contains_indexfile(_Dir, []) ->
false;
dir_contains_indexfile(Dir, [File|R]) ->
case file:read_file_info(filename:join(Dir, File)) of
{ok, _} ->
true;
_Else ->
dir_contains_indexfile(Dir, R)
end.
dir_contains_indexfile(Dir) ->
Indexfiles = [".yaws.auth", "index.yaws", "index.html", "index.htm"],
dir_contains_indexfile(Dir, Indexfiles).
dig_through_dir(Basedirlen, Dir, ForbiddenPaths, RE_ForbiddenNames) ->
Dir1 = string:sub_string(Dir, Basedirlen),
case {lists:member(Dir1, ForbiddenPaths),
dir_contains_indexfile(Dir)} of
{true,_} ->
[];
{_,true} ->
[];
{false, false} ->
{ok, Files} = file:list_dir(Dir),
lists:foldl(fun(I, Acc) ->
Filename = filename:join(Dir, I),
case {file:read_file_info(Filename),
re:run(Filename, RE_ForbiddenNames)} of
{_, {match, _}} ->
Acc;
{{ok, #file_info{type=directory}}, _} ->
Acc ++ dig_through_dir(
Basedirlen,
Filename,
ForbiddenPaths,
RE_ForbiddenNames);
{{ok, #file_info{type=regular}}, _} ->
Acc ++ [string:sub_string(
Filename, Basedirlen)];
_Else ->
end
end, [], Files)
end.
dig_through_dir(Dir, ForbiddenPaths, RE_ForbiddenNames) ->
dig_through_dir(length(Dir) + 1,
Dir,
ForbiddenPaths,
RE_ForbiddenNames).
stream_loop(YPid, P, FinishedFun) ->
receive
{P, {data, Data}} ->
yaws_api:stream_chunk_deliver_blocking(YPid, Data),
stream_loop(YPid, P, FinishedFun);
{P, {exit_status, _}} ->
yaws_api:stream_chunk_end(YPid),
FinishedFun();
{'EXIT', YPid, Status} ->
FinishedFun(),
exit(Status);
Else ->
FinishedFun(),
error_logger:error_msg("Could not deliver zip file: ~p\n", [Else])
end.
file_entry({ok, FI}, _DirName, Name, Qry, Descriptions) ->
?Debug("file_entry(~p) ", [Name]),
Ext = filename:extension(Name),
{Gif, Alt} = list_gif(FI#file_info.type, Ext),
QryStr = if FI#file_info.type == directory -> Qry;
true -> ""
end,
EncName = file_display_name(Name),
Description = get_description(Name,Descriptions),
Entry =
?F(" <tr>\n"
" <td><img src=~p alt=~p/><a href=~p title=\"~ts\">~ts</a></td>\n"
" <td>~s</td>\n"
" <td>~s</td>\n"
" <td>~s</td>\n"
" </tr>\n",
["/icons/" ++ Gif,
Alt,
yaws_api:url_encode(Name) ++ QryStr,
list_to_binary(EncName),
list_to_binary(trim(EncName,?FILE_LEN_SZ)),
datestr(FI),
sizestr(FI),
Description]),
?Debug("Entry:~p", [Entry]),
{true, {EncName, FI#file_info.mtime, FI#file_info.size, Description, Entry}};
file_entry(_Err, _, _Name, _, _) ->
?Debug("no entry for ~p: ~p", [_Name, _Err]),
false.
trim(L,N) ->
trim(L,N,[]).
trim([_H1,_H2,_H3]=[H|T], 3=I, Acc) ->
trim(T, I-1, [H|Acc]);
trim([H1,H2,H3|_T], 3=_I, Acc) when H1 < 128, H2 < 128, H3 < 128 ->
lists:reverse(Acc) ++ "..>";
trim([H1,H2,H3|_T], 3=_I, [H0|Acc]) ->
Drop UTF8 continuation bytes : 10xxxxxx
Hs0 = lists:dropwhile(fun(Byte) -> Byte bsr 6 == 2#10 end, [H3,H2,H1,H0]),
Drop UTF8 leading byte : 11xxxxxx
Hs = lists:dropwhile(fun(Byte) -> Byte bsr 6 == 2#11 end, Hs0),
lists:reverse(Hs++Acc) ++ "..>";
trim([H|T], I, Acc) ->
trim(T, I-1, [H|Acc]);
trim([], _I, Acc) ->
lists:reverse(Acc).
datestr(FI) ->
{{Year, Month, Day}, {Hour, Min, _}} = FI#file_info.mtime,
io_lib:format("~s-~s-~w ~s:~s",
[yaws:mk2(Day),yaws:month(Month),Year,
yaws:mk2(Hour),yaws:mk2(Min)]).
sizestr(FI) when FI#file_info.size > 1000000 ->
?F("~.1fM", [FI#file_info.size / 1000000]);
sizestr(FI) when FI#file_info.size > 1000 ->
?F("~wk", [trunc(FI#file_info.size / 1000)]);
sizestr(FI) when FI#file_info.size == 0 ->
?F("0k", []);
sizestr(_FI) ->
list_gif(directory, ".") ->
{"back.gif", "[DIR]"};
list_gif(regular, ".txt") ->
{"text.gif", "[TXT]"};
list_gif(regular, ".c") ->
{"c.gif", "[ ]"};
list_gif(regular, ".dvi") ->
{"dvi.gif", "[ ]"};
list_gif(regular, ".pdf") ->
{"pdf.gif", "[ ]"};
list_gif(regular, _) ->
{"layout.gif", "[ ]"};
list_gif(directory, _) ->
{"dir.gif", "[DIR]"};
list_gif(zip, _) ->
{"compressed.gif", "[DIR]"};
list_gif(_, _) ->
{"unknown.gif", "[OTH]"}.
Assume that all file names are UTF-8 encoded . If the VM uses ISO - latin-1
representation of file names ) . If the VM uses UTF-8 , we need to do a little
file_display_name(Name) ->
case file:native_name_encoding() of
latin1 -> Name;
utf8 -> binary_to_list(unicode:characters_to_binary(Name))
end.
|
3f47510c9149ea88ed743161d7c5efeb31133a605eb3b3006d90adb6d63f3411 | nvim-treesitter/nvim-treesitter | injections.scm | (comment) @comment
| null | https://raw.githubusercontent.com/nvim-treesitter/nvim-treesitter/75d98eaac424661812cc18de11f3d8037be8e8f4/queries/po/injections.scm | scheme | (comment) @comment
|
|
37ccdb3a17c4af288f8e0c9e908bf1dd2deb24dbc8e30c6fb43aaaf6381f2960 | vouillon/osm | routing_profile.mli | OSM tools
* Copyright ( C ) 2013
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , with linking exception ;
* either version 2.1 of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* Copyright (C) 2013 Jérôme Vouillon
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, with linking exception;
* either version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*)
module type API = sig
type str
val s : string -> str
val nil : str
module Table : sig
type 'a t
val create : (string * 'a) list -> 'a t
val create_set : string list -> unit t
val mem : 'a t -> str -> bool
val find : 'a t -> str -> 'a
end
module Tags : sig
type t
val mem : t -> str -> str -> bool
val find : t -> str -> str
end
type direction = [`BIDIRECTIONAL | `ONEWAY | `OPPOSITE]
type params =
{ mutable speed : float;
mutable backward_speed : float; (* < 0 if same as speed *)
mutable duration : float; (* < 0 if not defined *)
mutable direction : direction }
end
module type PROFILE =
functor (X : API) ->
sig
open X
val name : string
val way : Tags.t -> params -> unit
end
module Register (F : PROFILE) : sig end
(****)
type direction = [`BIDIRECTIONAL | `ONEWAY | `OPPOSITE]
type params =
{ mutable speed : float;
mutable backward_speed : float;
mutable duration : float; (* < 0 if not defined *)
mutable direction : direction }
val find : Dictionary.t -> string ->
(int * int) list -> params
| null | https://raw.githubusercontent.com/vouillon/osm/a42d1bcc82a4ad73c26c81ac7a75f9f1c7470344/osm/routing_profile.mli | ocaml | < 0 if same as speed
< 0 if not defined
**
< 0 if not defined | OSM tools
* Copyright ( C ) 2013
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , with linking exception ;
* either version 2.1 of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* Copyright (C) 2013 Jérôme Vouillon
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, with linking exception;
* either version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*)
module type API = sig
type str
val s : string -> str
val nil : str
module Table : sig
type 'a t
val create : (string * 'a) list -> 'a t
val create_set : string list -> unit t
val mem : 'a t -> str -> bool
val find : 'a t -> str -> 'a
end
module Tags : sig
type t
val mem : t -> str -> str -> bool
val find : t -> str -> str
end
type direction = [`BIDIRECTIONAL | `ONEWAY | `OPPOSITE]
type params =
{ mutable speed : float;
mutable direction : direction }
end
module type PROFILE =
functor (X : API) ->
sig
open X
val name : string
val way : Tags.t -> params -> unit
end
module Register (F : PROFILE) : sig end
type direction = [`BIDIRECTIONAL | `ONEWAY | `OPPOSITE]
type params =
{ mutable speed : float;
mutable backward_speed : float;
mutable direction : direction }
val find : Dictionary.t -> string ->
(int * int) list -> params
|
3ca85ee389c5546ef1179b24ebfae0ee079682bf65e5f10779163ee22ad2834b | janegca/htdp2e | Exercise-071-EditorV1.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-beginner-reader.ss" "lang")((modname Exercise-071-EditorV1) (read-case-sensitive #t) (teachpacks ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp"))) (htdp-settings #(#t constructor repeating-decimal #f #t none #f ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp")))))
Exercise 71 .
;
Design edit . The function consumes two inputs , an editor ed and a
, and it produces another editor . Its task is to add a
single - character to the end of the pre field of ed , unless
ke denotes the backspace ( " \b " ) key . In that case , it deletes the character
; immediately to the left of the cursor (if there are any). The function
ignores the tab key ( " \t " ) and the return key ( " \r " ) .
;
The function pays attention to only two KeyEvents longer than one letter :
; "left" and "right". The left arrow moves the cursor one character to the
left ( if any ) , and the right arrow moves it one character to the right
( if any ) . All other such KeyEvents are ignored .
;
Develop a good number of examples for edit , paying attention to special
cases . When we solved this exercise , we created 20 examples and turned all
; of them into tests.
;
Hint Think of this function as consuming KeyEvents , a collection that is
; specified as an enumeration. It uses auxiliary functions to deal with the
; Editor structure. Keep a wish list handy; you will need to design additional
; functions for most of these auxiliary functions, such as string-first,
; string-rest, string-last, and string-remove-last. If you haven’t done so,
; solve the exercises in Functions.
(define-struct editor [pre post])
Editor = ( make - editor )
; interpretation (make-editor s t) means the text in the editor is
; (string-append s t) with the cursor displayed between s and t
Pyhsical constants
(define WIDTH 200)
(define HEIGHT 20)
(define YCENTER (/ HEIGHT 2))
(define FONT-SIZE 16)
(define FONT-COLOR "black")
; Graphic constants
(define FIELD (empty-scene WIDTH HEIGHT))
(define CURSOR (rectangle 1 HEIGHT "solid" "red"))
; Functions
; Editor -> Image
; returns the editor text as an image
(check-expect (render (make-editor "hello " "world"))
(overlay/align "left" "center"
(beside (text "hello " FONT-SIZE FONT-COLOR)
CURSOR
(text "world" FONT-SIZE FONT-COLOR))
FIELD))
(define (render e)
(place-image/align
(beside (text (editor-pre e) FONT-SIZE FONT-COLOR)
CURSOR
(text (editor-post e) FONT-SIZE FONT-COLOR))
0 YCENTER "left" "center" FIELD))
Editor - > Editor
; modifies the contents of the editor based on the given key event
(check-expect (edit (make-editor "hel" "lo") "left")
(make-editor "he" "llo"))
(check-expect (edit (make-editor "hel" "lo") "right")
(make-editor "hell" "o"))
(check-expect (edit (make-editor "hel" "o") "l")
(make-editor "hell" "o"))
(check-expect (edit (make-editor "hello" "world") "up")
(make-editor "hello" "world"))
(define (edit ed ke)
(cond
[(string=? ke "left") (move-left ed)] ; left-arrow key event
[(string=? ke "right") (move-right ed)] ; right-arrow key event
[(= (string-length ke) 1) (process-key ed ke)] ; single character key event
[else ed])) ; ignore other key events
; Editor -> Editor
move the cursor one letter to the left , if possible
(check-expect (move-left (make-editor "hello" ""))
(make-editor "hell" "o"))
(check-expect (move-left (make-editor "" "hello"))
(make-editor "" "hello"))
(define (move-left ed)
(cond
[(string=? "" (editor-pre ed)) ed]
[ else (make-editor (string-remove-last (editor-pre ed))
(string-append (string-last (editor-pre ed))
(editor-post ed)))]))
; Editor -> Editor
move the cursor one letter to the right , if possible
(check-expect (move-right (make-editor "hello" ""))
(make-editor "hello" ""))
(check-expect (move-right (make-editor "hell" "o"))
(make-editor "hello" ""))
(check-expect (move-right (make-editor "hello" "world"))
(make-editor "hellow" "orld"))
(define (move-right ed)
(cond
[(string=? "" (editor-post ed)) ed]
[else (make-editor (string-append (editor-pre ed)
(string-first (editor-post ed)))
(string-rest (editor-post ed)))]))
Editor - > Editor
delete one letter to the left if backspace key event ,
; otherwise append the character
(check-expect (process-key (make-editor "hell" "o") "\b")
(make-editor "hel" "o"))
(check-expect (process-key (make-editor "hell" "o") "\t")
(make-editor "hell" "o"))
(check-expect (process-key (make-editor "hell" "o") "\r")
(make-editor "hell" "o"))
(check-expect (process-key (make-editor "hell" "o") "up")
(make-editor "hell" "o"))
(check-expect (process-key (make-editor "hello wo" "") "r")
(make-editor "hello wor" ""))
(check-expect (process-key (make-editor "hell" "world") "o")
(make-editor "hello" "world"))
(define (process-key ed ke)
(cond
[(string=? "\b" ke) (backspace ed)]
[(or (string=? "\t" ke) (string=? "\r" ke)) ed]
[(> (string-length ke) 1) ed]
[else (make-editor (string-append (editor-pre ed) ke)
(editor-post ed))]))
; Editor -> Editor
; delete a character at the cursor position, if one exists
(check-expect (backspace (make-editor "hell" "o"))
(make-editor "hel" "o"))
(check-expect (backspace (make-editor "" "hello"))
(make-editor "" "hello"))
(define (backspace ed)
(cond
[(string=? "" (editor-pre ed)) ed]
[ else (make-editor (string-remove-last (editor-pre ed))
(editor-post ed))]))
; String -> String
returns the first character from a non - empty string .
(check-expect (string-first "hello") "h")
(define (string-first str)
(string-ith str 0))
; String -> String
; returns the last character from a non-empty string
(check-expect (string-last "hello") "o")
(define (string-last str)
(string-ith str (- (string-length str) 1)))
; String -> String
return all but the first letter in the given string
(check-expect (string-rest "hello") "ello")
(define (string-rest str)
(substring str 1 (string-length str)))
; String -> String
; return all but the last character of the given string
(check-expect (string-remove-last "hello") "hell")
(define (string-remove-last str)
(substring str 0 (- (string-length str) 1)))
| null | https://raw.githubusercontent.com/janegca/htdp2e/2d50378135edc2b8b1816204021f8763f8b2707b/01-FixedSizeData/Exercise-071-EditorV1.rkt | racket | about the language level of this file in a form that our tools can easily process.
immediately to the left of the cursor (if there are any). The function
"left" and "right". The left arrow moves the cursor one character to the
of them into tests.
specified as an enumeration. It uses auxiliary functions to deal with the
Editor structure. Keep a wish list handy; you will need to design additional
functions for most of these auxiliary functions, such as string-first,
string-rest, string-last, and string-remove-last. If you haven’t done so,
solve the exercises in Functions.
interpretation (make-editor s t) means the text in the editor is
(string-append s t) with the cursor displayed between s and t
Graphic constants
Functions
Editor -> Image
returns the editor text as an image
modifies the contents of the editor based on the given key event
left-arrow key event
right-arrow key event
single character key event
ignore other key events
Editor -> Editor
Editor -> Editor
otherwise append the character
Editor -> Editor
delete a character at the cursor position, if one exists
String -> String
String -> String
returns the last character from a non-empty string
String -> String
String -> String
return all but the last character of the given string | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-beginner-reader.ss" "lang")((modname Exercise-071-EditorV1) (read-case-sensitive #t) (teachpacks ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp"))) (htdp-settings #(#t constructor repeating-decimal #f #t none #f ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp")))))
Exercise 71 .
Design edit . The function consumes two inputs , an editor ed and a
, and it produces another editor . Its task is to add a
single - character to the end of the pre field of ed , unless
ke denotes the backspace ( " \b " ) key . In that case , it deletes the character
ignores the tab key ( " \t " ) and the return key ( " \r " ) .
The function pays attention to only two KeyEvents longer than one letter :
left ( if any ) , and the right arrow moves it one character to the right
( if any ) . All other such KeyEvents are ignored .
Develop a good number of examples for edit , paying attention to special
cases . When we solved this exercise , we created 20 examples and turned all
Hint Think of this function as consuming KeyEvents , a collection that is
(define-struct editor [pre post])
Editor = ( make - editor )
Pyhsical constants
(define WIDTH 200)
(define HEIGHT 20)
(define YCENTER (/ HEIGHT 2))
(define FONT-SIZE 16)
(define FONT-COLOR "black")
(define FIELD (empty-scene WIDTH HEIGHT))
(define CURSOR (rectangle 1 HEIGHT "solid" "red"))
(check-expect (render (make-editor "hello " "world"))
(overlay/align "left" "center"
(beside (text "hello " FONT-SIZE FONT-COLOR)
CURSOR
(text "world" FONT-SIZE FONT-COLOR))
FIELD))
(define (render e)
(place-image/align
(beside (text (editor-pre e) FONT-SIZE FONT-COLOR)
CURSOR
(text (editor-post e) FONT-SIZE FONT-COLOR))
0 YCENTER "left" "center" FIELD))
Editor - > Editor
(check-expect (edit (make-editor "hel" "lo") "left")
(make-editor "he" "llo"))
(check-expect (edit (make-editor "hel" "lo") "right")
(make-editor "hell" "o"))
(check-expect (edit (make-editor "hel" "o") "l")
(make-editor "hell" "o"))
(check-expect (edit (make-editor "hello" "world") "up")
(make-editor "hello" "world"))
(define (edit ed ke)
(cond
move the cursor one letter to the left , if possible
(check-expect (move-left (make-editor "hello" ""))
(make-editor "hell" "o"))
(check-expect (move-left (make-editor "" "hello"))
(make-editor "" "hello"))
(define (move-left ed)
(cond
[(string=? "" (editor-pre ed)) ed]
[ else (make-editor (string-remove-last (editor-pre ed))
(string-append (string-last (editor-pre ed))
(editor-post ed)))]))
move the cursor one letter to the right , if possible
(check-expect (move-right (make-editor "hello" ""))
(make-editor "hello" ""))
(check-expect (move-right (make-editor "hell" "o"))
(make-editor "hello" ""))
(check-expect (move-right (make-editor "hello" "world"))
(make-editor "hellow" "orld"))
(define (move-right ed)
(cond
[(string=? "" (editor-post ed)) ed]
[else (make-editor (string-append (editor-pre ed)
(string-first (editor-post ed)))
(string-rest (editor-post ed)))]))
Editor - > Editor
delete one letter to the left if backspace key event ,
(check-expect (process-key (make-editor "hell" "o") "\b")
(make-editor "hel" "o"))
(check-expect (process-key (make-editor "hell" "o") "\t")
(make-editor "hell" "o"))
(check-expect (process-key (make-editor "hell" "o") "\r")
(make-editor "hell" "o"))
(check-expect (process-key (make-editor "hell" "o") "up")
(make-editor "hell" "o"))
(check-expect (process-key (make-editor "hello wo" "") "r")
(make-editor "hello wor" ""))
(check-expect (process-key (make-editor "hell" "world") "o")
(make-editor "hello" "world"))
(define (process-key ed ke)
(cond
[(string=? "\b" ke) (backspace ed)]
[(or (string=? "\t" ke) (string=? "\r" ke)) ed]
[(> (string-length ke) 1) ed]
[else (make-editor (string-append (editor-pre ed) ke)
(editor-post ed))]))
(check-expect (backspace (make-editor "hell" "o"))
(make-editor "hel" "o"))
(check-expect (backspace (make-editor "" "hello"))
(make-editor "" "hello"))
(define (backspace ed)
(cond
[(string=? "" (editor-pre ed)) ed]
[ else (make-editor (string-remove-last (editor-pre ed))
(editor-post ed))]))
returns the first character from a non - empty string .
(check-expect (string-first "hello") "h")
(define (string-first str)
(string-ith str 0))
(check-expect (string-last "hello") "o")
(define (string-last str)
(string-ith str (- (string-length str) 1)))
return all but the first letter in the given string
(check-expect (string-rest "hello") "ello")
(define (string-rest str)
(substring str 1 (string-length str)))
(check-expect (string-remove-last "hello") "hell")
(define (string-remove-last str)
(substring str 0 (- (string-length str) 1)))
|
0b51a45a5d71567c0de226b9c6738973bf32fd0e76121890f20f94d15d815ef0 | ocamllabs/ocaml-modular-implicits | hashtbl.ml | (***********************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the GNU Library General Public License , with
(* the special exception on linking described in file ../LICENSE. *)
(* *)
(***********************************************************************)
(* Hash tables *)
external seeded_hash_param :
int -> int -> int -> 'a -> int = "caml_hash" "noalloc"
external old_hash_param :
int -> int -> 'a -> int = "caml_hash_univ_param" "noalloc"
let hash x = seeded_hash_param 10 100 0 x
let hash_param n1 n2 x = seeded_hash_param n1 n2 0 x
let seeded_hash seed x = seeded_hash_param 10 100 seed x
(* We do dynamic hashing, and resize the table and rehash the elements
when buckets become too long. *)
type ('a, 'b) t =
{ mutable size: int; (* number of entries *)
mutable data: ('a, 'b) bucketlist array; (* the buckets *)
mutable seed: int; (* for randomization *)
initial_size: int; (* initial array size *)
}
and ('a, 'b) bucketlist =
Empty
| Cons of 'a * 'b * ('a, 'b) bucketlist
(* To pick random seeds if requested *)
let randomized_default =
let params =
try Sys.getenv "OCAMLRUNPARAM" with Not_found ->
try Sys.getenv "CAMLRUNPARAM" with Not_found -> "" in
String.contains params 'R'
let randomized = ref randomized_default
let randomize () = randomized := true
let prng = lazy (Random.State.make_self_init())
(* Creating a fresh, empty table *)
let rec power_2_above x n =
if x >= n then x
else if x * 2 > Sys.max_array_length then x
else power_2_above (x * 2) n
let create ?(random = !randomized) initial_size =
let s = power_2_above 16 initial_size in
let seed = if random then Random.State.bits (Lazy.force prng) else 0 in
{ initial_size = s; size = 0; seed = seed; data = Array.make s Empty }
let clear h =
h.size <- 0;
let len = Array.length h.data in
for i = 0 to len - 1 do
h.data.(i) <- Empty
done
let reset h =
let len = Array.length h.data in
if Obj.size (Obj.repr h) < 4 (* compatibility with old hash tables *)
|| len = h.initial_size then
clear h
else begin
h.size <- 0;
h.data <- Array.make h.initial_size Empty
end
let copy h = { h with data = Array.copy h.data }
let length h = h.size
let resize indexfun h =
let odata = h.data in
let osize = Array.length odata in
let nsize = osize * 2 in
if nsize < Sys.max_array_length then begin
let ndata = Array.make nsize Empty in
h.data <- ndata; (* so that indexfun sees the new bucket count *)
let rec insert_bucket = function
Empty -> ()
| Cons(key, data, rest) ->
insert_bucket rest; (* preserve original order of elements *)
let nidx = indexfun h key in
ndata.(nidx) <- Cons(key, data, ndata.(nidx)) in
for i = 0 to osize - 1 do
insert_bucket odata.(i)
done
end
let key_index h key =
(* compatibility with old hash tables *)
if Obj.size (Obj.repr h) >= 3
then (seeded_hash_param 10 100 h.seed key) land (Array.length h.data - 1)
else (old_hash_param 10 100 key) mod (Array.length h.data)
let add h key info =
let i = key_index h key in
let bucket = Cons(key, info, h.data.(i)) in
h.data.(i) <- bucket;
h.size <- h.size + 1;
if h.size > Array.length h.data lsl 1 then resize key_index h
let remove h key =
let rec remove_bucket = function
| Empty ->
Empty
| Cons(k, i, next) ->
if compare k key = 0
then begin h.size <- h.size - 1; next end
else Cons(k, i, remove_bucket next) in
let i = key_index h key in
h.data.(i) <- remove_bucket h.data.(i)
let rec find_rec key = function
| Empty ->
raise Not_found
| Cons(k, d, rest) ->
if compare key k = 0 then d else find_rec key rest
let find h key =
match h.data.(key_index h key) with
| Empty -> raise Not_found
| Cons(k1, d1, rest1) ->
if compare key k1 = 0 then d1 else
match rest1 with
| Empty -> raise Not_found
| Cons(k2, d2, rest2) ->
if compare key k2 = 0 then d2 else
match rest2 with
| Empty -> raise Not_found
| Cons(k3, d3, rest3) ->
if compare key k3 = 0 then d3 else find_rec key rest3
let find_all h key =
let rec find_in_bucket = function
| Empty ->
[]
| Cons(k, d, rest) ->
if compare k key = 0
then d :: find_in_bucket rest
else find_in_bucket rest in
find_in_bucket h.data.(key_index h key)
let replace h key info =
let rec replace_bucket = function
| Empty ->
raise Not_found
| Cons(k, i, next) ->
if compare k key = 0
then Cons(key, info, next)
else Cons(k, i, replace_bucket next) in
let i = key_index h key in
let l = h.data.(i) in
try
h.data.(i) <- replace_bucket l
with Not_found ->
h.data.(i) <- Cons(key, info, l);
h.size <- h.size + 1;
if h.size > Array.length h.data lsl 1 then resize key_index h
let mem h key =
let rec mem_in_bucket = function
| Empty ->
false
| Cons(k, d, rest) ->
compare k key = 0 || mem_in_bucket rest in
mem_in_bucket h.data.(key_index h key)
let iter f h =
let rec do_bucket = function
| Empty ->
()
| Cons(k, d, rest) ->
f k d; do_bucket rest in
let d = h.data in
for i = 0 to Array.length d - 1 do
do_bucket d.(i)
done
let fold f h init =
let rec do_bucket b accu =
match b with
Empty ->
accu
| Cons(k, d, rest) ->
do_bucket rest (f k d accu) in
let d = h.data in
let accu = ref init in
for i = 0 to Array.length d - 1 do
accu := do_bucket d.(i) !accu
done;
!accu
type statistics = {
num_bindings: int;
num_buckets: int;
max_bucket_length: int;
bucket_histogram: int array
}
let rec bucket_length accu = function
| Empty -> accu
| Cons(_, _, rest) -> bucket_length (accu + 1) rest
let stats h =
let mbl =
Array.fold_left (fun m b -> max m (bucket_length 0 b)) 0 h.data in
let histo = Array.make (mbl + 1) 0 in
Array.iter
(fun b ->
let l = bucket_length 0 b in
histo.(l) <- histo.(l) + 1)
h.data;
{ num_bindings = h.size;
num_buckets = Array.length h.data;
max_bucket_length = mbl;
bucket_histogram = histo }
Functorial interface
module type HashedType =
sig
type t
val equal: t -> t -> bool
val hash: t -> int
end
module type SeededHashedType =
sig
type t
val equal: t -> t -> bool
val hash: int -> t -> int
end
module type S =
sig
type key
type 'a t
val create: int -> 'a t
val clear : 'a t -> unit
val reset : 'a t -> unit
val copy: 'a t -> 'a t
val add: 'a t -> key -> 'a -> unit
val remove: 'a t -> key -> unit
val find: 'a t -> key -> 'a
val find_all: 'a t -> key -> 'a list
val replace : 'a t -> key -> 'a -> unit
val mem : 'a t -> key -> bool
val iter: (key -> 'a -> unit) -> 'a t -> unit
val fold: (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b
val length: 'a t -> int
val stats: 'a t -> statistics
end
module type SeededS =
sig
type key
type 'a t
val create : ?random:bool -> int -> 'a t
val clear : 'a t -> unit
val reset : 'a t -> unit
val copy : 'a t -> 'a t
val add : 'a t -> key -> 'a -> unit
val remove : 'a t -> key -> unit
val find : 'a t -> key -> 'a
val find_all : 'a t -> key -> 'a list
val replace : 'a t -> key -> 'a -> unit
val mem : 'a t -> key -> bool
val iter : (key -> 'a -> unit) -> 'a t -> unit
val fold : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b
val length : 'a t -> int
val stats: 'a t -> statistics
end
module MakeSeeded(H: SeededHashedType): (SeededS with type key = H.t) =
struct
type key = H.t
type 'a hashtbl = (key, 'a) t
type 'a t = 'a hashtbl
let create = create
let clear = clear
let reset = reset
let copy = copy
let key_index h key =
(H.hash h.seed key) land (Array.length h.data - 1)
let add h key info =
let i = key_index h key in
let bucket = Cons(key, info, h.data.(i)) in
h.data.(i) <- bucket;
h.size <- h.size + 1;
if h.size > Array.length h.data lsl 1 then resize key_index h
let remove h key =
let rec remove_bucket = function
| Empty ->
Empty
| Cons(k, i, next) ->
if H.equal k key
then begin h.size <- h.size - 1; next end
else Cons(k, i, remove_bucket next) in
let i = key_index h key in
h.data.(i) <- remove_bucket h.data.(i)
let rec find_rec key = function
| Empty ->
raise Not_found
| Cons(k, d, rest) ->
if H.equal key k then d else find_rec key rest
let find h key =
match h.data.(key_index h key) with
| Empty -> raise Not_found
| Cons(k1, d1, rest1) ->
if H.equal key k1 then d1 else
match rest1 with
| Empty -> raise Not_found
| Cons(k2, d2, rest2) ->
if H.equal key k2 then d2 else
match rest2 with
| Empty -> raise Not_found
| Cons(k3, d3, rest3) ->
if H.equal key k3 then d3 else find_rec key rest3
let find_all h key =
let rec find_in_bucket = function
| Empty ->
[]
| Cons(k, d, rest) ->
if H.equal k key
then d :: find_in_bucket rest
else find_in_bucket rest in
find_in_bucket h.data.(key_index h key)
let replace h key info =
let rec replace_bucket = function
| Empty ->
raise Not_found
| Cons(k, i, next) ->
if H.equal k key
then Cons(key, info, next)
else Cons(k, i, replace_bucket next) in
let i = key_index h key in
let l = h.data.(i) in
try
h.data.(i) <- replace_bucket l
with Not_found ->
h.data.(i) <- Cons(key, info, l);
h.size <- h.size + 1;
if h.size > Array.length h.data lsl 1 then resize key_index h
let mem h key =
let rec mem_in_bucket = function
| Empty ->
false
| Cons(k, d, rest) ->
H.equal k key || mem_in_bucket rest in
mem_in_bucket h.data.(key_index h key)
let iter = iter
let fold = fold
let length = length
let stats = stats
end
module Make(H: HashedType): (S with type key = H.t) =
struct
include MakeSeeded(struct
type t = H.t
let equal = H.equal
let hash (seed: int) x = H.hash x
end)
let create sz = create ~random:false sz
end
| null | https://raw.githubusercontent.com/ocamllabs/ocaml-modular-implicits/92e45da5c8a4c2db8b2cd5be28a5bec2ac2181f1/stdlib/hashtbl.ml | ocaml | *********************************************************************
OCaml
the special exception on linking described in file ../LICENSE.
*********************************************************************
Hash tables
We do dynamic hashing, and resize the table and rehash the elements
when buckets become too long.
number of entries
the buckets
for randomization
initial array size
To pick random seeds if requested
Creating a fresh, empty table
compatibility with old hash tables
so that indexfun sees the new bucket count
preserve original order of elements
compatibility with old hash tables | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the GNU Library General Public License , with
external seeded_hash_param :
int -> int -> int -> 'a -> int = "caml_hash" "noalloc"
external old_hash_param :
int -> int -> 'a -> int = "caml_hash_univ_param" "noalloc"
let hash x = seeded_hash_param 10 100 0 x
let hash_param n1 n2 x = seeded_hash_param n1 n2 0 x
let seeded_hash seed x = seeded_hash_param 10 100 seed x
type ('a, 'b) t =
}
and ('a, 'b) bucketlist =
Empty
| Cons of 'a * 'b * ('a, 'b) bucketlist
let randomized_default =
let params =
try Sys.getenv "OCAMLRUNPARAM" with Not_found ->
try Sys.getenv "CAMLRUNPARAM" with Not_found -> "" in
String.contains params 'R'
let randomized = ref randomized_default
let randomize () = randomized := true
let prng = lazy (Random.State.make_self_init())
let rec power_2_above x n =
if x >= n then x
else if x * 2 > Sys.max_array_length then x
else power_2_above (x * 2) n
let create ?(random = !randomized) initial_size =
let s = power_2_above 16 initial_size in
let seed = if random then Random.State.bits (Lazy.force prng) else 0 in
{ initial_size = s; size = 0; seed = seed; data = Array.make s Empty }
let clear h =
h.size <- 0;
let len = Array.length h.data in
for i = 0 to len - 1 do
h.data.(i) <- Empty
done
let reset h =
let len = Array.length h.data in
|| len = h.initial_size then
clear h
else begin
h.size <- 0;
h.data <- Array.make h.initial_size Empty
end
let copy h = { h with data = Array.copy h.data }
let length h = h.size
let resize indexfun h =
let odata = h.data in
let osize = Array.length odata in
let nsize = osize * 2 in
if nsize < Sys.max_array_length then begin
let ndata = Array.make nsize Empty in
let rec insert_bucket = function
Empty -> ()
| Cons(key, data, rest) ->
let nidx = indexfun h key in
ndata.(nidx) <- Cons(key, data, ndata.(nidx)) in
for i = 0 to osize - 1 do
insert_bucket odata.(i)
done
end
let key_index h key =
if Obj.size (Obj.repr h) >= 3
then (seeded_hash_param 10 100 h.seed key) land (Array.length h.data - 1)
else (old_hash_param 10 100 key) mod (Array.length h.data)
let add h key info =
let i = key_index h key in
let bucket = Cons(key, info, h.data.(i)) in
h.data.(i) <- bucket;
h.size <- h.size + 1;
if h.size > Array.length h.data lsl 1 then resize key_index h
let remove h key =
let rec remove_bucket = function
| Empty ->
Empty
| Cons(k, i, next) ->
if compare k key = 0
then begin h.size <- h.size - 1; next end
else Cons(k, i, remove_bucket next) in
let i = key_index h key in
h.data.(i) <- remove_bucket h.data.(i)
let rec find_rec key = function
| Empty ->
raise Not_found
| Cons(k, d, rest) ->
if compare key k = 0 then d else find_rec key rest
let find h key =
match h.data.(key_index h key) with
| Empty -> raise Not_found
| Cons(k1, d1, rest1) ->
if compare key k1 = 0 then d1 else
match rest1 with
| Empty -> raise Not_found
| Cons(k2, d2, rest2) ->
if compare key k2 = 0 then d2 else
match rest2 with
| Empty -> raise Not_found
| Cons(k3, d3, rest3) ->
if compare key k3 = 0 then d3 else find_rec key rest3
let find_all h key =
let rec find_in_bucket = function
| Empty ->
[]
| Cons(k, d, rest) ->
if compare k key = 0
then d :: find_in_bucket rest
else find_in_bucket rest in
find_in_bucket h.data.(key_index h key)
let replace h key info =
let rec replace_bucket = function
| Empty ->
raise Not_found
| Cons(k, i, next) ->
if compare k key = 0
then Cons(key, info, next)
else Cons(k, i, replace_bucket next) in
let i = key_index h key in
let l = h.data.(i) in
try
h.data.(i) <- replace_bucket l
with Not_found ->
h.data.(i) <- Cons(key, info, l);
h.size <- h.size + 1;
if h.size > Array.length h.data lsl 1 then resize key_index h
let mem h key =
let rec mem_in_bucket = function
| Empty ->
false
| Cons(k, d, rest) ->
compare k key = 0 || mem_in_bucket rest in
mem_in_bucket h.data.(key_index h key)
let iter f h =
let rec do_bucket = function
| Empty ->
()
| Cons(k, d, rest) ->
f k d; do_bucket rest in
let d = h.data in
for i = 0 to Array.length d - 1 do
do_bucket d.(i)
done
let fold f h init =
let rec do_bucket b accu =
match b with
Empty ->
accu
| Cons(k, d, rest) ->
do_bucket rest (f k d accu) in
let d = h.data in
let accu = ref init in
for i = 0 to Array.length d - 1 do
accu := do_bucket d.(i) !accu
done;
!accu
type statistics = {
num_bindings: int;
num_buckets: int;
max_bucket_length: int;
bucket_histogram: int array
}
let rec bucket_length accu = function
| Empty -> accu
| Cons(_, _, rest) -> bucket_length (accu + 1) rest
let stats h =
let mbl =
Array.fold_left (fun m b -> max m (bucket_length 0 b)) 0 h.data in
let histo = Array.make (mbl + 1) 0 in
Array.iter
(fun b ->
let l = bucket_length 0 b in
histo.(l) <- histo.(l) + 1)
h.data;
{ num_bindings = h.size;
num_buckets = Array.length h.data;
max_bucket_length = mbl;
bucket_histogram = histo }
Functorial interface
module type HashedType =
sig
type t
val equal: t -> t -> bool
val hash: t -> int
end
module type SeededHashedType =
sig
type t
val equal: t -> t -> bool
val hash: int -> t -> int
end
module type S =
sig
type key
type 'a t
val create: int -> 'a t
val clear : 'a t -> unit
val reset : 'a t -> unit
val copy: 'a t -> 'a t
val add: 'a t -> key -> 'a -> unit
val remove: 'a t -> key -> unit
val find: 'a t -> key -> 'a
val find_all: 'a t -> key -> 'a list
val replace : 'a t -> key -> 'a -> unit
val mem : 'a t -> key -> bool
val iter: (key -> 'a -> unit) -> 'a t -> unit
val fold: (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b
val length: 'a t -> int
val stats: 'a t -> statistics
end
module type SeededS =
sig
type key
type 'a t
val create : ?random:bool -> int -> 'a t
val clear : 'a t -> unit
val reset : 'a t -> unit
val copy : 'a t -> 'a t
val add : 'a t -> key -> 'a -> unit
val remove : 'a t -> key -> unit
val find : 'a t -> key -> 'a
val find_all : 'a t -> key -> 'a list
val replace : 'a t -> key -> 'a -> unit
val mem : 'a t -> key -> bool
val iter : (key -> 'a -> unit) -> 'a t -> unit
val fold : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b
val length : 'a t -> int
val stats: 'a t -> statistics
end
module MakeSeeded(H: SeededHashedType): (SeededS with type key = H.t) =
struct
type key = H.t
type 'a hashtbl = (key, 'a) t
type 'a t = 'a hashtbl
let create = create
let clear = clear
let reset = reset
let copy = copy
let key_index h key =
(H.hash h.seed key) land (Array.length h.data - 1)
let add h key info =
let i = key_index h key in
let bucket = Cons(key, info, h.data.(i)) in
h.data.(i) <- bucket;
h.size <- h.size + 1;
if h.size > Array.length h.data lsl 1 then resize key_index h
let remove h key =
let rec remove_bucket = function
| Empty ->
Empty
| Cons(k, i, next) ->
if H.equal k key
then begin h.size <- h.size - 1; next end
else Cons(k, i, remove_bucket next) in
let i = key_index h key in
h.data.(i) <- remove_bucket h.data.(i)
let rec find_rec key = function
| Empty ->
raise Not_found
| Cons(k, d, rest) ->
if H.equal key k then d else find_rec key rest
let find h key =
match h.data.(key_index h key) with
| Empty -> raise Not_found
| Cons(k1, d1, rest1) ->
if H.equal key k1 then d1 else
match rest1 with
| Empty -> raise Not_found
| Cons(k2, d2, rest2) ->
if H.equal key k2 then d2 else
match rest2 with
| Empty -> raise Not_found
| Cons(k3, d3, rest3) ->
if H.equal key k3 then d3 else find_rec key rest3
let find_all h key =
let rec find_in_bucket = function
| Empty ->
[]
| Cons(k, d, rest) ->
if H.equal k key
then d :: find_in_bucket rest
else find_in_bucket rest in
find_in_bucket h.data.(key_index h key)
let replace h key info =
let rec replace_bucket = function
| Empty ->
raise Not_found
| Cons(k, i, next) ->
if H.equal k key
then Cons(key, info, next)
else Cons(k, i, replace_bucket next) in
let i = key_index h key in
let l = h.data.(i) in
try
h.data.(i) <- replace_bucket l
with Not_found ->
h.data.(i) <- Cons(key, info, l);
h.size <- h.size + 1;
if h.size > Array.length h.data lsl 1 then resize key_index h
let mem h key =
let rec mem_in_bucket = function
| Empty ->
false
| Cons(k, d, rest) ->
H.equal k key || mem_in_bucket rest in
mem_in_bucket h.data.(key_index h key)
let iter = iter
let fold = fold
let length = length
let stats = stats
end
module Make(H: HashedType): (S with type key = H.t) =
struct
include MakeSeeded(struct
type t = H.t
let equal = H.equal
let hash (seed: int) x = H.hash x
end)
let create sz = create ~random:false sz
end
|
0b70df579a7b8155dea5c53a34b6a9e0fae4572010d1d648e5c2499d20b7d4f3 | takikawa/racket-ppa | teach-module-begin.rkt | #lang racket/base
Once upon a time , there were three different variants . Preserve the
; ability to do this.
(provide beginner-module-begin
beginner-abbr-module-begin
intermediate-module-begin
intermediate-lambda-module-begin
advanced-module-begin)
(require deinprogramm/signature/signature
lang/private/signature-syntax
(only-in test-engine/syntax test))
(require (for-syntax scheme/base)
(for-syntax racket/list)
(for-syntax syntax/boundmap)
(for-syntax syntax/kerncase))
(require (for-syntax "firstorder.rkt"))
(define-syntax (print-results stx)
(syntax-case stx ()
((_ expr)
(not (syntax-property #'expr 'test-call))
(syntax-property
(syntax-property
#'(#%app call-with-values (lambda () expr)
do-print-results)
'stepper-skipto
'(syntax-e cdr cdr car syntax-e cdr cdr car))
'certify-mode
'transparent))
((_ expr) #'expr)))
(define (do-print-results . vs)
(for-each (current-print) vs)
;; Returning 0 values avoids any further result printing
;; (even if void values are printed)
(values))
(define-syntaxes (beginner-module-begin
beginner-abbr-module-begin
intermediate-module-begin
intermediate-lambda-module-begin
advanced-module-begin
module-continue)
(let ()
;; takes a list of syntax objects (the result of syntax-e) and returns all the syntax objects that correspond to
;; a signature declaration. Syntax: (: id signature)
(define extract-signatures
(lambda (lostx)
(let* ((table (make-bound-identifier-mapping))
(non-signatures
(filter-map (lambda (maybe)
(syntax-case maybe (:)
((: ?exp ?sig)
(not (identifier? #'?exp))
#'(apply-signature/blame (signature ?sig) ?exp))
((: ?id ?sig)
(begin
(let ((real-id (first-order->higher-order #'?id)))
(cond
((bound-identifier-mapping-get table real-id (lambda () #f))
=> (lambda (old-sig-stx)
(unless (equal? (syntax->datum old-sig-stx)
(syntax->datum #'?sig))
(raise-syntax-error #f
"Second signature declaration for the same name."
maybe))))
(else
(bound-identifier-mapping-put! table real-id #'?sig)))
#f)))
((: ?id)
(raise-syntax-error #f "Signature declaration is missing a signature." maybe))
((: ?id ?sig ?stuff0 ?stuff1 ...)
(raise-syntax-error #f "The : form expects a name and a signature; there is more."
(syntax/loc #'?stuff0
(?stuff0 ?stuff1 ...))))
(_ maybe)))
lostx)))
(values table non-signatures))))
(define local-expand-stop-list
(append (list #': #'define-signature)
(kernel-form-identifier-list)))
(define (expand-signature-expressions signature-table expressions)
(let loop ((exprs expressions))
(cond
((null? exprs)
(bound-identifier-mapping-for-each signature-table
(lambda (id thing)
(when thing
(if (identifier-binding id)
(raise-syntax-error #f "Cannot declare a signature for a built-in form." id)
(raise-syntax-error #f "There is no definition for this signature declaration." id)))))
#'(begin))
(else
(let ((expanded (car exprs)))
(syntax-case expanded (begin define-values)
((define-values (?id ...) ?e1)
(with-syntax (((?enforced ...)
(map (lambda (id)
(cond
((bound-identifier-mapping-get signature-table id (lambda () #f))
=> (lambda (sig)
(bound-identifier-mapping-put! signature-table id #f) ; check for orphaned signatures
(with-syntax ((?id id)
(?sig sig))
#'(?id (signature ?sig)))))
(else
id)))
(syntax->list #'(?id ...))))
(?rest (loop (cdr exprs))))
(with-syntax ((?defn
(syntax-track-origin
#'(define-values/signature (?enforced ...)
?e1)
(car exprs)
(car (syntax-e expanded)))))
(syntax/loc (car exprs)
(begin
?defn
?rest)))))
((begin e1 ...)
(loop (append (syntax-e (syntax (e1 ...))) (cdr exprs))))
(else
(with-syntax ((?first expanded)
(?rest (loop (cdr exprs))))
(syntax/loc (car exprs)
(begin
?first ?rest))))))))))
(define (mk-module-begin options)
(lambda (stx)
(syntax-case stx ()
((_ e1 ...)
;; module-begin-continue takes a sequence of expanded
;; exprs and a sequence of to-expand exprs; that way,
;; the module-expansion machinery can be used to handle
;; requires, etc.:
#`(#%plain-module-begin
(module-continue (e1 ...) () ())
(module configure-runtime racket/base
(require htdp/bsl/runtime)
(configure '#,options))
(module+ test (test)))))))
(values
;; module-begin
(mk-module-begin '())
(mk-module-begin '(abbreviate-cons-as-list
read-accept-quasiquote))
(mk-module-begin '(abbreviate-cons-as-list
read-accept-quasiquote))
(mk-module-begin '(abbreviate-cons-as-list
read-accept-quasiquote))
(mk-module-begin '(abbreviate-cons-as-list
read-accept-quasiquote
show-sharing))
;; module-continue
(lambda (stx)
(syntax-case stx ()
((_ () (e1 ...) (defined-id ...))
;; Local-expanded all body elements, lifted out requires, etc.
;; Now process the result.
(begin
;; The expansion for signatures breaks the way that beginner-define, etc.,
;; check for duplicate definitions, so we have to re-check here.
;; A better strategy might be to turn every define into a define-syntax
;; to redirect the binding, and then the identifier-binding check in
;; beginner-define, etc. will work.
(let ((defined-ids (make-bound-identifier-mapping)))
(for-each (lambda (id)
(when (bound-identifier-mapping-get defined-ids id (lambda () #f))
(raise-syntax-error
#f
"this name was defined previously and cannot be re-defined"
id))
(bound-identifier-mapping-put! defined-ids id #t))
(reverse (syntax->list #'(defined-id ...)))))
;; Now handle signatures:
(let ((top-level (reverse (syntax->list (syntax (e1 ...))))))
(let-values (((sig-table expr-list)
(extract-signatures top-level)))
(expand-signature-expressions sig-table expr-list)))))
((frm e3s e1s def-ids)
(let loop ((e3s #'e3s)
(e1s #'e1s)
(def-ids #'def-ids))
(syntax-case e3s ()
(()
#`(frm () #,e1s #,def-ids))
((e2 . e3s)
(let ((e2 (local-expand #'e2 'module local-expand-stop-list)))
;; Lift out certain forms to make them visible to the module
;; expander:
(syntax-case e2 (#%require #%provide #%declare
define-syntaxes begin-for-syntax define-values begin
define-signature :)
((#%require . __)
#`(begin #,e2 (frm e3s #,e1s #,def-ids)))
((#%provide . __)
#`(begin #,e2 (frm e3s #,e1s #,def-ids)))
((#%declare . __)
#`(begin #,e2 (frm e3s #,e1s #,def-ids)))
((define-syntaxes (id ...) . _)
#`(begin #,e2 (frm e3s #,e1s (id ... . #,def-ids))))
((begin-for-syntax . _)
#`(begin #,e2 (frm e3s #,e1s #,def-ids)))
((begin b1 ...)
(syntax-track-origin
(loop (append (syntax->list #'(b1 ...)) #'e3s) e1s def-ids)
e2
(car (syntax-e e2))))
((define-values (id ...) . _)
(loop #'e3s (cons e2 e1s) (append (syntax->list #'(id ...)) def-ids)))
((define-signature id ctr)
(loop #'e3s (cons e2 e1s) def-ids))
((: stuff ...)
(loop #'e3s (cons e2 e1s) def-ids))
(_
(loop #'e3s (cons #`(print-results #,e2) e1s) def-ids)))))))))))))
| null | https://raw.githubusercontent.com/takikawa/racket-ppa/d336bb10e3e0ec3a20020e9ade9e77d2f6f80b6d/share/pkgs/htdp-lib/lang/private/teach-module-begin.rkt | racket | ability to do this.
Returning 0 values avoids any further result printing
(even if void values are printed)
takes a list of syntax objects (the result of syntax-e) and returns all the syntax objects that correspond to
a signature declaration. Syntax: (: id signature)
check for orphaned signatures
module-begin-continue takes a sequence of expanded
exprs and a sequence of to-expand exprs; that way,
the module-expansion machinery can be used to handle
requires, etc.:
module-begin
module-continue
Local-expanded all body elements, lifted out requires, etc.
Now process the result.
The expansion for signatures breaks the way that beginner-define, etc.,
check for duplicate definitions, so we have to re-check here.
A better strategy might be to turn every define into a define-syntax
to redirect the binding, and then the identifier-binding check in
beginner-define, etc. will work.
Now handle signatures:
Lift out certain forms to make them visible to the module
expander: | #lang racket/base
Once upon a time , there were three different variants . Preserve the
(provide beginner-module-begin
beginner-abbr-module-begin
intermediate-module-begin
intermediate-lambda-module-begin
advanced-module-begin)
(require deinprogramm/signature/signature
lang/private/signature-syntax
(only-in test-engine/syntax test))
(require (for-syntax scheme/base)
(for-syntax racket/list)
(for-syntax syntax/boundmap)
(for-syntax syntax/kerncase))
(require (for-syntax "firstorder.rkt"))
(define-syntax (print-results stx)
(syntax-case stx ()
((_ expr)
(not (syntax-property #'expr 'test-call))
(syntax-property
(syntax-property
#'(#%app call-with-values (lambda () expr)
do-print-results)
'stepper-skipto
'(syntax-e cdr cdr car syntax-e cdr cdr car))
'certify-mode
'transparent))
((_ expr) #'expr)))
(define (do-print-results . vs)
(for-each (current-print) vs)
(values))
(define-syntaxes (beginner-module-begin
beginner-abbr-module-begin
intermediate-module-begin
intermediate-lambda-module-begin
advanced-module-begin
module-continue)
(let ()
(define extract-signatures
(lambda (lostx)
(let* ((table (make-bound-identifier-mapping))
(non-signatures
(filter-map (lambda (maybe)
(syntax-case maybe (:)
((: ?exp ?sig)
(not (identifier? #'?exp))
#'(apply-signature/blame (signature ?sig) ?exp))
((: ?id ?sig)
(begin
(let ((real-id (first-order->higher-order #'?id)))
(cond
((bound-identifier-mapping-get table real-id (lambda () #f))
=> (lambda (old-sig-stx)
(unless (equal? (syntax->datum old-sig-stx)
(syntax->datum #'?sig))
(raise-syntax-error #f
"Second signature declaration for the same name."
maybe))))
(else
(bound-identifier-mapping-put! table real-id #'?sig)))
#f)))
((: ?id)
(raise-syntax-error #f "Signature declaration is missing a signature." maybe))
((: ?id ?sig ?stuff0 ?stuff1 ...)
(raise-syntax-error #f "The : form expects a name and a signature; there is more."
(syntax/loc #'?stuff0
(?stuff0 ?stuff1 ...))))
(_ maybe)))
lostx)))
(values table non-signatures))))
(define local-expand-stop-list
(append (list #': #'define-signature)
(kernel-form-identifier-list)))
(define (expand-signature-expressions signature-table expressions)
(let loop ((exprs expressions))
(cond
((null? exprs)
(bound-identifier-mapping-for-each signature-table
(lambda (id thing)
(when thing
(if (identifier-binding id)
(raise-syntax-error #f "Cannot declare a signature for a built-in form." id)
(raise-syntax-error #f "There is no definition for this signature declaration." id)))))
#'(begin))
(else
(let ((expanded (car exprs)))
(syntax-case expanded (begin define-values)
((define-values (?id ...) ?e1)
(with-syntax (((?enforced ...)
(map (lambda (id)
(cond
((bound-identifier-mapping-get signature-table id (lambda () #f))
=> (lambda (sig)
(with-syntax ((?id id)
(?sig sig))
#'(?id (signature ?sig)))))
(else
id)))
(syntax->list #'(?id ...))))
(?rest (loop (cdr exprs))))
(with-syntax ((?defn
(syntax-track-origin
#'(define-values/signature (?enforced ...)
?e1)
(car exprs)
(car (syntax-e expanded)))))
(syntax/loc (car exprs)
(begin
?defn
?rest)))))
((begin e1 ...)
(loop (append (syntax-e (syntax (e1 ...))) (cdr exprs))))
(else
(with-syntax ((?first expanded)
(?rest (loop (cdr exprs))))
(syntax/loc (car exprs)
(begin
?first ?rest))))))))))
(define (mk-module-begin options)
(lambda (stx)
(syntax-case stx ()
((_ e1 ...)
#`(#%plain-module-begin
(module-continue (e1 ...) () ())
(module configure-runtime racket/base
(require htdp/bsl/runtime)
(configure '#,options))
(module+ test (test)))))))
(values
(mk-module-begin '())
(mk-module-begin '(abbreviate-cons-as-list
read-accept-quasiquote))
(mk-module-begin '(abbreviate-cons-as-list
read-accept-quasiquote))
(mk-module-begin '(abbreviate-cons-as-list
read-accept-quasiquote))
(mk-module-begin '(abbreviate-cons-as-list
read-accept-quasiquote
show-sharing))
(lambda (stx)
(syntax-case stx ()
((_ () (e1 ...) (defined-id ...))
(begin
(let ((defined-ids (make-bound-identifier-mapping)))
(for-each (lambda (id)
(when (bound-identifier-mapping-get defined-ids id (lambda () #f))
(raise-syntax-error
#f
"this name was defined previously and cannot be re-defined"
id))
(bound-identifier-mapping-put! defined-ids id #t))
(reverse (syntax->list #'(defined-id ...)))))
(let ((top-level (reverse (syntax->list (syntax (e1 ...))))))
(let-values (((sig-table expr-list)
(extract-signatures top-level)))
(expand-signature-expressions sig-table expr-list)))))
((frm e3s e1s def-ids)
(let loop ((e3s #'e3s)
(e1s #'e1s)
(def-ids #'def-ids))
(syntax-case e3s ()
(()
#`(frm () #,e1s #,def-ids))
((e2 . e3s)
(let ((e2 (local-expand #'e2 'module local-expand-stop-list)))
(syntax-case e2 (#%require #%provide #%declare
define-syntaxes begin-for-syntax define-values begin
define-signature :)
((#%require . __)
#`(begin #,e2 (frm e3s #,e1s #,def-ids)))
((#%provide . __)
#`(begin #,e2 (frm e3s #,e1s #,def-ids)))
((#%declare . __)
#`(begin #,e2 (frm e3s #,e1s #,def-ids)))
((define-syntaxes (id ...) . _)
#`(begin #,e2 (frm e3s #,e1s (id ... . #,def-ids))))
((begin-for-syntax . _)
#`(begin #,e2 (frm e3s #,e1s #,def-ids)))
((begin b1 ...)
(syntax-track-origin
(loop (append (syntax->list #'(b1 ...)) #'e3s) e1s def-ids)
e2
(car (syntax-e e2))))
((define-values (id ...) . _)
(loop #'e3s (cons e2 e1s) (append (syntax->list #'(id ...)) def-ids)))
((define-signature id ctr)
(loop #'e3s (cons e2 e1s) def-ids))
((: stuff ...)
(loop #'e3s (cons e2 e1s) def-ids))
(_
(loop #'e3s (cons #`(print-results #,e2) e1s) def-ids)))))))))))))
|
8387ea4113e6ff80052ec6923d3f52d3552cb835f91e8ddc5e990fd62db9da01 | superhuman/rxxr2 | RegexParser.mli | type token =
| Literal of ((int * int) * char)
| Anchor of ((int * int) * ParsingData.pred)
| GrpOpen of (int * ParsingData.gkind)
| BeginQuote of ((int * int))
| EndQuote of ((int * int))
| TkDot of (int)
| ModsGrpOpen of (int)
| Mod of (int)
| GrpClose of (int)
| ClsClose of (int)
| TkBackref of ((int * int) * int)
| ClsOpen of (int * bool)
| ClsRange of (char * char)
| ClsNamed of ((int * int) * ((char * char) list))
| Repetition of (int * (int * int * ParsingData.qfier))
| VBar
| NegMods
| EndMods
| Eos
val parse :
(Lexing.lexbuf -> token) -> Lexing.lexbuf -> ParsingData.regex
| null | https://raw.githubusercontent.com/superhuman/rxxr2/0eea5e9f0e0cde6c39e0fc12614f64edb6189cd5/code/RegexParser.mli | ocaml | type token =
| Literal of ((int * int) * char)
| Anchor of ((int * int) * ParsingData.pred)
| GrpOpen of (int * ParsingData.gkind)
| BeginQuote of ((int * int))
| EndQuote of ((int * int))
| TkDot of (int)
| ModsGrpOpen of (int)
| Mod of (int)
| GrpClose of (int)
| ClsClose of (int)
| TkBackref of ((int * int) * int)
| ClsOpen of (int * bool)
| ClsRange of (char * char)
| ClsNamed of ((int * int) * ((char * char) list))
| Repetition of (int * (int * int * ParsingData.qfier))
| VBar
| NegMods
| EndMods
| Eos
val parse :
(Lexing.lexbuf -> token) -> Lexing.lexbuf -> ParsingData.regex
|
|
3001d864c92e967e4898323ea851c2e69f97a25b21919dc9f9dd35a73d83c49f | Frama-C/Frama-C-snapshot | Pdg.mli | (**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
(** Program Dependences Graph. *)
(** Functions for this plugin are registered through the [Db] module,
the dynamic API, and the module Below. *)
module Register : module type of Marks
| null | https://raw.githubusercontent.com/Frama-C/Frama-C-snapshot/639a3647736bf8ac127d00ebe4c4c259f75f9b87/src/plugins/pdg/Pdg.mli | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
* Program Dependences Graph.
* Functions for this plugin are registered through the [Db] module,
the dynamic API, and the module Below. | This file is part of Frama - C.
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
module Register : module type of Marks
|
5c3b2cd5fa64f81f6d2acfb6062d0ed0eb1aa211be78e620c6104502a001518c | yuriy-chumak/ol | sleep_sort.scm | ; #Ol
(define (sleep-sort lst)
(for-each (lambda (timeout)
(async (lambda ()
(sleep timeout)
(print timeout))))
lst))
(sleep-sort '(5 8 2 7 9 10 5))
| null | https://raw.githubusercontent.com/yuriy-chumak/ol/4bfd67d8025d0c4fcf5fc691520fe2141124f85a/tests/rosettacode/sorting_algorithms/sleep_sort.scm | scheme | #Ol |
(define (sleep-sort lst)
(for-each (lambda (timeout)
(async (lambda ()
(sleep timeout)
(print timeout))))
lst))
(sleep-sort '(5 8 2 7 9 10 5))
|
d80a46956a97b2b9ac251e0d2e75b0bb8213e54e67b72214aa3fd5d36bc76c5a | waddlaw/TAPL | Record.hs | | 図 11.7 レコード
module Language.FullSimpleLambda.System.Record
( Term (..),
Ty (..),
Context (..),
eval,
typeof,
)
where
import Language.FullSimpleLambda.Class
import RIO
import qualified RIO.List.Partial as L.Partial
data Record
type Value = Term Record
type FieldLabel = Text
instance System Record where
data Term Record
= -- | 変数
TmVar Int
| -- | ラムダ抽象
TmLam VarName (Ty Record) (Term Record)
| 関数適用
TmApp (Term Record) (Term Record)
| -- | レコード
TmRecord [(FieldLabel, Term Record)]
| -- | 射影
TmRecordProj FieldLabel (Term Record)
deriving stock (Show, Eq)
data Ty Record
= -- | 関数の型
TyArr (Ty Record) (Ty Record)
| -- | レコードの型
TyRecord [(FieldLabel, Ty Record)]
deriving stock (Show, Eq)
data Context Record
| 空の文脈
CtxEmpty
| 項変数の束縛
CtxVar (Context Record) VarName (Ty Record)
deriving stock (Show, Eq)
data Pattern Record
eval :: Term Record -> Term Record
eval = \case
TmApp t1@(TmLam _ _ t12) t2
E - APP1
| not (isValue t1) -> TmApp (eval t1) t2
-- E-APP2
| isValue t1 && not (isValue t2) -> TmApp t1 (eval t2)
E - APPABS
| isValue t1 && isValue t2 -> shift 0 (-1) $ subst 0 (shift 0 1 t2) t12
TmRecordProj label t@(TmRecord fields)
E - PROJRCD
| isValue t -> fromMaybe (error "field label not found (E-PROJRCD)") $ lookup label fields
-- E-PROJ
| otherwise -> TmRecordProj label (eval t)
-- E-RCD
t@(TmRecord _)
| isValue t -> t
| otherwise -> TmRecord (vfs ++ [(label, eval tj)] ++ tfs)
where
(vfs, (label, tj), tfs) = splitRecord t
_ -> error "unexpected term"
typeof :: Context Record -> Term Record -> Ty Record
typeof ctx = \case
-- T-VAR
TmVar i -> case getTypeFromContext i ctx of
Nothing -> error "Not found type variable in Context"
Just ty -> ty
-- T-ABS
TmLam x tyT1 t2 -> TyArr tyT1 tyT2
where
tyT2 = typeof ctx' t2
ctx' = CtxVar ctx x tyT1
-- T-APP
TmApp t1 t2 ->
case tyT1 of
TyArr tyT11 tyT12 ->
if tyT2 == tyT11
then tyT12
else
error . unlines $
[ "parameter type mismatch (T-APP): ",
"tyT2: " <> show tyT2,
"tyT11: " <> show tyT11
]
_ -> error "arrow type expected (T-APP)"
where
tyT1 = typeof ctx t1
tyT2 = typeof ctx t2
-- T-RCD
TmRecord fields -> TyRecord $ map (second (typeof ctx)) fields
-- T-PROJ
TmRecordProj label t -> case typeof ctx t of
TyRecord fields -> fromMaybe (error "field label not found (T-PROJ)") $ lookup label fields
_ -> error "type mismatch (T-PROJ)"
desugar :: Term Record -> Term Record
desugar = id
isValue :: Term Record -> Bool
isValue = \case
TmLam {} -> True -- ラムダ抽象値
TmRecord fs -> all (isValue . snd) fs -- レコードの値
_ -> False
subst :: Int -> Value -> Term Record -> Term Record
subst j s = \case
t@(TmVar k)
| k == j -> s
| otherwise -> t
TmLam x ty t -> TmLam x ty $ subst (j + 1) (shift 0 1 s) t
TmApp t1 t2 -> (TmApp `on` subst j s) t1 t2
TmRecord rs -> TmRecord $ map (second (subst j s)) rs
TmRecordProj l t -> TmRecordProj l $ subst j s t
shift :: Int -> Int -> Term Record -> Term Record
shift c d = \case
TmVar k
| k < c -> TmVar k
| otherwise -> TmVar (k + d)
TmLam x ty t -> TmLam x ty $ shift (c + 1) d t
TmApp t1 t2 -> (TmApp `on` shift c d) t1 t2
TmRecord rs -> TmRecord $ map (second (shift c d)) rs
TmRecordProj l t -> TmRecordProj l $ shift c d t
getTypeFromContext :: Int -> Context Record -> Maybe (Ty Record)
getTypeFromContext 0 = \case
CtxEmpty -> Nothing
CtxVar _ _ ty -> Just ty
getTypeFromContext i = \case
CtxEmpty -> Nothing
CtxVar ctx' _ _ -> getTypeFromContext (i -1) ctx'
-- | レコードのみ想定
splitRecord :: Term Record -> ([(FieldLabel, Value)], (FieldLabel, Term Record), [(FieldLabel, Term Record)])
splitRecord (TmRecord fs) = (vfs, L.Partial.head tfs, L.Partial.tail tfs)
where
(vfs, tfs) = span (isValue . snd) fs
splitRecord _ = error "only record"
| null | https://raw.githubusercontent.com/waddlaw/TAPL/94576e46821aaf7abce6d1d828fc3ce6d05a40b8/subs/lambda-fullsimple/src/Language/FullSimpleLambda/System/Record.hs | haskell | | 変数
| ラムダ抽象
| レコード
| 射影
| 関数の型
| レコードの型
E-APP2
E-PROJ
E-RCD
T-VAR
T-ABS
T-APP
T-RCD
T-PROJ
ラムダ抽象値
レコードの値
| レコードのみ想定 | | 図 11.7 レコード
module Language.FullSimpleLambda.System.Record
( Term (..),
Ty (..),
Context (..),
eval,
typeof,
)
where
import Language.FullSimpleLambda.Class
import RIO
import qualified RIO.List.Partial as L.Partial
data Record
type Value = Term Record
type FieldLabel = Text
instance System Record where
data Term Record
TmVar Int
TmLam VarName (Ty Record) (Term Record)
| 関数適用
TmApp (Term Record) (Term Record)
TmRecord [(FieldLabel, Term Record)]
TmRecordProj FieldLabel (Term Record)
deriving stock (Show, Eq)
data Ty Record
TyArr (Ty Record) (Ty Record)
TyRecord [(FieldLabel, Ty Record)]
deriving stock (Show, Eq)
data Context Record
| 空の文脈
CtxEmpty
| 項変数の束縛
CtxVar (Context Record) VarName (Ty Record)
deriving stock (Show, Eq)
data Pattern Record
eval :: Term Record -> Term Record
eval = \case
TmApp t1@(TmLam _ _ t12) t2
E - APP1
| not (isValue t1) -> TmApp (eval t1) t2
| isValue t1 && not (isValue t2) -> TmApp t1 (eval t2)
E - APPABS
| isValue t1 && isValue t2 -> shift 0 (-1) $ subst 0 (shift 0 1 t2) t12
TmRecordProj label t@(TmRecord fields)
E - PROJRCD
| isValue t -> fromMaybe (error "field label not found (E-PROJRCD)") $ lookup label fields
| otherwise -> TmRecordProj label (eval t)
t@(TmRecord _)
| isValue t -> t
| otherwise -> TmRecord (vfs ++ [(label, eval tj)] ++ tfs)
where
(vfs, (label, tj), tfs) = splitRecord t
_ -> error "unexpected term"
typeof :: Context Record -> Term Record -> Ty Record
typeof ctx = \case
TmVar i -> case getTypeFromContext i ctx of
Nothing -> error "Not found type variable in Context"
Just ty -> ty
TmLam x tyT1 t2 -> TyArr tyT1 tyT2
where
tyT2 = typeof ctx' t2
ctx' = CtxVar ctx x tyT1
TmApp t1 t2 ->
case tyT1 of
TyArr tyT11 tyT12 ->
if tyT2 == tyT11
then tyT12
else
error . unlines $
[ "parameter type mismatch (T-APP): ",
"tyT2: " <> show tyT2,
"tyT11: " <> show tyT11
]
_ -> error "arrow type expected (T-APP)"
where
tyT1 = typeof ctx t1
tyT2 = typeof ctx t2
TmRecord fields -> TyRecord $ map (second (typeof ctx)) fields
TmRecordProj label t -> case typeof ctx t of
TyRecord fields -> fromMaybe (error "field label not found (T-PROJ)") $ lookup label fields
_ -> error "type mismatch (T-PROJ)"
desugar :: Term Record -> Term Record
desugar = id
isValue :: Term Record -> Bool
isValue = \case
_ -> False
subst :: Int -> Value -> Term Record -> Term Record
subst j s = \case
t@(TmVar k)
| k == j -> s
| otherwise -> t
TmLam x ty t -> TmLam x ty $ subst (j + 1) (shift 0 1 s) t
TmApp t1 t2 -> (TmApp `on` subst j s) t1 t2
TmRecord rs -> TmRecord $ map (second (subst j s)) rs
TmRecordProj l t -> TmRecordProj l $ subst j s t
shift :: Int -> Int -> Term Record -> Term Record
shift c d = \case
TmVar k
| k < c -> TmVar k
| otherwise -> TmVar (k + d)
TmLam x ty t -> TmLam x ty $ shift (c + 1) d t
TmApp t1 t2 -> (TmApp `on` shift c d) t1 t2
TmRecord rs -> TmRecord $ map (second (shift c d)) rs
TmRecordProj l t -> TmRecordProj l $ shift c d t
getTypeFromContext :: Int -> Context Record -> Maybe (Ty Record)
getTypeFromContext 0 = \case
CtxEmpty -> Nothing
CtxVar _ _ ty -> Just ty
getTypeFromContext i = \case
CtxEmpty -> Nothing
CtxVar ctx' _ _ -> getTypeFromContext (i -1) ctx'
splitRecord :: Term Record -> ([(FieldLabel, Value)], (FieldLabel, Term Record), [(FieldLabel, Term Record)])
splitRecord (TmRecord fs) = (vfs, L.Partial.head tfs, L.Partial.tail tfs)
where
(vfs, tfs) = span (isValue . snd) fs
splitRecord _ = error "only record"
|
b1080ab201b35270feaf2abb0550e4b46af906c39aa2aea4d5c96acda7d8bb8f | dramforever/clash-with-stack | UpDown.hs | module PlayClash.UpDown where
import Clash.Prelude
import PlayClash.Utils ()
# ANN topEntity Synthesize
{ t_name = " up_down "
, t_inputs = [ " clk " , " rst " , " in " ]
, t_output = " out "
} #
{ t_name = "up_down"
, t_inputs = ["clk", "rst", "in"]
, t_output = "out"
} #-}
topEntity
:: Clock XilinxSystem
-> Reset XilinxSystem
-> Signal XilinxSystem Bool
-> Signal XilinxSystem (Unsigned 2)
topEntity clk rst inp =
exposeClockResetEnable
(upDown 1 inp)
clk rst enableGen
upDown :: _
=> a
-> Signal dom Bool
-> Signal dom a
upDown initial = mealy go initial
where
go x True = (x + 1, x + 1)
go x False = (x - 1, x - 1)
| null | https://raw.githubusercontent.com/dramforever/clash-with-stack/e67e82dfa9c44c39e0962ee8542387c119e05285/src/PlayClash/UpDown.hs | haskell | module PlayClash.UpDown where
import Clash.Prelude
import PlayClash.Utils ()
# ANN topEntity Synthesize
{ t_name = " up_down "
, t_inputs = [ " clk " , " rst " , " in " ]
, t_output = " out "
} #
{ t_name = "up_down"
, t_inputs = ["clk", "rst", "in"]
, t_output = "out"
} #-}
topEntity
:: Clock XilinxSystem
-> Reset XilinxSystem
-> Signal XilinxSystem Bool
-> Signal XilinxSystem (Unsigned 2)
topEntity clk rst inp =
exposeClockResetEnable
(upDown 1 inp)
clk rst enableGen
upDown :: _
=> a
-> Signal dom Bool
-> Signal dom a
upDown initial = mealy go initial
where
go x True = (x + 1, x + 1)
go x False = (x - 1, x - 1)
|
|
1375b02d2bb4e266e4b2b5425287859ba6d3ea71f77541a7b554ef287ef9825e | imdea-software/leap | solve.ml |
(***********************************************************************)
(* *)
LEAP
(* *)
, IMDEA Software Institute
(* *)
(* *)
Copyright 2011 IMDEA Software Institute
(* *)
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
(* You may obtain a copy of the License at *)
(* *)
(* -2.0 *)
(* *)
(* Unless required by applicable law or agreed to in writing, *)
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND ,
(* either express or implied. *)
(* See the License for the specific language governing permissions *)
(* and limitations under the License. *)
(* *)
(***********************************************************************)
open Printf
open LeapLib
module Eparser = ExprParser
module Elexer = ExprLexer
module Expr = Expression
module Symtbl = ExprSymTable
module SolOpt = SolverOptions
(****************)
(* main *)
(****************)
let _ =
try
SolveArgs.parse_args ();
let ch = SolveArgs.open_input () in
let phi = Parser.parse ch (Eparser.single_formula Elexer.norm) in
SolveArgs.close_input ();
(* Choose decision procedure *)
Printf.printf "Parsed formula is:\n%s\n\n" (Expr.formula_to_str phi);
(* Solver options *)
let opt = SolOpt.new_opt () in
SolOpt.set_cutoff_strategy opt !SolveArgs.coType;
SolOpt.set_use_quantifiers opt !SolveArgs.use_quantifiers;
SolOpt.set_use_arrangement_generator opt !SolveArgs.arrangement_gen;
let sol =
match !SolveArgs.dpType with
| DP.NoDP -> (print_endline "NO DP PROVIDED"; Valid.Invalid)
| DP.Loc -> Valid.Invalid
| DP.Num -> let module Num = (val (NumSolver.choose BackendSolvers.Yices.identifier)) in
Num.compute_model true;
let sol = Num.check_valid (NumInterface.formula_to_int_formula phi) in
if not (Valid.is_valid sol) then Num.print_model();
sol
| DP.Pairs -> let module Pairs = (val (PairsSolver.choose BackendSolvers.Yices.identifier)) in
Pairs.compute_model true;
let sol = Pairs.check_valid (PairsInterface.formula_to_pairs_formula phi) in
if not (Valid.is_valid sol) then Pairs.print_model();
sol
| DP.Tll -> let module Tll = (val (TllSolver.choose BackendSolvers.Z3.identifier)) in
Tll.compute_model true;
let tll_phi = TLLInterface.formula_to_tll_formula phi in
let sol = Tll.check_valid opt tll_phi in
if not (Valid.is_valid sol) then Tll.print_model();
sol
| DP.Tslk k -> let module Tslk = (val (TslkSolver.choose BackendSolvers.Z3.identifier k)) in
let module TSLKIntf = TSLKInterface.Make(Tslk.TslkExp) in
Tslk.compute_model true;
let tslk_phi = TSLKIntf.formula_to_tslk_formula phi in
let sol = Tslk.check_valid opt tslk_phi in
if not (Valid.is_valid sol) then Tslk.print_model();
sol
| DP.Tsl -> let tsl_phi = TSLInterface.formula_to_tsl_formula phi in
TslSolver.compute_model true;
let sol = TslSolver.check_valid opt tsl_phi in
if not (Valid.is_valid sol) then TslSolver.print_model();
sol
| DP.Thm -> let thm_phi = THMInterface.formula_to_thm_formula phi in
ThmSolver.compute_model true;
let sol = ThmSolver.check_valid opt thm_phi in
if not (Valid.is_valid sol) then TslSolver.print_model();
sol
in
if Valid.is_valid sol then
print_endline "VALID"
else
print_endline "NOT VALID"
with
| Global.ParserError msg -> Interface.Err.msg "Parsing error" msg
| Parsing.Parse_error -> Interface.Err.msg "Parsing error" $
sprintf "Unexpected symbol \"%s\" at line %i" (Global.get_last()) (Global.get_linenum())
| e -> raise(e)
let _ = LeapDebug.flush()
| null | https://raw.githubusercontent.com/imdea-software/leap/5f946163c0f80ff9162db605a75b7ce2e27926ef/src/progs/solve/solve.ml | ocaml | *********************************************************************
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
either express or implied.
See the License for the specific language governing permissions
and limitations under the License.
*********************************************************************
**************
main
**************
Choose decision procedure
Solver options |
LEAP
, IMDEA Software Institute
Copyright 2011 IMDEA Software Institute
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND ,
open Printf
open LeapLib
module Eparser = ExprParser
module Elexer = ExprLexer
module Expr = Expression
module Symtbl = ExprSymTable
module SolOpt = SolverOptions
let _ =
try
SolveArgs.parse_args ();
let ch = SolveArgs.open_input () in
let phi = Parser.parse ch (Eparser.single_formula Elexer.norm) in
SolveArgs.close_input ();
Printf.printf "Parsed formula is:\n%s\n\n" (Expr.formula_to_str phi);
let opt = SolOpt.new_opt () in
SolOpt.set_cutoff_strategy opt !SolveArgs.coType;
SolOpt.set_use_quantifiers opt !SolveArgs.use_quantifiers;
SolOpt.set_use_arrangement_generator opt !SolveArgs.arrangement_gen;
let sol =
match !SolveArgs.dpType with
| DP.NoDP -> (print_endline "NO DP PROVIDED"; Valid.Invalid)
| DP.Loc -> Valid.Invalid
| DP.Num -> let module Num = (val (NumSolver.choose BackendSolvers.Yices.identifier)) in
Num.compute_model true;
let sol = Num.check_valid (NumInterface.formula_to_int_formula phi) in
if not (Valid.is_valid sol) then Num.print_model();
sol
| DP.Pairs -> let module Pairs = (val (PairsSolver.choose BackendSolvers.Yices.identifier)) in
Pairs.compute_model true;
let sol = Pairs.check_valid (PairsInterface.formula_to_pairs_formula phi) in
if not (Valid.is_valid sol) then Pairs.print_model();
sol
| DP.Tll -> let module Tll = (val (TllSolver.choose BackendSolvers.Z3.identifier)) in
Tll.compute_model true;
let tll_phi = TLLInterface.formula_to_tll_formula phi in
let sol = Tll.check_valid opt tll_phi in
if not (Valid.is_valid sol) then Tll.print_model();
sol
| DP.Tslk k -> let module Tslk = (val (TslkSolver.choose BackendSolvers.Z3.identifier k)) in
let module TSLKIntf = TSLKInterface.Make(Tslk.TslkExp) in
Tslk.compute_model true;
let tslk_phi = TSLKIntf.formula_to_tslk_formula phi in
let sol = Tslk.check_valid opt tslk_phi in
if not (Valid.is_valid sol) then Tslk.print_model();
sol
| DP.Tsl -> let tsl_phi = TSLInterface.formula_to_tsl_formula phi in
TslSolver.compute_model true;
let sol = TslSolver.check_valid opt tsl_phi in
if not (Valid.is_valid sol) then TslSolver.print_model();
sol
| DP.Thm -> let thm_phi = THMInterface.formula_to_thm_formula phi in
ThmSolver.compute_model true;
let sol = ThmSolver.check_valid opt thm_phi in
if not (Valid.is_valid sol) then TslSolver.print_model();
sol
in
if Valid.is_valid sol then
print_endline "VALID"
else
print_endline "NOT VALID"
with
| Global.ParserError msg -> Interface.Err.msg "Parsing error" msg
| Parsing.Parse_error -> Interface.Err.msg "Parsing error" $
sprintf "Unexpected symbol \"%s\" at line %i" (Global.get_last()) (Global.get_linenum())
| e -> raise(e)
let _ = LeapDebug.flush()
|
b59a18a91a658d76c2be8d3f54a7aeaf60f3df9209d2d065e7e28d16cae553bb | travelping/eradius | eradius_lib.erl | -module(eradius_lib).
-export([del_attr/2, get_attr/2, encode_request/1, encode_reply/1, decode_request/2, decode_request/3, decode_request_id/1]).
-export([random_authenticator/0, zero_authenticator/0, pad_to/2, set_attr/3, get_attributes/1, set_attributes/2]).
-export([timestamp/0, timestamp/1, printable_peer/2, make_addr_info/1]).
-export_type([command/0, secret/0, authenticator/0, attribute_list/0]).
% -compile(bin_opt_info).
-ifdef(TEST).
-export([encode_value/2, decode_value/2, scramble/3, ascend/3]).
-export([salt_encrypt/4, salt_decrypt/3, encode_attribute/3, decode_attribute/5]).
-endif.
-include("eradius_lib.hrl").
-include("eradius_dict.hrl").
-type command() :: 'request' | 'accept' | 'challenge' | 'reject' | 'accreq' | 'accresp' | 'coareq' | 'coaack' | 'coanak' | 'discreq' | 'discack' | 'discnak'.
-type secret() :: binary().
-type authenticator() :: <<_:128>>.
-type salt() :: binary().
-type attribute_list() :: list({eradius_dict:attribute(), term()}).
-define(IS_ATTR(Key, Attr), ?IS_KEY(Key, element(1, Attr))).
-define(IS_KEY(Key, Attr), ((is_record(Attr, attribute) andalso (element(2, Attr) == Key))
orelse
(Attr == Key)) ).
%% ------------------------------------------------------------------------------------------
%% -- Request Accessors
-spec random_authenticator() -> authenticator().
random_authenticator() -> crypto:strong_rand_bytes(16).
-spec zero_authenticator() -> authenticator().
zero_authenticator() -> <<0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0>>.
-spec set_attributes(#radius_request{}, attribute_list()) -> #radius_request{}.
set_attributes(Req = #radius_request{attrs = Attrs}, NewAttrs) ->
Req#radius_request{attrs = NewAttrs ++ Attrs}.
-spec get_attributes(#radius_request{}) -> attribute_list().
get_attributes(#radius_request{attrs = Attrs}) ->
Attrs.
-spec set_attr(#radius_request{}, eradius_dict:attribute_id(), eradius_dict:attr_value()) -> #radius_request{}.
set_attr(Req = #radius_request{attrs = Attrs}, Id, Val) ->
Req#radius_request{attrs = [{Id, Val} | Attrs]}.
-spec get_attr(#radius_request{}, eradius_dict:attribute_id()) -> eradius_dict:attr_value() | undefined.
get_attr(#radius_request{attrs = Attrs}, Id) ->
get_attr_loop(Id, Attrs).
del_attr(Req = #radius_request{attrs = Attrs}, Id) ->
Req#radius_request{attrs = lists:reverse(lists:foldl(fun(Attr, Acc) when ?IS_ATTR(Id, Attr) -> Acc;
(Attr, Acc) -> [Attr | Acc]
end, [], Attrs))}.
get_attr_loop(Key, [{Id, Val}|_T]) when ?IS_KEY(Key, Id) -> Val;
get_attr_loop(Key, [_|T]) -> get_attr_loop(Key, T);
get_attr_loop(_, []) -> undefined.
%% ------------------------------------------------------------------------------------------
%% -- Wire Encoding
%% @doc Convert a RADIUS request to the wire format.
The Message - Authenticator MUST be used in Access - Request that include an EAP - Message attribute [ RFC 3579 ] .
-spec encode_request(#radius_request{}) -> {binary(), binary()}.
encode_request(Req = #radius_request{reqid = ReqID, cmd = Command, attrs = Attributes}) when (Command == request) ->
Authenticator = random_authenticator(),
Req1 = Req#radius_request{authenticator = Authenticator},
EncReq1 = encode_attributes(Req1, Attributes),
EncReq2 = encode_eap_message(Req1, EncReq1),
{Body, BodySize} = encode_message_authenticator(Req1, EncReq2),
{Authenticator, <<(encode_command(Command)):8, ReqID:8, (BodySize + 20):16, Authenticator:16/binary, Body/binary>>};
encode_request(Req = #radius_request{reqid = ReqID, cmd = Command, attrs = Attributes}) ->
{Body, BodySize} = encode_attributes(Req, Attributes),
Head = <<(encode_command(Command)):8, ReqID:8, (BodySize + 20):16>>,
Authenticator = crypto:hash(md5, [Head, zero_authenticator(), Body, Req#radius_request.secret]),
{Authenticator, <<Head/binary, Authenticator:16/binary, Body/binary>>}.
%% @doc Convert a RADIUS reply to the wire format.
%% This function performs the same task as {@link encode_request/2},
%% except that it includes the authenticator substitution required for replies.
The Message - Authenticator MUST be used in Access - Accept , Access - Reject or Access - Chalange
%% replies that includes an EAP-Message attribute [RFC 3579].
-spec encode_reply(#radius_request{}) -> binary().
encode_reply(Req = #radius_request{reqid = ReqID, cmd = Command, authenticator = RequestAuthenticator, attrs = Attributes}) ->
EncReq1 = encode_attributes(Req, Attributes),
EncReq2 = encode_eap_message(Req, EncReq1),
{Body, BodySize} = encode_message_authenticator(Req, EncReq2),
Head = <<(encode_command(Command)):8, ReqID:8, (BodySize + 20):16>>,
ReplyAuthenticator = crypto:hash(md5, [Head, <<RequestAuthenticator:16/binary>>, Body, Req#radius_request.secret]),
<<Head/binary, ReplyAuthenticator:16/binary, Body/binary>>.
-spec encode_command(command()) -> byte().
encode_command(request) -> ?RAccess_Request;
encode_command(accept) -> ?RAccess_Accept;
encode_command(challenge) -> ?RAccess_Challenge;
encode_command(reject) -> ?RAccess_Reject;
encode_command(accreq) -> ?RAccounting_Request;
encode_command(accresp) -> ?RAccounting_Response;
encode_command(coareq) -> ?RCoa_Request;
encode_command(coaack) -> ?RCoa_Ack;
encode_command(coanak) -> ?RCoa_Nak;
encode_command(discreq) -> ?RDisconnect_Request;
encode_command(discack) -> ?RDisconnect_Ack;
encode_command(discnak) -> ?RDisconnect_Nak.
-spec encode_message_authenticator(#radius_request{}, {binary(), non_neg_integer()}) -> {binary(), non_neg_integer()}.
encode_message_authenticator(_Req = #radius_request{msg_hmac = false}, Request) ->
Request;
encode_message_authenticator(Req = #radius_request{reqid = ReqID, cmd = Command, authenticator = Authenticator, msg_hmac = true}, {Body, BodySize}) ->
Head = <<(encode_command(Command)):8, ReqID:8, (BodySize + 20 + 2 +16):16>>,
ReqAuth = <<Authenticator:16/binary>>,
HMAC = message_authenticator(Req#radius_request.secret, [Head, ReqAuth, Body, <<?RMessage_Authenticator,18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0>>]),
{<<Body/binary, ?RMessage_Authenticator, 18, HMAC/binary>>, BodySize + 2 + 16}.
chunk(Bin, Length) ->
case Bin of
<<First:Length/bytes, Rest/binary>> -> {First, Rest};
_ -> {Bin, <<>>}
end.
encode_eap_attribute({<<>>, _}, EncReq) ->
EncReq;
encode_eap_attribute({Value, Rest}, {Body, BodySize}) ->
EncAttr = <<?REAP_Message, (byte_size(Value) + 2):8, Value/binary>>,
EncReq = {<<Body/binary, EncAttr/binary>>, BodySize + byte_size(EncAttr)},
encode_eap_attribute(chunk(Rest, 253), EncReq).
-spec encode_eap_message(#radius_request{}, {binary(), non_neg_integer()}) -> {binary(), non_neg_integer()}.
encode_eap_message(#radius_request{eap_msg = EAP}, EncReq)
when is_binary(EAP); size(EAP) > 0 ->
encode_eap_attribute(chunk(EAP, 253), EncReq);
encode_eap_message(#radius_request{eap_msg = <<>>}, EncReq) ->
EncReq.
-spec encode_attributes(#radius_request{}, attribute_list()) -> {binary(), non_neg_integer()}.
encode_attributes(Req, Attributes) ->
F = fun ({A = #attribute{}, Val}, {Body, BodySize}) ->
EncAttr = encode_attribute(Req, A, Val),
{<<Body/binary, EncAttr/binary>>, BodySize + byte_size(EncAttr)};
({ID, Val}, {Body, BodySize}) ->
case eradius_dict:lookup(attribute, ID) of
AttrRec = #attribute{} ->
EncAttr = encode_attribute(Req, AttrRec, Val),
{<<Body/binary, EncAttr/binary>>, BodySize + byte_size(EncAttr)};
_ ->
{Body, BodySize}
end
end,
lists:foldl(F, {<<>>, 0}, Attributes).
-spec encode_attribute(#radius_request{}, #attribute{}, term()) -> binary().
encode_attribute(_Req, _Attr = #attribute{id = ?RMessage_Authenticator}, _) ->
message authenticator is handled through the flag
<<>>;
encode_attribute(_Req, _Attr = #attribute{id = ?REAP_Message}, _) ->
EAP - Message attributes are handled through the eap_msg field
<<>>;
encode_attribute(Req, Attr = #attribute{id = {Vendor, ID}}, Value) ->
EncValue = encode_attribute(Req, Attr#attribute{id = ID}, Value),
if byte_size(EncValue) + 6 > 255 ->
error(badarg, [{Vendor, ID}, Value]);
true -> ok
end,
<<?RVendor_Specific:8, (byte_size(EncValue) + 6):8, Vendor:32, EncValue/binary>>;
encode_attribute(Req, #attribute{type = {tagged, Type}, id = ID, enc = Enc}, Value) ->
case Value of
{Tag, UntaggedValue} when Tag >= 1, Tag =< 16#1F -> ok;
UntaggedValue -> Tag = 0
end,
EncValue = encrypt_value(Req, encode_value(Type, UntaggedValue), Enc),
if byte_size(EncValue) + 3 > 255 ->
error(badarg, [ID, Value]);
true -> ok
end,
<<ID, (byte_size(EncValue) + 3):8, Tag:8, EncValue/binary>>;
encode_attribute(Req, #attribute{type = Type, id = ID, enc = Enc}, Value)->
EncValue = encrypt_value(Req, encode_value(Type, Value), Enc),
if byte_size(EncValue) + 2 > 255 ->
error(badarg, [ID, Value]);
true -> ok
end,
<<ID, (byte_size(EncValue) + 2):8, EncValue/binary>>.
-spec encrypt_value(#radius_request{}, binary(), eradius_dict:attribute_encryption()) -> binary().
encrypt_value(Req, Val, scramble) -> scramble(Req#radius_request.secret, Req#radius_request.authenticator, Val);
encrypt_value(Req, Val, salt_crypt) -> salt_encrypt(generate_salt(), Req#radius_request.secret, Req#radius_request.authenticator, Val);
encrypt_value(Req, Val, ascend) -> ascend(Req#radius_request.secret, Req#radius_request.authenticator, Val);
encrypt_value(_Req, Val, no) -> Val.
-spec encode_value(eradius_dict:attribute_prim_type(), term()) -> binary().
encode_value(_, V) when is_binary(V) ->
V;
encode_value(binary, V) ->
V;
encode_value(integer, V) ->
<<V:32>>;
encode_value(integer24, V) ->
<<V:24>>;
encode_value(integer64, V) ->
<<V:64>>;
encode_value(ipaddr, {A,B,C,D}) ->
<<A:8, B:8, C:8, D:8>>;
encode_value(ipv6addr, {A,B,C,D,E,F,G,H}) ->
<<A:16, B:16, C:16, D:16, E:16, F:16, G:16, H:16>>;
encode_value(ipv6prefix, {{A,B,C,D,E,F,G,H}, PLen}) ->
L = (PLen + 7) div 8,
<<IP:L/bytes, _R/binary>> = <<A:16, B:16, C:16, D:16, E:16, F:16, G:16, H:16>>,
<<0, PLen, IP/binary>>;
encode_value(string, V) when is_list(V) ->
unicode:characters_to_binary(V);
encode_value(octets, V) when is_list(V) ->
iolist_to_binary(V);
encode_value(octets, V) when is_integer(V) ->
<<V:32>>;
encode_value(date, V) when is_list(V) ->
unicode:characters_to_binary(V);
encode_value(date, Date = {{_,_,_},{_,_,_}}) ->
EpochSecs = calendar:datetime_to_gregorian_seconds(Date) - calendar:datetime_to_gregorian_seconds({{1970,1,1},{0,0,0}}),
<<EpochSecs:32>>.
%% ------------------------------------------------------------------------------------------
%% -- Wire Decoding
-spec decode_request_id(binary()) -> {0..255, binary()} | {bad_pdu, list()}.
decode_request_id(Req = <<_Cmd:8, ReqId:8, _Rest/binary>>) -> {ReqId, Req};
decode_request_id(_Req) -> {bad_pdu, "invalid request id"}.
-spec decode_request(binary(), secret()) -> #radius_request{} | {bad_pdu, list()}.
decode_request(Packet, Secret) ->
decode_request(Packet, Secret, undefined).
-spec decode_request(binary(), secret(), authenticator()) -> #radius_request{} | {bad_pdu, list()}.
decode_request(Packet, Secret, Authenticator) ->
case (catch decode_request0(Packet, Secret, Authenticator)) of
{'EXIT', _} -> {bad_pdu, "decode packet error"};
Else -> Else
end.
-spec decode_request0(binary(), secret(), authenticator() | 'undefined') -> #radius_request{}.
decode_request0(<<Cmd:8, ReqId:8, Len:16, PacketAuthenticator:16/binary, Body0/binary>>, Secret, RequestAuthenticator) ->
ActualBodySize = byte_size(Body0),
GivenBodySize = Len - 20,
Body = if
ActualBodySize > GivenBodySize ->
throw({bad_pdu, "false packet size"});
ActualBodySize == GivenBodySize ->
Body0;
true ->
binary:part(Body0, 0, GivenBodySize)
end,
Command = decode_command(Cmd),
PartialRequest = #radius_request{cmd = Command, reqid = ReqId, authenticator = PacketAuthenticator, secret = Secret, msg_hmac = false},
DecodedState = decode_attributes(PartialRequest, RequestAuthenticator, Body),
Request = PartialRequest#radius_request{attrs = lists:reverse(DecodedState#decoder_state.attrs),
eap_msg = list_to_binary(lists:reverse(DecodedState#decoder_state.eap_msg))},
validate_authenticator(Command, <<Cmd:8, ReqId:8, Len:16>>, RequestAuthenticator, PacketAuthenticator, Body, Secret),
if
is_integer(DecodedState#decoder_state.hmac_pos) ->
validate_packet_authenticator(Cmd, ReqId, Len, Body, DecodedState#decoder_state.hmac_pos, Secret, PacketAuthenticator, RequestAuthenticator),
Request#radius_request{msg_hmac = true};
true -> Request
end.
-spec validate_packet_authenticator(non_neg_integer(), non_neg_integer(), non_neg_integer(), non_neg_integer(), binary(), binary(), authenticator(), authenticator() | 'undefined') -> ok.
validate_packet_authenticator(Cmd, ReqId, Len, Body, Pos, Secret, PacketAuthenticator, undefined) ->
validate_packet_authenticator(Cmd, ReqId, Len, PacketAuthenticator, Body, Pos, Secret);
validate_packet_authenticator(Cmd, ReqId, Len, Body, Pos, Secret, _PacketAuthenticator, RequestAuthenticator) ->
validate_packet_authenticator(Cmd, ReqId, Len, RequestAuthenticator, Body, Pos, Secret).
-spec validate_packet_authenticator(non_neg_integer(), non_neg_integer(), non_neg_integer(), authenticator(), non_neg_integer(), binary(), binary()) -> ok.
validate_packet_authenticator(Cmd, ReqId, Len, Auth, Body, Pos, Secret) ->
case Body of
<<Before:Pos/bytes, Value:16/bytes, After/binary>> ->
case message_authenticator(Secret, [<<Cmd:8, ReqId:8, Len:16>>, Auth, Before, zero_authenticator(), After]) of
Value ->
ok;
_ ->
throw({bad_pdu, "Message-Authenticator Attribute is invalid"})
end;
_ ->
throw({bad_pdu, "Message-Authenticator Attribute is malformed"})
end.
validate_authenticator(accreq, Head, _RequestAuthenticator, PacketAuthenticator, Body, Secret) ->
compare_authenticator(crypto:hash(md5, [Head, zero_authenticator(), Body, Secret]), PacketAuthenticator);
validate_authenticator(Cmd, Head, RequestAuthenticator, PacketAuthenticator, Body, Secret)
when
(Cmd =:= accept) orelse
(Cmd =:= reject) orelse
(Cmd =:= accresp) orelse
(Cmd =:= coaack) orelse
(Cmd =:= coanak) orelse
(Cmd =:= discack) orelse
(Cmd =:= discnak) orelse
(Cmd =:= challenge) ->
compare_authenticator(crypto:hash(md5, [Head, RequestAuthenticator, Body, Secret]), PacketAuthenticator);
validate_authenticator(_Cmd, _Head, _RequestAuthenticator, _PacketAuthenticator,
_Body, _Secret) ->
true.
compare_authenticator(Authenticator, Authenticator) ->
true;
compare_authenticator(_RequestAuthenticator, _PacketAuthenticator) ->
throw({bad_pdu, "Authenticator Attribute is invalid"}).
-spec decode_command(byte()) -> command().
decode_command(?RAccess_Request) -> request;
decode_command(?RAccess_Accept) -> accept;
decode_command(?RAccess_Reject) -> reject;
decode_command(?RAccess_Challenge) -> challenge;
decode_command(?RAccounting_Request) -> accreq;
decode_command(?RAccounting_Response) -> accresp;
decode_command(?RCoa_Request) -> coareq;
decode_command(?RCoa_Ack) -> coaack;
decode_command(?RCoa_Nak) -> coanak;
decode_command(?RDisconnect_Request) -> discreq;
decode_command(?RDisconnect_Ack) -> discack;
decode_command(?RDisconnect_Nak) -> discnak;
decode_command(_) -> error({bad_pdu, "unknown request type"}).
append_attr(Attr, State) ->
State#decoder_state{attrs = [Attr | State#decoder_state.attrs]}.
-spec decode_attributes(#radius_request{}, binary(), binary()) -> #decoder_state{}.
decode_attributes(Req, RequestAuthenticator, As) ->
decode_attributes(Req, As, 0, #decoder_state{request_authenticator = RequestAuthenticator}).
-spec decode_attributes(#radius_request{}, binary(), non_neg_integer(), #decoder_state{}) -> #decoder_state{}.
decode_attributes(_Req, <<>>, _Pos, State) ->
State;
decode_attributes(Req, <<Type:8, ChunkLength:8, ChunkRest/binary>>, Pos, State) ->
ValueLength = ChunkLength - 2,
<<Value:ValueLength/binary, PacketRest/binary>> = ChunkRest,
NewState = case eradius_dict:lookup(attribute, Type) of
AttrRec = #attribute{} ->
decode_attribute(Value, Req, AttrRec, Pos + 2, State);
_ ->
append_attr({Type, Value}, State)
end,
decode_attributes(Req, PacketRest, Pos + ChunkLength, NewState).
%% gotcha: the function returns a LIST of attribute-value pairs because
a vendor - specific attribute blob might contain more than one attribute .
-spec decode_attribute(binary(), #radius_request{}, #attribute{}, non_neg_integer(), #decoder_state{}) -> #decoder_state{}.
decode_attribute(<<VendorID:32/integer, ValueBin/binary>>, Req, #attribute{id = ?RVendor_Specific}, Pos, State) ->
decode_vendor_specific_attribute(Req, VendorID, ValueBin, Pos + 4, State);
decode_attribute(<<Value/binary>>, _Req, Attr = #attribute{id = ?REAP_Message}, _Pos, State) ->
NewState = State#decoder_state{eap_msg = [Value | State#decoder_state.eap_msg]},
append_attr({Attr, Value}, NewState);
decode_attribute(<<EncValue/binary>>, Req, Attr = #attribute{id = ?RMessage_Authenticator, type = Type, enc = Encryption}, Pos, State) ->
append_attr({Attr, decode_value(decrypt_value(Req, State, EncValue, Encryption), Type)}, State#decoder_state{hmac_pos = Pos});
decode_attribute(<<EncValue/binary>>, Req, Attr = #attribute{type = Type, enc = Encryption}, _Pos, State) when is_atom(Type) ->
append_attr({Attr, decode_value(decrypt_value(Req, State, EncValue, Encryption), Type)}, State);
decode_attribute(WholeBin = <<Tag:8, Bin/binary>>, Req, Attr = #attribute{type = {tagged, Type}}, _Pos, State) ->
case {decode_tag_value(Tag), Attr#attribute.enc} of
{0, no} ->
% decode including tag byte if tag is out of range
append_attr({Attr, {0, decode_value(WholeBin, Type)}}, State);
{TagV, no} ->
append_attr({Attr, {TagV, decode_value(Bin, Type)}}, State);
{TagV, Encryption} ->
% for encrypted attributes, tag byte is never part of the value
append_attr({Attr, {TagV, decode_value(decrypt_value(Req, State, Bin, Encryption), Type)}}, State)
end.
-compile({inline, decode_tag_value/1}).
decode_tag_value(Tag) when (Tag >= 1) and (Tag =< 16#1F) -> Tag;
decode_tag_value(_OtherTag) -> 0.
-spec decode_value(binary(), eradius_dict:attribute_prim_type()) -> term().
decode_value(<<Bin/binary>>, Type) ->
case Type of
octets ->
Bin;
binary ->
Bin;
abinary ->
Bin;
string ->
Bin;
integer ->
decode_integer(Bin);
integer24 ->
decode_integer(Bin);
integer64 ->
decode_integer(Bin);
date ->
case decode_integer(Bin) of
Int when is_integer(Int) ->
calendar:now_to_universal_time({Int div 1000000, Int rem 1000000, 0});
_ ->
Bin
end;
ipaddr ->
<<B,C,D,E>> = Bin,
{B,C,D,E};
ipv6addr ->
<<B:16,C:16,D:16,E:16,F:16,G:16,H:16,I:16>> = Bin,
{B,C,D,E,F,G,H,I};
ipv6prefix ->
<<0,PLen,P/binary>> = Bin,
<<B:16,C:16,D:16,E:16,F:16,G:16,H:16,I:16>> = pad_to(16, P),
{{B,C,D,E,F,G,H,I}, PLen}
end.
-compile({inline, decode_integer/1}).
decode_integer(Bin) ->
ISize = bit_size(Bin),
case Bin of
<<Int:ISize/integer>> -> Int;
_ -> Bin
end.
-spec decrypt_value(#radius_request{}, #decoder_state{}, binary(),
eradius_dict:attribute_encryption()) -> eradius_dict:attr_value().
decrypt_value(#radius_request{secret = Secret, authenticator = Authenticator},
_, <<Val/binary>>, scramble) ->
scramble(Secret, Authenticator, Val);
decrypt_value(#radius_request{secret = Secret},
#decoder_state{request_authenticator = RequestAuthenticator},
<<Val/binary>>, salt_crypt)
when is_binary(RequestAuthenticator) ->
salt_decrypt(Secret, RequestAuthenticator, Val);
decrypt_value(#radius_request{secret = Secret, authenticator = Authenticator},
_, <<Val/binary>>, ascend) ->
ascend(Secret, Authenticator, Val);
decrypt_value(_Req, _State, <<Val/binary>>, _Type) ->
Val.
-spec decode_vendor_specific_attribute(#radius_request{}, non_neg_integer(), binary(), non_neg_integer(), #decoder_state{}) -> #decoder_state{}.
decode_vendor_specific_attribute(_Req, _VendorID, <<>>, _Pos, State) ->
State;
decode_vendor_specific_attribute(Req, VendorID, <<Type:8, ChunkLength:8, ChunkRest/binary>>, Pos, State) ->
ValueLength = ChunkLength - 2,
<<Value:ValueLength/binary, PacketRest/binary>> = ChunkRest,
VendorAttrKey = {VendorID, Type},
NewState = case eradius_dict:lookup(attribute, VendorAttrKey) of
Attr = #attribute{} ->
decode_attribute(Value, Req, Attr, Pos + 2, State);
_ ->
append_attr({VendorAttrKey, Value}, State)
end,
decode_vendor_specific_attribute(Req, VendorID, PacketRest, Pos + ChunkLength, NewState).
%% ------------------------------------------------------------------------------------------
%% -- Attribute Encryption
-spec scramble(secret(), authenticator(), binary()) -> binary().
scramble(SharedSecret, RequestAuthenticator, <<PlainText/binary>>) ->
B = crypto:hash(md5, [SharedSecret, RequestAuthenticator]),
do_scramble(SharedSecret, B, pad_to(16, PlainText), << >>).
do_scramble(SharedSecret, B, <<PlainText:16/binary, Remaining/binary>>, CipherText) ->
NewCipherText = crypto:exor(PlainText, B),
Bnext = crypto:hash(md5, [SharedSecret, NewCipherText]),
do_scramble(SharedSecret, Bnext, Remaining, <<CipherText/binary, NewCipherText/binary>>);
do_scramble(_SharedSecret, _B, << >>, CipherText) ->
CipherText.
-spec generate_salt() -> salt().
generate_salt() ->
<<Salt1, Salt2>> = crypto:strong_rand_bytes(2),
<<(Salt1 bor 16#80), Salt2>>.
-spec salt_encrypt(salt(), secret(), authenticator(), binary()) -> binary().
salt_encrypt(Salt, SharedSecret, RequestAuthenticator, PlainText) ->
CipherText = do_salt_crypt(encrypt, Salt, SharedSecret, RequestAuthenticator, (pad_to(16, << (byte_size(PlainText)):8, PlainText/binary >>))),
<<Salt/binary, CipherText/binary>>.
-spec salt_decrypt(secret(), authenticator(), binary()) -> binary().
salt_decrypt(SharedSecret, RequestAuthenticator, <<Salt:2/binary, CipherText/binary>>) ->
<< Length:8/integer, PlainText/binary >> = do_salt_crypt(decrypt, Salt, SharedSecret, RequestAuthenticator, CipherText),
if
Length < byte_size(PlainText) ->
binary:part(PlainText, 0, Length);
true ->
PlainText
end.
do_salt_crypt(Op, Salt, SharedSecret, RequestAuthenticator, <<CipherText/binary>>) ->
B = crypto:hash(md5, [SharedSecret, RequestAuthenticator, Salt]),
salt_crypt(Op, SharedSecret, B, CipherText, << >>).
salt_crypt(Op, SharedSecret, B, <<PlainText:16/binary, Remaining/binary>>, CipherText) ->
NewCipherText = crypto:exor(PlainText, B),
Bnext = case Op of
decrypt -> crypto:hash(md5, [SharedSecret, PlainText]);
encrypt -> crypto:hash(md5, [SharedSecret, NewCipherText])
end,
salt_crypt(Op, SharedSecret, Bnext, Remaining, <<CipherText/binary, NewCipherText/binary>>);
salt_crypt(_Op, _SharedSecret, _B, << >>, CipherText) ->
CipherText.
-spec ascend(secret(), authenticator(), binary()) -> binary().
ascend(SharedSecret, RequestAuthenticator, <<PlainText/binary>>) ->
Digest = crypto:hash(md5, [RequestAuthenticator, SharedSecret]),
crypto:exor(Digest, pad_to(16, PlainText)).
%% @doc pad binary to specific length
%% See <a href="-questions/2008-December/040709.html">
%% -questions/2008-December/040709.html
%% </a>
-compile({inline, pad_to/2}).
pad_to(Width, Binary) ->
case (Width - byte_size(Binary) rem Width) rem Width of
0 -> Binary;
N -> <<Binary/binary, 0:(N*8)>>
end.
-spec timestamp() -> erlang:timestamp().
timestamp() ->
erlang:system_time(milli_seconds).
timestamp(Units) ->
erlang:system_time(Units).
-spec make_addr_info({term(), {inet:ip_address(), integer()}}) -> atom_address().
make_addr_info({undefined, {IP, Port}}) ->
{socket_to_atom(IP, Port), ip_to_atom(IP), port_to_atom(Port)};
make_addr_info({Name, {IP, Port}}) ->
{to_atom(Name), ip_to_atom(IP), port_to_atom(Port)}.
to_atom(Value) when is_atom(Value) -> Value;
to_atom(Value) when is_binary(Value) -> binary_to_atom(Value, latin1);
to_atom(Value) when is_list(Value) -> list_to_atom(Value).
socket_to_atom(IP, undefined) ->
ip_to_atom(IP);
socket_to_atom(IP, Port) when is_tuple(IP) ->
list_to_atom(inet:ntoa(IP) ++ ":" ++ integer_to_list(Port));
socket_to_atom(IP, Port) when is_binary(IP) ->
binary_to_atom(erlang:iolist_to_binary([IP, <<":">>, Port]), latin1);
socket_to_atom(IP, Port) when is_atom(IP) ->
binary_to_atom(erlang:iolist_to_binary([atom_to_binary(IP, latin1), <<":">>, Port]), latin1).
ip_to_atom(IP) when is_atom(IP) -> IP;
ip_to_atom(IP) -> list_to_atom(inet:ntoa(IP)).
port_to_atom(undefined) -> undefined;
port_to_atom(Port) when is_atom(Port) -> Port;
port_to_atom(Port) -> list_to_atom(integer_to_list(Port)).
-spec printable_peer(inet:ip4_address(),eradius_server:port_number()) -> io_lib:chars().
printable_peer({IA,IB,IC,ID}, Port) ->
io_lib:format("~b.~b.~b.~b:~b",[IA,IB,IC,ID,Port]).
@doc calculate the MD5 message authenticator
-if(?OTP_RELEASE >= 23).
crypto API changes in OTP > = 23
message_authenticator(Secret, Msg) ->
crypto:mac(hmac, md5, Secret, Msg).
-else.
message_authenticator(Secret, Msg) ->
crypto:hmac(md5, Secret, Msg).
-endif.
| null | https://raw.githubusercontent.com/travelping/eradius/bac1a92f547ac4f8e009e9052f28c430b6f9b82d/src/eradius_lib.erl | erlang | -compile(bin_opt_info).
------------------------------------------------------------------------------------------
-- Request Accessors
------------------------------------------------------------------------------------------
-- Wire Encoding
@doc Convert a RADIUS request to the wire format.
@doc Convert a RADIUS reply to the wire format.
This function performs the same task as {@link encode_request/2},
except that it includes the authenticator substitution required for replies.
replies that includes an EAP-Message attribute [RFC 3579].
------------------------------------------------------------------------------------------
-- Wire Decoding
gotcha: the function returns a LIST of attribute-value pairs because
decode including tag byte if tag is out of range
for encrypted attributes, tag byte is never part of the value
------------------------------------------------------------------------------------------
-- Attribute Encryption
@doc pad binary to specific length
See <a href="-questions/2008-December/040709.html">
-questions/2008-December/040709.html
</a> | -module(eradius_lib).
-export([del_attr/2, get_attr/2, encode_request/1, encode_reply/1, decode_request/2, decode_request/3, decode_request_id/1]).
-export([random_authenticator/0, zero_authenticator/0, pad_to/2, set_attr/3, get_attributes/1, set_attributes/2]).
-export([timestamp/0, timestamp/1, printable_peer/2, make_addr_info/1]).
-export_type([command/0, secret/0, authenticator/0, attribute_list/0]).
-ifdef(TEST).
-export([encode_value/2, decode_value/2, scramble/3, ascend/3]).
-export([salt_encrypt/4, salt_decrypt/3, encode_attribute/3, decode_attribute/5]).
-endif.
-include("eradius_lib.hrl").
-include("eradius_dict.hrl").
-type command() :: 'request' | 'accept' | 'challenge' | 'reject' | 'accreq' | 'accresp' | 'coareq' | 'coaack' | 'coanak' | 'discreq' | 'discack' | 'discnak'.
-type secret() :: binary().
-type authenticator() :: <<_:128>>.
-type salt() :: binary().
-type attribute_list() :: list({eradius_dict:attribute(), term()}).
-define(IS_ATTR(Key, Attr), ?IS_KEY(Key, element(1, Attr))).
-define(IS_KEY(Key, Attr), ((is_record(Attr, attribute) andalso (element(2, Attr) == Key))
orelse
(Attr == Key)) ).
-spec random_authenticator() -> authenticator().
random_authenticator() -> crypto:strong_rand_bytes(16).
-spec zero_authenticator() -> authenticator().
zero_authenticator() -> <<0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0>>.
-spec set_attributes(#radius_request{}, attribute_list()) -> #radius_request{}.
set_attributes(Req = #radius_request{attrs = Attrs}, NewAttrs) ->
Req#radius_request{attrs = NewAttrs ++ Attrs}.
-spec get_attributes(#radius_request{}) -> attribute_list().
get_attributes(#radius_request{attrs = Attrs}) ->
Attrs.
-spec set_attr(#radius_request{}, eradius_dict:attribute_id(), eradius_dict:attr_value()) -> #radius_request{}.
set_attr(Req = #radius_request{attrs = Attrs}, Id, Val) ->
Req#radius_request{attrs = [{Id, Val} | Attrs]}.
-spec get_attr(#radius_request{}, eradius_dict:attribute_id()) -> eradius_dict:attr_value() | undefined.
get_attr(#radius_request{attrs = Attrs}, Id) ->
get_attr_loop(Id, Attrs).
del_attr(Req = #radius_request{attrs = Attrs}, Id) ->
Req#radius_request{attrs = lists:reverse(lists:foldl(fun(Attr, Acc) when ?IS_ATTR(Id, Attr) -> Acc;
(Attr, Acc) -> [Attr | Acc]
end, [], Attrs))}.
get_attr_loop(Key, [{Id, Val}|_T]) when ?IS_KEY(Key, Id) -> Val;
get_attr_loop(Key, [_|T]) -> get_attr_loop(Key, T);
get_attr_loop(_, []) -> undefined.
The Message - Authenticator MUST be used in Access - Request that include an EAP - Message attribute [ RFC 3579 ] .
-spec encode_request(#radius_request{}) -> {binary(), binary()}.
encode_request(Req = #radius_request{reqid = ReqID, cmd = Command, attrs = Attributes}) when (Command == request) ->
Authenticator = random_authenticator(),
Req1 = Req#radius_request{authenticator = Authenticator},
EncReq1 = encode_attributes(Req1, Attributes),
EncReq2 = encode_eap_message(Req1, EncReq1),
{Body, BodySize} = encode_message_authenticator(Req1, EncReq2),
{Authenticator, <<(encode_command(Command)):8, ReqID:8, (BodySize + 20):16, Authenticator:16/binary, Body/binary>>};
encode_request(Req = #radius_request{reqid = ReqID, cmd = Command, attrs = Attributes}) ->
{Body, BodySize} = encode_attributes(Req, Attributes),
Head = <<(encode_command(Command)):8, ReqID:8, (BodySize + 20):16>>,
Authenticator = crypto:hash(md5, [Head, zero_authenticator(), Body, Req#radius_request.secret]),
{Authenticator, <<Head/binary, Authenticator:16/binary, Body/binary>>}.
The Message - Authenticator MUST be used in Access - Accept , Access - Reject or Access - Chalange
-spec encode_reply(#radius_request{}) -> binary().
encode_reply(Req = #radius_request{reqid = ReqID, cmd = Command, authenticator = RequestAuthenticator, attrs = Attributes}) ->
EncReq1 = encode_attributes(Req, Attributes),
EncReq2 = encode_eap_message(Req, EncReq1),
{Body, BodySize} = encode_message_authenticator(Req, EncReq2),
Head = <<(encode_command(Command)):8, ReqID:8, (BodySize + 20):16>>,
ReplyAuthenticator = crypto:hash(md5, [Head, <<RequestAuthenticator:16/binary>>, Body, Req#radius_request.secret]),
<<Head/binary, ReplyAuthenticator:16/binary, Body/binary>>.
-spec encode_command(command()) -> byte().
encode_command(request) -> ?RAccess_Request;
encode_command(accept) -> ?RAccess_Accept;
encode_command(challenge) -> ?RAccess_Challenge;
encode_command(reject) -> ?RAccess_Reject;
encode_command(accreq) -> ?RAccounting_Request;
encode_command(accresp) -> ?RAccounting_Response;
encode_command(coareq) -> ?RCoa_Request;
encode_command(coaack) -> ?RCoa_Ack;
encode_command(coanak) -> ?RCoa_Nak;
encode_command(discreq) -> ?RDisconnect_Request;
encode_command(discack) -> ?RDisconnect_Ack;
encode_command(discnak) -> ?RDisconnect_Nak.
-spec encode_message_authenticator(#radius_request{}, {binary(), non_neg_integer()}) -> {binary(), non_neg_integer()}.
encode_message_authenticator(_Req = #radius_request{msg_hmac = false}, Request) ->
Request;
encode_message_authenticator(Req = #radius_request{reqid = ReqID, cmd = Command, authenticator = Authenticator, msg_hmac = true}, {Body, BodySize}) ->
Head = <<(encode_command(Command)):8, ReqID:8, (BodySize + 20 + 2 +16):16>>,
ReqAuth = <<Authenticator:16/binary>>,
HMAC = message_authenticator(Req#radius_request.secret, [Head, ReqAuth, Body, <<?RMessage_Authenticator,18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0>>]),
{<<Body/binary, ?RMessage_Authenticator, 18, HMAC/binary>>, BodySize + 2 + 16}.
chunk(Bin, Length) ->
case Bin of
<<First:Length/bytes, Rest/binary>> -> {First, Rest};
_ -> {Bin, <<>>}
end.
encode_eap_attribute({<<>>, _}, EncReq) ->
EncReq;
encode_eap_attribute({Value, Rest}, {Body, BodySize}) ->
EncAttr = <<?REAP_Message, (byte_size(Value) + 2):8, Value/binary>>,
EncReq = {<<Body/binary, EncAttr/binary>>, BodySize + byte_size(EncAttr)},
encode_eap_attribute(chunk(Rest, 253), EncReq).
-spec encode_eap_message(#radius_request{}, {binary(), non_neg_integer()}) -> {binary(), non_neg_integer()}.
encode_eap_message(#radius_request{eap_msg = EAP}, EncReq)
when is_binary(EAP); size(EAP) > 0 ->
encode_eap_attribute(chunk(EAP, 253), EncReq);
encode_eap_message(#radius_request{eap_msg = <<>>}, EncReq) ->
EncReq.
-spec encode_attributes(#radius_request{}, attribute_list()) -> {binary(), non_neg_integer()}.
encode_attributes(Req, Attributes) ->
F = fun ({A = #attribute{}, Val}, {Body, BodySize}) ->
EncAttr = encode_attribute(Req, A, Val),
{<<Body/binary, EncAttr/binary>>, BodySize + byte_size(EncAttr)};
({ID, Val}, {Body, BodySize}) ->
case eradius_dict:lookup(attribute, ID) of
AttrRec = #attribute{} ->
EncAttr = encode_attribute(Req, AttrRec, Val),
{<<Body/binary, EncAttr/binary>>, BodySize + byte_size(EncAttr)};
_ ->
{Body, BodySize}
end
end,
lists:foldl(F, {<<>>, 0}, Attributes).
-spec encode_attribute(#radius_request{}, #attribute{}, term()) -> binary().
encode_attribute(_Req, _Attr = #attribute{id = ?RMessage_Authenticator}, _) ->
message authenticator is handled through the flag
<<>>;
encode_attribute(_Req, _Attr = #attribute{id = ?REAP_Message}, _) ->
EAP - Message attributes are handled through the eap_msg field
<<>>;
encode_attribute(Req, Attr = #attribute{id = {Vendor, ID}}, Value) ->
EncValue = encode_attribute(Req, Attr#attribute{id = ID}, Value),
if byte_size(EncValue) + 6 > 255 ->
error(badarg, [{Vendor, ID}, Value]);
true -> ok
end,
<<?RVendor_Specific:8, (byte_size(EncValue) + 6):8, Vendor:32, EncValue/binary>>;
encode_attribute(Req, #attribute{type = {tagged, Type}, id = ID, enc = Enc}, Value) ->
case Value of
{Tag, UntaggedValue} when Tag >= 1, Tag =< 16#1F -> ok;
UntaggedValue -> Tag = 0
end,
EncValue = encrypt_value(Req, encode_value(Type, UntaggedValue), Enc),
if byte_size(EncValue) + 3 > 255 ->
error(badarg, [ID, Value]);
true -> ok
end,
<<ID, (byte_size(EncValue) + 3):8, Tag:8, EncValue/binary>>;
encode_attribute(Req, #attribute{type = Type, id = ID, enc = Enc}, Value)->
EncValue = encrypt_value(Req, encode_value(Type, Value), Enc),
if byte_size(EncValue) + 2 > 255 ->
error(badarg, [ID, Value]);
true -> ok
end,
<<ID, (byte_size(EncValue) + 2):8, EncValue/binary>>.
-spec encrypt_value(#radius_request{}, binary(), eradius_dict:attribute_encryption()) -> binary().
encrypt_value(Req, Val, scramble) -> scramble(Req#radius_request.secret, Req#radius_request.authenticator, Val);
encrypt_value(Req, Val, salt_crypt) -> salt_encrypt(generate_salt(), Req#radius_request.secret, Req#radius_request.authenticator, Val);
encrypt_value(Req, Val, ascend) -> ascend(Req#radius_request.secret, Req#radius_request.authenticator, Val);
encrypt_value(_Req, Val, no) -> Val.
-spec encode_value(eradius_dict:attribute_prim_type(), term()) -> binary().
encode_value(_, V) when is_binary(V) ->
V;
encode_value(binary, V) ->
V;
encode_value(integer, V) ->
<<V:32>>;
encode_value(integer24, V) ->
<<V:24>>;
encode_value(integer64, V) ->
<<V:64>>;
encode_value(ipaddr, {A,B,C,D}) ->
<<A:8, B:8, C:8, D:8>>;
encode_value(ipv6addr, {A,B,C,D,E,F,G,H}) ->
<<A:16, B:16, C:16, D:16, E:16, F:16, G:16, H:16>>;
encode_value(ipv6prefix, {{A,B,C,D,E,F,G,H}, PLen}) ->
L = (PLen + 7) div 8,
<<IP:L/bytes, _R/binary>> = <<A:16, B:16, C:16, D:16, E:16, F:16, G:16, H:16>>,
<<0, PLen, IP/binary>>;
encode_value(string, V) when is_list(V) ->
unicode:characters_to_binary(V);
encode_value(octets, V) when is_list(V) ->
iolist_to_binary(V);
encode_value(octets, V) when is_integer(V) ->
<<V:32>>;
encode_value(date, V) when is_list(V) ->
unicode:characters_to_binary(V);
encode_value(date, Date = {{_,_,_},{_,_,_}}) ->
EpochSecs = calendar:datetime_to_gregorian_seconds(Date) - calendar:datetime_to_gregorian_seconds({{1970,1,1},{0,0,0}}),
<<EpochSecs:32>>.
-spec decode_request_id(binary()) -> {0..255, binary()} | {bad_pdu, list()}.
decode_request_id(Req = <<_Cmd:8, ReqId:8, _Rest/binary>>) -> {ReqId, Req};
decode_request_id(_Req) -> {bad_pdu, "invalid request id"}.
-spec decode_request(binary(), secret()) -> #radius_request{} | {bad_pdu, list()}.
decode_request(Packet, Secret) ->
decode_request(Packet, Secret, undefined).
-spec decode_request(binary(), secret(), authenticator()) -> #radius_request{} | {bad_pdu, list()}.
decode_request(Packet, Secret, Authenticator) ->
case (catch decode_request0(Packet, Secret, Authenticator)) of
{'EXIT', _} -> {bad_pdu, "decode packet error"};
Else -> Else
end.
-spec decode_request0(binary(), secret(), authenticator() | 'undefined') -> #radius_request{}.
decode_request0(<<Cmd:8, ReqId:8, Len:16, PacketAuthenticator:16/binary, Body0/binary>>, Secret, RequestAuthenticator) ->
ActualBodySize = byte_size(Body0),
GivenBodySize = Len - 20,
Body = if
ActualBodySize > GivenBodySize ->
throw({bad_pdu, "false packet size"});
ActualBodySize == GivenBodySize ->
Body0;
true ->
binary:part(Body0, 0, GivenBodySize)
end,
Command = decode_command(Cmd),
PartialRequest = #radius_request{cmd = Command, reqid = ReqId, authenticator = PacketAuthenticator, secret = Secret, msg_hmac = false},
DecodedState = decode_attributes(PartialRequest, RequestAuthenticator, Body),
Request = PartialRequest#radius_request{attrs = lists:reverse(DecodedState#decoder_state.attrs),
eap_msg = list_to_binary(lists:reverse(DecodedState#decoder_state.eap_msg))},
validate_authenticator(Command, <<Cmd:8, ReqId:8, Len:16>>, RequestAuthenticator, PacketAuthenticator, Body, Secret),
if
is_integer(DecodedState#decoder_state.hmac_pos) ->
validate_packet_authenticator(Cmd, ReqId, Len, Body, DecodedState#decoder_state.hmac_pos, Secret, PacketAuthenticator, RequestAuthenticator),
Request#radius_request{msg_hmac = true};
true -> Request
end.
-spec validate_packet_authenticator(non_neg_integer(), non_neg_integer(), non_neg_integer(), non_neg_integer(), binary(), binary(), authenticator(), authenticator() | 'undefined') -> ok.
validate_packet_authenticator(Cmd, ReqId, Len, Body, Pos, Secret, PacketAuthenticator, undefined) ->
validate_packet_authenticator(Cmd, ReqId, Len, PacketAuthenticator, Body, Pos, Secret);
validate_packet_authenticator(Cmd, ReqId, Len, Body, Pos, Secret, _PacketAuthenticator, RequestAuthenticator) ->
validate_packet_authenticator(Cmd, ReqId, Len, RequestAuthenticator, Body, Pos, Secret).
-spec validate_packet_authenticator(non_neg_integer(), non_neg_integer(), non_neg_integer(), authenticator(), non_neg_integer(), binary(), binary()) -> ok.
validate_packet_authenticator(Cmd, ReqId, Len, Auth, Body, Pos, Secret) ->
case Body of
<<Before:Pos/bytes, Value:16/bytes, After/binary>> ->
case message_authenticator(Secret, [<<Cmd:8, ReqId:8, Len:16>>, Auth, Before, zero_authenticator(), After]) of
Value ->
ok;
_ ->
throw({bad_pdu, "Message-Authenticator Attribute is invalid"})
end;
_ ->
throw({bad_pdu, "Message-Authenticator Attribute is malformed"})
end.
validate_authenticator(accreq, Head, _RequestAuthenticator, PacketAuthenticator, Body, Secret) ->
compare_authenticator(crypto:hash(md5, [Head, zero_authenticator(), Body, Secret]), PacketAuthenticator);
validate_authenticator(Cmd, Head, RequestAuthenticator, PacketAuthenticator, Body, Secret)
when
(Cmd =:= accept) orelse
(Cmd =:= reject) orelse
(Cmd =:= accresp) orelse
(Cmd =:= coaack) orelse
(Cmd =:= coanak) orelse
(Cmd =:= discack) orelse
(Cmd =:= discnak) orelse
(Cmd =:= challenge) ->
compare_authenticator(crypto:hash(md5, [Head, RequestAuthenticator, Body, Secret]), PacketAuthenticator);
validate_authenticator(_Cmd, _Head, _RequestAuthenticator, _PacketAuthenticator,
_Body, _Secret) ->
true.
compare_authenticator(Authenticator, Authenticator) ->
true;
compare_authenticator(_RequestAuthenticator, _PacketAuthenticator) ->
throw({bad_pdu, "Authenticator Attribute is invalid"}).
-spec decode_command(byte()) -> command().
decode_command(?RAccess_Request) -> request;
decode_command(?RAccess_Accept) -> accept;
decode_command(?RAccess_Reject) -> reject;
decode_command(?RAccess_Challenge) -> challenge;
decode_command(?RAccounting_Request) -> accreq;
decode_command(?RAccounting_Response) -> accresp;
decode_command(?RCoa_Request) -> coareq;
decode_command(?RCoa_Ack) -> coaack;
decode_command(?RCoa_Nak) -> coanak;
decode_command(?RDisconnect_Request) -> discreq;
decode_command(?RDisconnect_Ack) -> discack;
decode_command(?RDisconnect_Nak) -> discnak;
decode_command(_) -> error({bad_pdu, "unknown request type"}).
append_attr(Attr, State) ->
State#decoder_state{attrs = [Attr | State#decoder_state.attrs]}.
-spec decode_attributes(#radius_request{}, binary(), binary()) -> #decoder_state{}.
decode_attributes(Req, RequestAuthenticator, As) ->
decode_attributes(Req, As, 0, #decoder_state{request_authenticator = RequestAuthenticator}).
-spec decode_attributes(#radius_request{}, binary(), non_neg_integer(), #decoder_state{}) -> #decoder_state{}.
decode_attributes(_Req, <<>>, _Pos, State) ->
State;
decode_attributes(Req, <<Type:8, ChunkLength:8, ChunkRest/binary>>, Pos, State) ->
ValueLength = ChunkLength - 2,
<<Value:ValueLength/binary, PacketRest/binary>> = ChunkRest,
NewState = case eradius_dict:lookup(attribute, Type) of
AttrRec = #attribute{} ->
decode_attribute(Value, Req, AttrRec, Pos + 2, State);
_ ->
append_attr({Type, Value}, State)
end,
decode_attributes(Req, PacketRest, Pos + ChunkLength, NewState).
a vendor - specific attribute blob might contain more than one attribute .
-spec decode_attribute(binary(), #radius_request{}, #attribute{}, non_neg_integer(), #decoder_state{}) -> #decoder_state{}.
decode_attribute(<<VendorID:32/integer, ValueBin/binary>>, Req, #attribute{id = ?RVendor_Specific}, Pos, State) ->
decode_vendor_specific_attribute(Req, VendorID, ValueBin, Pos + 4, State);
decode_attribute(<<Value/binary>>, _Req, Attr = #attribute{id = ?REAP_Message}, _Pos, State) ->
NewState = State#decoder_state{eap_msg = [Value | State#decoder_state.eap_msg]},
append_attr({Attr, Value}, NewState);
decode_attribute(<<EncValue/binary>>, Req, Attr = #attribute{id = ?RMessage_Authenticator, type = Type, enc = Encryption}, Pos, State) ->
append_attr({Attr, decode_value(decrypt_value(Req, State, EncValue, Encryption), Type)}, State#decoder_state{hmac_pos = Pos});
decode_attribute(<<EncValue/binary>>, Req, Attr = #attribute{type = Type, enc = Encryption}, _Pos, State) when is_atom(Type) ->
append_attr({Attr, decode_value(decrypt_value(Req, State, EncValue, Encryption), Type)}, State);
decode_attribute(WholeBin = <<Tag:8, Bin/binary>>, Req, Attr = #attribute{type = {tagged, Type}}, _Pos, State) ->
case {decode_tag_value(Tag), Attr#attribute.enc} of
{0, no} ->
append_attr({Attr, {0, decode_value(WholeBin, Type)}}, State);
{TagV, no} ->
append_attr({Attr, {TagV, decode_value(Bin, Type)}}, State);
{TagV, Encryption} ->
append_attr({Attr, {TagV, decode_value(decrypt_value(Req, State, Bin, Encryption), Type)}}, State)
end.
-compile({inline, decode_tag_value/1}).
decode_tag_value(Tag) when (Tag >= 1) and (Tag =< 16#1F) -> Tag;
decode_tag_value(_OtherTag) -> 0.
-spec decode_value(binary(), eradius_dict:attribute_prim_type()) -> term().
decode_value(<<Bin/binary>>, Type) ->
case Type of
octets ->
Bin;
binary ->
Bin;
abinary ->
Bin;
string ->
Bin;
integer ->
decode_integer(Bin);
integer24 ->
decode_integer(Bin);
integer64 ->
decode_integer(Bin);
date ->
case decode_integer(Bin) of
Int when is_integer(Int) ->
calendar:now_to_universal_time({Int div 1000000, Int rem 1000000, 0});
_ ->
Bin
end;
ipaddr ->
<<B,C,D,E>> = Bin,
{B,C,D,E};
ipv6addr ->
<<B:16,C:16,D:16,E:16,F:16,G:16,H:16,I:16>> = Bin,
{B,C,D,E,F,G,H,I};
ipv6prefix ->
<<0,PLen,P/binary>> = Bin,
<<B:16,C:16,D:16,E:16,F:16,G:16,H:16,I:16>> = pad_to(16, P),
{{B,C,D,E,F,G,H,I}, PLen}
end.
-compile({inline, decode_integer/1}).
decode_integer(Bin) ->
ISize = bit_size(Bin),
case Bin of
<<Int:ISize/integer>> -> Int;
_ -> Bin
end.
-spec decrypt_value(#radius_request{}, #decoder_state{}, binary(),
eradius_dict:attribute_encryption()) -> eradius_dict:attr_value().
decrypt_value(#radius_request{secret = Secret, authenticator = Authenticator},
_, <<Val/binary>>, scramble) ->
scramble(Secret, Authenticator, Val);
decrypt_value(#radius_request{secret = Secret},
#decoder_state{request_authenticator = RequestAuthenticator},
<<Val/binary>>, salt_crypt)
when is_binary(RequestAuthenticator) ->
salt_decrypt(Secret, RequestAuthenticator, Val);
decrypt_value(#radius_request{secret = Secret, authenticator = Authenticator},
_, <<Val/binary>>, ascend) ->
ascend(Secret, Authenticator, Val);
decrypt_value(_Req, _State, <<Val/binary>>, _Type) ->
Val.
-spec decode_vendor_specific_attribute(#radius_request{}, non_neg_integer(), binary(), non_neg_integer(), #decoder_state{}) -> #decoder_state{}.
decode_vendor_specific_attribute(_Req, _VendorID, <<>>, _Pos, State) ->
State;
decode_vendor_specific_attribute(Req, VendorID, <<Type:8, ChunkLength:8, ChunkRest/binary>>, Pos, State) ->
ValueLength = ChunkLength - 2,
<<Value:ValueLength/binary, PacketRest/binary>> = ChunkRest,
VendorAttrKey = {VendorID, Type},
NewState = case eradius_dict:lookup(attribute, VendorAttrKey) of
Attr = #attribute{} ->
decode_attribute(Value, Req, Attr, Pos + 2, State);
_ ->
append_attr({VendorAttrKey, Value}, State)
end,
decode_vendor_specific_attribute(Req, VendorID, PacketRest, Pos + ChunkLength, NewState).
-spec scramble(secret(), authenticator(), binary()) -> binary().
scramble(SharedSecret, RequestAuthenticator, <<PlainText/binary>>) ->
B = crypto:hash(md5, [SharedSecret, RequestAuthenticator]),
do_scramble(SharedSecret, B, pad_to(16, PlainText), << >>).
do_scramble(SharedSecret, B, <<PlainText:16/binary, Remaining/binary>>, CipherText) ->
NewCipherText = crypto:exor(PlainText, B),
Bnext = crypto:hash(md5, [SharedSecret, NewCipherText]),
do_scramble(SharedSecret, Bnext, Remaining, <<CipherText/binary, NewCipherText/binary>>);
do_scramble(_SharedSecret, _B, << >>, CipherText) ->
CipherText.
-spec generate_salt() -> salt().
generate_salt() ->
<<Salt1, Salt2>> = crypto:strong_rand_bytes(2),
<<(Salt1 bor 16#80), Salt2>>.
-spec salt_encrypt(salt(), secret(), authenticator(), binary()) -> binary().
salt_encrypt(Salt, SharedSecret, RequestAuthenticator, PlainText) ->
CipherText = do_salt_crypt(encrypt, Salt, SharedSecret, RequestAuthenticator, (pad_to(16, << (byte_size(PlainText)):8, PlainText/binary >>))),
<<Salt/binary, CipherText/binary>>.
-spec salt_decrypt(secret(), authenticator(), binary()) -> binary().
salt_decrypt(SharedSecret, RequestAuthenticator, <<Salt:2/binary, CipherText/binary>>) ->
<< Length:8/integer, PlainText/binary >> = do_salt_crypt(decrypt, Salt, SharedSecret, RequestAuthenticator, CipherText),
if
Length < byte_size(PlainText) ->
binary:part(PlainText, 0, Length);
true ->
PlainText
end.
do_salt_crypt(Op, Salt, SharedSecret, RequestAuthenticator, <<CipherText/binary>>) ->
B = crypto:hash(md5, [SharedSecret, RequestAuthenticator, Salt]),
salt_crypt(Op, SharedSecret, B, CipherText, << >>).
salt_crypt(Op, SharedSecret, B, <<PlainText:16/binary, Remaining/binary>>, CipherText) ->
NewCipherText = crypto:exor(PlainText, B),
Bnext = case Op of
decrypt -> crypto:hash(md5, [SharedSecret, PlainText]);
encrypt -> crypto:hash(md5, [SharedSecret, NewCipherText])
end,
salt_crypt(Op, SharedSecret, Bnext, Remaining, <<CipherText/binary, NewCipherText/binary>>);
salt_crypt(_Op, _SharedSecret, _B, << >>, CipherText) ->
CipherText.
-spec ascend(secret(), authenticator(), binary()) -> binary().
ascend(SharedSecret, RequestAuthenticator, <<PlainText/binary>>) ->
Digest = crypto:hash(md5, [RequestAuthenticator, SharedSecret]),
crypto:exor(Digest, pad_to(16, PlainText)).
-compile({inline, pad_to/2}).
pad_to(Width, Binary) ->
case (Width - byte_size(Binary) rem Width) rem Width of
0 -> Binary;
N -> <<Binary/binary, 0:(N*8)>>
end.
-spec timestamp() -> erlang:timestamp().
timestamp() ->
erlang:system_time(milli_seconds).
timestamp(Units) ->
erlang:system_time(Units).
-spec make_addr_info({term(), {inet:ip_address(), integer()}}) -> atom_address().
make_addr_info({undefined, {IP, Port}}) ->
{socket_to_atom(IP, Port), ip_to_atom(IP), port_to_atom(Port)};
make_addr_info({Name, {IP, Port}}) ->
{to_atom(Name), ip_to_atom(IP), port_to_atom(Port)}.
to_atom(Value) when is_atom(Value) -> Value;
to_atom(Value) when is_binary(Value) -> binary_to_atom(Value, latin1);
to_atom(Value) when is_list(Value) -> list_to_atom(Value).
socket_to_atom(IP, undefined) ->
ip_to_atom(IP);
socket_to_atom(IP, Port) when is_tuple(IP) ->
list_to_atom(inet:ntoa(IP) ++ ":" ++ integer_to_list(Port));
socket_to_atom(IP, Port) when is_binary(IP) ->
binary_to_atom(erlang:iolist_to_binary([IP, <<":">>, Port]), latin1);
socket_to_atom(IP, Port) when is_atom(IP) ->
binary_to_atom(erlang:iolist_to_binary([atom_to_binary(IP, latin1), <<":">>, Port]), latin1).
ip_to_atom(IP) when is_atom(IP) -> IP;
ip_to_atom(IP) -> list_to_atom(inet:ntoa(IP)).
port_to_atom(undefined) -> undefined;
port_to_atom(Port) when is_atom(Port) -> Port;
port_to_atom(Port) -> list_to_atom(integer_to_list(Port)).
-spec printable_peer(inet:ip4_address(),eradius_server:port_number()) -> io_lib:chars().
printable_peer({IA,IB,IC,ID}, Port) ->
io_lib:format("~b.~b.~b.~b:~b",[IA,IB,IC,ID,Port]).
@doc calculate the MD5 message authenticator
-if(?OTP_RELEASE >= 23).
crypto API changes in OTP > = 23
message_authenticator(Secret, Msg) ->
crypto:mac(hmac, md5, Secret, Msg).
-else.
message_authenticator(Secret, Msg) ->
crypto:hmac(md5, Secret, Msg).
-endif.
|
9f9e527ddc0e174854a960f2faf20076a4ab2d97ead90fbb98efd64511aba5f8 | jonase/eastwood | linter_executor.clj | (defn custom-map [f & colls]
(swap! @(resolve 'eastwood.linter-executor-test/proof)
conj
:hello)
(apply map f colls))
(set-linter-executor! custom-map)
| null | https://raw.githubusercontent.com/jonase/eastwood/c5b7d9f8ad8f8b38dc7138d853cc65f6987d6058/test-resources/eastwood/config/linter_executor.clj | clojure | (defn custom-map [f & colls]
(swap! @(resolve 'eastwood.linter-executor-test/proof)
conj
:hello)
(apply map f colls))
(set-linter-executor! custom-map)
|
|
ec41cd36795657fcfcc9ba2e00b3062acceccc3ec241d24154c415758fd1c25d | froggey/Mezzano | data-types.lisp | ;;;; Low-level definitions for data types
(in-package :mezzano.internals)
(defconstant +n-fixnum-bits+ 1)
(defconstant +fixnum-tag-mask+ (1- (ash 1 +n-fixnum-bits+)))
Fields in the object header .
(defconstant +object-type-shift+ 2)
(defconstant +object-type-size+ 6)
(defconstant +object-data-shift+ 8)
(defconstant +object-data-size+ 56)
;;; Low 4 bits of a value are tag bits
(defconstant +tag-fixnum-000+ #b0000)
+ TAG - CONS+ and + TAG - OBJECT+ have been carefully chosen so they have
;; exactly one bit different. This allows ordinary object pointers to
be detected trivially : ( eql ( logand val # b111 ) 1 )
(defconstant +tag-cons+ #b0001)
(defconstant +tag-fixnum-001+ #b0010)
;;#b0011
(defconstant +tag-fixnum-010+ #b0100)
(defconstant +tag-immediate+ #b0101)
(defconstant +tag-fixnum-011+ #b0110)
Low two bits of this one must be set , high two bits must match low
two bits of + object - tag - instance+ .
;; See %FAST-INSTANCE-LAYOUT-EQ-P.
Low two bits must be set .
(defconstant +tag-fixnum-100+ #b1000)
(defconstant +tag-object+ #b1001)
(defconstant +tag-fixnum-101+ #b1010)
# b1011
(defconstant +tag-fixnum-110+ #b1100)
(defconstant +tag-dx-root-object+ #b1101)
(defconstant +tag-fixnum-111+ #b1110)
(defconstant +tag-gc-forward+ #b1111)
(defconstant +tag-field+ (byte 4 0))
(defconstant +immediate-tag+ (byte 2 4))
(defconstant +immediate-tag-character+ #b00)
(defconstant +immediate-tag-single-float+ #b01)
(defconstant +immediate-tag-byte-specifier+ #b10)
# b11
;;; Simple 1D arrays.
;; Array type T == simple vector.
Is zero to allow for faster type checking .
(defconstant +object-tag-array-t+ #b000000)
(defconstant +object-tag-array-fixnum+ #b000001)
(defconstant +object-tag-array-bit+ #b000010)
(defconstant +object-tag-array-unsigned-byte-2+ #b000011)
(defconstant +object-tag-array-unsigned-byte-4+ #b000100)
(defconstant +object-tag-array-unsigned-byte-8+ #b000101)
(defconstant +object-tag-array-unsigned-byte-16+ #b000110)
(defconstant +object-tag-array-unsigned-byte-32+ #b000111)
(defconstant +object-tag-array-unsigned-byte-64+ #b001000)
(defconstant +object-tag-array-signed-byte-1+ #b001001)
(defconstant +object-tag-array-signed-byte-2+ #b001010)
(defconstant +object-tag-array-signed-byte-4+ #b001011)
(defconstant +object-tag-array-signed-byte-8+ #b001100)
(defconstant +object-tag-array-signed-byte-16+ #b001101)
(defconstant +object-tag-array-signed-byte-32+ #b001110)
(defconstant +object-tag-array-signed-byte-64+ #b001111)
(defconstant +object-tag-array-single-float+ #b010000)
(defconstant +object-tag-array-double-float+ #b010001)
(defconstant +object-tag-array-short-float+ #b010010)
(defconstant +object-tag-array-long-float+ #b010011)
(defconstant +object-tag-array-complex-single-float+ #b010100)
(defconstant +object-tag-array-complex-double-float+ #b010101)
(defconstant +object-tag-array-complex-short-float+ #b010110)
(defconstant +object-tag-array-complex-long-float+ #b010111)
(defconstant +first-simple-1d-array-object-tag+ +object-tag-array-t+)
(defconstant +last-simple-1d-array-object-tag+ +object-tag-array-complex-long-float+)
;;#b011000
# b011001
;;#b011010
;;#b011011
;; Strings. Simple strings are the same as normal strings, except marked as simple.
These are actually character arrays , they 're only string when rank = 1 .
(defconstant +object-tag-simple-string+ #b011100)
(defconstant +object-tag-string+ #b011101)
;; Other arrays.
(defconstant +object-tag-simple-array+ #b011110)
(defconstant +object-tag-array+ #b011111)
(defconstant +first-complex-array-object-tag+ +object-tag-simple-string+)
(defconstant +last-complex-array-object-tag+ +object-tag-array+)
(defconstant +first-array-object-tag+ +object-tag-array-t+)
(defconstant +last-array-object-tag+ +object-tag-array+)
;; When set, the array or string is not simple.
;; Only valid on object with +object-tag(-simple)-{string/array}+ tags.
(defconstant +array-type-simple-bit+ #b000001)
;;; All these object tags, along with immediate fixnums and single-floats are numbers.
(defconstant +object-tag-bignum+ #b100000)
(defconstant +object-tag-ratio+ #b100001)
(defconstant +object-tag-double-float+ #b100010)
(defconstant +object-tag-short-float+ #b100011)
(defconstant +object-tag-long-float+ #b100100)
(defconstant +object-tag-complex-rational+ #b100101)
(defconstant +object-tag-complex-single-float+ #b100110)
(defconstant +object-tag-complex-double-float+ #b100111)
(defconstant +object-tag-complex-short-float+ #b101000)
(defconstant +object-tag-complex-long-float+ #b101001)
(defconstant +first-rational-object-tag+ +object-tag-bignum+) ; plus fixnum
(defconstant +last-rational-object-tag+ +object-tag-ratio+)
(defconstant +first-float-object-tag+ +object-tag-double-float+) ; plus single-float
(defconstant +last-float-object-tag+ +object-tag-long-float+)
plus fixnum & single - float
(defconstant +last-real-object-tag+ +object-tag-long-float+)
(defconstant +first-complex-object-tag+ +object-tag-complex-rational+)
(defconstant +last-complex-object-tag+ +object-tag-complex-long-float+)
plus fixnum & single - float
(defconstant +last-numeric-object-tag+ +object-tag-complex-long-float+)
;;#b101010
# b101011
# b101100
;;#b101101
(defconstant +object-tag-symbol-value-cell+ #b101110)
(defconstant +object-tag-mmx-vector+ #b101111)
(defconstant +object-tag-symbol+ #b110000)
;;#b110001
# b110010
(defconstant +object-tag-sse-vector+ #b110011)
;;#b110100
Low two bits must match high two bits of + tag - instance - header+ .
;; Must be one bit different from +object-tag-funcallable-instance+.
(defconstant +object-tag-instance+ #b110101)
(defconstant +object-tag-function-reference+ #b110110)
(defconstant +object-tag-interrupt-frame+ #b110111)
Conses get an object header when allocated in a non - cons area , purely
to allow heap walking . The header is two words long , with the length
field containing 0 and the second header word containing 0 .
;; Cons values always point to the pair of pointers, never to the header.
(defconstant +object-tag-cons+ #b111000)
(defconstant +object-tag-freelist-entry+ #b111001)
(defconstant +object-tag-weak-pointer+ #b111010)
;; Function objects.
(defconstant +object-tag-delimited-continuation+ #b111011)
(defconstant +object-tag-function+ #b111100)
Low two bits must match high two bits of + tag - instance - header+ .
;; Must be one bit different from +object-tag-instance+.
(defconstant +object-tag-funcallable-instance+ #b111101)
(defconstant +object-tag-closure+ #b111110)
(defconstant +first-function-object-tag+ +object-tag-delimited-continuation+)
(defconstant +last-function-object-tag+ +object-tag-closure+)
;;#b111111
Layout of symbols .
(defconstant +symbol-name+ 0)
(defconstant +symbol-package+ 1)
(defconstant +symbol-value+ 2) ; actually the global symbol-value-cell
actually an fref
(defconstant +symbol-type+ 4)
(defconstant +symbol-value-cell-symbol+ 1)
(defconstant +symbol-value-cell-value+ 2)
(defconstant +symbol-header-mode+ (byte 3 0))
(defconstant +symbol-mode-nil+ 0)
(defconstant +symbol-mode-special+ 1)
(defconstant +symbol-mode-constant+ 2)
(defconstant +symbol-mode-symbol-macro+ 3)
(defconstant +symbol-mode-global+ 4)
Layout of a function 's header .
;;; Only applies to compiled functions.
Machine code size is measured in paragraphs ( 16 byte units ) and starts
;; at the beginning of the object, including the header.
(defconstant +function-header-code-size+ (byte 16 8))
(defconstant +function-header-pool-size+ (byte 16 24))
(defconstant +function-header-metadata-size+ (byte 16 40))
Layout of functions .
;;; Common to all functions.
;; Entry point of the function, used by function call machinery.
(defconstant +function-entry-point+ 0)
;;; Closures.
;;; Only the position of the function is specified. The compiler may arrange
;;; closure environments however it wants, including inlining them into the
;;; closure object.
(defconstant +closure-function+ 1)
;;; Funcallable instances.
Layout is important . Update ( setf funcallable - std - instance - function ) if
;; it changes.
(defconstant +funcallable-instance-function+ 1)
Delimited continuations .
(defconstant +delimited-continuation-stack+ 1)
(defconstant +delimited-continuation-stack-pointer+ 2)
(defconstant +delimited-continuation-state+ 3)
(defconstant +delimited-continuation-prompt+ 4)
Layout of function - references .
(defconstant +fref-undefined-entry-point+ 0)
(defconstant +fref-name+ 1)
(defconstant +fref-function+ 2)
(defconstant +fref-code+ 3)
Layout of complex arrays .
(defconstant +complex-array-storage+ 0)
(defconstant +complex-array-fill-pointer+ 1)
(defconstant +complex-array-info+ 2)
(defconstant +complex-array-axis-0+ 3)
Layout of weak pointers .
(defconstant +weak-pointer-header-livep+ 0
"Set by the GC when the value is live.")
(defconstant +weak-pointer-header-weakness+ (byte 2 1))
(defconstant +weak-pointer-weakness-key+ 0
"The weak pointer is live as long as the key is reachable.")
(defconstant +weak-pointer-weakness-value+ 1
"The weak pointer is live as long as the value is reachable.")
(defconstant +weak-pointer-weakness-and+ 2
"The weak pointer is live as long as both the key and the value are reachable.")
(defconstant +weak-pointer-weakness-or+ 3
"The weak pointer is live as long as either the key or the value are reachable.")
(defconstant +weak-pointer-link+ 0)
(defconstant +weak-pointer-key+ 1)
(defconstant +weak-pointer-value+ 2)
(defconstant +weak-pointer-finalizer+ 3)
(defconstant +weak-pointer-finalizer-link+ 4)
Layout of ratios .
(defconstant +ratio-numerator+ 0)
(defconstant +ratio-denominator+ 1)
Layout of complex numbers .
(defconstant +complex-realpart+ 0)
(defconstant +complex-imagpart+ 1)
;; Some bits are stored in the high(ish) bits of the address.
These are used to support the GC .
(defconstant +address-tag-shift+ 45)
(defconstant +address-tag-size+ 3)
(defconstant +address-tag+ (byte +address-tag-size+ +address-tag-shift+))
(defconstant +address-old-generation+ (expt 2 43)
"This bit distingushes the young and old generations.")
(defconstant +address-semispace+ (expt 2 44)
"This bit defines the semispaces for the young and old generations.")
Pinned must be zero , a number of critical objects are pinned & wired and stored
;; below 2GB to permit fast access to them.
(defconstant +address-tag-pinned+ #b000)
(defconstant +address-tag-stack+ #b001)
(defconstant +address-tag-general+ #b010)
(defconstant +address-tag-cons+ #b011)
(defconstant +card-size+ #x1000) ; Match page size for now.
(defconstant +card-table-entry-size+ 4)
(defconstant +card-table-entry-offset+ (byte 16 0)
"A negative 16-bit offset from the start of the card to the start
of the first object in the card. Measured in 16-byte units.
An offset of all ones (1- (expt 2 16)) indicates that the start of the
object is further away than what can be encoded and the the system
should continue looking backwards.")
(defconstant +card-table-entry-dirty-gen+ (byte 2 16))
;; Bits 31-18 available.
;; Cover the whole address space.
(defconstant +card-table-size+ (* (/ (expt 2 47) +card-size+)
+card-table-entry-size+))
256 GB , mostly arbitrary but in the wired area
;; VM regions must meet this allocation requirement so that the card table
;; entries associated with an allocation cover an exact number of pages.
;; This allows the pager to map/unmap regions in the card table without worrying
;; about partial page coverage.
;; NOTE: Stacks don't have card table entries and aren't subject to this
;; alignment constraint. They must still be page-aligned.
This must also be chosen so that ( > = ( / + a - m - a+ 16 8) page - size ) , to support
;; the mark bit region.
(defconstant +allocation-minimum-alignment+ (* (/ #x1000 +card-table-entry-size+)
+card-size+))
GC mark bit region .
(defconstant +octets-per-mark-bit+ 16)
(defconstant +mark-bit-region-base+ #x0000100000000000)
48 address bits , every 2 words/16 bytes needs a mark bit , 8 bits per byte .
(defconstant +mark-bit-region-size+ (/ (expt 2 48) +octets-per-mark-bit+ 8))
(defconstant +block-map-present+ #x01
"Entry is present. This entry may still have a block associated with it, even if it is not present.")
;; FIXME: This isn't really respected properly.
(defconstant +block-map-writable+ #x02
"Entry is writable.")
(defconstant +block-map-zero-fill+ #x04
"Entry should be zero-filled.")
(defconstant +block-map-committed+ #x08
"This block is owned by the currently running system, not by a previous snapshot and can be written to safely.
Internal to the pager, should not be used by other code.")
(defconstant +block-map-wired+ #x10
"Entry should be wired in memory.")
(defconstant +block-map-track-dirty+ #x20
"Dirty bit tracking is enabled for this entry.
When the page is written to, the corresponding dirty bit in the card table will be set and this flag will be cleared.")
(defconstant +block-map-transient+ #x40
"Entry is transient and won't be saved by snapshots.
Accesses to stale memory will signal an error.
Internal to sg-vec, should not be used by other code.")
(defconstant +block-map-flag-mask+ #xFF)
(defconstant +block-map-id-shift+ 8)
keep it a few bits smaller than 56 to avoid bignums .
(defconstant +block-map-id-lazy+ (1- (ash 1 +block-map-id-size+))
"When stored in the ID field, this value indicates that space has been
reserved on the disk, but no specific block has been allocated.")
(defconstant +block-map-id-not-allocated+ 0)
(defparameter *llf-version* 36)
(defconstant +llf-arch-x86-64+ 1)
(defconstant +llf-arch-arm64+ 2)
(defconstant +llf-end-of-load+ #xFF)
(defconstant +llf-backlink+ #x01)
(defconstant +llf-function+ #x02)
(defconstant +llf-cons+ #x03)
(defconstant +llf-symbol+ #x04)
(defconstant +llf-uninterned-symbol+ #x05)
(defconstant +llf-string+ #x07)
(defconstant +llf-integer+ #x09)
(defconstant +llf-simple-vector+ #x0C)
(defconstant +llf-character+ #x0D)
(defconstant +llf-structure-definition+ #x0E)
(defconstant +llf-single-float+ #x10)
(defconstant +llf-proper-list+ #x11)
(defconstant +llf-package+ #x12)
;; A vector consisting entirely of integers.
(defconstant +llf-integer-vector+ #x13)
(defconstant +llf-add-backlink+ #x14)
(defconstant +llf-ratio+ #x15)
(defconstant +llf-array+ #x16)
(defconstant +llf-bit-vector+ #x18)
(defconstant +llf-function-reference+ #x19)
(defconstant +llf-character-with-bits+ #x1A)
(defconstant +llf-structure-slot-definition+ #x1B)
(defconstant +llf-byte+ #x1C)
(defconstant +llf-double-float+ #x1D)
(defconstant +llf-typed-array+ #x1E)
;; Call a function with N arguments, push the result.
(defconstant +llf-funcall-n+ #x1F)
;; Discard the top of stack value.
(defconstant +llf-drop+ #x20)
(defconstant +llf-complex-rational+ #x21)
(defconstant +llf-complex-single-float+ #x22)
(defconstant +llf-complex-double-float+ #x23)
(defconstant +llf-instance-header+ #x24)
(defconstant +llf-symbol-global-value-cell+ #x25)
(defconstant +llf-if+ #x26)
(defconstant +llf-else+ #x27)
(defconstant +llf-fi+ #x28)
(defconstant +llf-layout+ #x29)
(defconstant +llf-initialize-array+ #x2A)
(defconstant +llf-short-float+ #x2B)
(defconstant +llf-complex-short-float+ #x2C)
Fields in the Unicode info tables .
(defconstant +unicode-info-name-offset+ (byte 20 0)
"Offset of the name in the name table.")
(defconstant +unicode-info-name-length+ (byte 6 20)
"Length of the name in the name table.")
(defconstant +unicode-info-othercase-code+ (byte 21 26)
"Character code for the alternate case character.")
(defconstant +unicode-info-general-category+ (byte 5 47)
"General category code.")
(eval-when (:compile-toplevel :load-toplevel :execute)
(defun unicode-general-category-encode (category)
(ecase category
(:uppercase-letter 0)
(:lowercase-letter 1)
(:titlecase-letter 2)
(:modifier-letter 3)
(:other-letter 4)
(:nonspacing-mark 5)
(:spacing-mark 6)
(:enclosing-mark 7)
(:decimal-number 8)
(:letter-number 9)
(:other-number 10)
(:connector-punctuation 11)
(:dash-punctuation 12)
(:open-punctuation 13)
(:close-punctuation 14)
(:initial-punctuation 15)
(:final-punctuation 16)
(:other-punctuation 17)
(:math-symbol 18)
(:currency-symbol 19)
(:modifier-symbol 20)
(:other-symbol 21)
(:space-separator 22)
(:line-separator 23)
(:paragraph-separator 24)
(:control 25)
(:format 26)
(:surrogate 27)
(:private-use 28)
(:unassigned 29)))
(defun unicode-general-category-decode (code)
(ecase code
(0 :uppercase-letter)
(1 :lowercase-letter)
(2 :titlecase-letter)
(3 :modifier-letter)
(4 :other-letter)
(5 :nonspacing-mark)
(6 :spacing-mark)
(7 :enclosing-mark)
(8 :decimal-number)
(9 :letter-number)
(10 :other-number)
(11 :connector-punctuation)
(12 :dash-punctuation)
(13 :open-punctuation)
(14 :close-punctuation)
(15 :initial-punctuation)
(16 :final-punctuation)
(17 :other-punctuation)
(18 :math-symbol)
(19 :currency-symbol)
(20 :modifier-symbol)
(21 :other-symbol)
(22 :space-separator)
(23 :line-separator)
(24 :paragraph-separator)
(25 :control)
(26 :format)
(27 :surrogate)
(28 :private-use)
(29 :unassigned)))
)
(defconstant +debug-end-entry-op+ #x00)
(defconstant +debug-add-var-op+ #x10)
(defconstant +debug-add-hidden-var-op+ #x11)
(defconstant +debug-drop-n-op+ #x40)
(defconstant +debug-drop-op+ #x50)
(defconstant +debug-update-n-op+ #x60)
(defconstant +debug-update-op+ #x70)
(defconstant +debug-repr-value+ 0)
(defconstant +debug-repr-single-float+ 1)
(defconstant +debug-repr-double-float+ 2)
(defconstant +debug-repr-mmx-vector+ 3)
(defconstant +debug-repr-sse-vector+ 4)
(defconstant +debug-repr-fixnum+ 5)
(defconstant +debug-repr-unsigned-byte-64+ 6)
(defconstant +debug-repr-signed-byte-64+ 7)
(defconstant +debug-repr-short-float+ 8)
(defvar *debug-x86-64-register-encodings* #(:rax :rcx :rdx :rbx :rsp :rbp :rsi :rdi
:r8 :r9 :r10 :r11 :r12 :r13 :r14 :r15
:mm0 :mm1 :mm2 :mm3 :mm4 :mm5 :mm6 :mm7
:xmm0 :xmm1 :xmm2 :xmm3 :xmm4 :xmm5 :xmm6 :xmm7
:xmm8 :xmm9 :xmm10 :xmm11 :xmm12 :xmm13 :xmm14 :xmm15))
(defvar *debug-arm64-register-encodings* #(:x0 :x1 :x2 :x3 :x4 :x5 :x6 :x7
:x8 :x9 :x10 :x11 :x12 :x13 :x14 :x15
:x16 :x17 :x18 :x19 :x20 :x21 :x22 :x23
:x24 :x25 :x26 :x27 :x28 :x29 :x30 :xzr
:q0 :q1 :q2 :q3 :q4 :q5 :q6 :q7
:q8 :q9 :q10 :q11 :q12 :q13 :q14 :q15
:q16 :q17 :q18 :q19 :q20 :q21 :q22 :q23
:q24 :q25 :q26 :q27 :q28 :q29 :q30 :q31))
(defconstant +gcmd-flag0-frame+ 0)
(defconstant +gcmd-flag0-interrupt+ 1)
(defconstant +gcmd-flag0-block-or-tagbody-thunk+ 2)
(defconstant +gcmd-flag0-incoming-arguments+ 3)
(defconstant +gcmd-flag0-pushed-values-register+ 4)
(defconstant +gcmd-flag0-extra-registers+ (byte 2 5))
(defconstant +gcmd-flag0-restart+ 7)
(defconstant +gcmd-flag1-multiple-values+ (byte 4 0))
(defconstant +gcmd-flag1-incoming-arguments-location+ (byte 4 4))
| null | https://raw.githubusercontent.com/froggey/Mezzano/f0eeb2a3f032098b394e31e3dfd32800f8a51122/system/data-types.lisp | lisp | Low-level definitions for data types
Low 4 bits of a value are tag bits
exactly one bit different. This allows ordinary object pointers to
#b0011
See %FAST-INSTANCE-LAYOUT-EQ-P.
Simple 1D arrays.
Array type T == simple vector.
#b011000
#b011010
#b011011
Strings. Simple strings are the same as normal strings, except marked as simple.
Other arrays.
When set, the array or string is not simple.
Only valid on object with +object-tag(-simple)-{string/array}+ tags.
All these object tags, along with immediate fixnums and single-floats are numbers.
plus fixnum
plus single-float
#b101010
#b101101
#b110001
#b110100
Must be one bit different from +object-tag-funcallable-instance+.
Cons values always point to the pair of pointers, never to the header.
Function objects.
Must be one bit different from +object-tag-instance+.
#b111111
actually the global symbol-value-cell
Only applies to compiled functions.
at the beginning of the object, including the header.
Common to all functions.
Entry point of the function, used by function call machinery.
Closures.
Only the position of the function is specified. The compiler may arrange
closure environments however it wants, including inlining them into the
closure object.
Funcallable instances.
it changes.
Some bits are stored in the high(ish) bits of the address.
below 2GB to permit fast access to them.
Match page size for now.
Bits 31-18 available.
Cover the whole address space.
VM regions must meet this allocation requirement so that the card table
entries associated with an allocation cover an exact number of pages.
This allows the pager to map/unmap regions in the card table without worrying
about partial page coverage.
NOTE: Stacks don't have card table entries and aren't subject to this
alignment constraint. They must still be page-aligned.
the mark bit region.
FIXME: This isn't really respected properly.
A vector consisting entirely of integers.
Call a function with N arguments, push the result.
Discard the top of stack value. |
(in-package :mezzano.internals)
(defconstant +n-fixnum-bits+ 1)
(defconstant +fixnum-tag-mask+ (1- (ash 1 +n-fixnum-bits+)))
Fields in the object header .
(defconstant +object-type-shift+ 2)
(defconstant +object-type-size+ 6)
(defconstant +object-data-shift+ 8)
(defconstant +object-data-size+ 56)
(defconstant +tag-fixnum-000+ #b0000)
+ TAG - CONS+ and + TAG - OBJECT+ have been carefully chosen so they have
be detected trivially : ( eql ( logand val # b111 ) 1 )
(defconstant +tag-cons+ #b0001)
(defconstant +tag-fixnum-001+ #b0010)
(defconstant +tag-fixnum-010+ #b0100)
(defconstant +tag-immediate+ #b0101)
(defconstant +tag-fixnum-011+ #b0110)
Low two bits of this one must be set , high two bits must match low
two bits of + object - tag - instance+ .
Low two bits must be set .
(defconstant +tag-fixnum-100+ #b1000)
(defconstant +tag-object+ #b1001)
(defconstant +tag-fixnum-101+ #b1010)
# b1011
(defconstant +tag-fixnum-110+ #b1100)
(defconstant +tag-dx-root-object+ #b1101)
(defconstant +tag-fixnum-111+ #b1110)
(defconstant +tag-gc-forward+ #b1111)
(defconstant +tag-field+ (byte 4 0))
(defconstant +immediate-tag+ (byte 2 4))
(defconstant +immediate-tag-character+ #b00)
(defconstant +immediate-tag-single-float+ #b01)
(defconstant +immediate-tag-byte-specifier+ #b10)
# b11
Is zero to allow for faster type checking .
(defconstant +object-tag-array-t+ #b000000)
(defconstant +object-tag-array-fixnum+ #b000001)
(defconstant +object-tag-array-bit+ #b000010)
(defconstant +object-tag-array-unsigned-byte-2+ #b000011)
(defconstant +object-tag-array-unsigned-byte-4+ #b000100)
(defconstant +object-tag-array-unsigned-byte-8+ #b000101)
(defconstant +object-tag-array-unsigned-byte-16+ #b000110)
(defconstant +object-tag-array-unsigned-byte-32+ #b000111)
(defconstant +object-tag-array-unsigned-byte-64+ #b001000)
(defconstant +object-tag-array-signed-byte-1+ #b001001)
(defconstant +object-tag-array-signed-byte-2+ #b001010)
(defconstant +object-tag-array-signed-byte-4+ #b001011)
(defconstant +object-tag-array-signed-byte-8+ #b001100)
(defconstant +object-tag-array-signed-byte-16+ #b001101)
(defconstant +object-tag-array-signed-byte-32+ #b001110)
(defconstant +object-tag-array-signed-byte-64+ #b001111)
(defconstant +object-tag-array-single-float+ #b010000)
(defconstant +object-tag-array-double-float+ #b010001)
(defconstant +object-tag-array-short-float+ #b010010)
(defconstant +object-tag-array-long-float+ #b010011)
(defconstant +object-tag-array-complex-single-float+ #b010100)
(defconstant +object-tag-array-complex-double-float+ #b010101)
(defconstant +object-tag-array-complex-short-float+ #b010110)
(defconstant +object-tag-array-complex-long-float+ #b010111)
(defconstant +first-simple-1d-array-object-tag+ +object-tag-array-t+)
(defconstant +last-simple-1d-array-object-tag+ +object-tag-array-complex-long-float+)
# b011001
These are actually character arrays , they 're only string when rank = 1 .
(defconstant +object-tag-simple-string+ #b011100)
(defconstant +object-tag-string+ #b011101)
(defconstant +object-tag-simple-array+ #b011110)
(defconstant +object-tag-array+ #b011111)
(defconstant +first-complex-array-object-tag+ +object-tag-simple-string+)
(defconstant +last-complex-array-object-tag+ +object-tag-array+)
(defconstant +first-array-object-tag+ +object-tag-array-t+)
(defconstant +last-array-object-tag+ +object-tag-array+)
(defconstant +array-type-simple-bit+ #b000001)
(defconstant +object-tag-bignum+ #b100000)
(defconstant +object-tag-ratio+ #b100001)
(defconstant +object-tag-double-float+ #b100010)
(defconstant +object-tag-short-float+ #b100011)
(defconstant +object-tag-long-float+ #b100100)
(defconstant +object-tag-complex-rational+ #b100101)
(defconstant +object-tag-complex-single-float+ #b100110)
(defconstant +object-tag-complex-double-float+ #b100111)
(defconstant +object-tag-complex-short-float+ #b101000)
(defconstant +object-tag-complex-long-float+ #b101001)
(defconstant +last-rational-object-tag+ +object-tag-ratio+)
(defconstant +last-float-object-tag+ +object-tag-long-float+)
plus fixnum & single - float
(defconstant +last-real-object-tag+ +object-tag-long-float+)
(defconstant +first-complex-object-tag+ +object-tag-complex-rational+)
(defconstant +last-complex-object-tag+ +object-tag-complex-long-float+)
plus fixnum & single - float
(defconstant +last-numeric-object-tag+ +object-tag-complex-long-float+)
# b101011
# b101100
(defconstant +object-tag-symbol-value-cell+ #b101110)
(defconstant +object-tag-mmx-vector+ #b101111)
(defconstant +object-tag-symbol+ #b110000)
# b110010
(defconstant +object-tag-sse-vector+ #b110011)
Low two bits must match high two bits of + tag - instance - header+ .
(defconstant +object-tag-instance+ #b110101)
(defconstant +object-tag-function-reference+ #b110110)
(defconstant +object-tag-interrupt-frame+ #b110111)
Conses get an object header when allocated in a non - cons area , purely
to allow heap walking . The header is two words long , with the length
field containing 0 and the second header word containing 0 .
(defconstant +object-tag-cons+ #b111000)
(defconstant +object-tag-freelist-entry+ #b111001)
(defconstant +object-tag-weak-pointer+ #b111010)
(defconstant +object-tag-delimited-continuation+ #b111011)
(defconstant +object-tag-function+ #b111100)
Low two bits must match high two bits of + tag - instance - header+ .
(defconstant +object-tag-funcallable-instance+ #b111101)
(defconstant +object-tag-closure+ #b111110)
(defconstant +first-function-object-tag+ +object-tag-delimited-continuation+)
(defconstant +last-function-object-tag+ +object-tag-closure+)
Layout of symbols .
(defconstant +symbol-name+ 0)
(defconstant +symbol-package+ 1)
actually an fref
(defconstant +symbol-type+ 4)
(defconstant +symbol-value-cell-symbol+ 1)
(defconstant +symbol-value-cell-value+ 2)
(defconstant +symbol-header-mode+ (byte 3 0))
(defconstant +symbol-mode-nil+ 0)
(defconstant +symbol-mode-special+ 1)
(defconstant +symbol-mode-constant+ 2)
(defconstant +symbol-mode-symbol-macro+ 3)
(defconstant +symbol-mode-global+ 4)
Layout of a function 's header .
Machine code size is measured in paragraphs ( 16 byte units ) and starts
(defconstant +function-header-code-size+ (byte 16 8))
(defconstant +function-header-pool-size+ (byte 16 24))
(defconstant +function-header-metadata-size+ (byte 16 40))
Layout of functions .
(defconstant +function-entry-point+ 0)
(defconstant +closure-function+ 1)
Layout is important . Update ( setf funcallable - std - instance - function ) if
(defconstant +funcallable-instance-function+ 1)
Delimited continuations .
(defconstant +delimited-continuation-stack+ 1)
(defconstant +delimited-continuation-stack-pointer+ 2)
(defconstant +delimited-continuation-state+ 3)
(defconstant +delimited-continuation-prompt+ 4)
Layout of function - references .
(defconstant +fref-undefined-entry-point+ 0)
(defconstant +fref-name+ 1)
(defconstant +fref-function+ 2)
(defconstant +fref-code+ 3)
Layout of complex arrays .
(defconstant +complex-array-storage+ 0)
(defconstant +complex-array-fill-pointer+ 1)
(defconstant +complex-array-info+ 2)
(defconstant +complex-array-axis-0+ 3)
Layout of weak pointers .
(defconstant +weak-pointer-header-livep+ 0
"Set by the GC when the value is live.")
(defconstant +weak-pointer-header-weakness+ (byte 2 1))
(defconstant +weak-pointer-weakness-key+ 0
"The weak pointer is live as long as the key is reachable.")
(defconstant +weak-pointer-weakness-value+ 1
"The weak pointer is live as long as the value is reachable.")
(defconstant +weak-pointer-weakness-and+ 2
"The weak pointer is live as long as both the key and the value are reachable.")
(defconstant +weak-pointer-weakness-or+ 3
"The weak pointer is live as long as either the key or the value are reachable.")
(defconstant +weak-pointer-link+ 0)
(defconstant +weak-pointer-key+ 1)
(defconstant +weak-pointer-value+ 2)
(defconstant +weak-pointer-finalizer+ 3)
(defconstant +weak-pointer-finalizer-link+ 4)
Layout of ratios .
(defconstant +ratio-numerator+ 0)
(defconstant +ratio-denominator+ 1)
Layout of complex numbers .
(defconstant +complex-realpart+ 0)
(defconstant +complex-imagpart+ 1)
These are used to support the GC .
(defconstant +address-tag-shift+ 45)
(defconstant +address-tag-size+ 3)
(defconstant +address-tag+ (byte +address-tag-size+ +address-tag-shift+))
(defconstant +address-old-generation+ (expt 2 43)
"This bit distingushes the young and old generations.")
(defconstant +address-semispace+ (expt 2 44)
"This bit defines the semispaces for the young and old generations.")
Pinned must be zero , a number of critical objects are pinned & wired and stored
(defconstant +address-tag-pinned+ #b000)
(defconstant +address-tag-stack+ #b001)
(defconstant +address-tag-general+ #b010)
(defconstant +address-tag-cons+ #b011)
(defconstant +card-table-entry-size+ 4)
(defconstant +card-table-entry-offset+ (byte 16 0)
"A negative 16-bit offset from the start of the card to the start
of the first object in the card. Measured in 16-byte units.
An offset of all ones (1- (expt 2 16)) indicates that the start of the
object is further away than what can be encoded and the the system
should continue looking backwards.")
(defconstant +card-table-entry-dirty-gen+ (byte 2 16))
(defconstant +card-table-size+ (* (/ (expt 2 47) +card-size+)
+card-table-entry-size+))
256 GB , mostly arbitrary but in the wired area
This must also be chosen so that ( > = ( / + a - m - a+ 16 8) page - size ) , to support
(defconstant +allocation-minimum-alignment+ (* (/ #x1000 +card-table-entry-size+)
+card-size+))
GC mark bit region .
(defconstant +octets-per-mark-bit+ 16)
(defconstant +mark-bit-region-base+ #x0000100000000000)
48 address bits , every 2 words/16 bytes needs a mark bit , 8 bits per byte .
(defconstant +mark-bit-region-size+ (/ (expt 2 48) +octets-per-mark-bit+ 8))
(defconstant +block-map-present+ #x01
"Entry is present. This entry may still have a block associated with it, even if it is not present.")
(defconstant +block-map-writable+ #x02
"Entry is writable.")
(defconstant +block-map-zero-fill+ #x04
"Entry should be zero-filled.")
(defconstant +block-map-committed+ #x08
"This block is owned by the currently running system, not by a previous snapshot and can be written to safely.
Internal to the pager, should not be used by other code.")
(defconstant +block-map-wired+ #x10
"Entry should be wired in memory.")
(defconstant +block-map-track-dirty+ #x20
"Dirty bit tracking is enabled for this entry.
When the page is written to, the corresponding dirty bit in the card table will be set and this flag will be cleared.")
(defconstant +block-map-transient+ #x40
"Entry is transient and won't be saved by snapshots.
Accesses to stale memory will signal an error.
Internal to sg-vec, should not be used by other code.")
(defconstant +block-map-flag-mask+ #xFF)
(defconstant +block-map-id-shift+ 8)
keep it a few bits smaller than 56 to avoid bignums .
(defconstant +block-map-id-lazy+ (1- (ash 1 +block-map-id-size+))
"When stored in the ID field, this value indicates that space has been
reserved on the disk, but no specific block has been allocated.")
(defconstant +block-map-id-not-allocated+ 0)
(defparameter *llf-version* 36)
(defconstant +llf-arch-x86-64+ 1)
(defconstant +llf-arch-arm64+ 2)
(defconstant +llf-end-of-load+ #xFF)
(defconstant +llf-backlink+ #x01)
(defconstant +llf-function+ #x02)
(defconstant +llf-cons+ #x03)
(defconstant +llf-symbol+ #x04)
(defconstant +llf-uninterned-symbol+ #x05)
(defconstant +llf-string+ #x07)
(defconstant +llf-integer+ #x09)
(defconstant +llf-simple-vector+ #x0C)
(defconstant +llf-character+ #x0D)
(defconstant +llf-structure-definition+ #x0E)
(defconstant +llf-single-float+ #x10)
(defconstant +llf-proper-list+ #x11)
(defconstant +llf-package+ #x12)
(defconstant +llf-integer-vector+ #x13)
(defconstant +llf-add-backlink+ #x14)
(defconstant +llf-ratio+ #x15)
(defconstant +llf-array+ #x16)
(defconstant +llf-bit-vector+ #x18)
(defconstant +llf-function-reference+ #x19)
(defconstant +llf-character-with-bits+ #x1A)
(defconstant +llf-structure-slot-definition+ #x1B)
(defconstant +llf-byte+ #x1C)
(defconstant +llf-double-float+ #x1D)
(defconstant +llf-typed-array+ #x1E)
(defconstant +llf-funcall-n+ #x1F)
(defconstant +llf-drop+ #x20)
(defconstant +llf-complex-rational+ #x21)
(defconstant +llf-complex-single-float+ #x22)
(defconstant +llf-complex-double-float+ #x23)
(defconstant +llf-instance-header+ #x24)
(defconstant +llf-symbol-global-value-cell+ #x25)
(defconstant +llf-if+ #x26)
(defconstant +llf-else+ #x27)
(defconstant +llf-fi+ #x28)
(defconstant +llf-layout+ #x29)
(defconstant +llf-initialize-array+ #x2A)
(defconstant +llf-short-float+ #x2B)
(defconstant +llf-complex-short-float+ #x2C)
Fields in the Unicode info tables .
(defconstant +unicode-info-name-offset+ (byte 20 0)
"Offset of the name in the name table.")
(defconstant +unicode-info-name-length+ (byte 6 20)
"Length of the name in the name table.")
(defconstant +unicode-info-othercase-code+ (byte 21 26)
"Character code for the alternate case character.")
(defconstant +unicode-info-general-category+ (byte 5 47)
"General category code.")
(eval-when (:compile-toplevel :load-toplevel :execute)
(defun unicode-general-category-encode (category)
(ecase category
(:uppercase-letter 0)
(:lowercase-letter 1)
(:titlecase-letter 2)
(:modifier-letter 3)
(:other-letter 4)
(:nonspacing-mark 5)
(:spacing-mark 6)
(:enclosing-mark 7)
(:decimal-number 8)
(:letter-number 9)
(:other-number 10)
(:connector-punctuation 11)
(:dash-punctuation 12)
(:open-punctuation 13)
(:close-punctuation 14)
(:initial-punctuation 15)
(:final-punctuation 16)
(:other-punctuation 17)
(:math-symbol 18)
(:currency-symbol 19)
(:modifier-symbol 20)
(:other-symbol 21)
(:space-separator 22)
(:line-separator 23)
(:paragraph-separator 24)
(:control 25)
(:format 26)
(:surrogate 27)
(:private-use 28)
(:unassigned 29)))
(defun unicode-general-category-decode (code)
(ecase code
(0 :uppercase-letter)
(1 :lowercase-letter)
(2 :titlecase-letter)
(3 :modifier-letter)
(4 :other-letter)
(5 :nonspacing-mark)
(6 :spacing-mark)
(7 :enclosing-mark)
(8 :decimal-number)
(9 :letter-number)
(10 :other-number)
(11 :connector-punctuation)
(12 :dash-punctuation)
(13 :open-punctuation)
(14 :close-punctuation)
(15 :initial-punctuation)
(16 :final-punctuation)
(17 :other-punctuation)
(18 :math-symbol)
(19 :currency-symbol)
(20 :modifier-symbol)
(21 :other-symbol)
(22 :space-separator)
(23 :line-separator)
(24 :paragraph-separator)
(25 :control)
(26 :format)
(27 :surrogate)
(28 :private-use)
(29 :unassigned)))
)
(defconstant +debug-end-entry-op+ #x00)
(defconstant +debug-add-var-op+ #x10)
(defconstant +debug-add-hidden-var-op+ #x11)
(defconstant +debug-drop-n-op+ #x40)
(defconstant +debug-drop-op+ #x50)
(defconstant +debug-update-n-op+ #x60)
(defconstant +debug-update-op+ #x70)
(defconstant +debug-repr-value+ 0)
(defconstant +debug-repr-single-float+ 1)
(defconstant +debug-repr-double-float+ 2)
(defconstant +debug-repr-mmx-vector+ 3)
(defconstant +debug-repr-sse-vector+ 4)
(defconstant +debug-repr-fixnum+ 5)
(defconstant +debug-repr-unsigned-byte-64+ 6)
(defconstant +debug-repr-signed-byte-64+ 7)
(defconstant +debug-repr-short-float+ 8)
(defvar *debug-x86-64-register-encodings* #(:rax :rcx :rdx :rbx :rsp :rbp :rsi :rdi
:r8 :r9 :r10 :r11 :r12 :r13 :r14 :r15
:mm0 :mm1 :mm2 :mm3 :mm4 :mm5 :mm6 :mm7
:xmm0 :xmm1 :xmm2 :xmm3 :xmm4 :xmm5 :xmm6 :xmm7
:xmm8 :xmm9 :xmm10 :xmm11 :xmm12 :xmm13 :xmm14 :xmm15))
(defvar *debug-arm64-register-encodings* #(:x0 :x1 :x2 :x3 :x4 :x5 :x6 :x7
:x8 :x9 :x10 :x11 :x12 :x13 :x14 :x15
:x16 :x17 :x18 :x19 :x20 :x21 :x22 :x23
:x24 :x25 :x26 :x27 :x28 :x29 :x30 :xzr
:q0 :q1 :q2 :q3 :q4 :q5 :q6 :q7
:q8 :q9 :q10 :q11 :q12 :q13 :q14 :q15
:q16 :q17 :q18 :q19 :q20 :q21 :q22 :q23
:q24 :q25 :q26 :q27 :q28 :q29 :q30 :q31))
(defconstant +gcmd-flag0-frame+ 0)
(defconstant +gcmd-flag0-interrupt+ 1)
(defconstant +gcmd-flag0-block-or-tagbody-thunk+ 2)
(defconstant +gcmd-flag0-incoming-arguments+ 3)
(defconstant +gcmd-flag0-pushed-values-register+ 4)
(defconstant +gcmd-flag0-extra-registers+ (byte 2 5))
(defconstant +gcmd-flag0-restart+ 7)
(defconstant +gcmd-flag1-multiple-values+ (byte 4 0))
(defconstant +gcmd-flag1-incoming-arguments-location+ (byte 4 4))
|
bdff268d039ade5f7aba45c03dabfa2dbe0b2b4d282883583afcfb4cc3ce2649 | dalaing/websockets-reflex | List.hs | {-# LANGUAGE RankNTypes #-}
# LANGUAGE LambdaCase #
# LANGUAGE RecursiveDo #
# LANGUAGE ScopedTypeVariables #
module List (
listHoldWithKey
, listWithKey
, listWithKey'
, listWithKeyShallowDiff
, listViewWithKey
, selectViewListWithKey
, selectViewListWithKey_
, list
, simpleList
) where
import Control.Monad (void)
import Control.Monad.Fix (MonadFix)
import Data.Either
import Data.Functor.Misc
import Data.Align
import Data.These
import Data.Map (Map)
import qualified Data.Map as Map
import Reflex
applyMap :: Ord k => Map k (Maybe v) -> Map k v -> Map k v
applyMap patch old = insertions `Map.union` (old `Map.difference` deletions)
where (deletions, insertions) = mapPartitionEithers $ maybeToEither <$> patch
maybeToEither = \case
Nothing -> Left ()
Just r -> Right r
mapPartitionEithers :: Map k (Either a b) -> (Map k a, Map k b)
mapPartitionEithers m = (fromLeft <$> ls, fromRight <$> rs)
where (ls, rs) = Map.partition isLeft m
fromLeft (Left l) = l
fromLeft _ = error "mapPartitionEithers: fromLeft received a Right value; this should be impossible"
fromRight (Right r) = r
fromRight _ = error "mapPartitionEithers: fromRight received a Left value; this should be impossible"
listHoldWithKey :: forall t m k v a. (Ord k, MonadAdjust t m, MonadHold t m) => Map k v -> Event t (Map k (Maybe v)) -> (k -> v -> m a) -> m (Dynamic t (Map k a))
listHoldWithKey m0 m' f = do
let dm0 = mapWithFunctorToDMap $ Map.mapWithKey f m0
dm' = fmap (PatchDMap . mapWithFunctorToDMap . Map.mapWithKey (\k v -> ComposeMaybe $ fmap (f k) v)) m'
(a0, a') <- sequenceDMapWithAdjust dm0 dm'
fmap dmapToMap . incrementalToDynamic <$> holdIncremental a0 a' --TODO: Move the dmapToMap to the righthand side so it doesn't get fully redone every time
--TODO: Something better than Dynamic t (Map k v) - we want something where the Events carry diffs, not the whole value
listWithKey :: forall t k v m a. (Ord k, MonadAdjust t m, PostBuild t m, MonadFix m, MonadHold t m) => Dynamic t (Map k v) -> (k -> Dynamic t v -> m a) -> m (Dynamic t (Map k a))
listWithKey vals mkChild = do
postBuild <- getPostBuild
let childValChangedSelector = fanMap $ updated vals
We keep track of changes to children values in the function we pass to listHoldWithKey
-- The other changes we need to keep track of are child insertions and deletions. diffOnlyKeyChanges
-- keeps track of insertions and deletions but ignores value changes, since they're already accounted for.
diffOnlyKeyChanges olds news = flip Map.mapMaybe (align olds news) $ \case
This _ -> Just Nothing
These _ _ -> Nothing
That new -> Just $ Just new
rec sentVals :: Dynamic t (Map k v) <- foldDyn applyMap Map.empty changeVals
let changeVals :: Event t (Map k (Maybe v))
changeVals = attachWith diffOnlyKeyChanges (current sentVals) $ leftmost
[ updated vals
TODO : This should probably be added to the attachWith , not to the updated ; if we were using diffMap instead of diffMapNoEq , I think it might not work
]
listHoldWithKey Map.empty changeVals $ \k v ->
mkChild k =<< holdDyn v (select childValChangedSelector $ Const2 k)
{-# DEPRECATED listWithKey' "listWithKey' has been renamed to listWithKeyShallowDiff; also, its behavior has changed to fix a bug where children were always rebuilt (never updated)" #-}
listWithKey' :: (Ord k, MonadAdjust t m, MonadFix m, MonadHold t m) => Map k v -> Event t (Map k (Maybe v)) -> (k -> v -> Event t v -> m a) -> m (Dynamic t (Map k a))
listWithKey' = listWithKeyShallowDiff
-- | Display the given map of items (in key order) using the builder function provided, and update it with the given event. 'Nothing' update entries will delete the corresponding children, and 'Just' entries will create them if they do not exist or send an update event to them if they do.
listWithKeyShallowDiff :: (Ord k, MonadAdjust t m, MonadFix m, MonadHold t m) => Map k v -> Event t (Map k (Maybe v)) -> (k -> v -> Event t v -> m a) -> m (Dynamic t (Map k a))
listWithKeyShallowDiff initialVals valsChanged mkChild = do
let childValChangedSelector = fanMap $ fmap (Map.mapMaybe id) valsChanged
sentVals <- foldDyn applyMap Map.empty $ fmap (fmap void) valsChanged
let relevantPatch patch _ = case patch of
Nothing -> Just Nothing -- Even if we let a Nothing through when the element doesn't already exist, this doesn't cause a problem because it is ignored
Just _ -> Nothing -- We don't want to let spurious re-creations of items through
listHoldWithKey initialVals (attachWith (flip (Map.differenceWith relevantPatch)) (current sentVals) valsChanged) $ \k v ->
mkChild k v $ select childValChangedSelector $ Const2 k
--TODO: Something better than Dynamic t (Map k v) - we want something where the Events carry diffs, not the whole value
-- | Create a dynamically-changing set of Event-valued widgets.
-- This is like listWithKey, specialized for widgets returning (Event t a). listWithKey would return 'Dynamic t (Map k (Event t a))' in this scenario, but listViewWithKey flattens this to 'Event t (Map k a)' via 'switch'.
listViewWithKey :: (Ord k, MonadAdjust t m, PostBuild t m, MonadHold t m, MonadFix m) => Dynamic t (Map k v) -> (k -> Dynamic t v -> m (Event t a)) -> m (Event t (Map k a))
listViewWithKey vals mkChild = switch . fmap mergeMap <$> listViewWithKey' vals mkChild
listViewWithKey' :: (Ord k, MonadAdjust t m, PostBuild t m, MonadHold t m, MonadFix m) => Dynamic t (Map k v) -> (k -> Dynamic t v -> m a) -> m (Behavior t (Map k a))
listViewWithKey' vals mkChild = current <$> listWithKey vals mkChild
| Create a dynamically - changing set of widgets , one of which is selected at any time .
selectViewListWithKey :: forall t m k v a. (MonadAdjust t m, Ord k, PostBuild t m, MonadHold t m, MonadFix m)
=> Dynamic t k -- ^ Current selection key
-> Dynamic t (Map k v) -- ^ Dynamic key/value map
^ Function to create a widget for a given key from Dynamic value and indicating if this widget is currently selected
-> m (Event t (k, a)) -- ^ Event that fires when any child's return Event fires. Contains key of an arbitrary firing widget.
selectViewListWithKey selection vals mkChild = do
let selectionDemux = demux selection -- For good performance, this value must be shared across all children
selectChild <- listWithKey vals $ \k v -> do
let selected = demuxed selectionDemux k
selectSelf <- mkChild k v selected
return $ fmap ((,) k) selectSelf
return $ switchPromptlyDyn $ leftmost . Map.elems <$> selectChild
selectViewListWithKey_ :: forall t m k v a. (MonadAdjust t m, Ord k, PostBuild t m, MonadHold t m, MonadFix m)
=> Dynamic t k -- ^ Current selection key
-> Dynamic t (Map k v) -- ^ Dynamic key/value map
^ Function to create a widget for a given key from Dynamic value and indicating if this widget is currently selected
-> m (Event t k) -- ^ Event that fires when any child's return Event fires. Contains key of an arbitrary firing widget.
selectViewListWithKey_ selection vals mkChild = fmap fst <$> selectViewListWithKey selection vals mkChild
-- | Create a dynamically-changing set of widgets from a Dynamic key/value map.
-- Unlike the 'withKey' variants, the child widgets are insensitive to which key they're associated with.
list :: (Ord k, MonadAdjust t m, MonadHold t m, PostBuild t m, MonadFix m) => Dynamic t (Map k v) -> (Dynamic t v -> m a) -> m (Dynamic t (Map k a))
list dm mkChild = listWithKey dm (\_ dv -> mkChild dv)
| Create a dynamically - changing set of widgets from a Dynamic list .
simpleList :: (MonadAdjust t m, MonadHold t m, PostBuild t m, MonadFix m) => Dynamic t [v] -> (Dynamic t v -> m a) -> m (Dynamic t [a])
simpleList xs mkChild = fmap (fmap (map snd . Map.toList)) $ flip list mkChild $ fmap (Map.fromList . zip [(1::Int)..]) xs
| null | https://raw.githubusercontent.com/dalaing/websockets-reflex/65bee7560442f5aae96f6f64fb12abeb501f5427/other-example/server/List.hs | haskell | # LANGUAGE RankNTypes #
TODO: Move the dmapToMap to the righthand side so it doesn't get fully redone every time
TODO: Something better than Dynamic t (Map k v) - we want something where the Events carry diffs, not the whole value
The other changes we need to keep track of are child insertions and deletions. diffOnlyKeyChanges
keeps track of insertions and deletions but ignores value changes, since they're already accounted for.
# DEPRECATED listWithKey' "listWithKey' has been renamed to listWithKeyShallowDiff; also, its behavior has changed to fix a bug where children were always rebuilt (never updated)" #
| Display the given map of items (in key order) using the builder function provided, and update it with the given event. 'Nothing' update entries will delete the corresponding children, and 'Just' entries will create them if they do not exist or send an update event to them if they do.
Even if we let a Nothing through when the element doesn't already exist, this doesn't cause a problem because it is ignored
We don't want to let spurious re-creations of items through
TODO: Something better than Dynamic t (Map k v) - we want something where the Events carry diffs, not the whole value
| Create a dynamically-changing set of Event-valued widgets.
This is like listWithKey, specialized for widgets returning (Event t a). listWithKey would return 'Dynamic t (Map k (Event t a))' in this scenario, but listViewWithKey flattens this to 'Event t (Map k a)' via 'switch'.
^ Current selection key
^ Dynamic key/value map
^ Event that fires when any child's return Event fires. Contains key of an arbitrary firing widget.
For good performance, this value must be shared across all children
^ Current selection key
^ Dynamic key/value map
^ Event that fires when any child's return Event fires. Contains key of an arbitrary firing widget.
| Create a dynamically-changing set of widgets from a Dynamic key/value map.
Unlike the 'withKey' variants, the child widgets are insensitive to which key they're associated with. | # LANGUAGE LambdaCase #
# LANGUAGE RecursiveDo #
# LANGUAGE ScopedTypeVariables #
module List (
listHoldWithKey
, listWithKey
, listWithKey'
, listWithKeyShallowDiff
, listViewWithKey
, selectViewListWithKey
, selectViewListWithKey_
, list
, simpleList
) where
import Control.Monad (void)
import Control.Monad.Fix (MonadFix)
import Data.Either
import Data.Functor.Misc
import Data.Align
import Data.These
import Data.Map (Map)
import qualified Data.Map as Map
import Reflex
applyMap :: Ord k => Map k (Maybe v) -> Map k v -> Map k v
applyMap patch old = insertions `Map.union` (old `Map.difference` deletions)
where (deletions, insertions) = mapPartitionEithers $ maybeToEither <$> patch
maybeToEither = \case
Nothing -> Left ()
Just r -> Right r
mapPartitionEithers :: Map k (Either a b) -> (Map k a, Map k b)
mapPartitionEithers m = (fromLeft <$> ls, fromRight <$> rs)
where (ls, rs) = Map.partition isLeft m
fromLeft (Left l) = l
fromLeft _ = error "mapPartitionEithers: fromLeft received a Right value; this should be impossible"
fromRight (Right r) = r
fromRight _ = error "mapPartitionEithers: fromRight received a Left value; this should be impossible"
listHoldWithKey :: forall t m k v a. (Ord k, MonadAdjust t m, MonadHold t m) => Map k v -> Event t (Map k (Maybe v)) -> (k -> v -> m a) -> m (Dynamic t (Map k a))
listHoldWithKey m0 m' f = do
let dm0 = mapWithFunctorToDMap $ Map.mapWithKey f m0
dm' = fmap (PatchDMap . mapWithFunctorToDMap . Map.mapWithKey (\k v -> ComposeMaybe $ fmap (f k) v)) m'
(a0, a') <- sequenceDMapWithAdjust dm0 dm'
listWithKey :: forall t k v m a. (Ord k, MonadAdjust t m, PostBuild t m, MonadFix m, MonadHold t m) => Dynamic t (Map k v) -> (k -> Dynamic t v -> m a) -> m (Dynamic t (Map k a))
listWithKey vals mkChild = do
postBuild <- getPostBuild
let childValChangedSelector = fanMap $ updated vals
We keep track of changes to children values in the function we pass to listHoldWithKey
diffOnlyKeyChanges olds news = flip Map.mapMaybe (align olds news) $ \case
This _ -> Just Nothing
These _ _ -> Nothing
That new -> Just $ Just new
rec sentVals :: Dynamic t (Map k v) <- foldDyn applyMap Map.empty changeVals
let changeVals :: Event t (Map k (Maybe v))
changeVals = attachWith diffOnlyKeyChanges (current sentVals) $ leftmost
[ updated vals
TODO : This should probably be added to the attachWith , not to the updated ; if we were using diffMap instead of diffMapNoEq , I think it might not work
]
listHoldWithKey Map.empty changeVals $ \k v ->
mkChild k =<< holdDyn v (select childValChangedSelector $ Const2 k)
listWithKey' :: (Ord k, MonadAdjust t m, MonadFix m, MonadHold t m) => Map k v -> Event t (Map k (Maybe v)) -> (k -> v -> Event t v -> m a) -> m (Dynamic t (Map k a))
listWithKey' = listWithKeyShallowDiff
listWithKeyShallowDiff :: (Ord k, MonadAdjust t m, MonadFix m, MonadHold t m) => Map k v -> Event t (Map k (Maybe v)) -> (k -> v -> Event t v -> m a) -> m (Dynamic t (Map k a))
listWithKeyShallowDiff initialVals valsChanged mkChild = do
let childValChangedSelector = fanMap $ fmap (Map.mapMaybe id) valsChanged
sentVals <- foldDyn applyMap Map.empty $ fmap (fmap void) valsChanged
let relevantPatch patch _ = case patch of
listHoldWithKey initialVals (attachWith (flip (Map.differenceWith relevantPatch)) (current sentVals) valsChanged) $ \k v ->
mkChild k v $ select childValChangedSelector $ Const2 k
listViewWithKey :: (Ord k, MonadAdjust t m, PostBuild t m, MonadHold t m, MonadFix m) => Dynamic t (Map k v) -> (k -> Dynamic t v -> m (Event t a)) -> m (Event t (Map k a))
listViewWithKey vals mkChild = switch . fmap mergeMap <$> listViewWithKey' vals mkChild
listViewWithKey' :: (Ord k, MonadAdjust t m, PostBuild t m, MonadHold t m, MonadFix m) => Dynamic t (Map k v) -> (k -> Dynamic t v -> m a) -> m (Behavior t (Map k a))
listViewWithKey' vals mkChild = current <$> listWithKey vals mkChild
| Create a dynamically - changing set of widgets , one of which is selected at any time .
selectViewListWithKey :: forall t m k v a. (MonadAdjust t m, Ord k, PostBuild t m, MonadHold t m, MonadFix m)
^ Function to create a widget for a given key from Dynamic value and indicating if this widget is currently selected
selectViewListWithKey selection vals mkChild = do
selectChild <- listWithKey vals $ \k v -> do
let selected = demuxed selectionDemux k
selectSelf <- mkChild k v selected
return $ fmap ((,) k) selectSelf
return $ switchPromptlyDyn $ leftmost . Map.elems <$> selectChild
selectViewListWithKey_ :: forall t m k v a. (MonadAdjust t m, Ord k, PostBuild t m, MonadHold t m, MonadFix m)
^ Function to create a widget for a given key from Dynamic value and indicating if this widget is currently selected
selectViewListWithKey_ selection vals mkChild = fmap fst <$> selectViewListWithKey selection vals mkChild
list :: (Ord k, MonadAdjust t m, MonadHold t m, PostBuild t m, MonadFix m) => Dynamic t (Map k v) -> (Dynamic t v -> m a) -> m (Dynamic t (Map k a))
list dm mkChild = listWithKey dm (\_ dv -> mkChild dv)
| Create a dynamically - changing set of widgets from a Dynamic list .
simpleList :: (MonadAdjust t m, MonadHold t m, PostBuild t m, MonadFix m) => Dynamic t [v] -> (Dynamic t v -> m a) -> m (Dynamic t [a])
simpleList xs mkChild = fmap (fmap (map snd . Map.toList)) $ flip list mkChild $ fmap (Map.fromList . zip [(1::Int)..]) xs
|
f681f23e99f08e0b170ed1d7b48c8a8fa4f131f3a01f20c76ca2daa7191c89a9 | S8A/htdp-exercises | ex413.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname ex413) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
(define-struct inex [mantissa sign exponent])
; An Inex is a structure:
( make - inex N99 S N99 )
An S is one of :
– 1
; – -1
An N99 is an N between 0 and 99 ( inclusive ) .
N Number N - > Inex
makes an instance of Inex after checking the arguments
(define (create-inex m s e)
(cond
[(and (<= 0 m 99) (<= 0 e 99) (or (= s 1) (= s -1)))
(make-inex m s e)]
[else (error "bad values given")]))
; Inex -> Number
; converts an inex into its numeric equivalent
(define (inex->number an-inex)
(* (inex-mantissa an-inex)
(expt 10 (* (inex-sign an-inex) (inex-exponent an-inex)))))
N N N Symbol String - > Inex
; Checks if the given mantissa, sign and exponent form a valid inex instance,
; and otherwise produces an error with symbol s and text msg
(define (check-inex mantissa sign exp s msg)
(cond
[(and (<= 0 mantissa 99) (<= 0 exp 99) (or (= sign 1) (= sign -1)))
(make-inex mantissa sign exp)]
[else (error s msg)]))
Inex
adds two inex numbers that have the same exponent
(define (inex+ x y)
(local ((define exponent (inex-exponent x))
(define sign (inex-sign x))
(define sum (+ (inex-mantissa x) (inex-mantissa y)))
(define mantissa
(if (> sum 99) (round (/ sum 10)) sum))
(define exp
(if (> sum 99) (add1 exponent) exponent)))
(check-inex mantissa sign exp 'inex+ "sum out of range")))
(check-expect (inex+ (create-inex 1 1 0) (create-inex 2 1 0))
(create-inex 3 1 0))
(check-expect (inex+ (create-inex 55 1 0) (create-inex 55 1 0))
(create-inex 11 1 1))
(check-expect (inex+ (create-inex 56 1 0) (create-inex 56 1 0))
(create-inex 11 1 1))
(check-error (inex+ (create-inex 99 1 99) (create-inex 1 1 99)))
Inex
multiplies two inex numbers
(define (inex* x y)
(local ((define product (* (inex-mantissa x) (inex-mantissa y)))
(define expsum (+ (* (inex-sign x) (inex-exponent x))
(* (inex-sign y) (inex-exponent y))))
(define exp (abs expsum))
(define sign (sgn expsum))
(define mantissa
(if (> product 99) (round (/ product 10)) product))
(define exponent
(if (> product 99) (add1 exp) exp)))
(check-inex mantissa sign exponent 'inex* "product out of range")))
(check-expect (inex* (create-inex 2 1 4) (create-inex 8 1 10))
(create-inex 16 1 14))
(check-expect (inex* (create-inex 20 1 1) (create-inex 5 1 4))
(create-inex 10 1 6))
(check-expect (inex* (create-inex 27 -1 1) (create-inex 7 1 4))
(create-inex 19 1 4))
(check-error (inex* (create-inex 10 1 99) (create-inex 10 1 99)))
| null | https://raw.githubusercontent.com/S8A/htdp-exercises/578e49834a9513f29ef81b7589b28081c5e0b69f/ex413.rkt | racket | about the language level of this file in a form that our tools can easily process.
An Inex is a structure:
– -1
Inex -> Number
converts an inex into its numeric equivalent
Checks if the given mantissa, sign and exponent form a valid inex instance,
and otherwise produces an error with symbol s and text msg | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname ex413) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
(define-struct inex [mantissa sign exponent])
( make - inex N99 S N99 )
An S is one of :
– 1
An N99 is an N between 0 and 99 ( inclusive ) .
N Number N - > Inex
makes an instance of Inex after checking the arguments
(define (create-inex m s e)
(cond
[(and (<= 0 m 99) (<= 0 e 99) (or (= s 1) (= s -1)))
(make-inex m s e)]
[else (error "bad values given")]))
(define (inex->number an-inex)
(* (inex-mantissa an-inex)
(expt 10 (* (inex-sign an-inex) (inex-exponent an-inex)))))
N N N Symbol String - > Inex
(define (check-inex mantissa sign exp s msg)
(cond
[(and (<= 0 mantissa 99) (<= 0 exp 99) (or (= sign 1) (= sign -1)))
(make-inex mantissa sign exp)]
[else (error s msg)]))
Inex
adds two inex numbers that have the same exponent
(define (inex+ x y)
(local ((define exponent (inex-exponent x))
(define sign (inex-sign x))
(define sum (+ (inex-mantissa x) (inex-mantissa y)))
(define mantissa
(if (> sum 99) (round (/ sum 10)) sum))
(define exp
(if (> sum 99) (add1 exponent) exponent)))
(check-inex mantissa sign exp 'inex+ "sum out of range")))
(check-expect (inex+ (create-inex 1 1 0) (create-inex 2 1 0))
(create-inex 3 1 0))
(check-expect (inex+ (create-inex 55 1 0) (create-inex 55 1 0))
(create-inex 11 1 1))
(check-expect (inex+ (create-inex 56 1 0) (create-inex 56 1 0))
(create-inex 11 1 1))
(check-error (inex+ (create-inex 99 1 99) (create-inex 1 1 99)))
Inex
multiplies two inex numbers
(define (inex* x y)
(local ((define product (* (inex-mantissa x) (inex-mantissa y)))
(define expsum (+ (* (inex-sign x) (inex-exponent x))
(* (inex-sign y) (inex-exponent y))))
(define exp (abs expsum))
(define sign (sgn expsum))
(define mantissa
(if (> product 99) (round (/ product 10)) product))
(define exponent
(if (> product 99) (add1 exp) exp)))
(check-inex mantissa sign exponent 'inex* "product out of range")))
(check-expect (inex* (create-inex 2 1 4) (create-inex 8 1 10))
(create-inex 16 1 14))
(check-expect (inex* (create-inex 20 1 1) (create-inex 5 1 4))
(create-inex 10 1 6))
(check-expect (inex* (create-inex 27 -1 1) (create-inex 7 1 4))
(create-inex 19 1 4))
(check-error (inex* (create-inex 10 1 99) (create-inex 10 1 99)))
|
3a3ef0d4c2a354eb386178d08aadfe0ffbf487942bcbe63dd594e19879c27c2b | FlowForwarding/loom | icontrol_ofsh.erl | %%------------------------------------------------------------------------------
Copyright 2014 FlowForwarding.org
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%-----------------------------------------------------------------------------
@author Erlang Solutions Ltd. < >
2014 FlowForwarding.org
%%% @doc
%%% Simple network executive callback handler for ofs_handler.
%%% @end
-module(icontrol_ofsh).
-include_lib("ofs_handler/include/ofs_handler.hrl").
-include_lib("of_protocol/include/of_protocol.hrl").
-export([
init/7,
connect/8,
disconnect/1,
failover/1,
handle_message/2,
handle_error/2,
terminate/1
]).
-behaviour(ofs_handler).
State held by ofs_handler .
% This state holds onto the datapath id and aux connection id.
There is one state for each connection .
-define(OFS_STATE, icontrol_ofs_state).
-record(?OFS_STATE, {
datapath_id,
aux_id = 0
}).
-type ofs_state() :: #?OFS_STATE{}.
% callbacks from ofs_handler
% The callback functions in turn call icontrol_logic for processing.
-spec init(handler_mode(), ipaddress(), datapath_id(), features(), of_version(), connection(), options()) -> {ok, ofs_state()}.
init(Mode, IpAddr, DatapathId, _Features, Version, Connection, _Opts) ->
% new main connection
ok = icontrol_logic:ofsh_init(Mode, IpAddr, DatapathId, Version, Connection),
State = #?OFS_STATE{datapath_id = DatapathId},
{ok, State}.
-spec connect(handler_mode(), ipaddress(), datapath_id(), features(), of_version(), connection(), auxid(), options()) -> {ok, ofs_state()}.
connect(Mode, IpAddr, DatapathId, _Features, Version, Connection, AuxId, _Opts) ->
% new auxiliary connection
ok = icontrol_logic:ofsh_connect(Mode, IpAddr, DatapathId, Version, Connection, AuxId),
State = #?OFS_STATE{datapath_id = DatapathId, aux_id = AuxId},
{ok, State}.
-spec disconnect(ofs_state()) -> ok.
disconnect(State) ->
% lost an auxiliary connection
#?OFS_STATE{
datapath_id = DatapathId,
aux_id = AuxId
} = State,
ok = icontrol_logic:ofsh_disconnect(AuxId, DatapathId).
-spec failover(ofs_state()) -> {ok, ofs_state()}.
failover(State) ->
State of new active
% TODO: not failover not implement in ofs_handler
ok = icontrol_logic:ofsh_failover(),
{ok, State}.
-spec handle_error(error_reason(), ofs_state()) -> ok.
handle_error(Reason, State) ->
% Error on the connection
DatapathId = State#?OFS_STATE.datapath_id,
ok = icontrol_logic:ofsh_handle_error(DatapathId, Reason).
-spec handle_message(ofp_message(), ofs_state()) -> ok.
handle_message(Msg, State) ->
% received a message on the connection
DatapathId = State#?OFS_STATE.datapath_id,
ok = icontrol_logic:ofsh_handle_message(DatapathId, Msg).
-spec terminate(ofs_state()) -> ok.
terminate(State) ->
% lost the main connection
DatapathId = State#?OFS_STATE.datapath_id,
ok = icontrol_logic:ofsh_terminate(DatapathId).
| null | https://raw.githubusercontent.com/FlowForwarding/loom/86a9c5aa8b7d4776062365716c9a3dbbf3330bc5/icontrol/apps/icontrol/src/icontrol_ofsh.erl | erlang | ------------------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------------------------------
@doc
Simple network executive callback handler for ofs_handler.
@end
This state holds onto the datapath id and aux connection id.
callbacks from ofs_handler
The callback functions in turn call icontrol_logic for processing.
new main connection
new auxiliary connection
lost an auxiliary connection
TODO: not failover not implement in ofs_handler
Error on the connection
received a message on the connection
lost the main connection | Copyright 2014 FlowForwarding.org
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
@author Erlang Solutions Ltd. < >
2014 FlowForwarding.org
-module(icontrol_ofsh).
-include_lib("ofs_handler/include/ofs_handler.hrl").
-include_lib("of_protocol/include/of_protocol.hrl").
-export([
init/7,
connect/8,
disconnect/1,
failover/1,
handle_message/2,
handle_error/2,
terminate/1
]).
-behaviour(ofs_handler).
State held by ofs_handler .
There is one state for each connection .
-define(OFS_STATE, icontrol_ofs_state).
-record(?OFS_STATE, {
datapath_id,
aux_id = 0
}).
-type ofs_state() :: #?OFS_STATE{}.
-spec init(handler_mode(), ipaddress(), datapath_id(), features(), of_version(), connection(), options()) -> {ok, ofs_state()}.
init(Mode, IpAddr, DatapathId, _Features, Version, Connection, _Opts) ->
ok = icontrol_logic:ofsh_init(Mode, IpAddr, DatapathId, Version, Connection),
State = #?OFS_STATE{datapath_id = DatapathId},
{ok, State}.
-spec connect(handler_mode(), ipaddress(), datapath_id(), features(), of_version(), connection(), auxid(), options()) -> {ok, ofs_state()}.
connect(Mode, IpAddr, DatapathId, _Features, Version, Connection, AuxId, _Opts) ->
ok = icontrol_logic:ofsh_connect(Mode, IpAddr, DatapathId, Version, Connection, AuxId),
State = #?OFS_STATE{datapath_id = DatapathId, aux_id = AuxId},
{ok, State}.
-spec disconnect(ofs_state()) -> ok.
disconnect(State) ->
#?OFS_STATE{
datapath_id = DatapathId,
aux_id = AuxId
} = State,
ok = icontrol_logic:ofsh_disconnect(AuxId, DatapathId).
-spec failover(ofs_state()) -> {ok, ofs_state()}.
failover(State) ->
State of new active
ok = icontrol_logic:ofsh_failover(),
{ok, State}.
-spec handle_error(error_reason(), ofs_state()) -> ok.
handle_error(Reason, State) ->
DatapathId = State#?OFS_STATE.datapath_id,
ok = icontrol_logic:ofsh_handle_error(DatapathId, Reason).
-spec handle_message(ofp_message(), ofs_state()) -> ok.
handle_message(Msg, State) ->
DatapathId = State#?OFS_STATE.datapath_id,
ok = icontrol_logic:ofsh_handle_message(DatapathId, Msg).
-spec terminate(ofs_state()) -> ok.
terminate(State) ->
DatapathId = State#?OFS_STATE.datapath_id,
ok = icontrol_logic:ofsh_terminate(DatapathId).
|
3ac26b5bb2e3b958097adaaa521879201692d67515ac15131e4cad9edcb1ca24 | jvranish/TheExperiment | Expression.hs | module Language.TheExperiment.Parser.Expression where
import Text.Parsec
import Text.Parsec.Expr
import Data.List
import Data.Function
import Language.TheExperiment.AST.Expression
import Language.TheExperiment.Parser.Lexer
import Language.TheExperiment.Parser.Literal
type ParsedExpr = Expr ()
anExpr :: EParser ParsedExpr
anExpr = do
Operators opList <- getState
let opTable = fmap (fmap fst) $ groupBy ((==) `on` snd) $ reverse $ sortBy (compare `on` snd) opList
--let opTable = [ op | (op, prec) <- opList, then reverse ... sortWith by prec, then group by prec]
buildExpressionParser opTable aSmplExpr <?> "expression"
aSmplExpr :: EParser ParsedExpr
aSmplExpr = anIdOrCall
<|> parens anExpr
<|> liftMp Literal aLiteral
<?> "simple expression"
aCall :: EParser ParsedExpr
aCall = do
expr <- anIdOrCall
case expr of
Call {} -> return expr
_ -> parserZero <?> "function call"
anIdOrCall :: EParser ParsedExpr
anIdOrCall = do
pos <- getPosition
name <- lowerIdent
args <- optionMaybe $ parens $ sepBy anExpr comma
let ident = Identifier pos () name NotOperator
return $ case args of
Nothing -> ident
Just xs -> Call pos () ident xs
| null | https://raw.githubusercontent.com/jvranish/TheExperiment/54ca832d2f62a928a992b4c23dadf9653d13a5a7/src/Language/TheExperiment/Parser/Expression.hs | haskell | let opTable = [ op | (op, prec) <- opList, then reverse ... sortWith by prec, then group by prec] | module Language.TheExperiment.Parser.Expression where
import Text.Parsec
import Text.Parsec.Expr
import Data.List
import Data.Function
import Language.TheExperiment.AST.Expression
import Language.TheExperiment.Parser.Lexer
import Language.TheExperiment.Parser.Literal
type ParsedExpr = Expr ()
anExpr :: EParser ParsedExpr
anExpr = do
Operators opList <- getState
let opTable = fmap (fmap fst) $ groupBy ((==) `on` snd) $ reverse $ sortBy (compare `on` snd) opList
buildExpressionParser opTable aSmplExpr <?> "expression"
aSmplExpr :: EParser ParsedExpr
aSmplExpr = anIdOrCall
<|> parens anExpr
<|> liftMp Literal aLiteral
<?> "simple expression"
aCall :: EParser ParsedExpr
aCall = do
expr <- anIdOrCall
case expr of
Call {} -> return expr
_ -> parserZero <?> "function call"
anIdOrCall :: EParser ParsedExpr
anIdOrCall = do
pos <- getPosition
name <- lowerIdent
args <- optionMaybe $ parens $ sepBy anExpr comma
let ident = Identifier pos () name NotOperator
return $ case args of
Nothing -> ident
Just xs -> Call pos () ident xs
|
2d8a2206b753970dec488e1cc9cdadbda31cf24a77e66b473abc03ed3b51f97f | schemedoc/cookbook | www-lowdown-colorize.scm | This code is ported from gist :
;;
;; This is an extension to the Chicken 5 `lowdown` egg to recognize
;; GitHub Flavored Markdown code blocks. These code blocks may contain
;; blank lines, and the line with the opening ``` can say the name of
;; the language being used in the block. We also add syntax coloring.
;; Example:
;; ```Scheme
( display " Hello world " )
;; (newline)
;; ```
;; TODO:
;; - (html-colorize lang*) returns a HTML string, which is parsed back
into SXML by ( html->sxml ) , then turned into HTML again by the
;; code that calls this module. Send a patch to the `colorize` egg
so it can return SXML directly and this module does not have to
;; depend on html-parser.
;; - Send a patch to add the features from this module into the
;; `lowdown` egg. The (lowdown extra) module would be a good place.
(define (fenced-code-block-end fence)
(any-of end-of-input
(skip non-indent-space
(char-seq fence)
(zero-or-more (is (string-ref fence 0)))
space*
line-end)))
(define (fenced-code-block-lines indent code-block-end)
(zero-or-more
(preceded-by (none-of code-block-end)
(repeated (is #\space) max: (length indent))
line)))
(define fenced-code-block-info-string
(as-string (zero-or-more (none-of* (is #\`) normal-line-end item))))
(define fenced-code-block
(sequence* ((indent non-indent-space)
(fence (as-string (repeated (in #\` #\~) min: 3)))
(_ space*)
(info fenced-code-block-info-string)
(_ normal-line-end))
(let ((code-block-end (fenced-code-block-end fence)))
(sequence* ((code-lines (fenced-code-block-lines
indent code-block-end))
(_ code-block-end))
(result `(verbatim (info ,(string-trim-both info))
(code ,@code-lines)))))))
(define (dashes->spaces string)
(string-map (lambda (c) (if (char=? c #\-) #\space c)) string))
(define (spaces->dashes string)
(string-map (lambda (c) (if (char=? c #\space) #\- c)) string))
(define (coloring-type-string->symbol string)
(let ((string (dashes->spaces string)))
(let loop ((names (coloring-type-names)))
(and (not (null? names))
(if (string-ci= string (cdar names))
(caar names)
(loop (cdr names)))))))
(define fenced-code-block-conversion-rules*
`((verbatim
. ,(lambda (_ contents)
(or (and-let* (((pair? contents))
((pair? (car contents)))
(info (alist-ref 'info contents))
(code (alist-ref 'code contents))
(code* (string-intersperse code ""))
(raw-lang (car info))
(lang-sym (coloring-type-string->symbol raw-lang))
(lang-dashed (spaces->dashes
(string-downcase raw-lang))))
(if (coloring-type-exists? lang-sym)
`(pre (code (@ (class ,(string-append
"colorize"
" language-" lang-dashed)))
,@(->> code*
(html-colorize lang-sym)
(html->sxml)
(cdr))))
`(pre (code ,code*))))
`(pre (code ,@contents)))))))
(define (enable-www-lowdown-colorize!)
(block-hook (cons fenced-code-block (block-hook)))
(markdown-html-conversion-rules*
(append fenced-code-block-conversion-rules*
(markdown-html-conversion-rules*)))
(void))
| null | https://raw.githubusercontent.com/schemedoc/cookbook/20fcb619303b0e0a567a492006b219eb9ea381c4/www-lowdown-colorize.scm | scheme |
This is an extension to the Chicken 5 `lowdown` egg to recognize
GitHub Flavored Markdown code blocks. These code blocks may contain
blank lines, and the line with the opening ``` can say the name of
the language being used in the block. We also add syntax coloring.
Example:
```Scheme
(newline)
```
TODO:
- (html-colorize lang*) returns a HTML string, which is parsed back
code that calls this module. Send a patch to the `colorize` egg
depend on html-parser.
- Send a patch to add the features from this module into the
`lowdown` egg. The (lowdown extra) module would be a good place. | This code is ported from gist :
( display " Hello world " )
into SXML by ( html->sxml ) , then turned into HTML again by the
so it can return SXML directly and this module does not have to
(define (fenced-code-block-end fence)
(any-of end-of-input
(skip non-indent-space
(char-seq fence)
(zero-or-more (is (string-ref fence 0)))
space*
line-end)))
(define (fenced-code-block-lines indent code-block-end)
(zero-or-more
(preceded-by (none-of code-block-end)
(repeated (is #\space) max: (length indent))
line)))
(define fenced-code-block-info-string
(as-string (zero-or-more (none-of* (is #\`) normal-line-end item))))
(define fenced-code-block
(sequence* ((indent non-indent-space)
(fence (as-string (repeated (in #\` #\~) min: 3)))
(_ space*)
(info fenced-code-block-info-string)
(_ normal-line-end))
(let ((code-block-end (fenced-code-block-end fence)))
(sequence* ((code-lines (fenced-code-block-lines
indent code-block-end))
(_ code-block-end))
(result `(verbatim (info ,(string-trim-both info))
(code ,@code-lines)))))))
(define (dashes->spaces string)
(string-map (lambda (c) (if (char=? c #\-) #\space c)) string))
(define (spaces->dashes string)
(string-map (lambda (c) (if (char=? c #\space) #\- c)) string))
(define (coloring-type-string->symbol string)
(let ((string (dashes->spaces string)))
(let loop ((names (coloring-type-names)))
(and (not (null? names))
(if (string-ci= string (cdar names))
(caar names)
(loop (cdr names)))))))
(define fenced-code-block-conversion-rules*
`((verbatim
. ,(lambda (_ contents)
(or (and-let* (((pair? contents))
((pair? (car contents)))
(info (alist-ref 'info contents))
(code (alist-ref 'code contents))
(code* (string-intersperse code ""))
(raw-lang (car info))
(lang-sym (coloring-type-string->symbol raw-lang))
(lang-dashed (spaces->dashes
(string-downcase raw-lang))))
(if (coloring-type-exists? lang-sym)
`(pre (code (@ (class ,(string-append
"colorize"
" language-" lang-dashed)))
,@(->> code*
(html-colorize lang-sym)
(html->sxml)
(cdr))))
`(pre (code ,code*))))
`(pre (code ,@contents)))))))
(define (enable-www-lowdown-colorize!)
(block-hook (cons fenced-code-block (block-hook)))
(markdown-html-conversion-rules*
(append fenced-code-block-conversion-rules*
(markdown-html-conversion-rules*)))
(void))
|
3599d261860e40d3c86f122c60dc005135333faa3d5e8e179587e8a09c58bcee | dwayne/haskell-programming | Main.hs | module Main where
import Control.Monad (forever, when)
import Data.List (intercalate)
import Data.Traversable (traverse)
import Morse (stringToMorse, morseToChar)
import System.Environment (getArgs)
import System.Exit (exitFailure, exitSuccess)
import System.IO (hGetLine, hIsEOF, stdin)
main :: IO ()
main = do
mode <- getArgs
case mode of
[arg] ->
case arg of
"from" -> convertFromMorse
"to" -> convertToMorse
_ -> argError
_ -> argError
where
argError = do
putStrLn "Please specify the\
\ first argument\
\ as being 'from' or\
\ 'to' morse,\
\ such as: morse to"
exitFailure
convertToMorse :: IO ()
convertToMorse = forever $ do
weAreDone <- hIsEOF stdin
when weAreDone exitSuccess
-- otherwise, proceed.
line <- hGetLine stdin
convertLine line
where
convertLine line = do
let morse = stringToMorse line
case morse of
Just str -> putStrLn $ intercalate " " str
Nothing -> do
putStrLn $ "ERROR: " ++ line
exitFailure
convertFromMorse :: IO ()
convertFromMorse = forever $ do
weAreDone <- hIsEOF stdin
when weAreDone exitSuccess
-- otherwise, proceed.
line <- hGetLine stdin
convertLine line
where
convertLine line = do
let decoded :: Maybe String
decoded = traverse morseToChar (words line)
case decoded of
Just s -> putStrLn s
Nothing -> do
putStrLn $ "ERROR: " ++ line
exitFailure
| null | https://raw.githubusercontent.com/dwayne/haskell-programming/d08679e76cfd39985fa2ee3cd89d55c9aedfb531/ch14/morse/app/Main.hs | haskell | otherwise, proceed.
otherwise, proceed. | module Main where
import Control.Monad (forever, when)
import Data.List (intercalate)
import Data.Traversable (traverse)
import Morse (stringToMorse, morseToChar)
import System.Environment (getArgs)
import System.Exit (exitFailure, exitSuccess)
import System.IO (hGetLine, hIsEOF, stdin)
main :: IO ()
main = do
mode <- getArgs
case mode of
[arg] ->
case arg of
"from" -> convertFromMorse
"to" -> convertToMorse
_ -> argError
_ -> argError
where
argError = do
putStrLn "Please specify the\
\ first argument\
\ as being 'from' or\
\ 'to' morse,\
\ such as: morse to"
exitFailure
convertToMorse :: IO ()
convertToMorse = forever $ do
weAreDone <- hIsEOF stdin
when weAreDone exitSuccess
line <- hGetLine stdin
convertLine line
where
convertLine line = do
let morse = stringToMorse line
case morse of
Just str -> putStrLn $ intercalate " " str
Nothing -> do
putStrLn $ "ERROR: " ++ line
exitFailure
convertFromMorse :: IO ()
convertFromMorse = forever $ do
weAreDone <- hIsEOF stdin
when weAreDone exitSuccess
line <- hGetLine stdin
convertLine line
where
convertLine line = do
let decoded :: Maybe String
decoded = traverse morseToChar (words line)
case decoded of
Just s -> putStrLn s
Nothing -> do
putStrLn $ "ERROR: " ++ line
exitFailure
|
5d67b20898de7921e6675fa1f49d6060a313b8cec2f8c322f95569db10224713 | YoshikuniJujo/test_haskell | VulkanComponentEnum.hs | # OPTIONS_GHC -Wall -fno - warn - tabs #
module VulkanComponentEnum where
import MakeEnum
make :: IO ()
make = createFileWithDefault vulkanCore "Component.Enum" ["Data.Word"] [
( Just "SwizzleIdentity", [],
("Swizzle", "VkComponentSwizzle", ["Show", "Eq", "Storable"]) )
] []
| null | https://raw.githubusercontent.com/YoshikuniJujo/test_haskell/6ea44c1048805a62979669c185ab32ba9f4d2e02/themes/gui/vulkan/try-vulkan-middle/tools/VulkanComponentEnum.hs | haskell | # OPTIONS_GHC -Wall -fno - warn - tabs #
module VulkanComponentEnum where
import MakeEnum
make :: IO ()
make = createFileWithDefault vulkanCore "Component.Enum" ["Data.Word"] [
( Just "SwizzleIdentity", [],
("Swizzle", "VkComponentSwizzle", ["Show", "Eq", "Storable"]) )
] []
|
|
d6749e13501a1d3a6cd5d62116b1bb7f68a187283e425926553213904ad4dbdb | peti/titlecase | Unit.hs | module Unit where
import Data.Text.Titlecase
import qualified Data.Text.Titlecase.Internal as Titlecase
import Data.Text.Titlecase.Internal hiding (articles, conjunctions, prepositions)
import Test.Tasty
import Test.Tasty.HUnit
import Data.Char ( toLower )
tests :: TestTree
tests = testGroup "Unit tests" [articles, conjunctions, prepositions]
articles :: TestTree
articles = testGroup "Articles" [articleFirst, articleLast, articleIgnored]
conjunctions :: TestTree
conjunctions = testGroup "Conjunctions" [conjunctionFirst, conjunctionLast, conjunctionIgnored]
prepositions :: TestTree
prepositions = testGroup "Prepositions" [prepositionFirst, prepositionLast, prepositionIgnored]
testTitlecase, testFirst, testLast, testIgnored :: String -> Assertion
testTitlecase t = titlecase (map toLower t) @?= t
toTitleFirst :: String -> String
toTitleFirst t = unwords $ case words t of
[] -> []
(x:xs) -> toTitle x : xs
toTitleLast :: String -> String
toTitleLast t = unwords $ go $ words t
where
go [] = []
go [x] = [toTitle x]
go (x:xs) = x : go xs
testFirst t = testTitlecase $ toTitleFirst t <#> "Is First, so It Is Capitalized"
testLast t = testTitlecase $ "This Sentence Capitalizes" <#> toTitleLast t
testIgnored t = testTitlecase $ "This Sentence Keeps" <#> t <#> "As Is"
articleFirst, articleLast, articleIgnored :: TestTree
articleFirst = testCase "article is first" $ mapM_ (testFirst . unArticle) Titlecase.articles
articleLast = testCase "article is last" $ mapM_ (testLast . unArticle) Titlecase.articles
articleIgnored = testCase "article is ignored" $ mapM_ (testIgnored . unArticle) Titlecase.articles
conjunctionFirst, conjunctionLast, conjunctionIgnored :: TestTree
conjunctionFirst = testCase "conjunction is first" $ mapM_ (testFirst . unConjunction) Titlecase.conjunctions
conjunctionLast = testCase "conjunction is last" $ mapM_ (testLast . unConjunction) Titlecase.conjunctions
conjunctionIgnored = testCase "conjunction is ignored" $ mapM_ (testIgnored . unConjunction) Titlecase.conjunctions
prepositionFirst, prepositionLast, prepositionIgnored :: TestTree
prepositionFirst = testCase "preposition is first" $ mapM_ (testFirst . unPreposition) Titlecase.prepositions
prepositionLast = testCase "preposition is last" $ mapM_ (testLast . unPreposition) Titlecase.prepositions
prepositionIgnored = testCase "preposition is ignored" $ mapM_ (testIgnored . unPreposition) Titlecase.prepositions
| null | https://raw.githubusercontent.com/peti/titlecase/d968000d7cdf62b816e71ddb1de47aa6fdb6fa91/tests/Unit.hs | haskell | module Unit where
import Data.Text.Titlecase
import qualified Data.Text.Titlecase.Internal as Titlecase
import Data.Text.Titlecase.Internal hiding (articles, conjunctions, prepositions)
import Test.Tasty
import Test.Tasty.HUnit
import Data.Char ( toLower )
tests :: TestTree
tests = testGroup "Unit tests" [articles, conjunctions, prepositions]
articles :: TestTree
articles = testGroup "Articles" [articleFirst, articleLast, articleIgnored]
conjunctions :: TestTree
conjunctions = testGroup "Conjunctions" [conjunctionFirst, conjunctionLast, conjunctionIgnored]
prepositions :: TestTree
prepositions = testGroup "Prepositions" [prepositionFirst, prepositionLast, prepositionIgnored]
testTitlecase, testFirst, testLast, testIgnored :: String -> Assertion
testTitlecase t = titlecase (map toLower t) @?= t
toTitleFirst :: String -> String
toTitleFirst t = unwords $ case words t of
[] -> []
(x:xs) -> toTitle x : xs
toTitleLast :: String -> String
toTitleLast t = unwords $ go $ words t
where
go [] = []
go [x] = [toTitle x]
go (x:xs) = x : go xs
testFirst t = testTitlecase $ toTitleFirst t <#> "Is First, so It Is Capitalized"
testLast t = testTitlecase $ "This Sentence Capitalizes" <#> toTitleLast t
testIgnored t = testTitlecase $ "This Sentence Keeps" <#> t <#> "As Is"
articleFirst, articleLast, articleIgnored :: TestTree
articleFirst = testCase "article is first" $ mapM_ (testFirst . unArticle) Titlecase.articles
articleLast = testCase "article is last" $ mapM_ (testLast . unArticle) Titlecase.articles
articleIgnored = testCase "article is ignored" $ mapM_ (testIgnored . unArticle) Titlecase.articles
conjunctionFirst, conjunctionLast, conjunctionIgnored :: TestTree
conjunctionFirst = testCase "conjunction is first" $ mapM_ (testFirst . unConjunction) Titlecase.conjunctions
conjunctionLast = testCase "conjunction is last" $ mapM_ (testLast . unConjunction) Titlecase.conjunctions
conjunctionIgnored = testCase "conjunction is ignored" $ mapM_ (testIgnored . unConjunction) Titlecase.conjunctions
prepositionFirst, prepositionLast, prepositionIgnored :: TestTree
prepositionFirst = testCase "preposition is first" $ mapM_ (testFirst . unPreposition) Titlecase.prepositions
prepositionLast = testCase "preposition is last" $ mapM_ (testLast . unPreposition) Titlecase.prepositions
prepositionIgnored = testCase "preposition is ignored" $ mapM_ (testIgnored . unPreposition) Titlecase.prepositions
|
|
e31161e4a31767b6e3267d63a40f1178b883cb985df3875abe6e4869428adf84 | jyh/metaprl | itt_comment.ml | doc <:doc<
@module[Itt_comment]
Terms used for comments in the @Nuprl type theory.
@docoff
----------------------------------------------------------------
@begin[license]
Copyright (C) 2000 Jason Hickey, Caltech
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
Author: Jason Hickey
@email{}
@end[license]
>>
extends Base_theory
(************************************************************************
* UNIVERSES AND EQUALITY
************************************************************************)
prec prec_type
prec prec_equal
(************************************************
* TeX mode.
*)
dform math_type_df1 : mode[tex] :: math_type{'t} =
slot{'t}
izone `"\\,\\mathtt{" ezone
`"Type"
izone "}" ezone
dform math_equal_df1 : mode[tex] :: math_equal{'T; 'a; 'b} =
izone `"{" ezone
slot{'a}
izone `" = " ezone
slot{'b}
izone `" \\in " ezone
slot{'T}
izone `"}" ezone
dform math_member_df1 : mode[tex] :: math_member{'T; 'a} =
izone `"{" ezone
slot{'a}
izone `" \\in " ezone
slot{'T}
izone `"}" ezone
dform math_cumulativity_df1 : mode[tex] :: math_cumulativity{'i; 'j} =
izone `"{{\\it cumulativity}[" ezone
slot{'i}
izone `", " ezone
slot{'j}
izone `"]}" ezone
(************************************************
* Normal mode.
*)
dform equal_df : except_mode[tex] :: parens :: "prec"[prec_equal] :: math_equal{'T; 'a; 'b} =
szone pushm slot{'a} space `"= " slot{'b} space Mpsymbols!member `" " slot{'T} popm ezone
dform member_df2 : mode[tex] :: parens :: "prec"[prec_equal] :: math_member{'T; 'a} =
szone pushm slot{'a} space `"IN" hspace slot{'T} popm ezone
dform type_df1 : except_mode[tex] :: parens :: "prec"[prec_type] :: math_type{'a} =
slot{'a} " " `"Type"
dform univ_df1 : math_univ{'i} =
mathbbU sub{'i}
dform cumulativity_df : except_mode[tex] :: math_cumulativity{'i; 'j} =
slot{'i} `" < " subl slot{'j}
(************************************************************************
* VOID
************************************************************************)
dform math_False_df1 : mode[tex] :: math_false =
izone `"{\\bot}" ezone
dform math_False_df2 : except_mode[tex] :: math_false =
it["False"]
(************************************************************************
* UNIT
************************************************************************)
dform math_Unit_df1 : math_unit =
math_i["Unit"]
dform math_True_df1 : mode[tex] :: math_true =
izone `"{\\top}" ezone
dform math_True_df2 : except_mode[tex] :: math_true =
it["True"]
dform math_it_df1 : mode[tex] :: math_it =
izone `"\\cdot " ezone
dform math_it_df2 : except_mode[tex] :: math_it =
Mpsymbols!cdot
(************************************************************************
* ATOM
************************************************************************)
dform math_Atom_df1 : math_atom =
math_i["Atom"]
dform math_token_df1 : math_token{'t} =
math_i["token"] `"(" slot{'t} `")"
(************************************************************************
* BOOL
************************************************************************)
dform math_Bool_df1 : math_bool =
math_i["Bool"]
dform math_btrue_df1 : math_btrue =
math_i["tt"]
dform math_bfalse_df1 : math_bfalse =
math_i["ff"]
(************************************************
* TeX mode
*)
dform math_bor_df1 : mode[tex] :: math_bor{'a; 'b} =
izone `"{" ezone
slot{'a}
izone `"\\vee_b " ezone
slot{'b}
izone `"}" ezone
dform math_band_df1 : mode[tex] :: math_band{'a; 'b} =
izone `"{" ezone
slot{'a}
izone `"\\wedge_b " ezone
slot{'b}
izone `"}" ezone
dform math_bimplies_df1 : mode[tex] :: math_bimplies{'a; 'b} =
izone `"{" ezone
slot{'a}
izone `"\\Rightarrow_b " ezone
slot{'b}
izone `"}" ezone
dform math_bnot_df1 : mode[tex] :: math_bnot{'a} =
izone `"{\\neg_b " ezone
slot{'a}
izone `"}" ezone
dform math_if_df1 : mode[tex] :: math_if{'a; 'b; 'c} =
izone `"\\mathop{\\bf if}" ezone
szone{'a}
izone `"\\mathrel{\\bf then}" ezone
szone{'b}
izone `"\\mathrel{\\bf else}" ezone
szone{'c}
(************************************************
* Normal mode.
*)
prec prec_bimplies
prec prec_bor
prec prec_band
prec prec_bnot
prec prec_bimplies < prec_bor
prec prec_bor < prec_band
prec prec_band < prec_bnot
dform bor_df : parens :: "prec"[prec_bor] :: except_mode[tex] :: math_bor{'a; 'b} =
slot{'a} " " vee subb " " slot{'b}
dform band_df : parens :: "prec"[prec_band] :: except_mode[tex] :: math_band{'a; 'b} =
slot{'a} " " wedge subb " " slot{'b}
dform bimplies_df : parens :: "prec"[prec_bimplies] :: except_mode[tex] :: math_bimplies{'a; 'b} =
slot{'a} " " Rightarrow subb " " slot{'b}
dform bnot_df : parens :: "prec"[prec_bnot] :: except_mode[tex] :: math_bnot{'a} =
tneg subb slot{'a}
dform ifthenelse_df : parens :: "prec"[prec_bor] :: except_mode[tex] :: math_if{'e1; 'e2; 'e3} =
szone pushm[0] pushm[3] `"if" `" " szone{slot{'e1}} `" " `"then" hspace
szone{slot{'e2}} popm hspace
pushm[3] `"else" hspace szone{slot{'e3}} popm popm ezone
(************************************************************************
* INTEGERS
************************************************************************)
dform math_int_df1 : mode[tex] :: math_int =
izone `"{\\mathbb Z}" ezone
dform math_number_df1 : mode[tex] :: math_number{'i} =
izone `"{{\\it number}[" ezone
slot{'i}
izone `"]}" ezone
dform math_ind_df1 : mode[tex] :: math_ind{'i; 'a; 'b; 'down; 'base; 'c; 'd; 'up} =
izone `"{\\it ind}(" ezone
slot{'i}
izone `"; " ezone
slot{'a}
izone `", " ezone
slot{'b}
izone `". " ezone
slot{'down}
izone `"; " ezone
slot{'base}
izone `"; " ezone
slot{'c}
izone `", " ezone
slot{'c}
izone `". " ezone
slot{'up}
izone `")" ezone
dform math_add_df1 : mode[tex] :: math_add{'i; 'j} =
slot{'i}
izone `"+" ezone
slot{'j}
dform math_sub_df1 : mode[tex] :: math_sub{'i; 'j} =
slot{'i}
izone `"-" ezone
slot{'j}
dform math_mul_df1 : mode[tex] :: math_mul{'i; 'j} =
slot{'i}
izone `"*" ezone
slot{'j}
dform math_div_df1 : mode[tex] :: math_div{'i; 'j} =
slot{'i}
izone `"/" ezone
slot{'j}
dform math_rem_df1 : mode[tex] :: math_rem{'i; 'j} =
slot{'i}
izone `"\\mathrel{\\bf rem}" ezone
slot{'j}
dform math_gt_df1 : mode[tex] :: math_gt{'i; 'j} =
slot{'i}
izone `">" ezone
slot{'j}
dform math_ge_df1 : mode[tex] :: math_ge{'i; 'j} =
slot{'i}
izone `"\\ge " ezone
slot{'j}
dform math_lt_df1 : mode[tex] :: math_lt{'i; 'j} =
slot{'i}
izone `"<" ezone
slot{'j}
dform math_le_df1 : mode[tex] :: math_le{'i; 'j} =
slot{'i}
izone `"\\le " ezone
slot{'j}
(************************************************
* Normal mode
*)
prec prec_compare
prec prec_add
prec prec_mul
dform int_prl_df : except_mode[src] :: math_int = mathbbZ
dform number_df : except_mode[tex] :: math_number{'n} =
slot{'n}
dform add_df1 : except_mode[tex] :: parens :: "prec"[prec_add] :: math_add{'a; 'b} =
slot["le"]{'a} `" + " slot["lt"]{'b}
dform sub_df1 : except_mode[tex] :: parens :: "prec"[prec_add] :: math_sub{'a; 'b} =
slot["lt"]{'a} `" - " slot["le"]{'b}
dform mul_df1 : except_mode[tex] :: parens :: "prec"[prec_mul] :: math_mul{'a; 'b} =
slot["lt"]{'a} `" * " slot["le"]{'b}
dform div_df1 : except_mode[tex] :: parens :: "prec"[prec_mul] :: math_div{'a; 'b} =
slot["lt"]{'a} Mpsymbols!"div" slot["le"]{'b}
dform rem_df1 : except_mode[tex] :: parens :: "prec"[prec_mul] :: math_rem{'a; 'b} =
slot["lt"]{'a} `" % " slot["le"]{'b}
dform lt_df1 : except_mode[tex] :: parens :: "prec"[prec_compare] :: math_lt{'a; 'b} =
slot["le"]{'a} `" < " slot["le"]{'b}
dform le_df1 : except_mode[tex] :: parens :: "prec"[prec_compare] :: math_le{'a; 'b} =
slot["lt"]{'a} Mpsymbols!le slot["le"]{'b}
dform ge_df1 : except_mode[tex] :: parens :: "prec"[prec_compare] :: math_ge{'a; 'b} =
slot["lt"]{'a} Mpsymbols!ge slot["le"]{'b}
dform gt_df1 : except_mode[tex] :: parens :: "prec"[prec_compare] :: math_gt{'a; 'b} =
slot["lt"]{'a} `" > " slot["le"]{'b}
(************************************************************************
* UNION
************************************************************************)
(************************************************
* TeX mode
*)
dform math_union_df1 : mode[tex] :: math_union{'A; 'B} =
izone `"{" ezone
slot{'A}
izone `"+" ezone
slot{'B}
izone `"}" ezone
dform math_inl_df1 : mode[tex] :: math_inl{'x} =
izone `"{{\\it inl}(" ezone
slot{'x}
izone `")}" ezone
dform math_inr_df1 : mode[tex] :: math_inr{'x} =
izone `"{{\\it inr}(" ezone
slot{'x}
izone `")}" ezone
dform math_decide_df1 : mode[tex] :: math_decide{'x; 'y; 'a; 'z; 'b} =
izone `"{\\mathop{\\bf match}" ezone
slot{'x}
izone `"\\mathrel{\\bf with}" ezone
math_inl{'y}
izone `"\\rightarrow " ezone
slot{'a} `"|" math_inr{'z}
izone `"\\rightarrow " ezone
slot{'b}
izone `"}" ezone
dform math_or_df1 : mode[tex] :: math_or{'a; 'b} =
izone `"{" ezone
slot{'a}
izone `"\\vee " ezone
slot{'b}
izone `"}" ezone
dform math_cor_df1 : mode[tex] :: math_cor{'a; 'b} =
izone `"{" ezone
slot{'a}
izone `"\\vee_c " ezone
slot{'b}
izone `"}" ezone
(************************************************
* Normal display.
*)
prec prec_inl
prec prec_union
prec prec_or
dform union_df : except_mode[tex] :: parens :: "prec"[prec_union] :: math_union{'A; 'B} =
slot{'A} " " `"+" " " slot{'B}
dform inl_df : except_mode[tex] :: parens :: "prec"[prec_inl] :: math_inl{'a} =
`"inl" " " slot{'a}
dform inr_df : except_mode[tex] :: parens :: "prec"[prec_inl] :: math_inr{'a} =
`"inr" " " slot{'a}
dform decide_df : except_mode[tex] :: math_decide{'x; 'y; 'a; 'z; 'b} =
szone pushm[0] pushm[3] `"match" " " slot{'x} " " `"with" hspace
`"inl " slot{'y} `" -> " slot{'a} popm hspace
pushm[3] `" | inr " slot{'z} `" -> " slot{'b} popm popm ezone
declare or_df{'a : Dform} : Dform
dform or_df1 : parens :: "prec"[prec_or] :: math_or{'a; 'b} =
szone pushm[0] slot["le"]{'a} or_df{'b} popm ezone
dform or_df2 : or_df{math_or{'a; 'b}} =
or_df{'a} or_df{'b}
dform or_df3 : or_df{'a} =
hspace Mpsymbols!vee " " slot{'a}
declare cor_df{'a : Dform} : Dform
dform cor_df1 : except_mode[tex] :: parens :: "prec"[prec_or] :: math_cor{'a; 'b} =
szone pushm[0] slot["le"]{'a} cor_df{'b} popm ezone
dform cor_df2 : cor_df{math_cor{'a; 'b}} =
cor_df{'a} cor_df{'b}
dform cor_df3 : cor_df{'a} =
hspace Mpsymbols!vee `"c" " " slot{'a}
(************************************************************************
* FUNCTIONS
************************************************************************)
(************************************************
* TeX mode
*)
dform math_rfun_df1 : mode[tex] :: math_rfun[x]{'f; 'A; 'B} =
izone `"\\left\\{" ezone
'f `"|" 'x
izone `"\\colon " ezone
'A
izone `"\\rightarrow " ezone
'B
izone `"\\right\\}" ezone
dform math_dfun_df1 : mode[tex] :: math_fun[x:s]{'A; 'B} =
ifvar[x:v]{'x}
ifvar[x:v]{izone} ifvar[x:v]{slot["\\colon "]} ifvar[x:v]{ezone}
'A
izone `"\\rightarrow " ezone
'B
dform math_lambda_df1 : mode[tex] :: math_lambda{'v; 'b} =
izone `"\\lambda " ezone
'v
izone `"." ezone
'b
dform math_apply_df1 : mode[tex] :: math_apply{'f; 'a} =
'f
izone `"\\ " ezone
'a
dform math_well_founded_df1 : mode[tex] :: math_well_founded{'A; 'x; 'y; 'R} =
izone `"{{\\it well\\_founded}(" ezone
'A
izone `";" ezone
'x
izone `"," ezone
'y
izone `"." ezone
'R
izone `")}" ezone
dform math_well_founded_assum_df1 : mode[tex] :: math_well_founded_assum{'A; 'x; 'y; 'R; 'P} =
izone `"{{\\it well\\_founded\\_assum}(" ezone
'A
izone `";" ezone
'x
izone `"," ezone
'y
izone `"." ezone
'R
izone `";" ezone
'P
izone `")}" ezone
dform math_well_founded_prop_df1 : mode[tex] :: math_well_founded_prop{'P} =
izone `"{{\\it well\\_founded\\_prop}(" ezone
'P
izone `")}" ezone
dform math_well_founded_apply_df1 : mode[tex] :: math_well_founded_apply{'P; 'a} =
izone `"{{\\it well\\_founded\\_apply}(" ezone
'P
izone `";" ezone
'a
izone `")}" ezone
dform math_fix_df1 : mode[tex] :: math_fix{'f; 'b} =
izone `"{\\it fix}(" ezone
'f
izone `"." ezone
'b
izone `")" ezone
dform math_all_df1 : mode[tex] :: math_all{'x; 'A; 'B} =
izone `"\\forall " ezone
'x
izone `"\\colon " ezone
'A
izone `"." ezone
'B
dform math_implies_df1 : mode[tex] :: math_implies{'A; 'B} =
'A
izone `"\\Rightarrow " ezone
'B
dform math_iff_df1 : mode[tex] :: math_iff{'A; 'B} =
'A
izone `"\\Leftrightarrow " ezone
'B
dform math_not_df1 : mode[tex] :: math_not{'A} =
izone `"\\neg " ezone
'A
(************************************************
* Normal mode.
*)
prec prec_fun
prec prec_apply
prec prec_lambda
prec prec_lambda < prec_apply
prec prec_fun < prec_apply
prec prec_fun < prec_lambda
prec prec_not
prec prec_quant
prec prec_iff
prec prec_implies
dform dfun_df2 : parens :: "prec"[prec_fun] :: except_mode[tex] :: math_fun[x:s]{'A; 'B} =
ifvar[x:v]{bvar{'x}} ifvar[x:v]{slot[":"]} slot{'A} " " rightarrow " " slot{'B}
dform fun_df3 : except_mode[tex] :: math_rfun[x]{'f; 'A; 'B} =
"{" " " slot{bvar{'f}} mid math_fun[x]{'A; 'B} `" }"
dform apply_df1 : parens :: "prec"[prec_apply] :: except_mode[tex] :: math_apply{'f; 'a} =
slot["lt"]{'f} " " slot["le"]{'a}
dform lambda_df1 : parens :: "prec"[prec_lambda] :: except_mode[tex] :: math_lambda{'x; 'b} =
Mpsymbols!lambda slot{'x} `"." slot{'b}
dform fix_df1 : except_mode[tex] :: except_mode[tex] :: math_fix{'f; 'b} =
`"fix" `"(" slot{'f} `"." slot{'b} `")"
dform well_founded_prop_df : except_mode[tex] :: except_mode[tex] :: math_well_founded_prop{'A} =
`"WellFounded " slot{'A} " " rightarrow `" Prop"
dform well_founded_apply_df : except_mode[tex] :: except_mode[tex] :: math_well_founded_apply{'P; 'a} =
slot{'P} `"[" slot{'a} `"]"
dform well_founded_assum_df : except_mode[tex] :: except_mode[tex] :: math_well_founded_assum{'A; 'a1; 'a2; 'R; 'P} =
szone pushm[3] `"WellFounded " Mpsymbols!forall slot{'a2} `":" slot{'A} `"."
`"(" Mpsymbols!forall slot{'a1} `":" slot{'A} `". " slot{'R} " " Rightarrow math_well_founded_apply{'P; 'a1} `")"
Rightarrow math_well_founded_apply{'P; 'a2} popm ezone
dform well_founded_df : except_mode[tex] :: except_mode[tex] :: math_well_founded{'A; 'a; 'b; 'R} =
szone pushm[3] `"WellFounded " slot{'a} `"," slot{'b} `":" slot{'A} `"." slot{'R} popm ezone
(*
* Quantifiers.
*)
dform not_df1 : except_mode[tex] :: parens :: "prec"[prec_not] :: math_not{'a} =
Mpsymbols!tneg slot["le"]{'a}
dform implies_df : except_mode[tex] :: parens :: "prec"[prec_implies] :: math_implies{'a; 'b} =
slot["le"]{'a} " " Mpsymbols!Rightarrow " " slot["lt"]{'b}
dform iff_df : except_mode[tex] :: parens :: "prec"[prec_iff] :: math_iff{'a; 'b} =
slot["le"]{'a} " " Mpsymbols!Leftrightarrow " " slot["lt"]{'b}
dform all_df1 : except_mode[tex] :: parens :: "prec"[prec_quant] :: except_mode[tex] :: math_all{'x; 'A; 'B} =
pushm[3] Mpsymbols!forall slot{'x} `":" slot{'A} sbreak["",". "] slot{'B} popm
(************************************************************************
* PRODUCT
************************************************************************)
(************************************************
* TeX mode.
*)
dform math_prod_df1 : mode[tex] :: math_prod{'x; 'A; 'B} =
izone `"{" ezone
slot{'x}
izone `"\\colon " ezone
slot{'A}
izone `"\\times " ezone
slot{'B}
izone `"}" ezone
dform math_prod_df2 : mode[tex] :: math_prod{'A; 'B} =
izone `"{" ezone
slot{'A}
izone `"\\times " ezone
slot{'B}
izone `"}" ezone
dform math_pair_df1 : mode[tex] :: math_pair{'a; 'b} =
izone `"{(" ezone
slot{'a}
izone `", " ezone
slot{'b}
izone `")}" ezone
dform math_spread_df1 : mode[tex] :: math_spread{'e; 'u; 'v; 'b} =
izone `"{\\mathop{{\\bf match}}" ezone
slot{'e}
izone `"\\mathrel{{\\bf with}}" ezone
math_pair{'u; 'v}
izone `"\\rightarrow " ezone
slot{'b}
izone `"}" ezone
dform math_fst_df1 : mode[tex] :: math_fst{'e} =
izone `"{{\\it fst}(" ezone
slot{'e}
izone `")}" ezone
dform math_snd_df1 : mode[tex] :: math_snd{'e} =
izone `"{{\\it snd}(" ezone
slot{'e}
izone `")}" ezone
dform math_and_df1 : mode[tex] :: math_and{'a; 'b} =
slot{'a}
izone `"\\wedge " ezone
slot{'b}
dform math_cand_df1 : mode[tex] :: math_cand{'a; 'b} =
slot{'a}
izone `"\\wedge" ezone subc
slot{'b}
dform math_exists_df1 : mode[tex] :: math_exists{'x; 'A; 'B} =
izone `"{\\exists " ezone
slot{'x}
izone `"\\colon " ezone
slot{'A}
izone `"." ezone
slot{'B}
izone `"}" ezone
dform math_exists_df1 : mode[tex] :: math_exists =
izone `"\\exists " ezone
(************************************************
* NORMAL MODE
*)
prec prec_prod
prec prec_spread
prec prec_and
prec prec_implies < prec_iff
prec prec_iff < prec_or
prec prec_or < prec_and
prec prec_and < prec_not
prec prec_quant < prec_iff
dform prod_df : parens :: "prec"[prec_prod] :: except_mode[tex] :: math_prod{'A; 'B} =
pushm[0] slot{'A} " " times " " slot{'B} popm
dform prod_df2 : parens :: "prec"[prec_prod] :: except_mode[tex] :: math_prod{'x; 'A; 'B} =
slot{'x} `":" slot{'A} " " times " " slot{'B}
dform pair_prl_df : except_mode[tex] :: except_mode[tex] :: math_pair{'a; 'b} =
pushm[0] `"(" slot{'a}`"," slot{'b} `")" popm
dform spread_prl_df1 : parens :: "prec"[prec_spread] :: except_mode[tex] :: except_mode[tex] :: math_spread{'e; 'u; 'v; 'b} =
szone pushm[1]
keyword["match"] `" " slot{'e} `" " keyword["with"] hspace
math_pair{'u; 'v} `" " Mpsymbols!rightarrow hspace
slot{'b}
popm ezone
dform fst_df1 : except_mode[tex] :: except_mode[tex] :: math_fst{'e} =
slot{'e} `".1"
dform snd_df1 : except_mode[tex] :: except_mode[tex] :: math_snd{'e} =
slot{'e} `".2"
declare and_df{'a : Dform} : Dform
dform and_df1 : except_mode[tex] :: parens :: "prec"[prec_and] :: math_and{'a; 'b} =
szone pushm[0] slot["le"]{'a} and_df{'b} popm ezone
dform and_df2 : and_df{math_and{'a; 'b}} =
and_df{'a} and_df{'b}
dform and_df3 : and_df{'a} =
hspace Mpsymbols!wedge " " slot{'a}
declare cand_df{'a : Dform} : Dform
dform cand_df1 : except_mode[tex] :: parens :: "prec"[prec_and] :: math_cand{'a; 'b} =
szone pushm[0] slot["le"]{'a} cand_df{'b} popm ezone
dform cand_df2 : and_df{math_cand{'a; 'b}} =
cand_df{'a} cand_df{'b}
dform cand_df3 : cand_df{'a} =
hspace Mpsymbols!wedge `"c" " " slot{'a}
dform exists_df1 : except_mode[tex] :: parens :: "prec"[prec_quant] :: except_mode[tex] :: math_exists{'x; 'A; 'B} =
pushm[3] Mpsymbols!"exists" slot{'x} `":" slot{'A} sbreak["",". "] slot{'B} popm
(************************************************************************
* SET TYPE
************************************************************************)
(************************************************
* TeX mode
*)
dform math_set_df1 : mode[tex] :: math_set{'x; 'A; 'B} =
izone `"\\{" ezone
slot{'x}
izone `"\\colon " ezone
slot{'A} `"|" slot{'B}
izone `"\\}" ezone
dform math_squash_df1 : mode[tex] :: math_squash{'A} =
izone `"\\sq{" ezone
slot{'A}
izone `"}" ezone
(************************************************
* Normal mode
*)
dform set_df1 : except_mode[tex] :: math_set{'x; 'A; 'B} =
pushm[3] `"{" bvar{'x} `":" slot{'A} mid slot{'B} `"}" popm
dform math_squash_df2 : except_mode[tex] :: math_squash{'A} = "[" 'A "]"
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Decidable
************************************************************************)
(************************************************
* TeX mode
*)
dform math_decidable_df1 : mode[tex] :: math_decidable{'P} =
izone `"{{\\it decidable}(" ezone
slot{'P}
izone `")}" ezone
(************************************************
* Normal mode
*)
dform decidable_df1 : except_mode[tex] :: math_decidable{'A} =
`"decidable(" slot{'A} `")"
(************************************************************************
* INTERSECTION
************************************************************************)
(************************************************
* TeX mode
*)
dform math_isect_df1 : mode[tex] :: math_isect{'x; 'A; 'B} =
izone `"{\\bigcap_{" ezone
slot{'x}
izone `"\\colon " ezone
slot{'A}
izone `"} " ezone
slot{'B}
izone `"}" ezone
dform math_record_df1 : mode[tex] :: math_record{'t} =
izone `"{\\left\\{" ezone
slot{'t}
izone `"\\right\\}}" ezone
dform math_bisect_df1 : mode[tex] :: math_bisect{'A; 'B} =
izone `"{" ezone
slot{'A}
izone `"\\cap " ezone
slot{'B}
izone `"}" ezone
(************************************************
* Normal mode
*)
dform isect_df1 : except_mode[tex] :: math_isect{'x; 'A; 'B} =
cap slot{'x} `":" slot{'A} `"." slot{'B}
dform top_df : math_top =
math_i["Top"]
dform record_df : except_mode[tex] :: math_record{'t} =
pushm[0] szone `"{ " pushm[0] 't popm hspace `"}" ezone popm
prec prec_bisect
dform bisect_df : except_mode[tex] :: parens :: "prec"[prec_bisect] :: math_bisect{'A; 'B} =
slot["le"]{'A} `" " cap space slot{'B}
(************************************************************************
* UNION
************************************************************************)
(************************************************
* TeX mode
*)
dform math_tunion_df1 : mode[tex] :: math_tunion{'x; 'A; 'B} =
izone `"{\\bigcup_{" ezone
slot{'x}
izone `"\\colon " ezone
slot{'A}
izone `"} " ezone
slot{'B}
izone `"}" ezone
dform math_bunion_df1 : mode[tex] :: math_bunion{'A; 'B} =
izone `"{" ezone
slot{'A}
izone `"\\cup " ezone
slot{'B}
izone `"}" ezone
(************************************************
* Normal mode
*)
dform tunion_df1 : except_mode[tex] :: math_tunion{'x; 'A; 'B} =
cup slot{'x} `":" slot{'A} `"." slot{'B}
prec prec_bunion
dform bunion_df : except_mode[tex] :: parens :: "prec"[prec_bunion] :: math_bunion{'A; 'B} =
slot["le"]{'A} `" " cup space slot{'B}
(************************************************************************
* RECURSIVE TYPES
************************************************************************)
(************************************************
* TeX mode
*)
dform math_srec_df1 : mode[tex] :: math_srec{'T; 'B} =
izone `"{\\mu(" ezone
slot{'T}
izone `"." ezone
slot{'B}
izone `")}" ezone
dform math_srecind_df1 : mode[tex] :: math_srecind{'t; 'a; 'b; 'c} =
izone `"{{\\it srec\\_ind}(" ezone
slot{'t}
izone `";" ezone
slot{'a}
izone `"," ezone
slot{'b}
izone `"." ezone
slot{'c}
izone `")}" ezone
dform math_prec_df1 : mode[tex] :: math_prec{'T; 'x; 'B; 'a} =
izone `"{\\mu(" ezone
slot{'T}
izone `"," ezone
slot{'x}
izone `"." ezone
slot{'B}
izone `";" ezone
slot{'a}
izone `")}" ezone
dform math_precind_df1 : mode[tex] :: math_precind{'t; 'a; 'b; 'c} =
izone `"{{\\it prec\\_ind}(" ezone
slot{'t}
izone `";" ezone
slot{'a}
izone `"," ezone
slot{'b}
izone `"." ezone
slot{'c}
izone `")}" ezone
dform math_w_df1 : mode[tex] :: math_w{'x; 'A; 'B} =
izone `"{\\mathop{\\it W}(" ezone
slot{'x}
izone `"\\colon " ezone
slot{'A}
izone `"." ezone
slot{'B}
izone `")}" ezone
dform math_tree_df1 : mode[tex] :: math_tree{'A; 'B} =
izone `"{{\\it tree}(" ezone
slot{'A}
izone `";" ezone
slot{'B}
izone `")}" ezone
dform math_treeind_df1 : mode[tex] :: math_treeind{'t; 'a; 'b; 'c; 'd} =
izone `"{{\\it prec\\_ind}(" ezone
slot{'t}
izone `";" ezone
slot{'a}
izone `"," ezone
slot{'b}
izone `"," ezone
slot{'c}
izone `"." ezone
slot{'d}
izone `")}" ezone
dform math_nil_df1 : mode[tex] :: math_nil =
izone `"{\\it nil}" ezone
dform math_cons_df1 : mode[tex] :: math_cons{'h; 't} =
izone `"{{\\it cons}(" ezone
slot{'h}
izone `"," ezone
slot{'t}
izone `")}" ezone
dform math_list_df1 : mode[tex] :: math_list{'l} =
izone `"{{\\it list}(" ezone
slot{'l}
izone `")}" ezone
dform math_listind_df1 : mode[tex] :: math_listind{'e; 'base; 'h; 't; 'f; 'step} =
izone `"{\\mathop{\\bf match}" ezone
slot{'e}
izone `"\\mathrel{\\bf with}" ezone
math_cons{'h; 't}
izone `"." ezone
slot{'f}
izone `"\\rightarrow " ezone
slot{'step}
izone `"}" ezone
(************************************************
* Normal mode
*)
dform srec_df : except_mode[tex] :: math_srec{'T; 'B} =
szone mu `"{" slot{'T} `"." pushm[0] slot{'B} `"}" popm ezone
prec prec_w
prec prec_tree_ind
dform w_df : except_mode[tex] :: parens :: "prec"[prec_w] :: math_w{'x; 'A; 'B} =
mathbbW slot{'x} `":" slot{'A} `"." slot{'B}
dform tree_df : except_mode[tex] :: math_tree{'a; 'f} =
`"tree(" slot{'a} `"," " " slot{'f} `")"
dform tree_ind_df : except_mode[tex] :: parens :: "prec"[prec_tree_ind] :: math_treeind{'z; 'a; 'f; 'g; 'body} =
szone pushm[3] `"tree_ind(" slot{'g} `"." " "
pushm[3] `"let tree(" slot{'a} `", " slot{'f} `") =" space slot{'z} space `"in" popm space
slot{'body} popm ezone
(* unused
prec prec_cons
*)
prec prec_list
declare search{'a : Dform; 'b : Dform} : Dform
(* Empty list *)
dform nil_df : except_mode[tex] :: math_nil = `"[]"
(* Search for nil entry *)
dform cons_df : except_mode[tex] :: math_cons{'a; 'b} =
search{math_cons{'a; math_nil}; 'b}
(* Keep searching down the list *)
dform search_df1 : search{'a; math_cons{'b; 'c}} =
search{math_cons{'b; 'a}; 'c}
(* Found a nil terminator: use bracket notation *)
dform search_df2 : search{'a; math_nil} =
`"[" semicolons{'a} `"]"
(* No nil terminator, so use :: notation *)
dform search_df3 : search{'a; 'b} =
colons{'a} `"::" slot{'b}
(* Reverse entries and separate with ; *)
dform semicolons_df1 : semicolons{math_cons{'a; math_nil}} =
slot{'a}
dform semicolons_df2 : semicolons{math_cons{'a; 'b}} =
semicolons{'b} `";" slot{'a}
(* Reverse entries and separate with :: *)
dform colons_df1 : colons{math_cons{'a; math_nil}} =
slot{'a}
dform colons_df2 : colons{math_cons{'a; 'b}} =
colons{'b} `"::" slot{'a}
dform list_df1 : except_mode[tex] :: parens :: "prec"[prec_list] :: math_list{'a} =
slot{'a} `" List"
dform list_ind_df1 : except_mode[tex] :: parens :: "prec"[prec_list] :: math_listind{'e; 'base; 'h; 't; 'f; 'step} =
szone pushm[1] pushm[3]
`"match " slot{'e} `" with" hspace
`" [] ->" hspace slot{'base} popm hspace
`"| " pushm[0] slot{'h} `"::" slot{'t} `"." slot{'f} `" ->" hspace slot{'step} popm popm ezone
(************************************************************************
* QUOTIENT TYPE
************************************************************************)
(************************************************
* TeX
*)
dform math_quot_df1 : mode[tex] :: math_quot{'T; 'x; 'y; 'E} =
izone `"{" ezone
slot{'x}
izone `"," ezone
slot{'y}
izone `"\\colon " ezone
slot{'T}
izone `"// " ezone
slot{'E}
izone `"}" ezone
(************************************************
* Normal mode
*)
prec prec_quot
dform quot_df1 : except_mode[tex] :: parens :: "prec"[prec_quot] :: math_quot{'A; 'x; 'y; 'E} =
slot{'x} `", " slot{'y} `":" " " slot{'A} `" // " slot{'E}
(*
* -*-
* Local Variables:
* Caml-master: "compile"
* End:
* -*-
*)
| null | https://raw.githubusercontent.com/jyh/metaprl/51ba0bbbf409ecb7f96f5abbeb91902fdec47a19/theories/itt/core/itt_comment.ml | ocaml | ***********************************************************************
* UNIVERSES AND EQUALITY
***********************************************************************
***********************************************
* TeX mode.
***********************************************
* Normal mode.
***********************************************************************
* VOID
***********************************************************************
***********************************************************************
* UNIT
***********************************************************************
***********************************************************************
* ATOM
***********************************************************************
***********************************************************************
* BOOL
***********************************************************************
***********************************************
* TeX mode
***********************************************
* Normal mode.
***********************************************************************
* INTEGERS
***********************************************************************
***********************************************
* Normal mode
***********************************************************************
* UNION
***********************************************************************
***********************************************
* TeX mode
***********************************************
* Normal display.
***********************************************************************
* FUNCTIONS
***********************************************************************
***********************************************
* TeX mode
***********************************************
* Normal mode.
* Quantifiers.
***********************************************************************
* PRODUCT
***********************************************************************
***********************************************
* TeX mode.
***********************************************
* NORMAL MODE
***********************************************************************
* SET TYPE
***********************************************************************
***********************************************
* TeX mode
***********************************************
* Normal mode
***********************************************
* TeX mode
***********************************************
* Normal mode
***********************************************************************
* INTERSECTION
***********************************************************************
***********************************************
* TeX mode
***********************************************
* Normal mode
***********************************************************************
* UNION
***********************************************************************
***********************************************
* TeX mode
***********************************************
* Normal mode
***********************************************************************
* RECURSIVE TYPES
***********************************************************************
***********************************************
* TeX mode
***********************************************
* Normal mode
unused
prec prec_cons
Empty list
Search for nil entry
Keep searching down the list
Found a nil terminator: use bracket notation
No nil terminator, so use :: notation
Reverse entries and separate with ;
Reverse entries and separate with ::
***********************************************************************
* QUOTIENT TYPE
***********************************************************************
***********************************************
* TeX
***********************************************
* Normal mode
* -*-
* Local Variables:
* Caml-master: "compile"
* End:
* -*-
| doc <:doc<
@module[Itt_comment]
Terms used for comments in the @Nuprl type theory.
@docoff
----------------------------------------------------------------
@begin[license]
Copyright (C) 2000 Jason Hickey, Caltech
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
Author: Jason Hickey
@email{}
@end[license]
>>
extends Base_theory
prec prec_type
prec prec_equal
dform math_type_df1 : mode[tex] :: math_type{'t} =
slot{'t}
izone `"\\,\\mathtt{" ezone
`"Type"
izone "}" ezone
dform math_equal_df1 : mode[tex] :: math_equal{'T; 'a; 'b} =
izone `"{" ezone
slot{'a}
izone `" = " ezone
slot{'b}
izone `" \\in " ezone
slot{'T}
izone `"}" ezone
dform math_member_df1 : mode[tex] :: math_member{'T; 'a} =
izone `"{" ezone
slot{'a}
izone `" \\in " ezone
slot{'T}
izone `"}" ezone
dform math_cumulativity_df1 : mode[tex] :: math_cumulativity{'i; 'j} =
izone `"{{\\it cumulativity}[" ezone
slot{'i}
izone `", " ezone
slot{'j}
izone `"]}" ezone
dform equal_df : except_mode[tex] :: parens :: "prec"[prec_equal] :: math_equal{'T; 'a; 'b} =
szone pushm slot{'a} space `"= " slot{'b} space Mpsymbols!member `" " slot{'T} popm ezone
dform member_df2 : mode[tex] :: parens :: "prec"[prec_equal] :: math_member{'T; 'a} =
szone pushm slot{'a} space `"IN" hspace slot{'T} popm ezone
dform type_df1 : except_mode[tex] :: parens :: "prec"[prec_type] :: math_type{'a} =
slot{'a} " " `"Type"
dform univ_df1 : math_univ{'i} =
mathbbU sub{'i}
dform cumulativity_df : except_mode[tex] :: math_cumulativity{'i; 'j} =
slot{'i} `" < " subl slot{'j}
dform math_False_df1 : mode[tex] :: math_false =
izone `"{\\bot}" ezone
dform math_False_df2 : except_mode[tex] :: math_false =
it["False"]
dform math_Unit_df1 : math_unit =
math_i["Unit"]
dform math_True_df1 : mode[tex] :: math_true =
izone `"{\\top}" ezone
dform math_True_df2 : except_mode[tex] :: math_true =
it["True"]
dform math_it_df1 : mode[tex] :: math_it =
izone `"\\cdot " ezone
dform math_it_df2 : except_mode[tex] :: math_it =
Mpsymbols!cdot
dform math_Atom_df1 : math_atom =
math_i["Atom"]
dform math_token_df1 : math_token{'t} =
math_i["token"] `"(" slot{'t} `")"
dform math_Bool_df1 : math_bool =
math_i["Bool"]
dform math_btrue_df1 : math_btrue =
math_i["tt"]
dform math_bfalse_df1 : math_bfalse =
math_i["ff"]
dform math_bor_df1 : mode[tex] :: math_bor{'a; 'b} =
izone `"{" ezone
slot{'a}
izone `"\\vee_b " ezone
slot{'b}
izone `"}" ezone
dform math_band_df1 : mode[tex] :: math_band{'a; 'b} =
izone `"{" ezone
slot{'a}
izone `"\\wedge_b " ezone
slot{'b}
izone `"}" ezone
dform math_bimplies_df1 : mode[tex] :: math_bimplies{'a; 'b} =
izone `"{" ezone
slot{'a}
izone `"\\Rightarrow_b " ezone
slot{'b}
izone `"}" ezone
dform math_bnot_df1 : mode[tex] :: math_bnot{'a} =
izone `"{\\neg_b " ezone
slot{'a}
izone `"}" ezone
dform math_if_df1 : mode[tex] :: math_if{'a; 'b; 'c} =
izone `"\\mathop{\\bf if}" ezone
szone{'a}
izone `"\\mathrel{\\bf then}" ezone
szone{'b}
izone `"\\mathrel{\\bf else}" ezone
szone{'c}
prec prec_bimplies
prec prec_bor
prec prec_band
prec prec_bnot
prec prec_bimplies < prec_bor
prec prec_bor < prec_band
prec prec_band < prec_bnot
dform bor_df : parens :: "prec"[prec_bor] :: except_mode[tex] :: math_bor{'a; 'b} =
slot{'a} " " vee subb " " slot{'b}
dform band_df : parens :: "prec"[prec_band] :: except_mode[tex] :: math_band{'a; 'b} =
slot{'a} " " wedge subb " " slot{'b}
dform bimplies_df : parens :: "prec"[prec_bimplies] :: except_mode[tex] :: math_bimplies{'a; 'b} =
slot{'a} " " Rightarrow subb " " slot{'b}
dform bnot_df : parens :: "prec"[prec_bnot] :: except_mode[tex] :: math_bnot{'a} =
tneg subb slot{'a}
dform ifthenelse_df : parens :: "prec"[prec_bor] :: except_mode[tex] :: math_if{'e1; 'e2; 'e3} =
szone pushm[0] pushm[3] `"if" `" " szone{slot{'e1}} `" " `"then" hspace
szone{slot{'e2}} popm hspace
pushm[3] `"else" hspace szone{slot{'e3}} popm popm ezone
dform math_int_df1 : mode[tex] :: math_int =
izone `"{\\mathbb Z}" ezone
dform math_number_df1 : mode[tex] :: math_number{'i} =
izone `"{{\\it number}[" ezone
slot{'i}
izone `"]}" ezone
dform math_ind_df1 : mode[tex] :: math_ind{'i; 'a; 'b; 'down; 'base; 'c; 'd; 'up} =
izone `"{\\it ind}(" ezone
slot{'i}
izone `"; " ezone
slot{'a}
izone `", " ezone
slot{'b}
izone `". " ezone
slot{'down}
izone `"; " ezone
slot{'base}
izone `"; " ezone
slot{'c}
izone `", " ezone
slot{'c}
izone `". " ezone
slot{'up}
izone `")" ezone
dform math_add_df1 : mode[tex] :: math_add{'i; 'j} =
slot{'i}
izone `"+" ezone
slot{'j}
dform math_sub_df1 : mode[tex] :: math_sub{'i; 'j} =
slot{'i}
izone `"-" ezone
slot{'j}
dform math_mul_df1 : mode[tex] :: math_mul{'i; 'j} =
slot{'i}
izone `"*" ezone
slot{'j}
dform math_div_df1 : mode[tex] :: math_div{'i; 'j} =
slot{'i}
izone `"/" ezone
slot{'j}
dform math_rem_df1 : mode[tex] :: math_rem{'i; 'j} =
slot{'i}
izone `"\\mathrel{\\bf rem}" ezone
slot{'j}
dform math_gt_df1 : mode[tex] :: math_gt{'i; 'j} =
slot{'i}
izone `">" ezone
slot{'j}
dform math_ge_df1 : mode[tex] :: math_ge{'i; 'j} =
slot{'i}
izone `"\\ge " ezone
slot{'j}
dform math_lt_df1 : mode[tex] :: math_lt{'i; 'j} =
slot{'i}
izone `"<" ezone
slot{'j}
dform math_le_df1 : mode[tex] :: math_le{'i; 'j} =
slot{'i}
izone `"\\le " ezone
slot{'j}
prec prec_compare
prec prec_add
prec prec_mul
dform int_prl_df : except_mode[src] :: math_int = mathbbZ
dform number_df : except_mode[tex] :: math_number{'n} =
slot{'n}
dform add_df1 : except_mode[tex] :: parens :: "prec"[prec_add] :: math_add{'a; 'b} =
slot["le"]{'a} `" + " slot["lt"]{'b}
dform sub_df1 : except_mode[tex] :: parens :: "prec"[prec_add] :: math_sub{'a; 'b} =
slot["lt"]{'a} `" - " slot["le"]{'b}
dform mul_df1 : except_mode[tex] :: parens :: "prec"[prec_mul] :: math_mul{'a; 'b} =
slot["lt"]{'a} `" * " slot["le"]{'b}
dform div_df1 : except_mode[tex] :: parens :: "prec"[prec_mul] :: math_div{'a; 'b} =
slot["lt"]{'a} Mpsymbols!"div" slot["le"]{'b}
dform rem_df1 : except_mode[tex] :: parens :: "prec"[prec_mul] :: math_rem{'a; 'b} =
slot["lt"]{'a} `" % " slot["le"]{'b}
dform lt_df1 : except_mode[tex] :: parens :: "prec"[prec_compare] :: math_lt{'a; 'b} =
slot["le"]{'a} `" < " slot["le"]{'b}
dform le_df1 : except_mode[tex] :: parens :: "prec"[prec_compare] :: math_le{'a; 'b} =
slot["lt"]{'a} Mpsymbols!le slot["le"]{'b}
dform ge_df1 : except_mode[tex] :: parens :: "prec"[prec_compare] :: math_ge{'a; 'b} =
slot["lt"]{'a} Mpsymbols!ge slot["le"]{'b}
dform gt_df1 : except_mode[tex] :: parens :: "prec"[prec_compare] :: math_gt{'a; 'b} =
slot["lt"]{'a} `" > " slot["le"]{'b}
dform math_union_df1 : mode[tex] :: math_union{'A; 'B} =
izone `"{" ezone
slot{'A}
izone `"+" ezone
slot{'B}
izone `"}" ezone
dform math_inl_df1 : mode[tex] :: math_inl{'x} =
izone `"{{\\it inl}(" ezone
slot{'x}
izone `")}" ezone
dform math_inr_df1 : mode[tex] :: math_inr{'x} =
izone `"{{\\it inr}(" ezone
slot{'x}
izone `")}" ezone
dform math_decide_df1 : mode[tex] :: math_decide{'x; 'y; 'a; 'z; 'b} =
izone `"{\\mathop{\\bf match}" ezone
slot{'x}
izone `"\\mathrel{\\bf with}" ezone
math_inl{'y}
izone `"\\rightarrow " ezone
slot{'a} `"|" math_inr{'z}
izone `"\\rightarrow " ezone
slot{'b}
izone `"}" ezone
dform math_or_df1 : mode[tex] :: math_or{'a; 'b} =
izone `"{" ezone
slot{'a}
izone `"\\vee " ezone
slot{'b}
izone `"}" ezone
dform math_cor_df1 : mode[tex] :: math_cor{'a; 'b} =
izone `"{" ezone
slot{'a}
izone `"\\vee_c " ezone
slot{'b}
izone `"}" ezone
prec prec_inl
prec prec_union
prec prec_or
dform union_df : except_mode[tex] :: parens :: "prec"[prec_union] :: math_union{'A; 'B} =
slot{'A} " " `"+" " " slot{'B}
dform inl_df : except_mode[tex] :: parens :: "prec"[prec_inl] :: math_inl{'a} =
`"inl" " " slot{'a}
dform inr_df : except_mode[tex] :: parens :: "prec"[prec_inl] :: math_inr{'a} =
`"inr" " " slot{'a}
dform decide_df : except_mode[tex] :: math_decide{'x; 'y; 'a; 'z; 'b} =
szone pushm[0] pushm[3] `"match" " " slot{'x} " " `"with" hspace
`"inl " slot{'y} `" -> " slot{'a} popm hspace
pushm[3] `" | inr " slot{'z} `" -> " slot{'b} popm popm ezone
declare or_df{'a : Dform} : Dform
dform or_df1 : parens :: "prec"[prec_or] :: math_or{'a; 'b} =
szone pushm[0] slot["le"]{'a} or_df{'b} popm ezone
dform or_df2 : or_df{math_or{'a; 'b}} =
or_df{'a} or_df{'b}
dform or_df3 : or_df{'a} =
hspace Mpsymbols!vee " " slot{'a}
declare cor_df{'a : Dform} : Dform
dform cor_df1 : except_mode[tex] :: parens :: "prec"[prec_or] :: math_cor{'a; 'b} =
szone pushm[0] slot["le"]{'a} cor_df{'b} popm ezone
dform cor_df2 : cor_df{math_cor{'a; 'b}} =
cor_df{'a} cor_df{'b}
dform cor_df3 : cor_df{'a} =
hspace Mpsymbols!vee `"c" " " slot{'a}
dform math_rfun_df1 : mode[tex] :: math_rfun[x]{'f; 'A; 'B} =
izone `"\\left\\{" ezone
'f `"|" 'x
izone `"\\colon " ezone
'A
izone `"\\rightarrow " ezone
'B
izone `"\\right\\}" ezone
dform math_dfun_df1 : mode[tex] :: math_fun[x:s]{'A; 'B} =
ifvar[x:v]{'x}
ifvar[x:v]{izone} ifvar[x:v]{slot["\\colon "]} ifvar[x:v]{ezone}
'A
izone `"\\rightarrow " ezone
'B
dform math_lambda_df1 : mode[tex] :: math_lambda{'v; 'b} =
izone `"\\lambda " ezone
'v
izone `"." ezone
'b
dform math_apply_df1 : mode[tex] :: math_apply{'f; 'a} =
'f
izone `"\\ " ezone
'a
dform math_well_founded_df1 : mode[tex] :: math_well_founded{'A; 'x; 'y; 'R} =
izone `"{{\\it well\\_founded}(" ezone
'A
izone `";" ezone
'x
izone `"," ezone
'y
izone `"." ezone
'R
izone `")}" ezone
dform math_well_founded_assum_df1 : mode[tex] :: math_well_founded_assum{'A; 'x; 'y; 'R; 'P} =
izone `"{{\\it well\\_founded\\_assum}(" ezone
'A
izone `";" ezone
'x
izone `"," ezone
'y
izone `"." ezone
'R
izone `";" ezone
'P
izone `")}" ezone
dform math_well_founded_prop_df1 : mode[tex] :: math_well_founded_prop{'P} =
izone `"{{\\it well\\_founded\\_prop}(" ezone
'P
izone `")}" ezone
dform math_well_founded_apply_df1 : mode[tex] :: math_well_founded_apply{'P; 'a} =
izone `"{{\\it well\\_founded\\_apply}(" ezone
'P
izone `";" ezone
'a
izone `")}" ezone
dform math_fix_df1 : mode[tex] :: math_fix{'f; 'b} =
izone `"{\\it fix}(" ezone
'f
izone `"." ezone
'b
izone `")" ezone
dform math_all_df1 : mode[tex] :: math_all{'x; 'A; 'B} =
izone `"\\forall " ezone
'x
izone `"\\colon " ezone
'A
izone `"." ezone
'B
dform math_implies_df1 : mode[tex] :: math_implies{'A; 'B} =
'A
izone `"\\Rightarrow " ezone
'B
dform math_iff_df1 : mode[tex] :: math_iff{'A; 'B} =
'A
izone `"\\Leftrightarrow " ezone
'B
dform math_not_df1 : mode[tex] :: math_not{'A} =
izone `"\\neg " ezone
'A
prec prec_fun
prec prec_apply
prec prec_lambda
prec prec_lambda < prec_apply
prec prec_fun < prec_apply
prec prec_fun < prec_lambda
prec prec_not
prec prec_quant
prec prec_iff
prec prec_implies
dform dfun_df2 : parens :: "prec"[prec_fun] :: except_mode[tex] :: math_fun[x:s]{'A; 'B} =
ifvar[x:v]{bvar{'x}} ifvar[x:v]{slot[":"]} slot{'A} " " rightarrow " " slot{'B}
dform fun_df3 : except_mode[tex] :: math_rfun[x]{'f; 'A; 'B} =
"{" " " slot{bvar{'f}} mid math_fun[x]{'A; 'B} `" }"
dform apply_df1 : parens :: "prec"[prec_apply] :: except_mode[tex] :: math_apply{'f; 'a} =
slot["lt"]{'f} " " slot["le"]{'a}
dform lambda_df1 : parens :: "prec"[prec_lambda] :: except_mode[tex] :: math_lambda{'x; 'b} =
Mpsymbols!lambda slot{'x} `"." slot{'b}
dform fix_df1 : except_mode[tex] :: except_mode[tex] :: math_fix{'f; 'b} =
`"fix" `"(" slot{'f} `"." slot{'b} `")"
dform well_founded_prop_df : except_mode[tex] :: except_mode[tex] :: math_well_founded_prop{'A} =
`"WellFounded " slot{'A} " " rightarrow `" Prop"
dform well_founded_apply_df : except_mode[tex] :: except_mode[tex] :: math_well_founded_apply{'P; 'a} =
slot{'P} `"[" slot{'a} `"]"
dform well_founded_assum_df : except_mode[tex] :: except_mode[tex] :: math_well_founded_assum{'A; 'a1; 'a2; 'R; 'P} =
szone pushm[3] `"WellFounded " Mpsymbols!forall slot{'a2} `":" slot{'A} `"."
`"(" Mpsymbols!forall slot{'a1} `":" slot{'A} `". " slot{'R} " " Rightarrow math_well_founded_apply{'P; 'a1} `")"
Rightarrow math_well_founded_apply{'P; 'a2} popm ezone
dform well_founded_df : except_mode[tex] :: except_mode[tex] :: math_well_founded{'A; 'a; 'b; 'R} =
szone pushm[3] `"WellFounded " slot{'a} `"," slot{'b} `":" slot{'A} `"." slot{'R} popm ezone
dform not_df1 : except_mode[tex] :: parens :: "prec"[prec_not] :: math_not{'a} =
Mpsymbols!tneg slot["le"]{'a}
dform implies_df : except_mode[tex] :: parens :: "prec"[prec_implies] :: math_implies{'a; 'b} =
slot["le"]{'a} " " Mpsymbols!Rightarrow " " slot["lt"]{'b}
dform iff_df : except_mode[tex] :: parens :: "prec"[prec_iff] :: math_iff{'a; 'b} =
slot["le"]{'a} " " Mpsymbols!Leftrightarrow " " slot["lt"]{'b}
dform all_df1 : except_mode[tex] :: parens :: "prec"[prec_quant] :: except_mode[tex] :: math_all{'x; 'A; 'B} =
pushm[3] Mpsymbols!forall slot{'x} `":" slot{'A} sbreak["",". "] slot{'B} popm
dform math_prod_df1 : mode[tex] :: math_prod{'x; 'A; 'B} =
izone `"{" ezone
slot{'x}
izone `"\\colon " ezone
slot{'A}
izone `"\\times " ezone
slot{'B}
izone `"}" ezone
dform math_prod_df2 : mode[tex] :: math_prod{'A; 'B} =
izone `"{" ezone
slot{'A}
izone `"\\times " ezone
slot{'B}
izone `"}" ezone
dform math_pair_df1 : mode[tex] :: math_pair{'a; 'b} =
izone `"{(" ezone
slot{'a}
izone `", " ezone
slot{'b}
izone `")}" ezone
dform math_spread_df1 : mode[tex] :: math_spread{'e; 'u; 'v; 'b} =
izone `"{\\mathop{{\\bf match}}" ezone
slot{'e}
izone `"\\mathrel{{\\bf with}}" ezone
math_pair{'u; 'v}
izone `"\\rightarrow " ezone
slot{'b}
izone `"}" ezone
dform math_fst_df1 : mode[tex] :: math_fst{'e} =
izone `"{{\\it fst}(" ezone
slot{'e}
izone `")}" ezone
dform math_snd_df1 : mode[tex] :: math_snd{'e} =
izone `"{{\\it snd}(" ezone
slot{'e}
izone `")}" ezone
dform math_and_df1 : mode[tex] :: math_and{'a; 'b} =
slot{'a}
izone `"\\wedge " ezone
slot{'b}
dform math_cand_df1 : mode[tex] :: math_cand{'a; 'b} =
slot{'a}
izone `"\\wedge" ezone subc
slot{'b}
dform math_exists_df1 : mode[tex] :: math_exists{'x; 'A; 'B} =
izone `"{\\exists " ezone
slot{'x}
izone `"\\colon " ezone
slot{'A}
izone `"." ezone
slot{'B}
izone `"}" ezone
dform math_exists_df1 : mode[tex] :: math_exists =
izone `"\\exists " ezone
prec prec_prod
prec prec_spread
prec prec_and
prec prec_implies < prec_iff
prec prec_iff < prec_or
prec prec_or < prec_and
prec prec_and < prec_not
prec prec_quant < prec_iff
dform prod_df : parens :: "prec"[prec_prod] :: except_mode[tex] :: math_prod{'A; 'B} =
pushm[0] slot{'A} " " times " " slot{'B} popm
dform prod_df2 : parens :: "prec"[prec_prod] :: except_mode[tex] :: math_prod{'x; 'A; 'B} =
slot{'x} `":" slot{'A} " " times " " slot{'B}
dform pair_prl_df : except_mode[tex] :: except_mode[tex] :: math_pair{'a; 'b} =
pushm[0] `"(" slot{'a}`"," slot{'b} `")" popm
dform spread_prl_df1 : parens :: "prec"[prec_spread] :: except_mode[tex] :: except_mode[tex] :: math_spread{'e; 'u; 'v; 'b} =
szone pushm[1]
keyword["match"] `" " slot{'e} `" " keyword["with"] hspace
math_pair{'u; 'v} `" " Mpsymbols!rightarrow hspace
slot{'b}
popm ezone
dform fst_df1 : except_mode[tex] :: except_mode[tex] :: math_fst{'e} =
slot{'e} `".1"
dform snd_df1 : except_mode[tex] :: except_mode[tex] :: math_snd{'e} =
slot{'e} `".2"
declare and_df{'a : Dform} : Dform
dform and_df1 : except_mode[tex] :: parens :: "prec"[prec_and] :: math_and{'a; 'b} =
szone pushm[0] slot["le"]{'a} and_df{'b} popm ezone
dform and_df2 : and_df{math_and{'a; 'b}} =
and_df{'a} and_df{'b}
dform and_df3 : and_df{'a} =
hspace Mpsymbols!wedge " " slot{'a}
declare cand_df{'a : Dform} : Dform
dform cand_df1 : except_mode[tex] :: parens :: "prec"[prec_and] :: math_cand{'a; 'b} =
szone pushm[0] slot["le"]{'a} cand_df{'b} popm ezone
dform cand_df2 : and_df{math_cand{'a; 'b}} =
cand_df{'a} cand_df{'b}
dform cand_df3 : cand_df{'a} =
hspace Mpsymbols!wedge `"c" " " slot{'a}
dform exists_df1 : except_mode[tex] :: parens :: "prec"[prec_quant] :: except_mode[tex] :: math_exists{'x; 'A; 'B} =
pushm[3] Mpsymbols!"exists" slot{'x} `":" slot{'A} sbreak["",". "] slot{'B} popm
dform math_set_df1 : mode[tex] :: math_set{'x; 'A; 'B} =
izone `"\\{" ezone
slot{'x}
izone `"\\colon " ezone
slot{'A} `"|" slot{'B}
izone `"\\}" ezone
dform math_squash_df1 : mode[tex] :: math_squash{'A} =
izone `"\\sq{" ezone
slot{'A}
izone `"}" ezone
dform set_df1 : except_mode[tex] :: math_set{'x; 'A; 'B} =
pushm[3] `"{" bvar{'x} `":" slot{'A} mid slot{'B} `"}" popm
dform math_squash_df2 : except_mode[tex] :: math_squash{'A} = "[" 'A "]"
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Decidable
************************************************************************)
dform math_decidable_df1 : mode[tex] :: math_decidable{'P} =
izone `"{{\\it decidable}(" ezone
slot{'P}
izone `")}" ezone
dform decidable_df1 : except_mode[tex] :: math_decidable{'A} =
`"decidable(" slot{'A} `")"
dform math_isect_df1 : mode[tex] :: math_isect{'x; 'A; 'B} =
izone `"{\\bigcap_{" ezone
slot{'x}
izone `"\\colon " ezone
slot{'A}
izone `"} " ezone
slot{'B}
izone `"}" ezone
dform math_record_df1 : mode[tex] :: math_record{'t} =
izone `"{\\left\\{" ezone
slot{'t}
izone `"\\right\\}}" ezone
dform math_bisect_df1 : mode[tex] :: math_bisect{'A; 'B} =
izone `"{" ezone
slot{'A}
izone `"\\cap " ezone
slot{'B}
izone `"}" ezone
dform isect_df1 : except_mode[tex] :: math_isect{'x; 'A; 'B} =
cap slot{'x} `":" slot{'A} `"." slot{'B}
dform top_df : math_top =
math_i["Top"]
dform record_df : except_mode[tex] :: math_record{'t} =
pushm[0] szone `"{ " pushm[0] 't popm hspace `"}" ezone popm
prec prec_bisect
dform bisect_df : except_mode[tex] :: parens :: "prec"[prec_bisect] :: math_bisect{'A; 'B} =
slot["le"]{'A} `" " cap space slot{'B}
dform math_tunion_df1 : mode[tex] :: math_tunion{'x; 'A; 'B} =
izone `"{\\bigcup_{" ezone
slot{'x}
izone `"\\colon " ezone
slot{'A}
izone `"} " ezone
slot{'B}
izone `"}" ezone
dform math_bunion_df1 : mode[tex] :: math_bunion{'A; 'B} =
izone `"{" ezone
slot{'A}
izone `"\\cup " ezone
slot{'B}
izone `"}" ezone
dform tunion_df1 : except_mode[tex] :: math_tunion{'x; 'A; 'B} =
cup slot{'x} `":" slot{'A} `"." slot{'B}
prec prec_bunion
dform bunion_df : except_mode[tex] :: parens :: "prec"[prec_bunion] :: math_bunion{'A; 'B} =
slot["le"]{'A} `" " cup space slot{'B}
dform math_srec_df1 : mode[tex] :: math_srec{'T; 'B} =
izone `"{\\mu(" ezone
slot{'T}
izone `"." ezone
slot{'B}
izone `")}" ezone
dform math_srecind_df1 : mode[tex] :: math_srecind{'t; 'a; 'b; 'c} =
izone `"{{\\it srec\\_ind}(" ezone
slot{'t}
izone `";" ezone
slot{'a}
izone `"," ezone
slot{'b}
izone `"." ezone
slot{'c}
izone `")}" ezone
dform math_prec_df1 : mode[tex] :: math_prec{'T; 'x; 'B; 'a} =
izone `"{\\mu(" ezone
slot{'T}
izone `"," ezone
slot{'x}
izone `"." ezone
slot{'B}
izone `";" ezone
slot{'a}
izone `")}" ezone
dform math_precind_df1 : mode[tex] :: math_precind{'t; 'a; 'b; 'c} =
izone `"{{\\it prec\\_ind}(" ezone
slot{'t}
izone `";" ezone
slot{'a}
izone `"," ezone
slot{'b}
izone `"." ezone
slot{'c}
izone `")}" ezone
dform math_w_df1 : mode[tex] :: math_w{'x; 'A; 'B} =
izone `"{\\mathop{\\it W}(" ezone
slot{'x}
izone `"\\colon " ezone
slot{'A}
izone `"." ezone
slot{'B}
izone `")}" ezone
dform math_tree_df1 : mode[tex] :: math_tree{'A; 'B} =
izone `"{{\\it tree}(" ezone
slot{'A}
izone `";" ezone
slot{'B}
izone `")}" ezone
dform math_treeind_df1 : mode[tex] :: math_treeind{'t; 'a; 'b; 'c; 'd} =
izone `"{{\\it prec\\_ind}(" ezone
slot{'t}
izone `";" ezone
slot{'a}
izone `"," ezone
slot{'b}
izone `"," ezone
slot{'c}
izone `"." ezone
slot{'d}
izone `")}" ezone
dform math_nil_df1 : mode[tex] :: math_nil =
izone `"{\\it nil}" ezone
dform math_cons_df1 : mode[tex] :: math_cons{'h; 't} =
izone `"{{\\it cons}(" ezone
slot{'h}
izone `"," ezone
slot{'t}
izone `")}" ezone
dform math_list_df1 : mode[tex] :: math_list{'l} =
izone `"{{\\it list}(" ezone
slot{'l}
izone `")}" ezone
dform math_listind_df1 : mode[tex] :: math_listind{'e; 'base; 'h; 't; 'f; 'step} =
izone `"{\\mathop{\\bf match}" ezone
slot{'e}
izone `"\\mathrel{\\bf with}" ezone
math_cons{'h; 't}
izone `"." ezone
slot{'f}
izone `"\\rightarrow " ezone
slot{'step}
izone `"}" ezone
dform srec_df : except_mode[tex] :: math_srec{'T; 'B} =
szone mu `"{" slot{'T} `"." pushm[0] slot{'B} `"}" popm ezone
prec prec_w
prec prec_tree_ind
dform w_df : except_mode[tex] :: parens :: "prec"[prec_w] :: math_w{'x; 'A; 'B} =
mathbbW slot{'x} `":" slot{'A} `"." slot{'B}
dform tree_df : except_mode[tex] :: math_tree{'a; 'f} =
`"tree(" slot{'a} `"," " " slot{'f} `")"
dform tree_ind_df : except_mode[tex] :: parens :: "prec"[prec_tree_ind] :: math_treeind{'z; 'a; 'f; 'g; 'body} =
szone pushm[3] `"tree_ind(" slot{'g} `"." " "
pushm[3] `"let tree(" slot{'a} `", " slot{'f} `") =" space slot{'z} space `"in" popm space
slot{'body} popm ezone
prec prec_list
declare search{'a : Dform; 'b : Dform} : Dform
dform nil_df : except_mode[tex] :: math_nil = `"[]"
dform cons_df : except_mode[tex] :: math_cons{'a; 'b} =
search{math_cons{'a; math_nil}; 'b}
dform search_df1 : search{'a; math_cons{'b; 'c}} =
search{math_cons{'b; 'a}; 'c}
dform search_df2 : search{'a; math_nil} =
`"[" semicolons{'a} `"]"
dform search_df3 : search{'a; 'b} =
colons{'a} `"::" slot{'b}
dform semicolons_df1 : semicolons{math_cons{'a; math_nil}} =
slot{'a}
dform semicolons_df2 : semicolons{math_cons{'a; 'b}} =
semicolons{'b} `";" slot{'a}
dform colons_df1 : colons{math_cons{'a; math_nil}} =
slot{'a}
dform colons_df2 : colons{math_cons{'a; 'b}} =
colons{'b} `"::" slot{'a}
dform list_df1 : except_mode[tex] :: parens :: "prec"[prec_list] :: math_list{'a} =
slot{'a} `" List"
dform list_ind_df1 : except_mode[tex] :: parens :: "prec"[prec_list] :: math_listind{'e; 'base; 'h; 't; 'f; 'step} =
szone pushm[1] pushm[3]
`"match " slot{'e} `" with" hspace
`" [] ->" hspace slot{'base} popm hspace
`"| " pushm[0] slot{'h} `"::" slot{'t} `"." slot{'f} `" ->" hspace slot{'step} popm popm ezone
dform math_quot_df1 : mode[tex] :: math_quot{'T; 'x; 'y; 'E} =
izone `"{" ezone
slot{'x}
izone `"," ezone
slot{'y}
izone `"\\colon " ezone
slot{'T}
izone `"// " ezone
slot{'E}
izone `"}" ezone
prec prec_quot
dform quot_df1 : except_mode[tex] :: parens :: "prec"[prec_quot] :: math_quot{'A; 'x; 'y; 'E} =
slot{'x} `", " slot{'y} `":" " " slot{'A} `" // " slot{'E}
|
5948324644bc149497441c7dbb769fcb9cc98ac1600e4f6f2c4cb6fe209c5745 | ermine-language/ermine | Keywords.hs | --------------------------------------------------------------------
-- |
Copyright : ( c ) and 2012 - 2013
License : BSD2
Maintainer : < >
-- Stability : experimental
-- Portability: non-portable
--
--------------------------------------------------------------------
module Ermine.Parser.Keywords where
import Data.HashSet
import Data.Monoid
-- | This is the set of keywords that can only occur at the beginning of the line for auto-completion purposes.
startingKeywords :: HashSet String
startingKeywords = fromList
[ "abstract"
, "class"
, "data"
, "database"
, "export"
, "field"
, "foreign"
, "import"
, "instance"
, "private"
, "type"
]
-- | This is the set of keywords that can occur anywhere on the line for auto-completion purposes.
otherKeywords :: HashSet String
otherKeywords = fromList
[ "case"
, "constraint"
, "constructor"
, "do"
, "exists"
, "forall"
, "hole"
, "in"
, "infix"
, "infixl"
, "infixr"
, "let"
, "of"
, "phi"
, "postfix"
, "prefix"
, "rho"
, "subtype"
, "table"
, "where"
, "_"
, "Γ"
, "ρ"
, "φ"
]
-- | The set of all keywords.
--
-- @'keywords' = 'startingKeywords' '<>' 'otherKeywords'@
keywords :: HashSet String
keywords = startingKeywords <> otherKeywords
| null | https://raw.githubusercontent.com/ermine-language/ermine/bd58949ab56311be9e0d2506a900f3d77652566b/src/Ermine/Parser/Keywords.hs | haskell | ------------------------------------------------------------------
|
Stability : experimental
Portability: non-portable
------------------------------------------------------------------
| This is the set of keywords that can only occur at the beginning of the line for auto-completion purposes.
| This is the set of keywords that can occur anywhere on the line for auto-completion purposes.
| The set of all keywords.
@'keywords' = 'startingKeywords' '<>' 'otherKeywords'@ | Copyright : ( c ) and 2012 - 2013
License : BSD2
Maintainer : < >
module Ermine.Parser.Keywords where
import Data.HashSet
import Data.Monoid
startingKeywords :: HashSet String
startingKeywords = fromList
[ "abstract"
, "class"
, "data"
, "database"
, "export"
, "field"
, "foreign"
, "import"
, "instance"
, "private"
, "type"
]
otherKeywords :: HashSet String
otherKeywords = fromList
[ "case"
, "constraint"
, "constructor"
, "do"
, "exists"
, "forall"
, "hole"
, "in"
, "infix"
, "infixl"
, "infixr"
, "let"
, "of"
, "phi"
, "postfix"
, "prefix"
, "rho"
, "subtype"
, "table"
, "where"
, "_"
, "Γ"
, "ρ"
, "φ"
]
keywords :: HashSet String
keywords = startingKeywords <> otherKeywords
|
cb60952038b7222cf635ca845c0cdb1289c1ea9e1f967965dcc69791de0a3266 | OCamlPro/ocp-build | buildOCamlRules.ml | (**************************************************************************)
(* *)
(* Typerex Tools *)
(* *)
Copyright 2011 - 2017 OCamlPro SAS
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU General Public License version 3 described in the file
(* LICENSE. *)
(* *)
(**************************************************************************)
(* TODO:
- Support for shared libraries. Currently, ocp-build ONLY supports building
in custom mode. Shared mode is a bit more complex, as linking with a library
would depend on different files, if a shared library or a static library is
built.
*)
ocp - imports should be able to print this !
Warning 40 : package_options was selected from type BuildOCPTypes.package .
Warning 40 : package_dirname was selected from type BuildOCPTypes.package .
Warning 40 : package_options was selected from type BuildOCPTypes.package .
Warning 40 : package_name was selected from type BuildOCPTypes.package .
ocp-imports should be able to print this !
Warning 40: package_options was selected from type BuildOCPTypes.package.
Warning 40: package_dirname was selected from type BuildOCPTypes.package.
Warning 40: package_options was selected from type BuildOCPTypes.package.
Warning 40: package_name was selected from type BuildOCPTypes.package.
*)
(* special attributes *)
let mli_file_attr = "mli_file"
let file2string_attr = "file2string"
let binannot_attr = "binannot"
open OcpCompat
open BuildMisc
open BuildEngineTypes
open BuildEngineGlobals
open BuildEngineContext
open BuildEngineRules
open BuildValue.TYPES
open BuildTypes
open BuildGlobals
open BuildOCamlConfig
open BuildOCamlTypes
open BuildOCamlVariables
open BuildOCamlMisc
open BuildOCamlInstall.TYPES
(* renamed to record in replay log *)
let safe_mkdir (dir : string) =
BuildEngineReport.cmd_mkdir dir;
BuildMisc.safe_mkdir dir
let add_file lib dir name =
BuildEngineContext.add_file lib.lib.lib_package dir name
let add_temp_file lib dir name =
BuildEngineContext.add_temp_file lib.lib.lib_package dir name
let add_virtual_file lib dir name =
BuildEngineContext.add_virtual_file lib.lib.lib_package dir name
let add_dst_file lib dir name =
BuildOCamlMisc.add_dst_file lib.lib.lib_package dir name
let comp_deps w lib options =
let options = options :: lib.lib_opk.opk_options in
let comp_requires = comp_requires_option.get options in
BuildOCamlSyntaxes.get_tool_requires w "comp" lib comp_requires
let string_of_libloc lib =
Printf.sprintf "File %S, line 0, characters 0-1:\nPackage %S:"
lib.lib.lib_filename lib.lib.lib_name
(* TODO: [mut_dir] does not work for source files beginning with ".."
and for source files in other packages (package = "toto")
*)
let open_aliases_flag = "open-aliases"
let is_aliased options =
BuildValue.get_bool_with_default [options] open_aliases_flag true
let comp_alias_options lib options =
match lib.lib_alias with
| None -> []
| Some alias ->
let args = ["-w"; "-49"; "-no-alias-deps" ] in
if is_aliased options then
args @ [ "-open"; String.capitalize alias ]
else
args
let dep_alias_options lib options =
match lib.lib_alias with
| None -> []
| Some alias ->
let args = [] in
if is_aliased options then
args @ [ "-open"; String.capitalize alias ]
else
args
let ocamlc_command options ocamlc_specific ocamlc_generic =
let ocamlc_command = ocamlc_specific.get options in
if ocamlc_command = [] then
ocamlc_generic.get options
else ocamlc_command
let copy_dir lib src_file =
let b = lib.lib.lib_context in
let mut_dirname =
Filename.concat b.build_dir_filename "_mutable_tree" in
safe_mkdir mut_dirname;
let mut_dir = BuildEngineContext.add_directory b mut_dirname in
(*
let rec iter mut_dir file_dir =
(*
Printf.eprintf "src_dir = %S\n%!" lib.lib.lib_src_dir.dir_fullname;
Printf.eprintf "fil_dir = %S\n%!" file_dir.dir_fullname;
Printf.eprintf "mut_dir = %S\n%!" lib.lib_mut_dir.dir_fullname;
*)
if file_dir.dir_parent == file_dir
then mut_dir else
let parent_dir = file_dir.dir_parent in
Printf.eprintf " check parent\n " ;
(* assert (lib.lib_mut_dir.dir_fullname <> file_dir.dir_fullname); *)
let mut_dir = iter mut_dir parent_dir in
let subdir = Filename.concat mut_dir.dir_fullname file_dir.dir_basename
in
safe_mkdir subdir;
add_directory lib.lib.lib_context subdir
in
*)
try
let subdir = Filename.concat mut_dir.dir_fullname
src_file.file_dir.dir_basename in
safe_mkdir subdir;
let copy_dir = BuildEngineContext.add_directory
lib.lib.lib_context subdir in
let src_file.file_dir in
Printf.eprintf " COPY DIR of % S is % S\n% ! "
( FileGen.to_string src_file.file_file ) copy_dir.dir_fullname ;
(FileGen.to_string src_file.file_file) copy_dir.dir_fullname; *)
copy_dir
with Stack_overflow ->
Printf.eprintf "Error: Stack_overflow while computing mut_dir\n";
Printf.eprintf " of source file %S of package %S \n%!"
(FileGen.to_string src_file.file_file)
lib.lib.lib_name;
clean_exit 2
let verbose = OcpDebug.verbose_function ["B"; "BuildOCamlRules"]
let chop_prefix s prefix =
let prefix_len = String.length prefix in
String.sub s prefix_len (String.length s - prefix_len)
type package_temp_variables = {
mutable src_files : build_file IntMap.t;
mutable dep_files : build_file IntMap.t;
cmi_files : build_file list ref;
cmo_files : build_file list ref;
odoc_files : build_file list ref;
cmx_files : build_file list ref;
cmx_o_files : build_file list ref;
o_files : build_file list ref;
}
let new_package_temp_variables () = {
src_files = IntMap.empty;
dep_files = IntMap.empty;
cmi_files = ref [];
cmo_files = ref [];
odoc_files = ref [];
cmx_files = ref [];
cmx_o_files = ref []; (* .o files generated with .cmx files *)
o_files = ref [];
}
(* TODO: must do something more correct !! *)
let ocaml_version_greater_than version options =
let ocaml_version = ocaml_config_version.get options in
ocaml_version >= version
let add_bin_annot_argument cmd options =
if ocaml_version_greater_than "4" options &&
BuildValue.get_bool_with_default options binannot_attr true
then
add_command_args cmd [S "-bin-annot" ]
let c_includes lib =
let added_dirs = ref IntMap.empty in
let includes = ref [] in
let add_include_dir dir =
if not (IntMap.mem dir.dir_id !added_dirs) then begin
added_dirs := IntMap.add dir.dir_id dir !added_dirs;
includes := !includes @ [S "-I"; S dir.dir_fullname];
end
in
add_include_dir lib.lib.lib_src_dir;
(* TODO: Fabrice: they should be reversed, no ?
We should search directories in the
reverse order of the topological order. *)
List.iter (fun dep ->
let lib = dep.dep_project in
match lib.lib.lib_type with
| ProgramPackage (* | ProjectToplevel *) -> ()
| TestPackage -> assert false
| LibraryPackage
| ObjectsPackage
| RulesPackage
->
if dep.dep_link || externals_only.get [dep.dep_options]
then begin
add_include_dir lib.lib.lib_src_dir;
end
| SyntaxPackage -> ()
) (List.rev lib.lib_requires);
!includes
let command_includes lib pack_for =
let includes =
match lib.lib_includes with
| Some includes -> includes
| None ->
let added_dirs = ref IntMap.empty in
let includes = ref [] in
let add_include_dir dir =
if not (IntMap.mem dir.dir_id !added_dirs) then begin
added_dirs := IntMap.add dir.dir_id dir !added_dirs;
includes := !includes @ ["-I"; dir.dir_fullname];
end
in
add_include_dir lib.lib.lib_dst_dir;
add_include_dir lib.lib.lib_src_dir;
(* TODO: Fabrice: they should be reversed, no ?
We should search directories in the
reverse order of the topological order. *)
List.iter (fun dep ->
let lib = dep.dep_project in
match lib.lib.lib_type with
| ProgramPackage (* | ProjectToplevel *) -> ()
| TestPackage -> assert false
| LibraryPackage
| ObjectsPackage
->
if dep.dep_link || externals_only.get [dep.dep_options] then begin
add_include_dir lib.lib.lib_dst_dir;
add_include_dir lib.lib.lib_src_dir;
end
| SyntaxPackage -> ()
| RulesPackage ->
add_include_dir lib.lib.lib_src_dir;
add_include_dir lib.lib.lib_dst_dir;
) (List.rev lib.lib_requires);
(* we put the source dir last in case there are some remaining objects files there, since
we don't do any hygienic cleaning before. We don't do it because we want to be able to
support object files that are built by other means. *)
let includes = !includes in
lib.lib_includes <- Some includes;
includes
in
let rec add_internal_includes pack_for includes =
match pack_for with
[] -> includes
| _ :: tail ->
let includes = add_internal_includes tail includes in
"-I" :: (Filename.concat lib.lib.lib_dst_dir.dir_fullname
(String.concat "/" (List.rev pack_for))) ::
includes
in
add_internal_includes (List.rev pack_for) includes
(*
let command_pp ptmp options =
match string_option options pp_option with
| "" -> []
| pp -> ["-pp"; pp]
*)
let add_package_file lib filename =
let b = lib.lib.lib_context in
if Filename.is_relative filename then
add_file lib lib.lib.lib_src_dir filename
else
let dir =
let dirname = Filename.dirname filename in
try
find_directory b dirname
with Not_found ->
Printf.eprintf "Error: directory %S of %S is not a package directory\n%!" dirname lib.lib.lib_name;
exit 2
in
add_file lib dir (Filename.basename filename)
let add_more_rule_sources lib r deps options =
let more_rule_sources = rule_sources_option.get options
@ more_deps_option.get options in
List.iter (fun s ->
let s = BuildSubst.subst_global s in
let s = add_package_file lib s in
add_rule_source r s
) more_rule_sources;
List.iter (fun option ->
List.iter (fun s ->
let s = BuildSubst.subst_global s in
let s = add_package_file lib s in
add_rule_source r s
) (option.get options)
) deps
(*
let add_objects lib name_objs options =
List.map (fun s ->
let s = BuildSubst.subst_global s in
add_package_file lib s)
(BuildValue.get_strings_with_default options name_objs [])
*)
override [ ] to add [ lib_ready ]
let new_rule lib file cmds =
let r = new_rule lib.lib.lib_context lib.lib.lib_loc file cmds in
add_rule_sources r lib.lib_ready;
r
let add_c2o_rule b lib seq src_file target_file options =
let build_dir = BuildEngineContext.add_directory b (MinUnix.getcwd ()) in
let temp_file = add_temp_file lib build_dir target_file.file_basename in
let r = new_rule lib target_file
[Execute (new_command
( ocamlcc_cmd.get options
(*
(if bool_option_true lib.lib.lib_opk.opk_options byte_option then ocamlcc_cmd
else ocamlopt_cmd) *)
)
(c_includes lib @[
S "-ccopt"; S
(String.concat " " (cflags_option.get options));
S "-ccopt"; S (String.concat " " ( ccopt_option.get options));
S "-c"; S (file_filename src_file);
])
);
Move (false, F temp_file.file_file, F target_file.file_file)
]
in
add_more_rule_sources lib r [] options;
add_rule_source r src_file;
add_rule_sources r seq;
add_rule_temporary r temp_file
let add_mll2ml_rule lib src_file target_file options =
let envs = options :: lib.lib_opk.opk_options in
let r = new_rule lib target_file
[Execute (new_command (ocamllex_cmd.get envs )
[ S "-o"; BF target_file; BF src_file])
]
in
add_more_rule_sources lib r [ ocamllex_deps ] envs;
add_rule_source r src_file
let add_mly2ml_rule lib src_file ml_target_file mli_target_file options =
let envs = options :: lib.lib_opk.opk_options in
let src_dir = src_file.file_dir in
let temp_ml = add_temp_file lib src_dir ml_target_file.file_basename in
let temp_mli = add_temp_file lib src_dir mli_target_file.file_basename in
let r = new_rule lib ml_target_file
[Execute (new_command ( ocamlyacc_cmd.get envs) [BF src_file]);
Move (false, BF temp_ml, BF ml_target_file);
Move (false, BF temp_mli, BF mli_target_file);
]
in
add_more_rule_sources lib r [ ocamlyacc_deps ] envs;
add_rule_source r src_file;
add_rule_target r mli_target_file
reading dependencies is a bit complicated , as the format of make
dependencies is not OK on Windows ( because : is used in
filenames ) . We should fix filenames in those cases .
Note that ocamldep will escape any space in a filename , so that
spaces are the only significant characters .
Read the full file . Convert \\\n sequences into spaces .
Instead , we should have a specail format , such as :
CMO filename
DEP dependency
DEP dependency
TODO : add a String.unescaped , the inverse of String.escaped .
dependencies is not OK on Windows (because : is used in
filenames). We should fix filenames in those cases.
Note that ocamldep will escape any space in a filename, so that
spaces are the only significant characters.
Read the full file. Convert \\\n sequences into spaces.
Instead, we should have a specail format, such as:
CMO filename
DEP dependency
DEP dependency
TODO: add a String.unescaped, the inverse of String.escaped.
*)
let add_flag option flag options flags =
if option.get options && not (List.mem (S flag) flags) then
(S flag) :: flags else flags
let add_nopervasives_flag = add_flag nopervasives "-nopervasives"
let add_asmdebug_flag = add_flag asmdebug_option "-g"
let add_bytedebug_flag = add_flag bytedebug_option "-g"
let add_debug_flag = add_flag debug_option "-g"
let bytelinkflags lib =
let options = lib.lib_opk.opk_options in
add_debug_flag options (
add_bytedebug_flag options (
add_nopervasives_flag options (
List.map argument_of_string (bytelink_option.get options)
)
)
)
let asmlinkflags lib =
let options = lib.lib_opk.opk_options in
add_debug_flag options (
add_asmdebug_flag options (
add_nopervasives_flag options (
List.map argument_of_string (asmlink_option.get options )
)
)
)
let depflags options =
List.map argument_of_string ( dep_option.get options)
let bytecompflags options =
add_debug_flag options (
add_bytedebug_flag options (
add_nopervasives_flag options (
List.map argument_of_string ( bytecomp_option.get options)
)))
let docflags options =
add_nopervasives_flag options (
List.map argument_of_string ( docflags_option.get options)
)
let asmcompflags options =
add_debug_flag options (
add_asmdebug_flag options (
add_nopervasives_flag options (
List.map argument_of_string (asmcomp_option.get options )
)))
let indocs envs = doc_option.get envs
let needs_odoc lib =
match lib.lib.lib_type with
LibraryPackage | ObjectsPackage -> true
| ProgramPackage | SyntaxPackage
| TestPackage | RulesPackage -> false
let add_ml2mldep_rule lib dst_dir pack_for force src_file target_file needs_odoc options =
let envs = options :: lib.lib_opk.opk_options in
let cmd = new_command (ocamldep_cmd.get envs)
(depflags envs) in
add_command_string cmd "-modules";
add_command_strings cmd (dep_alias_options lib options);
add_command_strings cmd (command_includes lib pack_for);
(* add_command_strings cmd (command_pp lib options); *)
if force = Force_IMPL || ml_file_option.get envs then
add_command_strings cmd [ "-impl" ]
else
if force = Force_INTF || mli_file_option.get envs then
add_command_strings cmd [ "-intf" ]
;
add_command_strings cmd [file_filename src_file];
add_command_pipe cmd (file_filename target_file);
let r = new_rule lib target_file [Execute cmd] in
add_more_rule_sources lib r [ ocamldep_deps ] envs;
add_rule_source r src_file;
(* We don't need to have all the sources available ! Actually, the
computation of dependencies is not done on the file-system, but on
the virtual image of the file system, so files don't need to be
present, they just need to be known to exist...
List.iter (fun pd ->
let lib = pd.dep_project in
IntMap.iter (fun _ file -> add_rule_source r file) lib.lib_dep_deps
) lib.lib.lib_requires;
*)
let mldep_file_loaded = add_virtual_file lib dst_dir
(target_file.file_basename ^ " loaded") in
let mldep_file_ok = add_virtual_file b dst_dir
( target_file.file_basename ^ " ok " ) in
let r_ok = new_rule b lib.lib_loc mldep_file_ok [ ] in
r_ok.rule_forced < - true ; ( * must be executed , even when no changes
let mldep_file_ok = add_virtual_file b dst_dir
(target_file.file_basename ^ " ok") in
let r_ok = new_rule b lib.lib_loc mldep_file_ok [] in
r_ok.rule_forced <- true; (* must be executed, even when no changes *)
add_rule_source r_ok mldep_file_loaded;
*)
let loader =
BuildOCamldep.load_modules_dependencies
lib options force dst_dir pack_for needs_odoc
(* not fully applied *)
in
let r_loaded = new_rule lib mldep_file_loaded [] in
add_rule_command r_loaded (LoadDeps (loader, target_file, r_loaded));
r_loaded.rule_forced <- true; (* must be executed, even when no changes *)
add_rule_source r_loaded target_file;
mldep_file_loaded
type 'a to_sort =
{
to_sort_value : 'a;
to_sort_node : OcpToposort.node;
mutable to_sort_deps : 'a to_sort list;
}
module FileSorter = OcpToposort.Make(struct
type t = build_file to_sort
let node to_sort = to_sort.to_sort_node
let iter_edges f to_sort = List.iter f to_sort.to_sort_deps
let name to_sort = file_filename to_sort.to_sort_value
let verbose = OcpDebug.verbose_function [ "BuildOCamlRules.FileSorter" ]
end)
(* We use the graph of build rules to sort topologically the object files *)
let sort_ocaml_files lib cmo_files =
if verbose 3 then begin
Printf.eprintf "Sorting:\n";
List.iter (fun file ->
Printf.eprintf "%s " file.file_basename;
) cmo_files;
end;
let map = ref StringMap.empty in
let list = ref [] in
let cmo_files = List.map (fun file ->
let modname = Filename.chop_extension file.file_basename in
let modname = String.capitalize modname in
let to_sort = {
to_sort_value = file;
to_sort_node = OcpToposort.new_node();
to_sort_deps = [];
} in
map := StringMap.add modname to_sort !map;
list := to_sort :: !list;
(file, to_sort)
) cmo_files in
(* reverse to keep original order *)
let list = List.rev !list in
List.iter (fun (file, to_sort) ->
List.iter (fun r ->
if r.rule_state <> RULE_INACTIVE then
IntMap.iter (fun _ file2 ->
try
let modname =
try Filename.chop_extension file2.file_basename
with _ -> raise Not_found in
let modname = String.capitalize modname in
let to_sort2 = StringMap.find modname !map in
if to_sort2 != to_sort then
to_sort.to_sort_deps <- to_sort2 :: to_sort.to_sort_deps
with Not_found -> ()
) r.rule_sources
) file.file_target_of
) cmo_files;
let (sorted, cycle, _others) = FileSorter.sort list in
if cycle <> [] then begin
Printf.eprintf
"Error: There is a cycle in the inter-dependencies inside package %S.\n"
lib.lib.lib_name;
Printf.eprintf
" You MUST specify the link order of modules by ordering 'files'\n";
Printf.eprintf
" and using 'sort=false' in the package description.\n%!";
exit 2
end;
let cmo_files =
List.map (fun to_sort -> to_sort.to_sort_value) sorted in
if verbose 3 then begin
Printf.eprintf "\n";
Printf.eprintf "Sorted:\n";
List.iter (fun file ->
Printf.eprintf "%s " file.file_basename;
) cmo_files;
Printf.eprintf "\n";
end;
cmo_files
let add_files_to_link_to_command lib case cmd options cmx_files =
if sort_files_option.get options then begin
DynamicAction (
(Printf.sprintf "sort for %s" case),
lazy (
let cmx_files = sort_ocaml_files lib cmx_files in
List.iter (fun cmx_file ->
add_command_args cmd [BF cmx_file]) cmx_files;
[Execute cmd]
)
)
end else begin
List.iter (fun cmx_file ->
add_command_args cmd [BF cmx_file]) cmx_files;
Execute cmd
end
let add_cmo2cma_rule lib ptmp cclib cmo_files cma_file =
if not lib.lib_opk.opk_installed then
let options = lib.lib_opk.opk_options in
let cmd = new_command
(ocamlc_command options ocamlc2cma_cmd ocamlc_cmd
) (bytelinkflags lib) in
add_command_args cmd [S "-a"; S "-o"; BF cma_file];
if cclib <> "" then
add_command_strings cmd [ "-custom" ; "-cclib"; cclib ];
if force_link_option.get options then
add_command_strings cmd [ "-linkall" ];
let cmd = add_files_to_link_to_command lib "byte lib" cmd options cmo_files in
let r = new_rule lib cma_file [cmd] in
add_more_rule_sources lib r [ ocamlc_deps; bytelink_deps; link_deps ] options;
add_rule_sources r cmo_files;
add_rule_sources r !(ptmp.cmi_files)
let cross_move r list =
r.rule_commands <- r.rule_commands @
(List.map (fun (f1, f2) ->
Move (false, f1, f2)
) list)
let cross_update r list =
r.rule_commands <- r.rule_commands @
(List.map (fun (f1, f2) ->
Move (true, f1, f2)
) list)
let add_cmx2cmxa_rule lib cclib cmi_files cmx_files cmx_o_files stubs_files =
let options = lib.lib_opk.opk_options in
let src_dir = lib.lib.lib_src_dir in
let dst_dir = lib.lib.lib_dst_dir in
let basename_cmxa = lib.lib_archive ^ ".cmxa" in
let basename_cmxs = lib.lib_archive ^ ".cmxs" in
let ext_lib = BuildOCamlConfig.ocaml_config_ext_lib.get options in
let basename_a = lib.lib_archive ^ ext_lib in
let cmxa_file = add_dst_file lib dst_dir basename_cmxa in
let a_file = add_dst_file lib dst_dir basename_a in
let cmxs_file = add_dst_file lib dst_dir basename_cmxs in
let has_cmxs = cmxs_plugin.get options in
let cmxs_files = if has_cmxs then [cmxs_file, CMXS] else [] in
if not lib.lib_opk.opk_installed then begin
(* Build the cmxa *)
let temp_cmxa = add_temp_file lib src_dir basename_cmxa in
let r = new_rule lib cmxa_file [] in
let temp_a = add_temp_file lib src_dir basename_a in
begin
let cmd = new_command (ocamlopt_cmd.get options ) (asmlinkflags lib) in
add_command_args cmd [S "-a"; S "-o"; BF temp_cmxa ];
if cclib <> "" then
add_command_strings cmd ["-cclib"; cclib];
if force_link_option.get options then
add_command_strings cmd [ "-linkall" ];
let cmd = add_files_to_link_to_command lib "asm lib" cmd options cmx_files in
add_rule_command r cmd;
add_rule_target r a_file;
add_rule_temporaries r [ temp_cmxa; temp_a ];
end;
add_more_rule_sources lib r
[ ocamlopt_deps; asmlink_deps; link_deps ] options;
add_rule_sources r cmx_files;
add_rule_sources r cmx_o_files;
add_rule_sources r cmi_files;
cross_move r [ F temp_cmxa.file_file, F cmxa_file.file_file;
F temp_a.file_file, F a_file.file_file;
];
if has_cmxs then begin
let temp_cmxs = add_temp_file lib src_dir basename_cmxs in
let asmlink_libs =
List.map (fun s ->
let s = BuildSubst.subst_global s in
add_package_file lib s
) (asmlink_libs.get options) in
let cmd = new_command
(ocamlopt_cmd.get options )
(
(asmlinkflags lib) @
[S "-shared"; S "-I";
S lib.lib.lib_dst_dir.dir_fullname;
S "-o"; BF temp_cmxs ] @
(if cclib = "" then [] else [S "-cclib"; S cclib]) @
(if force_link_option.get options then
[S "-linkall" ] else [] ) @
(List.map (fun f -> BF f) asmlink_libs)
)
in
let cmd = add_files_to_link_to_command lib "cmxs lib"
cmd options cmx_files in
(* We can probably not build the .cmxs in parallel with the .cmxa.
So, we just do then consecutively in the same rule. *)
add_rule_command r cmd;
add_rule_sources r asmlink_libs;
add_rule_target r cmxs_file;
add_rule_temporaries r [ temp_cmxs ];
add_more_rule_sources lib r
[ ocamlopt_deps; asmlink_deps; link_deps ] options;
add_rule_sources r stubs_files;
(* TODO: as we introduce this new dependency, we might want to
split generation of .cmxa from .cmxs to be able to do them in
parallel *)
cross_move r [
F temp_cmxs.file_file, F cmxs_file.file_file;
];
end;
end;
(cmxa_file, a_file, cmxs_files)
let add_odocs2html_rule lib odoc_files docdir html_file =
if not lib.lib_opk.opk_installed then
let options = lib.lib_opk.opk_options in
let cmd = new_command (ocamldoc_cmd.get options ) [] in
List.iter (fun odoc_file ->
add_command_args cmd [S "-load"; BF odoc_file]
) odoc_files;
add_command_args cmd [S "-html";S "-d"; BD docdir];
let r = new_rule lib html_file [Execute cmd] in
add_more_rule_sources lib r [ ocamldoc_deps ] options;
add_rule_sources r odoc_files
let get_link_order lib =
let tolink =
List.fold_right (fun pd links ->
if pd.dep_link then
let lib2 = pd.dep_project in
lib2 :: links
else links)
lib.lib_requires []
in
let link_order = link_order.get lib.lib_opk.opk_options in
if link_order = [] then tolink else
let map = List.fold_left (fun map lib ->
StringMap.add lib.lib.lib_name (lib, ref false) map
) StringMap.empty tolink
in
let tolink =
List.map (fun name -> try
let (lib, used) = StringMap.find name map in
used := true;
lib
with Not_found ->
Printf.eprintf "Error with package %S: %S in 'link_order' is not specified in 'requires'\n%!" lib.lib.lib_name name;
exit 2
) link_order
in
StringMap.iter (fun name (_, used) ->
if not !used then
Printf.eprintf "Warning with package %S: required %S not specified in `link_order'\n%!" lib.lib.lib_name name
) map;
tolink
let add_cmo2byte_rule lib ptmp linkflags cclib cmo_files o_files byte_file =
if not lib.lib_opk.opk_installed then
let options = lib.lib_opk.opk_options in
let ocamlc_command =
if is_toplevel.get options then
ocamlmktop_cmd.get options
else
ocamlc_command options ocamlc2byte_cmd ocamlc_cmd
in
let cmd = new_command ocamlc_command linkflags in
add_command_args cmd [S "-o"; BF byte_file];
let custom = ref false in
List.iter (fun o_file ->
custom := true;
add_command_args cmd [BF o_file]) o_files;
if cclib <> "" then
add_command_args cmd [S "-cclib"; S cclib ];
add_command_strings cmd (command_includes lib []);
Printf.eprintf " to_link for % S\n% ! " lib.lib_name ;
List.iter (fun lib2 ->
match lib2.lib.lib_type with
| LibraryPackage
| RulesPackage
| ObjectsPackage
| ProgramPackage ->
add_command_args cmd (bytelinkflags lib2);
if not lib2.lib_meta then begin
let has_ocaml_modules = ref false in
List.iter (fun (obj, kind) ->
match kind with
| CMA
| CMO ->
has_ocaml_modules := true;
add_command_arg cmd (BF obj)
| _ -> ()
) lib2.lib_byte_targets;
if not (lib2.lib_autolink && !has_ocaml_modules) then
List.iter (fun (obj, kind) ->
match kind with
| STUB_A -> add_command_arg cmd (BF obj)
(* [S "-cclib"; S ("-l" ^ lib2.lib_stubarchive)] *)
| _ -> ()
) lib2.lib_stub_targets;
end;
| SyntaxPackage -> ()
| TestPackage -> ()
) lib.lib_linkdeps;
if !custom then add_command_string cmd "-custom";
let bytelink_libs =
List.map (fun s ->
let s = BuildSubst.subst_global s in
add_package_file lib s
) (bytelink_libs.get options) in
List.iter (fun s -> add_command_arg cmd (BF s)) bytelink_libs;
let cmd = add_files_to_link_to_command lib "byte prog" cmd options cmo_files in
let r = new_rule lib byte_file [cmd] in
add_more_rule_sources lib r [ ocamlc_deps; bytelink_deps; link_deps ] options;
add_rule_sources r cmo_files;
add_rule_sources r !(ptmp.cmi_files);
add_rule_sources r o_files;
List.iter (fun lib2 ->
List.iter (fun (obj, kind) ->
match kind with
| CMA | CMO -> add_rule_source r obj
| _ -> ()
) lib2.lib_byte_targets;
List.iter (fun (obj, kind) ->
match kind with
| STUB_A -> add_rule_source r obj
| _ -> ()
) lib2.lib_stub_targets;
) lib.lib_linkdeps;
add_rule_sources r bytelink_libs
let add_cmx2asm_rule lib ptmp linkflags cclib cmx_files cmx_o_files o_files opt_file =
if not lib.lib_opk.opk_installed then
let options = lib.lib_opk.opk_options in
let cmd = new_command
(ocamlc_command options ocamlopt2asm_cmd ocamlopt_cmd)
linkflags in
add_command_args cmd [S "-o"; BF opt_file];
if cclib <> "" then
add_command_args cmd [S "-cclib"; S cclib];
List.iter (fun o_file ->
add_command_arg cmd (BF o_file)) o_files;
add_command_strings cmd (command_includes lib []);
Printf.eprintf " To link % S:\n% ! " lib.lib_name ;
List.iter (fun lib2 ->
Printf.eprintf " Lib % S\n% ! " lib2.lib_name ;
match lib2.lib.lib_type with
| LibraryPackage
| RulesPackage
| ObjectsPackage
| ProgramPackage ->
add_command_args cmd (asmlinkflags lib2);
let has_ocaml_modules = ref false in
List.iter (fun (obj, kind) ->
match kind with
| CMXA | CMX ->
has_ocaml_modules := true;
add_command_arg cmd (BF obj)
| _ -> ()
) lib2.lib_asm_targets;
if not (!has_ocaml_modules && lib2.lib_autolink) then
List.iter (fun (obj, kind) ->
match kind with
| STUB_A -> add_command_arg cmd (BF obj)
(* [S "-cclib"; S ("-l" ^ lib2.lib_stubarchive)] *)
| _ -> ()
) lib2.lib_stub_targets;
| SyntaxPackage -> ()
| TestPackage -> ()
) lib.lib_linkdeps;
let asmlink_libs =
List.map (fun s ->
let s = BuildSubst.subst_global s in
add_package_file lib s
) (asmlink_libs.get options) in
List.iter (fun s -> add_command_arg cmd (BF s)) asmlink_libs;
let cmd = add_files_to_link_to_command lib "asm prog" cmd options cmx_files in
let r = new_rule lib opt_file [cmd] in
add_more_rule_sources lib r [ ocamlopt_deps; asmlink_deps; link_deps ] options;
add_rule_sources r cmx_files;
add_rule_sources r cmx_o_files;
add_rule_sources r !(ptmp.cmi_files);
add_rule_sources r o_files;
List.iter (fun lib2 ->
List.iter (fun (obj, kind) ->
match kind with
| CMXA | CMXA_A
| CMX | CMX_O -> add_rule_source r obj
| _ -> ()
) lib2.lib_asm_targets;
List.iter (fun (obj, kind) ->
match kind with
| STUB_A -> add_rule_source r obj
| _ -> ()
) lib2.lib_stub_targets;
) lib.lib_linkdeps;
add_rule_sources r asmlink_libs;
()
let add_os2a_rule lib o_files a_file =
let envs = lib.lib_opk.opk_options in
if not lib.lib_opk.opk_installed then
let target = a_file.file_basename in
let ext_lib = BuildOCamlConfig.ocaml_config_ext_lib.get envs in
let target_without_ext = Filename.chop_suffix target ext_lib in
let target_without_prefix = chop_prefix target_without_ext "lib" in
let target = FileGen.add_basename a_file.file_dir.dir_file target_without_prefix in
let cmd = new_command (ocamlmklib_cmd.get envs)
[S "-custom"; S "-o"; F target] in
List.iter (add_command_string cmd)
(mklib_option.get lib.lib_opk.opk_options );
List.iter (fun o_file ->
add_command_arg cmd (BF o_file)) o_files;
let r = new_rule lib a_file
[Execute cmd] in
add_more_rule_sources lib r [ ocamlmklib_deps ] envs;
add_rule_sources r o_files;
()
let add_c_source b lib ptmp c_file options =
let envs = options :: lib.lib_opk.opk_options in
let dst_dir = lib.lib.lib_dst_dir in
let basename = c_file.file_basename in
let kernel_name = Filename.chop_suffix basename ".c" in
let ext_obj = BuildOCamlConfig.ocaml_config_ext_obj.get envs in
let o_file = add_dst_file lib dst_dir (kernel_name ^ ext_obj) in
if not lib.lib_opk.opk_installed then
add_c2o_rule b lib [] c_file o_file envs;
ptmp.o_files := o_file :: !(ptmp.o_files)
let add_command_pack_args cmd modnames =
if modnames <> [] then
add_command_args cmd [S "-for-pack";
S (String.concat "." modnames)]
let move_compilation_garbage r copy_dir temp_dir kernel_name lib =
let move_to_sources dst_dir_virt exts =
let dst_dir = dst_dir_virt.dir_file in
List.iter (fun ext ->
let basename = kernel_name ^ ext in
let src_file = FileGen.add_basename temp_dir basename in
let dst_file = FileGen.add_basename dst_dir basename in
let _maybe_file = add_file lib lib.lib.lib_mut_dir basename in
add_rule_command r (MoveIfExists (F src_file, F dst_file, None))
) exts
in
move_to_sources lib.lib.lib_mut_dir [ ".annot"; ".s" ];
let move_to_build exts =
List.iter (fun ext ->
let basename = kernel_name ^ ext in
let src_file = FileGen.add_basename temp_dir basename in
let dst_file = add_file lib lib.lib.lib_dst_dir basename in
let link_file = add_file lib copy_dir (basename ^ ".link") in
add_rule_command r (MoveIfExists
(F src_file, BF dst_file, Some (BF link_file)))
) exts
in
move_to_build [ ".cmt"; ".cmti"; ".spit"; ".spot"; ]
let do_copy_objects_from lib src_lib kernel_name extension obj_files =
Printf.eprintf " do_copy_objects_from % s:%s.%s - > % s:%s.%s\n% ! "
src_lib.lib_name kernel_name extension lib.lib_name kernel_name extension ;
Printf.eprintf "do_copy_objects_from %s:%s.%s -> %s:%s.%s\n%!"
src_lib.lib_name kernel_name extension lib.lib_name kernel_name extension;
*)
let obj_basename = kernel_name ^ extension in
let obj_file_to_build =
try
let obj_file = find_dst_file lib.lib.lib_dst_dir obj_basename in
if obj_file.file_target_of = [] then Some obj_file else None
with NoSuchFileInDir _ ->
Some (add_dst_file lib lib.lib.lib_dst_dir obj_basename)
in
match obj_file_to_build with
| None -> ()
| Some dst_obj_file ->
try
let src_obj_file = find_dst_file src_lib.lib.lib_dst_dir obj_basename in
let r = new_rule lib dst_obj_file [] in
add_rule_command r (Copy (BF src_obj_file, BF dst_obj_file));
add_rule_source r src_obj_file;
obj_files := dst_obj_file :: !obj_files
with NoSuchFileInDir _ ->
Printf.eprintf "Error: %s:%s is supposed to be copied from %s:%s that does not exist\n%!"
lib.lib.lib_name obj_basename src_lib.lib.lib_name obj_basename;
clean_exit 2
let get_copy_objects_from lib envs =
match BuildValue.get_string_option_with_default envs "copy_objects_from" None with
None -> None
| Some name ->
let bc = lib.lib.lib_builder_context in
try
BuildOCamlGlobals.get_by_id (StringMap.find name bc.packages_by_name)
with Not_found ->
Printf.eprintf "Error: in package %S, copy_objects_from %S, no such package\n%!" lib.lib.lib_name name;
clean_exit 2
let copy_ml_objects_from lib ptmp src_lib kernel_name =
(* TODO: check that pack_for = [] *)
(* TODO: check that src_lib is in requires *)
let envs = lib.lib_opk.opk_options in
do_copy_objects_from lib src_lib kernel_name ".cmi" ptmp.cmi_files;
if lib.lib_opk.opk_has_byte then
do_copy_objects_from lib src_lib kernel_name ".cmo" ptmp.cmo_files;
if lib.lib_opk.opk_has_asm then
let ext_obj = BuildOCamlConfig.ocaml_config_ext_obj.get envs in
do_copy_objects_from lib src_lib kernel_name ".cmx" ptmp.cmx_files;
do_copy_objects_from lib src_lib kernel_name ext_obj ptmp.cmx_o_files
let object_dst_dir b lib pack_for =
let dst_dir = lib.lib.lib_dst_dir in
match pack_for with
[] -> dst_dir
| modnames ->
let name = String.concat "/" modnames in
let full_dirname = Filename.concat dst_dir.dir_fullname name in
if not lib.lib_opk.opk_installed then
safe_mkdir full_dirname;
BuildEngineContext.add_directory b full_dirname
let ml2odoc lib ptmp kernel_name envs before_cmd pack_for force temp_ml_file ml_file seq_order =
if needs_odoc lib && indocs envs then
let b = lib.lib.lib_context in
let dst_dir = object_dst_dir b lib pack_for in
let odoc_basename = kernel_name ^ ".odoc" in
let odoc_file = add_dst_file lib dst_dir odoc_basename in
let cmd = new_command (ocamldoc_cmd.get envs ) (docflags envs) in
let r = new_rule lib odoc_file before_cmd in
add_more_rule_sources lib r [ ocamldoc_deps ] envs;
add_command_args cmd [S "-dump"; T odoc_basename];
add_command_strings cmd (command_includes lib pack_for);
if force = Force_IMPL || ml_file_option.get envs then
add_command_string cmd "-impl";
add_command_arg cmd temp_ml_file;
add_rule_command r (Execute cmd);
add_rule_source r ml_file;
cross_move r [ T odoc_basename, BF odoc_file ];
ptmp.odoc_files := odoc_file :: !(ptmp.odoc_files);
add_rule_sources r seq_order;
()
let mli2odoc lib ptmp kernel_name envs pack_for force mli_file seq_order =
if needs_odoc lib && indocs envs then
let b = lib.lib.lib_context in
let dst_dir = object_dst_dir b lib pack_for in
let odoc_basename = kernel_name ^ ".odoc" in
let odoc_file = add_dst_file lib dst_dir odoc_basename in
let cmd = new_command (ocamldoc_cmd.get envs ) (docflags envs) in
let r = new_rule lib odoc_file [] in
add_more_rule_sources lib r [ ocamldoc_deps ] envs;
add_command_args cmd [S "-dump"; BF odoc_file];
add_command_strings cmd (command_includes lib pack_for);
if force = Force_INTF || mli_file_option.get envs then
add_command_string cmd "-intf";
add_command_args cmd [ BF mli_file];
add_rule_command r (Execute cmd);
add_rule_source r mli_file;
ptmp.odoc_files := odoc_file :: !(ptmp.odoc_files);
add_rule_sources r seq_order;
()
let add_mli_source w b lib ptmp mli_file options =
let envs = options :: lib.lib_opk.opk_options in
if lib.lib_opk.opk_installed then () else
let _ = () in
let basename = mli_file.file_basename in
let kernel_name = Filename.chop_extension basename in
let kernel_modalias = String.capitalize kernel_name in
let kernel_name = match lib.lib_alias with
| None -> kernel_name
| Some alias -> Printf.sprintf "%s__%s" alias kernel_modalias
in
let kernel_modname = String.capitalize kernel_name in
let copy_objects_from = get_copy_objects_from lib envs in
match copy_objects_from with
| Some src_lib ->
(* TODO: check that pack_for = [] *)
(* TODO: check that src_lib is in requires *)
do_copy_objects_from lib src_lib kernel_name ".cmi" ptmp.cmi_files;
(* TODO: do the same for .odoc files ! *)
| None ->
if IntMap.mem mli_file.file_id ptmp.src_files then begin
Printf.eprintf "Error: interface %s should be specified before its implementation in project %s\n%!"
(file_filename mli_file) lib.lib.lib_name;
clean_exit 2
end;
let pack_for = BuildValue.get_strings_with_default envs "packed" [] in
let dst_dir = object_dst_dir b lib pack_for in
ptmp.src_files <- IntMap.add mli_file.file_id mli_file ptmp.src_files;
let copy_dir = copy_dir lib mli_file in
let ppv = BuildOCamlSyntaxes.get_pp "mli" w lib basename options in
let comp_deps = comp_deps w lib options in
let mli_file, force =
match ppv.pp_option with
[] -> mli_file, Force_not
| pp ->
TODO : we should create the new_ml_file in the same subdirectory
as the source file , not at the toplevel ! !
as the source file, not at the toplevel !! *)
let new_mli_file =
add_file lib lib.lib.lib_mut_dir (mli_file.file_basename ^ "pp")
in
let cmd = new_command pp (ppv.pp_flags @ [ BF mli_file ]) in
add_command_pipe cmd (FileGen.to_string new_mli_file.file_file);
let r = new_rule lib new_mli_file [] in
add_more_rule_sources lib r [] envs;
add_rule_command r (Execute cmd);
BuildOCamlSyntaxes.add_pp_requires r ppv;
add_more_rule_sources lib r [ pp_deps ] envs;
add_rule_source r mli_file;
add_rule_sources r comp_deps;
new_mli_file, Force_INTF
in
let mldep_file =
add_dst_file lib dst_dir (kernel_name ^ ".mlimods")
in
let needs_odoc = needs_odoc lib && indocs envs in
let mldep_file_ok =
add_ml2mldep_rule lib dst_dir pack_for force mli_file mldep_file needs_odoc options in
let seq_order = [mldep_file_ok] in
let cmi_basename = kernel_name ^ ".cmi" in
let cmi_temp = add_temp_file lib mli_file.file_dir cmi_basename in
let cmi_file = add_dst_file lib dst_dir cmi_basename in
let cmd, cmd_deps =
if lib.lib_opk.opk_has_byte then
let cmd = new_command (ocamlc_cmd.get envs ) (bytecompflags envs) in
add_bin_annot_argument cmd envs;
add_command_args cmd [S "-c"; S "-o"; BF cmi_temp];
add_command_strings cmd (comp_alias_options lib options);
add_command_strings cmd (command_includes lib pack_for);
(* add_command_strings cmd (command_pp lib options); *)
if force = Force_INTF || mli_file_option.get envs then
add_command_args cmd [S "-intf" ];
add_command_args cmd [BF mli_file];
cmd, ocamlc_deps
else
let cmd = new_command (ocamlopt_cmd.get envs ) (asmcompflags envs) in
add_bin_annot_argument cmd envs;
add_command_args cmd [S "-c"; S "-o"; BF cmi_temp];
add_command_strings cmd (comp_alias_options lib options);
add_command_strings cmd (command_includes lib pack_for);
add_command_pack_args cmd pack_for;
( command_pp lib options ) ;
if force = Force_INTF || mli_file_option.get envs then
add_command_string cmd "-intf" ;
add_command_args cmd [BF mli_file];
cmd, ocamlopt_deps
in
let r = new_rule lib cmi_file [Execute cmd] in
add_more_rule_sources lib r [cmd_deps] envs;
add_rule_sources r comp_deps;
if cmi_temp != cmi_file then begin
cross_move r [ BF cmi_temp, BF cmi_file ];
add_rule_temporary r cmi_temp;
end;
move_compilation_garbage r copy_dir mli_file.file_dir.dir_file
kernel_name lib;
add_rule_source r mli_file;
add_rule_sources r seq_order;
TODO : we should actually rename all modules to fit
their capitalized name in the _ directory
their capitalized name in the _obuild directory *)
let lib_modules =
let pack_for = List.rev pack_for in
try
let (_, map) = StringsMap.find pack_for lib.lib_internal_modules in
map
with Not_found ->
let map = ref StringMap.empty in
lib.lib_internal_modules <-
StringsMap.add pack_for (dst_dir, map) lib.lib_internal_modules;
map
in
begin
let dep_info =
try
let (kind, basename) = StringMap.find kernel_modname !lib_modules in
match kind with
MLI
| MLandMLI -> None
| ML -> Some (MLandMLI, basename)
with Not_found ->
if verbose 5 then
Printf.eprintf "Adding MLI module %s to %s in %s\n"
kernel_modname kernel_name lib.lib.lib_name;
Some (MLI, DepBasename kernel_name)
in
match dep_info with
| None -> ()
| Some (kind, basename) ->
lib_modules :=
StringMap.add kernel_modname (kind, basename) !lib_modules;
if kernel_modname <> kernel_modalias then
lib.lib_aliases <-
StringMap.add kernel_modalias (kind, basename) lib.lib_aliases
end;
mli2odoc lib ptmp kernel_name
envs pack_for force mli_file seq_order;
if pack_for = [] then
ptmp.cmi_files := cmi_file :: !(ptmp.cmi_files)
let rec find_capital s len =
if len > 0 then
let pos = len-1 in
let c = s.[pos] in
if c = '/' || c = '\\' then len
else
find_capital s pos
else 0
let invert_capital s =
let len = String.length s in
let pos = find_capital s len in
Printf.eprintf " invert_capital % S at pos % d\n% ! " s pos ;
if pos < len then
let s= Bytes.of_string s in
let c = Bytes.get s pos in
begin
match c with
| 'a'..'z' -> s.[pos] <- Char.uppercase c
| 'A'..'Z' -> s.[pos] <- Char.lowercase c
| _ -> ()
end;
Bytes.to_string s
else s
let rec find_source_with_extension b lib src_dir kernel_name exts =
match exts with
| [] ->
Printf.eprintf "Error: package %S, module %S, could not find\n"
lib.lib.lib_name kernel_name;
Printf.eprintf " matching source in source directory\n";
Printf.eprintf " %S\n%!" src_dir.dir_fullname;
clean_exit 2
| ext :: rem_exts ->
let basename1 = kernel_name ^ "." ^ ext in
let test1 = FileGen.add_basename src_dir.dir_file basename1 in
if FileGen.exists test1 then
(basename1, ext)
else
let basename2 = invert_capital (kernel_name ^ "." ^ ext) in
let test2 = FileGen.add_basename src_dir.dir_file basename2 in
if FileGen.exists test2 then
(basename2, ext)
else
find_source_with_extension b lib src_dir kernel_name rem_exts
let standard_source_exts = [ "mly"; "mll"; "ml"; "mli"; "c" ]
let get_packed_objects lib r src_dir pack_of obj_ext =
let options = lib.lib_opk.opk_options in
let packed_cmx_files = ref [] in
let b = r.rule_context in
List.iter (fun basename ->
let basename, extension = FileString.cut_at_last_extension basename in
let (filename, _obj_extension) =
if extension = "" then
find_source_with_extension b lib src_dir basename
[ obj_ext; "cmi" ]
else
let obj_extension = match String.lowercase extension with
"ml" | "mll" | "mly" -> obj_ext
| "mli" -> "cmi"
| ext ->
if List.mem ext (BuildValue.get_strings_with_default options
"impl_exts" []) then
obj_ext
else
if List.mem ext (BuildValue.get_strings_with_default options
"intf_exts" []) then
"cmi"
else
Printf.ksprintf failwith
"Bad extension [%s] for filename [%s]" extension basename
in
(basename ^ "." ^ obj_extension, obj_extension)
in
let object_file = add_file lib src_dir filename in
packed_cmx_files := object_file :: !packed_cmx_files;
add_rule_source r object_file;
) pack_of;
let packed_cmx_files = List.rev !packed_cmx_files in
packed_cmx_files
let bprintf_list b name list =
Printf.bprintf b "let %s = [\n" name;
List.iter (fun s -> Printf.bprintf b " %S;\n" s) list;
Printf.bprintf b " ]\n"
let (//) = Filename.concat
let rec find_git_commit dir =
let git_dir = dir // ".git" in
if Sys.file_exists git_dir then
let filename = git_dir // "HEAD" in
try
let ref =
let ic = open_in filename in
let line = input_line ic in
close_in ic;
line
in
let ref, file = OcpString.cut_at ref ' ' in
if ref = "ref:" then
let ic = open_in (git_dir // file) in
let line = input_line ic in
close_in ic;
line
else ref
with _ ->
let head = try FileString.string_of_file filename with _ -> "??" in
Printf.eprintf "Warning: unreadable-git-commit\nHEAD %S:\n%S\n%!"
filename head;
"unreadable-git-commit"
else
let new_dir = Filename.dirname dir in
if dir = new_dir then "no-git-commit"
else find_git_commit new_dir
let add_info b lib options name =
match name with
| "ocp::dates" ->
let (date, en_date) =
try
BuildValue.get_string options "ocp_date",
BuildValue.get_string options "ocp_en_date"
ignore ( Sys.getenv " OCPBUILD_NODATE " ) ;
" NODATE " , " NODATE ( option OCPBUILD_NODATE ) "
"NODATE", "NODATE (option OCPBUILD_NODATE)" *)
with _ ->
Tue Jan 20 17:48:12 CET 2015
let tm = MinUnix.localtime (MinUnix.time()) in
let date =
Printf.sprintf "%04d-%02d-%02d %02d:%02d"
(1900+tm.MinUnix.tm_year) (1+tm.MinUnix.tm_mon)
tm.MinUnix.tm_mday tm.MinUnix.tm_hour tm.MinUnix.tm_min
in
let en_date =
try
let date = MinUnix.strftime "%a %b %d %T %Z %Y" tm in
if date = "" then failwith "strftime";
date
with _ -> date
in
(date, en_date)
in
Printf.bprintf b "let date = %S\n" date;
Printf.bprintf b "let en_date = %S\n" en_date;
| "ocp::commit" ->
Printf.bprintf b "let commit = %S\n"
(let commit = find_git_commit lib.lib.lib_src_dir.dir_fullname in
try String.sub commit 0 8
with _ -> commit
)
| name ->
Printf.bprintf b "let %s = %S\n" name
(BuildValue.get_string_with_default options name "")
let create_ml_file_if_needed lib mut_dir options ml_file =
if BuildValue.get_bool_with_default options "ocp2ml" false then begin
let tmp_ml = add_file lib mut_dir ml_file.file_basename in
let tmp_ml_file = tmp_ml.file_file in
(* generate file in a buffer *)
let b = Buffer.create 1000 in
let opk = lib.lib_opk in
Printf.bprintf b "(* Generated by ocp-build *)\n";
Printf.bprintf b "let package = %S\n" lib.lib.lib_name;
Printf.bprintf b "let version = %S\n" opk.opk_version;
bprintf_list b "authors"
(
(BuildValue.get_strings_with_default options "author" [])
@
(BuildValue.get_strings_with_default options "authors" [])
);
List.iter (add_info b lib options) [
"copyright";
"license";
"description";
];
List.iter (fun variable ->
bprintf_list b variable ( BuildValue.get_strings_with_default options variable [] )
) (BuildValue.get_strings_with_default options "env_lists" []);
List.iter (add_info b lib options)
(BuildValue.get_strings_with_default options "env_strings" []);
List.iter (fun variable ->
Printf.bprintf b "let %s = %b\n" variable
(BuildValue.get_bool_with_default options variable false)
) (BuildValue.get_strings_with_default options "env_bools" []);
Printf.bprintf b "let requires = [\n";
List.iter (fun dep ->
let lib = dep.dep_project in
Printf.bprintf b " %S, %S;\n" lib.lib.lib_name
lib.lib_opk.opk_version;
) lib.lib_requires;
Printf.bprintf b " ]\n";
let ml_content = Buffer.contents b in
BuildEngineReport.cmd_file_from_content
(FileGen.to_string tmp_ml_file) ml_content;
if FileGen.exists tmp_ml_file then begin
let old_ml_content = FileGen.read_file tmp_ml_file in
if ml_content <> old_ml_content then begin
if verbose 2 then
Printf.fprintf stderr "create %s [outdated]\n%!"
(FileGen.to_string tmp_ml_file);
FileGen.write_file tmp_ml_file ml_content
end
end else begin
if verbose 2 then
Printf.fprintf stderr "create %s [unexisting] \n%!"
(FileGen.to_string tmp_ml_file);
FileGen.write_file tmp_ml_file ml_content;
end;
tmp_ml
end else ml_file
Instead of copy_mli_if_needed that copies the mli file during
OCamlBuildRules , we should instead create a rule to generate this
file , and makes the .ml rules depend on it .
OCamlBuildRules, we should instead create a rule to generate this
file, and makes the .ml rules depend on it.
*)
let copy_mli_if_needed lib mut_dir mll_file kernel_name =
try
let mli_file = FileGen.add_basename mll_file.file_dir.dir_file (kernel_name ^ ".mli") in
if FileGen.exists mli_file then begin
let mli_content = FileGen.read_file mli_file in
let tmp_mli = add_file lib mut_dir (kernel_name ^ ".mli") in
let tmp_mli_file = tmp_mli.file_file in
BuildEngineReport.cmd_copy (FileGen.to_string mli_file)
(FileGen.to_string tmp_mli_file);
if FileGen.exists tmp_mli_file then
let old_mli_content = FileGen.read_file tmp_mli_file in
if mli_content <> old_mli_content then begin
if verbose 2 then
Printf.fprintf stderr "cp %s %s [outdated]\n%!"
(FileGen.to_string mli_file) (FileGen.to_string tmp_mli_file);
FileGen.write_file tmp_mli_file mli_content
end else
()
else begin
if verbose 2 then
Printf.fprintf stderr "cp %s %s [unexisting] \n%!"
(FileGen.to_string mli_file) (FileGen.to_string tmp_mli_file);
FileGen.write_file tmp_mli_file mli_content;
end
else
Printf.eprintf " MLI FILE % S does not exist\n% ! "
( FileGen.to_string mli_file ) ;
Printf.eprintf "MLI FILE %S does not exist\n%!"
(FileGen.to_string mli_file); *)
with e ->
Printf.eprintf "copy_mli_if_needed error %s\n%!" (Printexc.to_string e);
clean_exit 2
(* Shall we infer the presence of the mli file ? We should probably ask the user
to tell the build system that the mli does not exist. *)
let content_generator new_file f =
function () ->
let b = Buffer.create 10000 in
f b;
let content = Buffer.contents b in
let file = file_filename new_file in
BuildEngineReport.cmd_file_from_content file content;
FileString.file_of_string file content;
()
let add_ml_source w b lib ptmp ml_file options =
let needs_odoc = needs_odoc lib in
let envs = options :: lib.lib_opk.opk_options in
let basename = ml_file.file_basename in
Printf.eprintf " basename = [ % s]\n " basename ;
let kernel_alias =
BuildValue.get_string_with_default envs "module"
(Filename.chop_extension basename) in
let kernel_modalias = String.capitalize kernel_alias in
let kernel_name = match lib.lib_alias with
| None -> kernel_alias
| Some alias ->
if is_aliased options then
Printf.sprintf "%s__%s" alias kernel_modalias
else
kernel_alias
in
let kernel_modname = String.capitalize kernel_name in
let has_byte = lib.lib_opk.opk_has_byte in
let has_asm = lib.lib_opk.opk_has_asm in
let orig_ml_file = ml_file in
let pack_for = BuildValue.get_strings_with_default envs "packed" [] in
if lib.lib_opk.opk_installed then begin
if pack_for = [] then begin
Printf.eprintf " add_ml_source : % s is already installed in % s\n% ! "
basename ( FileGen.to_string dst_dir.dir_file ) ;
Printf.eprintf " ml_file % s\n% ! " ( file_filename ml_file ) ;
Printf.eprintf "add_ml_source: %s is already installed in %s\n%!"
basename (FileGen.to_string dst_dir.dir_file);
Printf.eprintf "ml_file %s\n%!" (file_filename ml_file);
*)
let dst_dir = ml_file.file_dir in
let cmo_basename = kernel_name ^ ".cmo" in
let cmo_file = add_dst_file lib dst_dir cmo_basename in
let cmx_basename = kernel_name ^ ".cmx" in
let cmx_file = add_dst_file lib dst_dir cmx_basename in
let ext_obj = BuildOCamlConfig.ocaml_config_ext_obj.get envs in
let o_basename = kernel_name ^ ext_obj in
let o_file = add_dst_file lib dst_dir o_basename in
(* TODO: we should check that they do exist !! *)
if has_byte then
ptmp.cmo_files := cmo_file :: !(ptmp.cmo_files);
if has_asm then begin
ptmp.cmx_files := cmx_file :: !(ptmp.cmx_files);
ptmp.cmx_o_files := o_file :: !(ptmp.cmx_o_files)
end
end
end else
let comp_deps = comp_deps w lib options in
let copy_objects_from = get_copy_objects_from lib envs in
match copy_objects_from with
| Some src_lib ->
copy_ml_objects_from lib ptmp src_lib kernel_name
| None ->
let copy_dir = copy_dir lib ml_file in
let old_ml_file = ml_file in
let ml_file = create_ml_file_if_needed lib lib.lib.lib_mut_dir envs ml_file in
let ppv = BuildOCamlSyntaxes.get_pp "ml" w lib basename options in
(* [has_mli] = None | Some (build_file, in_source_directory_predicate) *)
let _has_mli =
if no_mli_option.get envs then None else
try
let mli_file = BuildValue.get_string envs mli_file_attr in
Some (add_package_file lib mli_file, false)
with Var_not_found _ ->
let mli_name = kernel_alias ^ ".mli" in
let mli_file =
Filename.concat
orig_ml_file.file_dir.dir_fullname
mli_name
in
(* do that before pp_option change it ! *)
if Sys.file_exists mli_file then
Some (add_file lib orig_ml_file.file_dir mli_name, true)
else
try
Some (find_dst_file lib.lib.lib_src_dir (kernel_alias ^ ".mli"), true)
with NoSuchFileInDir _ -> None
in
let ml_file =
let file2string = BuildValue.get_strings_with_default envs
file2string_attr [] in
if file2string = [] then ml_file else
let new_ml_file = add_file lib lib.lib.lib_mut_dir ml_file.file_basename
in
let r = new_rule lib new_ml_file [] in
(* TODO: for bytecode, we should generate the .mli too *)
let sources = List.map (fun file ->
file, add_file lib orig_ml_file.file_dir file
) file2string in
add_rule_sources r (List.map snd sources);
add_rule_command r (
Function ("file2string",
(fun b ->
List.iter (fun (file, _) ->
Printf.bprintf b "%s\n" file
) sources
),
content_generator new_ml_file
(fun b ->
Printf.bprintf b "let files = [\n";
List.iter (fun (file, src_file) ->
Printf.bprintf b "%S, %S;"
file (FileString.string_of_file (file_filename src_file))
) sources;
Printf.bprintf b " ]\n";
)));
new_ml_file
in
let ml_file, force =
match ppv.pp_option with
[] -> ml_file, Force_not
| pp ->
TODO : we should create the new_ml_file in the same subdirectory
as the source file , not at the toplevel ! !
as the source file, not at the toplevel !! *)
let new_ml_file =
add_file lib lib.lib.lib_mut_dir (ml_file.file_basename ^ "pp")
in
let cmd = new_command pp (ppv.pp_flags @ [ BF ml_file ]) in
add_command_pipe cmd (FileGen.to_string new_ml_file.file_file);
let r = new_rule lib new_ml_file [] in
add_more_rule_sources lib r [] envs;
add_rule_command r (Execute cmd);
BuildOCamlSyntaxes.add_pp_requires r ppv;
add_more_rule_sources lib r [ pp_deps ] envs;
add_rule_source r ml_file;
add_rule_sources r comp_deps;
new_ml_file, Force_IMPL
in
if old_ml_file != ml_file then begin
(* Why is javascript files rebuilt ?
Printf.eprintf "Need to copy mli file for %S\n%!"
(file_filename old_ml_file);
*)
copy_mli_if_needed lib lib.lib.lib_mut_dir old_ml_file kernel_alias;
end;
let dst_dir = object_dst_dir b lib pack_for in
let pack_of = pack_option.get envs in
if pack_of < > [ ] then
List.iter ( fun pack - > Printf.eprintf " pack % s\n " pack ) pack_of ;
if pack_of <> [] then
List.iter (fun pack -> Printf.eprintf "pack %s\n" pack) pack_of;
*)
let cmi_name = kernel_name ^ ".cmi" in
(* TODO: we already check for this previously in _has_mli. Why not use
it ? *)
let needs_cmi =
try
This case corresponds to a .mli file present in " files "
before the .ml
before the .ml *)
let cmi_file = find_dst_file dst_dir cmi_name in
Some cmi_file
with NoSuchFileInDir _ ->
let mli_name = kernel_alias ^ ".mli" in
let mli_file =
Filename.concat
orig_ml_file.file_dir.dir_fullname
mli_name
in
if not (no_mli_option.get envs ) then
(* do that before pp_option change it ! *)
let mli_file =
if Sys.file_exists mli_file then
Some (add_file lib orig_ml_file.file_dir mli_name)
else
try
Some (find_dst_file lib.lib.lib_src_dir (kernel_alias ^ ".mli"))
with NoSuchFileInDir _ -> None
in
match mli_file with
| Some mli_file ->
(* MLI file does exist !!! We should probably put a warning, as we
have no information on how to compile this file !!*)
ignore (add_mli_source w b lib ptmp mli_file (BuildValue.set_bool options "ml" false) : unit);
let cmi_file = find_dst_file dst_dir cmi_name in
Some cmi_file
| None -> None
else
None
in
ptmp.src_files <- IntMap.add ml_file.file_id ml_file ptmp.src_files;
let seq_order =
if pack_of <> [] ||
(* do not compute dependencies for alias source *)
(lib.lib_alias <> None && not (is_aliased options)) then
[] (* don't compute dependencies when we already know them *)
else
let mldep_file =
add_dst_file lib dst_dir (kernel_name ^ ".mlmods")
in
let mldep_file_ok = add_ml2mldep_rule lib dst_dir pack_for force ml_file mldep_file
(needs_odoc && needs_cmi = None) options in
ptmp.dep_files <- IntMap.add mldep_file.file_id mldep_file ptmp.dep_files;
[mldep_file_ok]
in
let seq_order = match needs_cmi with
None -> seq_order
| Some cmi_file -> cmi_file :: seq_order in
let gen_cmi = match needs_cmi with
None -> [add_dst_file lib dst_dir cmi_name ]
| Some _ -> []
in
let lib_modules =
let pack_for = List.rev pack_for in
try
let (_, map) = StringsMap.find pack_for lib.lib_internal_modules in
map
with Not_found ->
let map = ref StringMap.empty in
lib.lib_internal_modules <- StringsMap.add pack_for (dst_dir, map) lib.lib_internal_modules;
map
in
begin
let dep =
try
let (kind, basename) = StringMap.find kernel_modname !lib_modules
in
match kind with
ML
| MLandMLI -> None
| MLI -> Some (MLandMLI, basename)
with Not_found ->
if verbose 5 then
Printf.eprintf "Adding ML module %s to %s.CMO in %s\n"
kernel_modname kernel_name lib.lib.lib_name;
match lib.lib_alias with
| Some _ when kernel_modname = kernel_modalias ->
Some (ML, DepAlias lib)
| _ ->
Some (ML, DepBasename kernel_name)
in
match dep with
| None ->
Printf.eprintf
"ERROR: The file(s) %s appears more than once in %s\n%!"
(kernel_name ^ ".ml")
lib.lib.lib_filename
| Some (kind, basename) ->
lib_modules := StringMap.add kernel_modname (kind, basename)
!lib_modules;
if kernel_modname <> kernel_modalias then
lib.lib_aliases <-
StringMap.add kernel_modalias (kind, basename) lib.lib_aliases
end;
let cmi_basename = kernel_name ^ ".cmi" in
let cmi_file = add_dst_file lib dst_dir cmi_basename in
let (before_cmd, temp_ml_file) =
if no_mli_option.get envs then
let temp_ml_file = T (kernel_name ^ ".ml") in
([ NeedTempDir; Copy (BF ml_file, temp_ml_file)], temp_ml_file)
else
([], BF ml_file)
in
let needs_cmo =
if has_byte then begin
let cmo_basename = kernel_name ^ ".cmo" in
let cmo_file = add_dst_file lib dst_dir cmo_basename in
let r = new_rule lib cmo_file before_cmd in
add_more_rule_sources lib r [ ocamlc_deps ] envs;
add_rule_sources r comp_deps;
if pack_of = [] then begin
let cmd = new_command (ocamlc_cmd.get envs ) [] in
add_bin_annot_argument cmd envs;
add_command_args cmd [S "-c"; S "-o"; T cmo_basename];
add_command_strings cmd (comp_alias_options lib options);
add_command_pack_args cmd pack_for;
add_command_strings cmd (command_includes lib pack_for);
add_command_args cmd (bytecompflags envs);
(* add_command_strings cmd (command_pp ptmp options); *)
if force = Force_IMPL || ml_file_option.get envs then
add_command_string cmd "-impl";
add_command_arg cmd temp_ml_file;
add_rule_command r (Execute cmd);
add_rule_source r ml_file;
end else begin
let cmd = new_command (ocamlc_cmd.get envs ) [] in
add_bin_annot_argument cmd envs;
add_command_args cmd (bytecompflags envs);
add_command_args cmd [S "-pack"; S "-o"; T cmo_basename];
add_command_pack_args cmd pack_for;
let src_dir =
Filename.concat dst_dir.dir_fullname kernel_modname in
Printf.eprintf " Pack in % s [ % s]\n " src_dir modname ;
let src_dir = BuildEngineContext.add_directory b src_dir in
let cmo_files = get_packed_objects lib r src_dir pack_of "cmo" in
let cmd = add_files_to_link_to_command
lib "byte pack" cmd envs cmo_files in
add_rule_command r cmd
end;
cross_move r [ T cmo_basename, BF cmo_file ];
begin match needs_cmi with
None ->
cross_update r [T cmi_basename, BF cmi_file]
| _ -> ();
end;
if pack_for = [] then
ptmp.cmo_files := cmo_file :: !(ptmp.cmo_files);
move_compilation_garbage r copy_dir
(BuildEngineRules.rule_temp_dir r) kernel_name lib;
add_rule_sources r seq_order;
add_rule_targets r gen_cmi;
match needs_cmi with
None -> Some cmo_file
| Some _ -> None
end else None
in
let _needs_cmx =
if has_asm then begin
let cmx_basename = kernel_name ^ ".cmx" in
let cmx_file = add_dst_file lib dst_dir cmx_basename in
let ext_obj = BuildOCamlConfig.ocaml_config_ext_obj.get envs in
let o_basename = kernel_name ^ ext_obj in
let o_file = add_dst_file lib dst_dir o_basename in
let r = new_rule lib cmx_file before_cmd in
add_more_rule_sources lib r [ ocamlopt_deps] envs;
add_rule_sources r comp_deps;
let temp_dir = BuildEngineRules.rule_temp_dir r in
let o_temp = FileGen.add_basename temp_dir o_basename in
let cmx_temp = FileGen.add_basename temp_dir cmx_basename in
let = FileGen.add_basename temp_dir cmi_basename in
let temp_dir = BuildEngineRules.rule_temp_dir r in
let o_temp = FileGen.add_basename temp_dir o_basename in
let cmx_temp = FileGen.add_basename temp_dir cmx_basename in
let cmi_temp = FileGen.add_basename temp_dir cmi_basename in
*)
if pack_of = [] then begin
let cmd = new_command (ocamlopt_cmd.get envs ) [] in
add_bin_annot_argument cmd envs;
add_command_args cmd [S "-c"; S "-o"; T cmx_basename];
add_command_pack_args cmd pack_for;
add_command_strings cmd (command_includes lib pack_for);
add_command_strings cmd (comp_alias_options lib options);
add_command_args cmd (asmcompflags envs);
(* add_command_strings cmd (command_pp ptmp options); *)
if force = Force_IMPL || ml_file_option.get envs then
add_command_string cmd "-impl" ;
add_command_arg cmd temp_ml_file;
add_rule_command r (Execute cmd);
add_rule_source r ml_file;
end else begin
let cmd = new_command (ocamlopt_cmd.get envs ) [] in
add_bin_annot_argument cmd envs;
add_command_args cmd (asmcompflags envs);
add_command_args cmd [S "-pack"; S "-o"; T cmx_basename];
add_command_pack_args cmd pack_for;
let src_dir
= BuildEngineContext.add_directory
b (Filename.concat dst_dir.dir_fullname kernel_modname) in
let cmx_files = get_packed_objects
lib r src_dir pack_of "cmx" in
let cmd = add_files_to_link_to_command
lib "asm pack" cmd envs cmx_files in
add_rule_command r cmd
end;
cross_move r [ T cmx_basename, BF cmx_file;
T o_basename, BF o_file ];
begin match needs_cmi with
None ->
cross_update r [T cmi_basename, BF cmi_file]
| _ -> ();
end;
add_rule_sources r seq_order;
add_rule_targets r (o_file :: gen_cmi);
move_compilation_garbage r copy_dir (BuildEngineRules.rule_temp_dir r) kernel_name lib;
begin match needs_cmo with
Some cmo_file ->
If both ocamlc and ocamlopt build the cmi file , they should
not execute concurrently . For that , we create an artificial
ordering between them , by requesting the cmo file before
the cmx file , if both have to be generated .
not execute concurrently. For that, we create an artificial
ordering between them, by requesting the cmo file before
the cmx file, if both have to be generated. *)
(* TODO: is this still useful ? Now that we build in a
temporary directory, there is no need for that, no ? *)
add_rule_time_dependency r cmo_file
| None -> ()
end;
if pack_for = [] then begin
ptmp.cmx_files := cmx_file :: !(ptmp.cmx_files);
ptmp.cmx_o_files := o_file :: !(ptmp.cmx_o_files);
end;
Some cmx_file
end else None
in
begin
match needs_cmi with
| Some _ -> ()
| None ->
if pack_of = [] then
ml2odoc lib ptmp kernel_name envs before_cmd pack_for force temp_ml_file ml_file seq_order
end;
if pack_for = [] then begin
if needs_cmi = None then
ptmp.cmi_files := cmi_file :: !(ptmp.cmi_files);
end
let add_mll_source w b lib ptmp mll_file options =
let envs = options :: lib.lib_opk.opk_options in
let basename = mll_file.file_basename in
let kernel_name = Filename.chop_suffix basename ".mll" in
if lib.lib_opk.opk_installed then
let ml_file = add_file lib lib.lib.lib_src_dir (kernel_name ^ ".ml") in
add_ml_source w b lib ptmp ml_file options
else
let copy_objects_from = get_copy_objects_from lib envs in
match copy_objects_from with
| Some src_lib ->
copy_ml_objects_from lib ptmp src_lib kernel_name
| None ->
let tmp_dirname =
( Filename.concat b.build_dir_filename " _ temp_tree " )
( FileGen.to_string mll_file.file_dir.dir_file ) in
if not ( Sys.file_exists tmp_dirname ) then safe_mkdir tmp_dirname ;
let tmp_dir = add_directory b tmp_dirname in
Filename.concat
(Filename.concat b.build_dir_filename "_temp_tree")
(FileGen.to_string mll_file.file_dir.dir_file) in
if not (Sys.file_exists tmp_dirname) then safe_mkdir tmp_dirname;
let tmp_dir = add_directory b tmp_dirname in *)
let in
let _ = () in
copy_mli_if_needed lib lib.lib.lib_mut_dir mll_file kernel_name;
let ml_file = add_file lib lib.lib.lib_mut_dir (kernel_name ^ ".ml") in
add_mll2ml_rule lib mll_file ml_file options;
add_ml_source w b lib ptmp ml_file options
let add_mly_source w b lib ptmp mly_file options =
let envs = options :: lib.lib_opk.opk_options in
let basename = mly_file.file_basename in
let kernel_name = Filename.chop_suffix basename ".mly" in
if lib.lib_opk.opk_installed then
let ml_file = add_file lib mly_file.file_dir (kernel_name ^ ".ml") in
add_ml_source w b lib ptmp ml_file options
else
let copy_objects_from = get_copy_objects_from lib envs in
match copy_objects_from with
| Some src_lib ->
copy_ml_objects_from lib ptmp src_lib kernel_name
| None ->
let _ = () in
let in
let ml_file = add_file lib lib.lib.lib_mut_dir (kernel_name ^ ".ml") in
let mli_filename = kernel_name ^ ".mli" in
let mli_file = add_file lib lib.lib.lib_mut_dir mli_filename in
add_mli_source w b lib ptmp mli_file options;
add_mly2ml_rule lib mly_file ml_file mli_file options;
add_ml_source w b lib ptmp ml_file options
let process_source w b lib ptmp src_dir (basename, options) =
let _bc = lib.lib.lib_builder_context in
let envs = options :: lib.lib_opk.opk_options in
let (kernel_name, last_extension) = OcpString.rcut_at basename '.' in
let (basename, last_extension) =
if last_extension = "" then
find_source_with_extension b lib src_dir kernel_name
standard_source_exts
else
(basename, last_extension)
in
let src_file = try
add_file lib src_dir basename
with MinUnix.Unix_error(MinUnix.ENOENT, _, _) ->
(* This actually only happens when the source file is located in a non-existing directory *)
Printf.eprintf "Error: missing source file %S for package %S\n%!"
(Filename.concat src_dir.dir_fullname basename) lib.lib.lib_name;
Printf.eprintf " (You may need to manually disable compilation of this package\n";
Printf.eprintf " with 'enabled = false')\n%!";
clean_exit 2
in
match last_extension with
"c" ->
add_c_source b lib ptmp src_file options
| " objects " - >
let obj_lib =
try
StringMap.find kernel_name bc.packages_by_name
with Not_found - >
Printf.eprintf " Package % s : Could not find % s.objects in:\n% ! "
lib.lib.lib_name kernel_name ;
StringMap.iter ( fun s _ - > Printf.eprintf " % s " s ) bc.packages_by_name ;
Printf.eprintf " \n% ! " ;
clean_exit 2
in
begin match BuildOCamlGlobals.get_by_id obj_lib with
| None - > ( )
| Some obj_lib - >
ptmp.cmo_files : = ( List.rev obj_lib.lib_cmo_objects ) @ ! ( ptmp.cmo_files ) ;
ptmp.cmx_files : = ( List.rev obj_lib.lib_cmx_objects ) @ ! ( ptmp.cmx_files ) ;
ptmp.cmx_o_files : = ( List.rev obj_lib.lib_cmx_o_objects ) @ ! ( ptmp.cmx_o_files ) ;
( )
end
| " files " - >
let obj_lib =
try
StringMap.find kernel_name bc.packages_by_name
with Not_found - >
Printf.eprintf " Package % s : Could not find % s.objects\n% ! "
lib.lib.lib_name kernel_name ;
clean_exit 2
in
begin match BuildOCamlGlobals.get_by_id obj_lib with
| None - > ( )
| Some obj_lib - >
let src_dir = obj_lib.lib.lib_src_dir in
List.iter ( process_source w b lib ptmp src_dir ) obj_lib.lib_sources
end
| "objects" ->
let obj_lib =
try
StringMap.find kernel_name bc.packages_by_name
with Not_found ->
Printf.eprintf "Package %s: Could not find %s.objects in:\n%!"
lib.lib.lib_name kernel_name;
StringMap.iter (fun s _ -> Printf.eprintf "%s " s) bc.packages_by_name;
Printf.eprintf "\n%!";
clean_exit 2
in
begin match BuildOCamlGlobals.get_by_id obj_lib with
| None -> ()
| Some obj_lib ->
ptmp.cmo_files := (List.rev obj_lib.lib_cmo_objects) @ !(ptmp.cmo_files);
ptmp.cmx_files := (List.rev obj_lib.lib_cmx_objects) @ !(ptmp.cmx_files);
ptmp.cmx_o_files := (List.rev obj_lib.lib_cmx_o_objects) @ !(ptmp.cmx_o_files);
()
end
| "files" ->
let obj_lib =
try
StringMap.find kernel_name bc.packages_by_name
with Not_found ->
Printf.eprintf "Package %s: Could not find %s.objects\n%!"
lib.lib.lib_name kernel_name;
clean_exit 2
in
begin match BuildOCamlGlobals.get_by_id obj_lib with
| None -> ()
| Some obj_lib ->
let src_dir = obj_lib.lib.lib_src_dir in
List.iter (process_source w b lib ptmp src_dir) obj_lib.lib_sources
end
*)
| "ml" ->
add_ml_source w b lib ptmp src_file options
| "mll" ->
add_mll_source w b lib ptmp src_file options
| "mly" ->
add_mly_source w b lib ptmp src_file options
| "mli" ->
add_mli_source w b lib ptmp src_file options
other ones : .ml4 , , .ml5 , .mli5 , .mly4 , .mly5 , .mll4 , .mll5
| ext ->
if ml_file_option.get envs
|| List.mem ext (BuildValue.get_strings_with_default envs "ml_exts" [])
|| List.mem ext (BuildValue.get_strings_with_default envs "impl_exts" [])
then
add_ml_source w b lib ptmp src_file options
else
if mli_file_option.get envs
|| List.mem ext (BuildValue.get_strings_with_default envs "mli_exts" [])
|| List.mem ext (BuildValue.get_strings_with_default envs "intf_exts" [])
then
add_mli_source w b lib ptmp src_file options
else
if
List.mem ext (BuildValue.get_strings_with_default envs "mll_exts" [])
then
add_mll_source w b lib ptmp src_file options
else
if
List.mem ext (BuildValue.get_strings_with_default envs "mly_exts" [])
then
add_mly_source w b lib ptmp src_file options
else
begin
Printf.eprintf "Don't know what to do with [%s] (extension %S)\n%!"
(String.escaped basename) ext;
Printf.eprintf "\tfrom project %s in dir %s\n%!"
lib.lib.lib_name src_dir.dir_fullname;
clean_exit 2;
end
let process_source w b lib ptmp src_dir (basename, options) =
let bc = lib.lib.lib_builder_context in
let envs = options :: lib.lib_opk.opk_options in
let src_dir =
let package = package_option.get envs in
if package = "" then src_dir else
let obj_lib =
try
StringMap.find package bc.packages_by_name
with Not_found ->
Printf.eprintf "Package %s: Could not find package %s\n%!"
lib.lib.lib_name package;
clean_exit 2
in
let src_dir = obj_lib.lib_src_dir in
src_dir
in
let basename =
let subdir = subdir_option.get envs in
match subdir with
[] -> basename
| subdir ->
(* Since basename can be a relative filename, we use both
FileGen.t and strings. Clearly, it is not good, and we should
convert basename to FileGen.t earlier *)
let subdir = FileGen.add_basenames (FileGen.of_string "") subdir in
Filename.concat (FileGen.to_string subdir) basename
in
process_source w b lib ptmp src_dir (basename, options)
let process_sources w b lib =
let ptmp = new_package_temp_variables () in
begin
match lib.lib.lib_type with
| SyntaxPackage ->
if lib.lib_sources <> [] then begin
Printf.eprintf "Syntax %S: 'files' should be empty !\n" lib.lib.lib_name;
Printf.eprintf " If your syntax contains sources, you should build a library\n";
Printf.eprintf " and define the syntax to require this library.\n%!";
clean_exit 2
end
| RulesPackage -> assert false
| TestPackage
| LibraryPackage
| ProgramPackage
| ObjectsPackage ->
let src_dir = lib.lib.lib_src_dir in
let _dst_dir = lib.lib.lib_dst_dir in
begin
match lib.lib_alias with
| None -> ()
| Some alias ->
let alias_file = add_file lib lib.lib.lib_dst_dir (alias ^ ".ml") in
let r = new_rule lib alias_file [] in
let modnames =
List.fold_left (fun acc (filename, options) ->
let (is_ml, modname, _basename) =
BuildOCamldep.modname_of_file [options] Force_not filename
in
if is_ml then modname :: acc else acc
) [] lib.lib_sources
in
let mod_alias = String.capitalize alias in
add_rule_command r (
Function ("gen-alias",
(fun b ->
List.iter (fun s ->
Buffer.add_string b s;
Buffer.add_char b '|'
) modnames
),
content_generator alias_file
(fun b ->
List.iter (fun s ->
Printf.bprintf b
"module %s = %s__%s\n"
s
mod_alias
s
) modnames;
)
));
(* TODO: how to add specific options here ? *)
let options = BuildValue.empty_env in
let options = BuildValue.set_bool options open_aliases_flag false in
add_ml_source w b lib ptmp alias_file options
end;
List.iter (process_source w b lib ptmp src_dir) lib.lib_sources;
end;
ptmp.cmo_files := List.rev !(ptmp.cmo_files);
ptmp.odoc_files := List.rev !(ptmp.odoc_files);
lib.lib_doc_targets := !(ptmp.odoc_files) @ !(lib.lib_doc_targets);
ptmp.cmx_files := List.rev !(ptmp.cmx_files);
ptmp.cmx_o_files := List.rev !(ptmp.cmx_o_files);
ptmp.cmi_files := List.rev !(ptmp.cmi_files);
ptmp.o_files := List.rev !(ptmp.o_files);
ptmp
let add_library w b lib =
let src_dir = lib.lib.lib_src_dir in
let dst_dir = lib.lib.lib_dst_dir in
let envs = lib.lib_opk.opk_options in
let ptmp = process_sources w b lib in
let cclib = cclib_option.get envs in
let cclib = String.concat " " cclib in
let (cclib, stubs_files) =
let a_file =
let ext_lib = BuildOCamlConfig.ocaml_config_ext_lib.get envs in
let libbasename =
Printf.sprintf "lib%s%s" lib.lib_stubarchive ext_lib in
if !(ptmp.o_files) <> [] then
let a_file = add_dst_file lib dst_dir libbasename in
add_os2a_rule lib !(ptmp.o_files) a_file;
Some a_file
else
try
let a_file = libstubs.get envs in
if a_file = "" then raise (Var_not_found "libstubs");
let a_file = BuildSubst.subst_global a_file in
if Filename.basename a_file <> libbasename then begin
Printf.eprintf "%s\nError: %s=%S basename differs from %S^%s^%S=\"%s\"\n%!"
(string_of_libloc lib)
"libstubs" a_file "lib" "stubarchive" ext_lib libbasename;
BuildMisc.clean_exit 2
end;
let a_file = add_package_file lib a_file in
Some a_file
with Var_not_found _ -> None
in
match a_file with
| None -> cclib, []
| Some a_file ->
lib.lib_stub_targets <- (a_file, STUB_A) :: lib.lib_stub_targets;
Printf.sprintf "-l%s %s" lib.lib_stubarchive cclib, [a_file]
in
if lib.lib_opk.opk_has_byte &&
(lib.lib_opk.opk_installed || !(ptmp.cmo_files) <> []) then begin
let cma_file = add_dst_file lib dst_dir (lib.lib_archive ^ ".cma") in
add_cmo2cma_rule lib ptmp cclib !(ptmp.cmo_files) cma_file;
lib.lib_intf_targets <-
(List.map (fun cmi -> cmi, CMI)
(!(ptmp.cmi_files))) @ lib.lib_intf_targets;
lib.lib_byte_targets <- (cma_file, CMA) :: lib.lib_byte_targets;
end;
if lib.lib_opk.opk_has_asm &&
(lib.lib_opk.opk_installed || !(ptmp.cmx_files) <> []) then begin
let (cmxa_file, a_file, cmxs_files) =
add_cmx2cmxa_rule lib cclib !(ptmp.cmi_files)
!(ptmp.cmx_files) !(ptmp.cmx_o_files) stubs_files in
lib.lib_intf_targets <-
(List.map (fun cmi -> cmi, CMI) (!(ptmp.cmi_files))) @
(List.map (fun cmx -> cmx, CMX) (!(ptmp.cmx_files))) @
lib.lib_intf_targets;
lib.lib_asm_targets <-
(cmxa_file, CMXA) ::
(a_file, CMXA_A) ::
cmxs_files @ lib.lib_asm_targets
end;
if !(ptmp.odoc_files) <> [] then begin
let doc_dirname = Filename.concat dst_dir.dir_fullname "_doc" in
safe_mkdir doc_dirname;
let docdir = BuildEngineContext.add_directory b doc_dirname in
let html_file = add_file lib dst_dir "_doc/index.html" in
add_odocs2html_rule lib !(ptmp.odoc_files) docdir html_file;
lib.lib_doc_targets := html_file :: !(lib.lib_doc_targets)
end;
()
let add_objects w b lib =
let ptmp = process_sources w b lib in
if lib.lib_opk.opk_has_byte then begin
lib.lib_intf_targets <-
(List.map (fun cmi -> cmi, CMI) (!(ptmp.cmi_files))) @
lib.lib_intf_targets;
lib.lib_byte_targets <-
(List.map (fun cmo -> cmo, CMO)
(!(ptmp.cmo_files))) @ lib.lib_byte_targets;
end;
if lib.lib_opk.opk_has_asm then begin
lib.lib_intf_targets <-
(List.map (fun cmi -> cmi, CMI) (!(ptmp.cmi_files))) @
(List.map (fun cmx -> cmx, CMX) (!(ptmp.cmx_files))) @
lib.lib_intf_targets;
lib.lib_asm_targets <-
(List.map (fun cmx -> cmx, CMX)
(!(ptmp.cmx_files)))
@ (List.map (fun o -> o, CMX_O)
(!(ptmp.cmx_o_files)))
@ lib.lib_asm_targets;
end;
()
let local_subst (file, env) s =
let s = BuildSubst.subst_global s in
let s = BuildSubst.apply_substituter
BuildOCP.filesubst s (file,env) in
s
let add_extra_rules bc lib target_name target_files =
let lib_options = lib.lib_opk.opk_options in
let _b = bc.build_context in
let dirname = lib.lib.lib_dirname in
let files = BuildValue.get_strings_with_default lib_options "source_files" [] in
List.iter (fun file ->
let (_: build_file) = add_file lib lib.lib.lib_src_dir file
in
()
) files;
let build_rules =
BuildValue.get_local_prop_list_with_default lib_options
(target_name ^ "_rules") [] in
let build_targets =
BuildValue.get_local_prop_list_with_default lib_options
(target_name ^ "_targets") [] in
List.iter (fun (file, _env) ->
let file = BuildSubst.subst_global file in
let target_file = add_package_file lib file in
target_files := target_file :: !target_files
) build_targets;
if build_rules <> [] then
List.iter (fun (file, env) ->
Printf.eprintf " Adding rule to build % s/%s\n% ! " ( ) file ;
Printf.eprintf "Adding rule to build %s/%s\n%!" (FileGen.to_string dirname) file;
*)
let envs = env :: lib.lib_opk.opk_options in
let uniq_rule = BuildValue.get_string_option_with_default envs "uniq_rule" None in
let file = BuildSubst.subst_global file in
let target_file = add_package_file lib file in
let to_build = BuildValue.get_bool_with_default envs "build_target" false in
if to_build then
target_files := target_file :: ! target_files;
try
match uniq_rule with
None -> raise Not_found
| Some uniq_rule ->
let r = Hashtbl.find bc.uniq_rules uniq_rule in
add_rule_target r target_file
with Not_found ->
(* let substituted_words = BuildValue.get_strings_with_default envs "subst" [] in *)
let local_subst = local_subst (file, envs) in
let targets = BuildValue.get_strings_with_default envs "more_targets" [] in
let targets = List.map local_subst targets in
let commands =
try
BuildValue.get_local_prop_list envs "commands"
with Var_not_found _ ->
Printf.eprintf "Error in package %S at %S:\n%!"
lib.lib.lib_name
(BuildEngineDisplay.string_of_loc lib.lib.lib_loc);
Printf.eprintf "\tRule for %S does not define 'commands'\n%!" file;
clean_exit 2
in
let sources = BuildValue.get_strings_with_default envs "sources" [] in
let sources = List.map local_subst sources in
let r = new_rule lib target_file [] in
begin match uniq_rule with
None -> () | Some uniq_rule ->
Hashtbl.add bc.uniq_rules uniq_rule r
end;
let sources = List.map (add_package_file lib) sources in
let dirname_s = FileGen.to_string dirname in
List.iter (fun (cmd_name, cmd_env) ->
let envs = cmd_env :: envs in
match cmd_name with
| "" ->
let cmd =
try
let cmd = BuildValue.get_strings envs "value" in
cmd
with Var_not_found _ -> assert false
in
let cmd = List.map local_subst cmd in
let cmd = new_command cmd [] in
begin
let dirname_s = try
let s = BuildValue.get_string envs "chdir" in
let s = local_subst s in
if Filename.is_relative s then
Filename.concat dirname_s s
else s
with Var_not_found _ -> dirname_s
in
cmd.cmd_move_to_dir <- Some dirname_s
end;
let get_pipe name =
try
let stdout = BuildValue.get_string envs name in
let stdout = local_subst stdout in
let stdout = if Filename.is_relative stdout then
Filename.concat dirname_s stdout
else stdout
in
Some stdout
with Var_not_found _ -> None
in
cmd.cmd_stdin_pipe <- get_pipe "stdin";
cmd.cmd_stdout_pipe <- get_pipe "stdout";
cmd.cmd_stderr_pipe <- get_pipe "stderr";
add_rule_command r (Execute cmd)
| "%%loaddeps" ->
make_virtual_file target_file;
let loader filename =
let dependencies =
BuildDepMisc.load_make_dependencies filename
in
List.map (fun (file, deps) ->
(Filename.concat dirname_s file,
List.map (fun file -> [
if Filename.is_relative file then
Filename.concat dirname_s file
else file ]) deps)
) dependencies
in
List.iter (fun source_file ->
add_rule_command r
(LoadDeps (loader, source_file, r))
) sources;
r.rule_forced <- true;
(* must be executed, even when no changes *)
| "%%subst" ->
let to_file = BuildValue.get_path_with_default envs "to_file" file in
let to_file = local_subst to_file in
let to_file = add_package_file lib to_file in
let from_file = BuildValue.get_path_with_default envs "from_file" (file ^ ".in") in
let from_file = local_subst from_file in
let from_file = add_package_file lib from_file in
let substitutions = BuildValue.prop_list (BuildValue.get envs "substitutions") in
let substitutions =
List.map (fun (string, string_env) ->
let envs = string_env :: envs in
let with_string =
try
local_subst (BuildValue.get_string envs "with_string")
with Var_not_found _ ->
failwith (Printf.sprintf "In command %%subst, string %s has no 'with_string'" string)
in
(string, with_string)
) substitutions in
let subst = List.fold_left (fun subst (string, with_string) ->
StringMap.add string with_string subst
) StringMap.empty substitutions
in
let printer b =
Printf.bprintf b "subst %S %S\n"
(file_filename from_file) (file_filename to_file);
List.iter (fun (string, with_string) ->
Printf.bprintf b "\t%S -> %S\n" string with_string
) substitutions;
in
let actor () =
let s = FileString.string_of_file (file_filename from_file) in
let s = BuildSubst.map_subst subst s in
FileString.file_of_string (file_filename to_file) s
in
add_rule_source r from_file;
add_rule_target r to_file;
add_rule_command r (Function (cmd_name, printer, actor))
| "%%config_make2ocp" ->
let to_file = BuildValue.get_path_with_default envs "dst" file in
let from_file = BuildValue.get_path envs "src" in
let from_file = local_subst from_file in
let to_file = local_subst to_file in
let from_file = add_package_file lib from_file in
let to_file = add_package_file lib to_file in
let printer b =
Printf.bprintf b "config_make2ocp %S -> %S\n"
(file_filename from_file) (file_filename to_file)
in
let actor () =
Printf.eprintf "Loading %S\n" (file_filename from_file);
let make_subst = OcpSubst.empty_subst () in
OcpSubst.add_to_subst make_subst "\\ " " ";
let vars = ref [] in
FileGen.iter_lines (fun line ->
let _, line = OcpSubst.iter_subst make_subst line in
if String.length line > 0 && line.[0] <> '#' then
let var, value = OcpString.cut_at line '=' in
OcpSubst.add_to_subst make_subst
(Printf.sprintf "$(%s)" var) value;
vars := (var, value) :: !vars
) from_file.file_file;
let vars = List.rev !vars in
Printf.eprintf "Writing %S\n" (file_filename to_file);
let oc = open_out (file_filename to_file) in
List.iter (fun (var, value) ->
Printf.fprintf oc "%s = %S\n" var value
) vars;
close_out oc;
()
in
add_rule_source r from_file;
add_rule_target r to_file;
add_rule_command r (Function (cmd_name, printer, actor));
| _ ->
Printf.eprintf "Error: Unknown primitive command %S in %s\n" cmd_name
(BuildEngineDisplay.string_of_loc lib.lib.lib_loc);
Printf.eprintf " Commands to execute should be between { ... }, while\n";
Printf.eprintf " primitive commands start by %% (for example %%loaddeps)\n%!";
clean_exit 2
) commands;
add_more_rule_sources lib r [] envs;
add_rule_sources r sources;
let targets = List.map (add_package_file lib) targets in
add_rule_targets r targets;
) build_rules;
()
let add_program w b lib =
let lib_options = lib.lib_opk.opk_options in
let dst_dir = lib.lib.lib_dst_dir in
let ptmp = process_sources w b lib in
begin (* Fast check of libraries modules *)
let map = ref StringMap.empty in
List.iter (fun dep ->
if dep.dep_link then
let lib1 = dep.dep_project in
match lib1.lib.lib_type with
| TestPackage -> assert false
| ProgramPackage
(* | ProjectToplevel *)
| ObjectsPackage
-> ()
| RulesPackage
| LibraryPackage ->
StringsMap.iter (fun _ (_, modules) ->
StringMap.iter (fun modname (kind1, _) ->
try
let (kind2, lib2) = StringMap.find modname !map in
match kind1, kind2 with
| (ML | MLandMLI), (ML | MLandMLI) ->
Printf.eprintf
"Warning: program %s, requirements %s and %s\n"
lib.lib.lib_name lib2.lib.lib_name lib1.lib.lib_name;
Printf.eprintf "\tboth define a module name %s.\n" modname;
| _ -> ()
with Not_found ->
map := StringMap.add modname (kind1,lib1) !map
) !modules
) lib1.lib_internal_modules
| SyntaxPackage ->
(* Nothing to do ? *)
()
) lib.lib_requires
end;
let cclib = cclib_option.get lib_options in
let cclib = String.concat " " cclib in
let is_toplevel = is_toplevel.get lib_options in
let linkall = force_link_option.get lib_options || is_toplevel in
begin
let linkflags = bytelinkflags lib in
let linkflags =
if linkall || !(ptmp.cmo_files) = [] then
S "-linkall" :: linkflags
else linkflags
in
let byte_file = add_dst_file lib dst_dir (lib.lib_archive ^ byte_exe) in
add_cmo2byte_rule lib ptmp linkflags cclib !(ptmp.cmo_files)
!(ptmp.o_files) byte_file;
if lib.lib_opk.opk_has_byte then begin
lib.lib_byte_targets <- (byte_file, RUN_BYTE) :: lib.lib_byte_targets;
end
end;
if !(ptmp.cmx_files) <> [] then begin
let linkflags = asmlinkflags lib in
let linkflags =
if linkall || !(ptmp.cmx_files) = [] then S "-linkall" :: linkflags
else linkflags in
let asm_file = add_dst_file lib dst_dir (lib.lib_archive ^ asm_exe) in
add_cmx2asm_rule lib ptmp linkflags cclib
!(ptmp.cmx_files) !(ptmp.cmx_o_files) !(ptmp.o_files) asm_file;
if lib.lib_opk.opk_has_asm && not is_toplevel then begin
lib.lib_asm_targets <- (asm_file, RUN_ASM) :: lib.lib_asm_targets;
end
end;
()
let fix_windows_directory s =
let s = Bytes.of_string s in
let len = Bytes.length s in
for i = 0 to len - 1 do
if Bytes.get s i = '\\' then s.[i] <- '/'
done;
let rec iter i =
if i = 0 then "." else
if Bytes.get s (i-1) = '/' then iter (i-1)
else
if i = len
then Bytes.to_string s
else Bytes.sub_string s 0 i
in
iter len
let add_package bc opk =
let pk = opk.opk_package in
let b = bc.build_context in
let package_name = pk.BuildOCPTypes.package_name in
let package_dirname = pk.BuildOCPTypes.package_dirname in
let package_options = opk.opk_options in
try
if verbose 7 then Printf.eprintf "Adding %s\n" package_name;
let package_dirname =
try
let list = BuildValue.strings_of_plist ( BuildValue.get_local package_options " dirname " ) in
BuildSubst.subst_global ( String.concat Filename.dir_sep list )
with Var_not_found _ - >
let list = BuildValue.strings_of_plist ( BuildValue.get_local package_options "dirname" ) in
BuildSubst.subst_global (String.concat Filename.dir_sep list)
with Var_not_found _ -> *)
package_dirname
in
let package_dirname = fix_windows_directory package_dirname in
if verbose 7 then Printf.eprintf "\tfrom %s\n" package_dirname;
let src_dir = BuildEngineContext.add_directory b (absolute_filename package_dirname) in
if verbose 7 then Printf.eprintf "\tfrom %s\n" src_dir.dir_fullname;
let already_installed = BuildValue.is_already_installed package_options
in
let dst_dir =
if already_installed then src_dir else
let dirname =
Filename.concat b.build_dir_filename package_name
(* Filename.concat src_dir.dir_fullname build_dir_basename *)
in
safe_mkdir dirname;
BuildEngineContext.add_directory b dirname
in
if verbose 7 then Printf.eprintf "\tto %s\n" dst_dir.dir_fullname;
let mut_dir =
if already_installed then src_dir else
let mut_dirname =
Filename.concat dst_dir.dir_fullname "_temp"
in
safe_mkdir mut_dirname;
BuildEngineContext.add_directory b mut_dirname
in
let lib = BuildGlobals.new_library bc pk
package_dirname src_dir dst_dir mut_dir in
let lib = BuildOCamlGlobals.create_package lib opk in
TOOD : we should do that in one pass before
BuildSubst.add_to_global_subst
(Printf.sprintf "%s_SRC_DIR" package_name) src_dir.dir_fullname;
BuildSubst.add_to_global_subst
(Printf.sprintf "%s_DST_DIR" package_name) dst_dir.dir_fullname;
let full_src_dir = absolute_filename src_dir.dir_fullname in
let full_dst_dir = absolute_filename dst_dir.dir_fullname in
BuildSubst.add_to_global_subst
(Printf.sprintf "%s_FULL_SRC_DIR" package_name)
full_src_dir;
BuildSubst.add_to_global_subst
(Printf.sprintf "%s_FULL_DST_DIR" package_name)
full_dst_dir;
lib
with Failure s ->
Printf.eprintf "While preparing package %S:\n%!" package_name;
Printf.eprintf "Error: %s\n%!" s;
clean_exit 2
let plugin =
let module Plugin = struct
let name = "OCaml"
end in
(module Plugin : Plugin)
let create w cin cout bc state =
BuildOCamlGlobals.reset ();
(* BuildOCPPrinter.eprint_project "BuildOCamlRules.create" ptmp; *)
let b = bc.build_context in
let libs =
Array.map (fun pk ->
match pk.BuildOCPTypes.package_plugin with
| OCamlPackage opk ->
add_package bc opk
| _ -> assert false
) state.BuildOCPTypes.project_sorted
in
Array.iter (fun lib ->
let ext_lib, ext_obj =
let envs = lib.lib_opk.opk_options in
BuildOCamlConfig.(ocaml_config_ext_lib.get envs, ocaml_config_ext_obj.get envs)
in
try
if not lib.lib_opk.opk_installed then
safe_mkdir lib.lib.lib_dst_dir.dir_fullname;
add_extra_rules bc lib "build" lib.lib_build_targets;
add_extra_rules bc lib "doc" lib.lib_doc_targets;
add_extra_rules bc lib "test" lib.lib_test_targets;
lib.lib_linkdeps <- get_link_order lib;
Printf.eprintf " linkdeps for % S : % s\n% ! " lib.lib.lib_name
( String.concat " "
( List.map ( fun lib - > lib.lib.lib_name ) lib.lib_linkdeps ) ) ;
Printf.eprintf "linkdeps for %S : %s\n%!" lib.lib.lib_name
(String.concat " "
(List.map (fun lib -> lib.lib.lib_name) lib.lib_linkdeps));
*)
begin
if not lib.lib_opk.opk_installed then
match lib.lib.lib_type with
LibraryPackage -> add_library w b lib
| ProgramPackage -> add_program w b lib
| TestPackage ->
if lib.lib_sources <> [] then add_program w b lib;
lib.lib_opk.opk_install <- false;
| ObjectsPackage -> add_objects w b lib
| SyntaxPackage -> ()
| RulesPackage -> ()
end;
let options = lib.lib_opk.opk_options in
let set_objects lib name kinds f =
let objs = BuildValue.get_strings_with_default options name [] in
if objs <> [] then
f (
List.flatten (
List.map (fun s0 ->
let s = BuildSubst.subst_global s0 in
let bf = add_package_file lib s in
let basename = bf.file_basename in
match List.rev (OcpString.split basename '.') with
| [] ->
Printf.eprintf
"Error: package %S, option %S contains a file %S\n"
lib.lib.lib_name name s0;
Printf.eprintf " with no extension\n%!";
exit 2
| ext :: _ ->
match ext with
| "asm" when List.mem RUN_ASM kinds -> [ bf, RUN_ASM ]
| "byte" when List.mem RUN_BYTE kinds -> [ bf, RUN_BYTE ]
| "cmxs" when List.mem CMXS kinds -> [ bf, CMXS ]
| "cmx" when List.mem CMX kinds ->
let s = Filename.chop_extension s ^ ext_obj in
let bf2 = add_package_file lib s in
[ bf, CMX; bf2, CMX_O ]
| "cmxa" ->
let s = Filename.chop_extension s ^ ext_lib in
let bf2 = add_package_file lib s in
[ bf, CMXA; bf2, CMXA_A ]
| "cmi" when List.mem CMI kinds -> [ bf, CMI ]
| "cmo" when List.mem CMO kinds -> [ bf, CMO ]
| "cma" when List.mem CMA kinds -> [ bf, CMA ]
| _ ->
let dot_ext = "." ^ ext in
if (dot_ext = ext_obj || dot_ext = ext_lib) && List.mem STUB_A kinds then
[ bf, STUB_A ]
else begin
Printf.eprintf
"Error: package %S, option %S contains a file %S\n"
lib.lib.lib_name name s0;
Printf.eprintf " with unexpected extension %S\n%!" ext;
exit 2
end
) objs))
in
set_objects lib "intf_targets" [CMI; CMX]
(fun objs -> lib.lib_intf_targets <- objs);
set_objects lib "byte_targets" [CMO;CMA;RUN_BYTE]
(fun objs ->
lib.lib_byte_targets <- objs);
set_objects lib "asm_targets" [CMX;CMXA;CMXS;RUN_ASM]
(fun objs ->
lib.lib_asm_targets <- objs);
set_objects lib "stub_targets" [STUB_A]
(fun objs -> lib.lib_stub_targets <- objs);
begin
try
lib.lib_modules <- [StringsMap.find [] lib.lib_internal_modules]
with Not_found ->
let objs =
let objs = BuildValue.get_strings_with_default
options "internal_targets" [] in
if objs = [] then
BuildValue.get_strings_with_default
options "intf_targets" []
else []
in
let dirs = ref [] in
List.iter (fun s0 ->
let s = BuildSubst.subst_global s0 in
let bf = add_package_file lib s in
let dst_dir = bf.file_dir in
let (is_ml, modname, basename) =
BuildOCamldep.modname_of_file options Force_not
bf.file_basename in
let map =
try
List.assq dst_dir !dirs
with Not_found ->
let map = ref StringMap.empty in
dirs := (dst_dir, map) :: !dirs;
map
in
try
match StringMap.find modname !map with
| (ML, _) when not is_ml ->
map := StringMap.add modname
(MLandMLI, DepBasename basename) !map
| (MLI, _) when is_ml ->
map := StringMap.add modname
(MLandMLI, DepBasename basename) !map
| (MLandMLI, _) -> ()
| _ -> raise Not_found
with Not_found ->
map := StringMap.add modname (
(if is_ml then ML else MLI), DepBasename basename) !map
) objs;
lib.lib_modules <- !dirs
end;
with Failure s ->
Printf.eprintf "While preparing package %S:\n%!" lib.lib.lib_name;
Printf.eprintf "Error: %s\n%!" s;
clean_exit 2
) libs;
if !BuildOCamlGlobals.list_byte_targets_arg then begin
Printf.eprintf "Bytecode targets:\n";
StringMap.iter (fun _ lib ->
match BuildOCamlGlobals.get_by_id lib with
| None -> ()
| Some lib ->
if lib.lib_byte_targets <> [] then begin
List.iter (fun (target, _kind) ->
Printf.eprintf "\t%s\t->\t%s\n" lib.lib.lib_name target.file_basename)
lib.lib_byte_targets;
end) bc.packages_by_name;
Printf.eprintf "%!"
end;
if !BuildOCamlGlobals.list_asm_targets_arg then begin
Printf.eprintf "Native targets:\n";
StringMap.iter (fun _ lib ->
match BuildOCamlGlobals.get_by_id lib with
| None -> ()
| Some lib ->
if lib.lib_asm_targets <> [] then begin
List.iter (fun (target, _kind) ->
Printf.eprintf "\t%s\t->\t%s\n" lib.lib.lib_name target.file_basename)
lib.lib_asm_targets;
end) bc.packages_by_name;
Printf.eprintf "%!"
end;
let install_where = BuildOCamlInstall.install_where cin cout in
let install_what = BuildOCamlInstall.install_what () in
let pks =
Array.map (fun lib ->
let module P = struct
let name = lib.lib.lib_name
let info = lib.lib
let plugin = plugin
let clean_targets () = assert false
let build_targets () =
Printf.eprintf " ( pk % s)\n " lib.lib_opk.opk_name ;
Printf.eprintf " ( dir % s)\n " lib.lib_opk.opk_dirname ;
Printf.eprintf " (pk %s)\n" lib.lib_opk.opk_name;
Printf.eprintf " (dir %s)\n" lib.lib_opk.opk_dirname;
*)
if lib.lib_opk.opk_installed then begin
Printf.eprintf " % s is already installed\n% ! " name ;
{
targets = [];
depends = [];
} end
else
let targets = BuildOCamlGlobals.make_build_targets lib.lib cin in
Printf.eprintf " % s.build_targets = \n * % s\n End\n "
name
( String.concat " \n * "
( List.map ( fun f - > file_filename f ) targets ) ) ;
name
(String.concat "\n * "
(List.map (fun f -> file_filename f) targets)); *)
let depends =
let depends = ref [] in
List.iter (fun dep ->
if dep.dep_link || dep.dep_syntax then
depends := dep.dep_project.lib :: !depends
) lib.lib_requires;
!depends
in
{ targets; depends }
let test_targets () =
let targets = BuildOCamlGlobals.make_test_targets lib.lib cin in
let depends =
let depends = ref [] in
List.iter (fun dep ->
if dep.dep_link || dep.dep_syntax then
depends := dep.dep_project.lib :: !depends
) lib.lib_requires;
!depends
in
{ targets; depends }
let doc_targets () =
let targets = BuildOCamlGlobals.make_doc_targets lib.lib cin in
let depends =
let depends = ref [] in
List.iter (fun dep ->
if dep.dep_link || dep.dep_syntax then
depends := dep.dep_project.lib :: !depends
) lib.lib_requires;
!depends
in
{ targets; depends }
let conf_targets () =
let targets = BuildOCamlGlobals.make_build_targets lib.lib cin in
let depends =
let depends = ref [] in
List.iter (fun dep ->
if dep.dep_link || dep.dep_syntax then
depends := dep.dep_project.lib :: !depends
) lib.lib_requires;
!depends
in
{ targets; depends }
(* lazy because shared AND it can creates directories *)
let install_dir =
lazy (BuildOCamlInstall.find_installdir
install_where lib)
(* where to look for previously installed packages *)
let install_dirs () = install_where.install_libdirs
let test () = assert false
TODO
let install_dir () =
match Lazy.force install_dir with
| None -> assert false
| Some install_dir -> install_dir
let pre_installed () = lib.lib_opk.opk_installed
let to_install () = lib.lib_opk.opk_install
let install () =
if lib.lib_opk.opk_install then
let installdir = install_dir () in
BuildOCamlInstall.install
install_where install_what
lib.lib installdir
end in
(module P : BuildTypes.Package)
) libs
in
pks
let () =
BuildOCamlOCP2.init ()
| null | https://raw.githubusercontent.com/OCamlPro/ocp-build/56aff560bb438c12b2929feaf8379bc6f31b9840/tools/ocp-build/ocaml/buildOCamlRules.ml | ocaml | ************************************************************************
Typerex Tools
All rights reserved. This file is distributed under the terms of
LICENSE.
************************************************************************
TODO:
- Support for shared libraries. Currently, ocp-build ONLY supports building
in custom mode. Shared mode is a bit more complex, as linking with a library
would depend on different files, if a shared library or a static library is
built.
special attributes
renamed to record in replay log
TODO: [mut_dir] does not work for source files beginning with ".."
and for source files in other packages (package = "toto")
let rec iter mut_dir file_dir =
(*
Printf.eprintf "src_dir = %S\n%!" lib.lib.lib_src_dir.dir_fullname;
Printf.eprintf "fil_dir = %S\n%!" file_dir.dir_fullname;
Printf.eprintf "mut_dir = %S\n%!" lib.lib_mut_dir.dir_fullname;
assert (lib.lib_mut_dir.dir_fullname <> file_dir.dir_fullname);
.o files generated with .cmx files
TODO: must do something more correct !!
TODO: Fabrice: they should be reversed, no ?
We should search directories in the
reverse order of the topological order.
| ProjectToplevel
TODO: Fabrice: they should be reversed, no ?
We should search directories in the
reverse order of the topological order.
| ProjectToplevel
we put the source dir last in case there are some remaining objects files there, since
we don't do any hygienic cleaning before. We don't do it because we want to be able to
support object files that are built by other means.
let command_pp ptmp options =
match string_option options pp_option with
| "" -> []
| pp -> ["-pp"; pp]
let add_objects lib name_objs options =
List.map (fun s ->
let s = BuildSubst.subst_global s in
add_package_file lib s)
(BuildValue.get_strings_with_default options name_objs [])
(if bool_option_true lib.lib.lib_opk.opk_options byte_option then ocamlcc_cmd
else ocamlopt_cmd)
add_command_strings cmd (command_pp lib options);
We don't need to have all the sources available ! Actually, the
computation of dependencies is not done on the file-system, but on
the virtual image of the file system, so files don't need to be
present, they just need to be known to exist...
List.iter (fun pd ->
let lib = pd.dep_project in
IntMap.iter (fun _ file -> add_rule_source r file) lib.lib_dep_deps
) lib.lib.lib_requires;
must be executed, even when no changes
not fully applied
must be executed, even when no changes
We use the graph of build rules to sort topologically the object files
reverse to keep original order
Build the cmxa
We can probably not build the .cmxs in parallel with the .cmxa.
So, we just do then consecutively in the same rule.
TODO: as we introduce this new dependency, we might want to
split generation of .cmxa from .cmxs to be able to do them in
parallel
[S "-cclib"; S ("-l" ^ lib2.lib_stubarchive)]
[S "-cclib"; S ("-l" ^ lib2.lib_stubarchive)]
TODO: check that pack_for = []
TODO: check that src_lib is in requires
TODO: check that pack_for = []
TODO: check that src_lib is in requires
TODO: do the same for .odoc files !
add_command_strings cmd (command_pp lib options);
generate file in a buffer
Shall we infer the presence of the mli file ? We should probably ask the user
to tell the build system that the mli does not exist.
TODO: we should check that they do exist !!
[has_mli] = None | Some (build_file, in_source_directory_predicate)
do that before pp_option change it !
TODO: for bytecode, we should generate the .mli too
Why is javascript files rebuilt ?
Printf.eprintf "Need to copy mli file for %S\n%!"
(file_filename old_ml_file);
TODO: we already check for this previously in _has_mli. Why not use
it ?
do that before pp_option change it !
MLI file does exist !!! We should probably put a warning, as we
have no information on how to compile this file !!
do not compute dependencies for alias source
don't compute dependencies when we already know them
add_command_strings cmd (command_pp ptmp options);
add_command_strings cmd (command_pp ptmp options);
TODO: is this still useful ? Now that we build in a
temporary directory, there is no need for that, no ?
This actually only happens when the source file is located in a non-existing directory
Since basename can be a relative filename, we use both
FileGen.t and strings. Clearly, it is not good, and we should
convert basename to FileGen.t earlier
TODO: how to add specific options here ?
let substituted_words = BuildValue.get_strings_with_default envs "subst" [] in
must be executed, even when no changes
Fast check of libraries modules
| ProjectToplevel
Nothing to do ?
Filename.concat src_dir.dir_fullname build_dir_basename
BuildOCPPrinter.eprint_project "BuildOCamlRules.create" ptmp;
lazy because shared AND it can creates directories
where to look for previously installed packages | Copyright 2011 - 2017 OCamlPro SAS
the GNU General Public License version 3 described in the file
ocp - imports should be able to print this !
Warning 40 : package_options was selected from type BuildOCPTypes.package .
Warning 40 : package_dirname was selected from type BuildOCPTypes.package .
Warning 40 : package_options was selected from type BuildOCPTypes.package .
Warning 40 : package_name was selected from type BuildOCPTypes.package .
ocp-imports should be able to print this !
Warning 40: package_options was selected from type BuildOCPTypes.package.
Warning 40: package_dirname was selected from type BuildOCPTypes.package.
Warning 40: package_options was selected from type BuildOCPTypes.package.
Warning 40: package_name was selected from type BuildOCPTypes.package.
*)
let mli_file_attr = "mli_file"
let file2string_attr = "file2string"
let binannot_attr = "binannot"
open OcpCompat
open BuildMisc
open BuildEngineTypes
open BuildEngineGlobals
open BuildEngineContext
open BuildEngineRules
open BuildValue.TYPES
open BuildTypes
open BuildGlobals
open BuildOCamlConfig
open BuildOCamlTypes
open BuildOCamlVariables
open BuildOCamlMisc
open BuildOCamlInstall.TYPES
let safe_mkdir (dir : string) =
BuildEngineReport.cmd_mkdir dir;
BuildMisc.safe_mkdir dir
let add_file lib dir name =
BuildEngineContext.add_file lib.lib.lib_package dir name
let add_temp_file lib dir name =
BuildEngineContext.add_temp_file lib.lib.lib_package dir name
let add_virtual_file lib dir name =
BuildEngineContext.add_virtual_file lib.lib.lib_package dir name
let add_dst_file lib dir name =
BuildOCamlMisc.add_dst_file lib.lib.lib_package dir name
let comp_deps w lib options =
let options = options :: lib.lib_opk.opk_options in
let comp_requires = comp_requires_option.get options in
BuildOCamlSyntaxes.get_tool_requires w "comp" lib comp_requires
let string_of_libloc lib =
Printf.sprintf "File %S, line 0, characters 0-1:\nPackage %S:"
lib.lib.lib_filename lib.lib.lib_name
let open_aliases_flag = "open-aliases"
let is_aliased options =
BuildValue.get_bool_with_default [options] open_aliases_flag true
let comp_alias_options lib options =
match lib.lib_alias with
| None -> []
| Some alias ->
let args = ["-w"; "-49"; "-no-alias-deps" ] in
if is_aliased options then
args @ [ "-open"; String.capitalize alias ]
else
args
let dep_alias_options lib options =
match lib.lib_alias with
| None -> []
| Some alias ->
let args = [] in
if is_aliased options then
args @ [ "-open"; String.capitalize alias ]
else
args
let ocamlc_command options ocamlc_specific ocamlc_generic =
let ocamlc_command = ocamlc_specific.get options in
if ocamlc_command = [] then
ocamlc_generic.get options
else ocamlc_command
let copy_dir lib src_file =
let b = lib.lib.lib_context in
let mut_dirname =
Filename.concat b.build_dir_filename "_mutable_tree" in
safe_mkdir mut_dirname;
let mut_dir = BuildEngineContext.add_directory b mut_dirname in
if file_dir.dir_parent == file_dir
then mut_dir else
let parent_dir = file_dir.dir_parent in
Printf.eprintf " check parent\n " ;
let mut_dir = iter mut_dir parent_dir in
let subdir = Filename.concat mut_dir.dir_fullname file_dir.dir_basename
in
safe_mkdir subdir;
add_directory lib.lib.lib_context subdir
in
*)
try
let subdir = Filename.concat mut_dir.dir_fullname
src_file.file_dir.dir_basename in
safe_mkdir subdir;
let copy_dir = BuildEngineContext.add_directory
lib.lib.lib_context subdir in
let src_file.file_dir in
Printf.eprintf " COPY DIR of % S is % S\n% ! "
( FileGen.to_string src_file.file_file ) copy_dir.dir_fullname ;
(FileGen.to_string src_file.file_file) copy_dir.dir_fullname; *)
copy_dir
with Stack_overflow ->
Printf.eprintf "Error: Stack_overflow while computing mut_dir\n";
Printf.eprintf " of source file %S of package %S \n%!"
(FileGen.to_string src_file.file_file)
lib.lib.lib_name;
clean_exit 2
let verbose = OcpDebug.verbose_function ["B"; "BuildOCamlRules"]
let chop_prefix s prefix =
let prefix_len = String.length prefix in
String.sub s prefix_len (String.length s - prefix_len)
type package_temp_variables = {
mutable src_files : build_file IntMap.t;
mutable dep_files : build_file IntMap.t;
cmi_files : build_file list ref;
cmo_files : build_file list ref;
odoc_files : build_file list ref;
cmx_files : build_file list ref;
cmx_o_files : build_file list ref;
o_files : build_file list ref;
}
let new_package_temp_variables () = {
src_files = IntMap.empty;
dep_files = IntMap.empty;
cmi_files = ref [];
cmo_files = ref [];
odoc_files = ref [];
cmx_files = ref [];
o_files = ref [];
}
let ocaml_version_greater_than version options =
let ocaml_version = ocaml_config_version.get options in
ocaml_version >= version
let add_bin_annot_argument cmd options =
if ocaml_version_greater_than "4" options &&
BuildValue.get_bool_with_default options binannot_attr true
then
add_command_args cmd [S "-bin-annot" ]
let c_includes lib =
let added_dirs = ref IntMap.empty in
let includes = ref [] in
let add_include_dir dir =
if not (IntMap.mem dir.dir_id !added_dirs) then begin
added_dirs := IntMap.add dir.dir_id dir !added_dirs;
includes := !includes @ [S "-I"; S dir.dir_fullname];
end
in
add_include_dir lib.lib.lib_src_dir;
List.iter (fun dep ->
let lib = dep.dep_project in
match lib.lib.lib_type with
| TestPackage -> assert false
| LibraryPackage
| ObjectsPackage
| RulesPackage
->
if dep.dep_link || externals_only.get [dep.dep_options]
then begin
add_include_dir lib.lib.lib_src_dir;
end
| SyntaxPackage -> ()
) (List.rev lib.lib_requires);
!includes
let command_includes lib pack_for =
let includes =
match lib.lib_includes with
| Some includes -> includes
| None ->
let added_dirs = ref IntMap.empty in
let includes = ref [] in
let add_include_dir dir =
if not (IntMap.mem dir.dir_id !added_dirs) then begin
added_dirs := IntMap.add dir.dir_id dir !added_dirs;
includes := !includes @ ["-I"; dir.dir_fullname];
end
in
add_include_dir lib.lib.lib_dst_dir;
add_include_dir lib.lib.lib_src_dir;
List.iter (fun dep ->
let lib = dep.dep_project in
match lib.lib.lib_type with
| TestPackage -> assert false
| LibraryPackage
| ObjectsPackage
->
if dep.dep_link || externals_only.get [dep.dep_options] then begin
add_include_dir lib.lib.lib_dst_dir;
add_include_dir lib.lib.lib_src_dir;
end
| SyntaxPackage -> ()
| RulesPackage ->
add_include_dir lib.lib.lib_src_dir;
add_include_dir lib.lib.lib_dst_dir;
) (List.rev lib.lib_requires);
let includes = !includes in
lib.lib_includes <- Some includes;
includes
in
let rec add_internal_includes pack_for includes =
match pack_for with
[] -> includes
| _ :: tail ->
let includes = add_internal_includes tail includes in
"-I" :: (Filename.concat lib.lib.lib_dst_dir.dir_fullname
(String.concat "/" (List.rev pack_for))) ::
includes
in
add_internal_includes (List.rev pack_for) includes
let add_package_file lib filename =
let b = lib.lib.lib_context in
if Filename.is_relative filename then
add_file lib lib.lib.lib_src_dir filename
else
let dir =
let dirname = Filename.dirname filename in
try
find_directory b dirname
with Not_found ->
Printf.eprintf "Error: directory %S of %S is not a package directory\n%!" dirname lib.lib.lib_name;
exit 2
in
add_file lib dir (Filename.basename filename)
let add_more_rule_sources lib r deps options =
let more_rule_sources = rule_sources_option.get options
@ more_deps_option.get options in
List.iter (fun s ->
let s = BuildSubst.subst_global s in
let s = add_package_file lib s in
add_rule_source r s
) more_rule_sources;
List.iter (fun option ->
List.iter (fun s ->
let s = BuildSubst.subst_global s in
let s = add_package_file lib s in
add_rule_source r s
) (option.get options)
) deps
override [ ] to add [ lib_ready ]
let new_rule lib file cmds =
let r = new_rule lib.lib.lib_context lib.lib.lib_loc file cmds in
add_rule_sources r lib.lib_ready;
r
let add_c2o_rule b lib seq src_file target_file options =
let build_dir = BuildEngineContext.add_directory b (MinUnix.getcwd ()) in
let temp_file = add_temp_file lib build_dir target_file.file_basename in
let r = new_rule lib target_file
[Execute (new_command
( ocamlcc_cmd.get options
)
(c_includes lib @[
S "-ccopt"; S
(String.concat " " (cflags_option.get options));
S "-ccopt"; S (String.concat " " ( ccopt_option.get options));
S "-c"; S (file_filename src_file);
])
);
Move (false, F temp_file.file_file, F target_file.file_file)
]
in
add_more_rule_sources lib r [] options;
add_rule_source r src_file;
add_rule_sources r seq;
add_rule_temporary r temp_file
let add_mll2ml_rule lib src_file target_file options =
let envs = options :: lib.lib_opk.opk_options in
let r = new_rule lib target_file
[Execute (new_command (ocamllex_cmd.get envs )
[ S "-o"; BF target_file; BF src_file])
]
in
add_more_rule_sources lib r [ ocamllex_deps ] envs;
add_rule_source r src_file
let add_mly2ml_rule lib src_file ml_target_file mli_target_file options =
let envs = options :: lib.lib_opk.opk_options in
let src_dir = src_file.file_dir in
let temp_ml = add_temp_file lib src_dir ml_target_file.file_basename in
let temp_mli = add_temp_file lib src_dir mli_target_file.file_basename in
let r = new_rule lib ml_target_file
[Execute (new_command ( ocamlyacc_cmd.get envs) [BF src_file]);
Move (false, BF temp_ml, BF ml_target_file);
Move (false, BF temp_mli, BF mli_target_file);
]
in
add_more_rule_sources lib r [ ocamlyacc_deps ] envs;
add_rule_source r src_file;
add_rule_target r mli_target_file
reading dependencies is a bit complicated , as the format of make
dependencies is not OK on Windows ( because : is used in
filenames ) . We should fix filenames in those cases .
Note that ocamldep will escape any space in a filename , so that
spaces are the only significant characters .
Read the full file . Convert \\\n sequences into spaces .
Instead , we should have a specail format , such as :
CMO filename
DEP dependency
DEP dependency
TODO : add a String.unescaped , the inverse of String.escaped .
dependencies is not OK on Windows (because : is used in
filenames). We should fix filenames in those cases.
Note that ocamldep will escape any space in a filename, so that
spaces are the only significant characters.
Read the full file. Convert \\\n sequences into spaces.
Instead, we should have a specail format, such as:
CMO filename
DEP dependency
DEP dependency
TODO: add a String.unescaped, the inverse of String.escaped.
*)
let add_flag option flag options flags =
if option.get options && not (List.mem (S flag) flags) then
(S flag) :: flags else flags
let add_nopervasives_flag = add_flag nopervasives "-nopervasives"
let add_asmdebug_flag = add_flag asmdebug_option "-g"
let add_bytedebug_flag = add_flag bytedebug_option "-g"
let add_debug_flag = add_flag debug_option "-g"
let bytelinkflags lib =
let options = lib.lib_opk.opk_options in
add_debug_flag options (
add_bytedebug_flag options (
add_nopervasives_flag options (
List.map argument_of_string (bytelink_option.get options)
)
)
)
let asmlinkflags lib =
let options = lib.lib_opk.opk_options in
add_debug_flag options (
add_asmdebug_flag options (
add_nopervasives_flag options (
List.map argument_of_string (asmlink_option.get options )
)
)
)
let depflags options =
List.map argument_of_string ( dep_option.get options)
let bytecompflags options =
add_debug_flag options (
add_bytedebug_flag options (
add_nopervasives_flag options (
List.map argument_of_string ( bytecomp_option.get options)
)))
let docflags options =
add_nopervasives_flag options (
List.map argument_of_string ( docflags_option.get options)
)
let asmcompflags options =
add_debug_flag options (
add_asmdebug_flag options (
add_nopervasives_flag options (
List.map argument_of_string (asmcomp_option.get options )
)))
let indocs envs = doc_option.get envs
let needs_odoc lib =
match lib.lib.lib_type with
LibraryPackage | ObjectsPackage -> true
| ProgramPackage | SyntaxPackage
| TestPackage | RulesPackage -> false
let add_ml2mldep_rule lib dst_dir pack_for force src_file target_file needs_odoc options =
let envs = options :: lib.lib_opk.opk_options in
let cmd = new_command (ocamldep_cmd.get envs)
(depflags envs) in
add_command_string cmd "-modules";
add_command_strings cmd (dep_alias_options lib options);
add_command_strings cmd (command_includes lib pack_for);
if force = Force_IMPL || ml_file_option.get envs then
add_command_strings cmd [ "-impl" ]
else
if force = Force_INTF || mli_file_option.get envs then
add_command_strings cmd [ "-intf" ]
;
add_command_strings cmd [file_filename src_file];
add_command_pipe cmd (file_filename target_file);
let r = new_rule lib target_file [Execute cmd] in
add_more_rule_sources lib r [ ocamldep_deps ] envs;
add_rule_source r src_file;
let mldep_file_loaded = add_virtual_file lib dst_dir
(target_file.file_basename ^ " loaded") in
let mldep_file_ok = add_virtual_file b dst_dir
( target_file.file_basename ^ " ok " ) in
let r_ok = new_rule b lib.lib_loc mldep_file_ok [ ] in
r_ok.rule_forced < - true ; ( * must be executed , even when no changes
let mldep_file_ok = add_virtual_file b dst_dir
(target_file.file_basename ^ " ok") in
let r_ok = new_rule b lib.lib_loc mldep_file_ok [] in
add_rule_source r_ok mldep_file_loaded;
*)
let loader =
BuildOCamldep.load_modules_dependencies
lib options force dst_dir pack_for needs_odoc
in
let r_loaded = new_rule lib mldep_file_loaded [] in
add_rule_command r_loaded (LoadDeps (loader, target_file, r_loaded));
add_rule_source r_loaded target_file;
mldep_file_loaded
type 'a to_sort =
{
to_sort_value : 'a;
to_sort_node : OcpToposort.node;
mutable to_sort_deps : 'a to_sort list;
}
module FileSorter = OcpToposort.Make(struct
type t = build_file to_sort
let node to_sort = to_sort.to_sort_node
let iter_edges f to_sort = List.iter f to_sort.to_sort_deps
let name to_sort = file_filename to_sort.to_sort_value
let verbose = OcpDebug.verbose_function [ "BuildOCamlRules.FileSorter" ]
end)
let sort_ocaml_files lib cmo_files =
if verbose 3 then begin
Printf.eprintf "Sorting:\n";
List.iter (fun file ->
Printf.eprintf "%s " file.file_basename;
) cmo_files;
end;
let map = ref StringMap.empty in
let list = ref [] in
let cmo_files = List.map (fun file ->
let modname = Filename.chop_extension file.file_basename in
let modname = String.capitalize modname in
let to_sort = {
to_sort_value = file;
to_sort_node = OcpToposort.new_node();
to_sort_deps = [];
} in
map := StringMap.add modname to_sort !map;
list := to_sort :: !list;
(file, to_sort)
) cmo_files in
let list = List.rev !list in
List.iter (fun (file, to_sort) ->
List.iter (fun r ->
if r.rule_state <> RULE_INACTIVE then
IntMap.iter (fun _ file2 ->
try
let modname =
try Filename.chop_extension file2.file_basename
with _ -> raise Not_found in
let modname = String.capitalize modname in
let to_sort2 = StringMap.find modname !map in
if to_sort2 != to_sort then
to_sort.to_sort_deps <- to_sort2 :: to_sort.to_sort_deps
with Not_found -> ()
) r.rule_sources
) file.file_target_of
) cmo_files;
let (sorted, cycle, _others) = FileSorter.sort list in
if cycle <> [] then begin
Printf.eprintf
"Error: There is a cycle in the inter-dependencies inside package %S.\n"
lib.lib.lib_name;
Printf.eprintf
" You MUST specify the link order of modules by ordering 'files'\n";
Printf.eprintf
" and using 'sort=false' in the package description.\n%!";
exit 2
end;
let cmo_files =
List.map (fun to_sort -> to_sort.to_sort_value) sorted in
if verbose 3 then begin
Printf.eprintf "\n";
Printf.eprintf "Sorted:\n";
List.iter (fun file ->
Printf.eprintf "%s " file.file_basename;
) cmo_files;
Printf.eprintf "\n";
end;
cmo_files
let add_files_to_link_to_command lib case cmd options cmx_files =
if sort_files_option.get options then begin
DynamicAction (
(Printf.sprintf "sort for %s" case),
lazy (
let cmx_files = sort_ocaml_files lib cmx_files in
List.iter (fun cmx_file ->
add_command_args cmd [BF cmx_file]) cmx_files;
[Execute cmd]
)
)
end else begin
List.iter (fun cmx_file ->
add_command_args cmd [BF cmx_file]) cmx_files;
Execute cmd
end
let add_cmo2cma_rule lib ptmp cclib cmo_files cma_file =
if not lib.lib_opk.opk_installed then
let options = lib.lib_opk.opk_options in
let cmd = new_command
(ocamlc_command options ocamlc2cma_cmd ocamlc_cmd
) (bytelinkflags lib) in
add_command_args cmd [S "-a"; S "-o"; BF cma_file];
if cclib <> "" then
add_command_strings cmd [ "-custom" ; "-cclib"; cclib ];
if force_link_option.get options then
add_command_strings cmd [ "-linkall" ];
let cmd = add_files_to_link_to_command lib "byte lib" cmd options cmo_files in
let r = new_rule lib cma_file [cmd] in
add_more_rule_sources lib r [ ocamlc_deps; bytelink_deps; link_deps ] options;
add_rule_sources r cmo_files;
add_rule_sources r !(ptmp.cmi_files)
let cross_move r list =
r.rule_commands <- r.rule_commands @
(List.map (fun (f1, f2) ->
Move (false, f1, f2)
) list)
let cross_update r list =
r.rule_commands <- r.rule_commands @
(List.map (fun (f1, f2) ->
Move (true, f1, f2)
) list)
let add_cmx2cmxa_rule lib cclib cmi_files cmx_files cmx_o_files stubs_files =
let options = lib.lib_opk.opk_options in
let src_dir = lib.lib.lib_src_dir in
let dst_dir = lib.lib.lib_dst_dir in
let basename_cmxa = lib.lib_archive ^ ".cmxa" in
let basename_cmxs = lib.lib_archive ^ ".cmxs" in
let ext_lib = BuildOCamlConfig.ocaml_config_ext_lib.get options in
let basename_a = lib.lib_archive ^ ext_lib in
let cmxa_file = add_dst_file lib dst_dir basename_cmxa in
let a_file = add_dst_file lib dst_dir basename_a in
let cmxs_file = add_dst_file lib dst_dir basename_cmxs in
let has_cmxs = cmxs_plugin.get options in
let cmxs_files = if has_cmxs then [cmxs_file, CMXS] else [] in
if not lib.lib_opk.opk_installed then begin
let temp_cmxa = add_temp_file lib src_dir basename_cmxa in
let r = new_rule lib cmxa_file [] in
let temp_a = add_temp_file lib src_dir basename_a in
begin
let cmd = new_command (ocamlopt_cmd.get options ) (asmlinkflags lib) in
add_command_args cmd [S "-a"; S "-o"; BF temp_cmxa ];
if cclib <> "" then
add_command_strings cmd ["-cclib"; cclib];
if force_link_option.get options then
add_command_strings cmd [ "-linkall" ];
let cmd = add_files_to_link_to_command lib "asm lib" cmd options cmx_files in
add_rule_command r cmd;
add_rule_target r a_file;
add_rule_temporaries r [ temp_cmxa; temp_a ];
end;
add_more_rule_sources lib r
[ ocamlopt_deps; asmlink_deps; link_deps ] options;
add_rule_sources r cmx_files;
add_rule_sources r cmx_o_files;
add_rule_sources r cmi_files;
cross_move r [ F temp_cmxa.file_file, F cmxa_file.file_file;
F temp_a.file_file, F a_file.file_file;
];
if has_cmxs then begin
let temp_cmxs = add_temp_file lib src_dir basename_cmxs in
let asmlink_libs =
List.map (fun s ->
let s = BuildSubst.subst_global s in
add_package_file lib s
) (asmlink_libs.get options) in
let cmd = new_command
(ocamlopt_cmd.get options )
(
(asmlinkflags lib) @
[S "-shared"; S "-I";
S lib.lib.lib_dst_dir.dir_fullname;
S "-o"; BF temp_cmxs ] @
(if cclib = "" then [] else [S "-cclib"; S cclib]) @
(if force_link_option.get options then
[S "-linkall" ] else [] ) @
(List.map (fun f -> BF f) asmlink_libs)
)
in
let cmd = add_files_to_link_to_command lib "cmxs lib"
cmd options cmx_files in
add_rule_command r cmd;
add_rule_sources r asmlink_libs;
add_rule_target r cmxs_file;
add_rule_temporaries r [ temp_cmxs ];
add_more_rule_sources lib r
[ ocamlopt_deps; asmlink_deps; link_deps ] options;
add_rule_sources r stubs_files;
cross_move r [
F temp_cmxs.file_file, F cmxs_file.file_file;
];
end;
end;
(cmxa_file, a_file, cmxs_files)
let add_odocs2html_rule lib odoc_files docdir html_file =
if not lib.lib_opk.opk_installed then
let options = lib.lib_opk.opk_options in
let cmd = new_command (ocamldoc_cmd.get options ) [] in
List.iter (fun odoc_file ->
add_command_args cmd [S "-load"; BF odoc_file]
) odoc_files;
add_command_args cmd [S "-html";S "-d"; BD docdir];
let r = new_rule lib html_file [Execute cmd] in
add_more_rule_sources lib r [ ocamldoc_deps ] options;
add_rule_sources r odoc_files
let get_link_order lib =
let tolink =
List.fold_right (fun pd links ->
if pd.dep_link then
let lib2 = pd.dep_project in
lib2 :: links
else links)
lib.lib_requires []
in
let link_order = link_order.get lib.lib_opk.opk_options in
if link_order = [] then tolink else
let map = List.fold_left (fun map lib ->
StringMap.add lib.lib.lib_name (lib, ref false) map
) StringMap.empty tolink
in
let tolink =
List.map (fun name -> try
let (lib, used) = StringMap.find name map in
used := true;
lib
with Not_found ->
Printf.eprintf "Error with package %S: %S in 'link_order' is not specified in 'requires'\n%!" lib.lib.lib_name name;
exit 2
) link_order
in
StringMap.iter (fun name (_, used) ->
if not !used then
Printf.eprintf "Warning with package %S: required %S not specified in `link_order'\n%!" lib.lib.lib_name name
) map;
tolink
let add_cmo2byte_rule lib ptmp linkflags cclib cmo_files o_files byte_file =
if not lib.lib_opk.opk_installed then
let options = lib.lib_opk.opk_options in
let ocamlc_command =
if is_toplevel.get options then
ocamlmktop_cmd.get options
else
ocamlc_command options ocamlc2byte_cmd ocamlc_cmd
in
let cmd = new_command ocamlc_command linkflags in
add_command_args cmd [S "-o"; BF byte_file];
let custom = ref false in
List.iter (fun o_file ->
custom := true;
add_command_args cmd [BF o_file]) o_files;
if cclib <> "" then
add_command_args cmd [S "-cclib"; S cclib ];
add_command_strings cmd (command_includes lib []);
Printf.eprintf " to_link for % S\n% ! " lib.lib_name ;
List.iter (fun lib2 ->
match lib2.lib.lib_type with
| LibraryPackage
| RulesPackage
| ObjectsPackage
| ProgramPackage ->
add_command_args cmd (bytelinkflags lib2);
if not lib2.lib_meta then begin
let has_ocaml_modules = ref false in
List.iter (fun (obj, kind) ->
match kind with
| CMA
| CMO ->
has_ocaml_modules := true;
add_command_arg cmd (BF obj)
| _ -> ()
) lib2.lib_byte_targets;
if not (lib2.lib_autolink && !has_ocaml_modules) then
List.iter (fun (obj, kind) ->
match kind with
| STUB_A -> add_command_arg cmd (BF obj)
| _ -> ()
) lib2.lib_stub_targets;
end;
| SyntaxPackage -> ()
| TestPackage -> ()
) lib.lib_linkdeps;
if !custom then add_command_string cmd "-custom";
let bytelink_libs =
List.map (fun s ->
let s = BuildSubst.subst_global s in
add_package_file lib s
) (bytelink_libs.get options) in
List.iter (fun s -> add_command_arg cmd (BF s)) bytelink_libs;
let cmd = add_files_to_link_to_command lib "byte prog" cmd options cmo_files in
let r = new_rule lib byte_file [cmd] in
add_more_rule_sources lib r [ ocamlc_deps; bytelink_deps; link_deps ] options;
add_rule_sources r cmo_files;
add_rule_sources r !(ptmp.cmi_files);
add_rule_sources r o_files;
List.iter (fun lib2 ->
List.iter (fun (obj, kind) ->
match kind with
| CMA | CMO -> add_rule_source r obj
| _ -> ()
) lib2.lib_byte_targets;
List.iter (fun (obj, kind) ->
match kind with
| STUB_A -> add_rule_source r obj
| _ -> ()
) lib2.lib_stub_targets;
) lib.lib_linkdeps;
add_rule_sources r bytelink_libs
let add_cmx2asm_rule lib ptmp linkflags cclib cmx_files cmx_o_files o_files opt_file =
if not lib.lib_opk.opk_installed then
let options = lib.lib_opk.opk_options in
let cmd = new_command
(ocamlc_command options ocamlopt2asm_cmd ocamlopt_cmd)
linkflags in
add_command_args cmd [S "-o"; BF opt_file];
if cclib <> "" then
add_command_args cmd [S "-cclib"; S cclib];
List.iter (fun o_file ->
add_command_arg cmd (BF o_file)) o_files;
add_command_strings cmd (command_includes lib []);
Printf.eprintf " To link % S:\n% ! " lib.lib_name ;
List.iter (fun lib2 ->
Printf.eprintf " Lib % S\n% ! " lib2.lib_name ;
match lib2.lib.lib_type with
| LibraryPackage
| RulesPackage
| ObjectsPackage
| ProgramPackage ->
add_command_args cmd (asmlinkflags lib2);
let has_ocaml_modules = ref false in
List.iter (fun (obj, kind) ->
match kind with
| CMXA | CMX ->
has_ocaml_modules := true;
add_command_arg cmd (BF obj)
| _ -> ()
) lib2.lib_asm_targets;
if not (!has_ocaml_modules && lib2.lib_autolink) then
List.iter (fun (obj, kind) ->
match kind with
| STUB_A -> add_command_arg cmd (BF obj)
| _ -> ()
) lib2.lib_stub_targets;
| SyntaxPackage -> ()
| TestPackage -> ()
) lib.lib_linkdeps;
let asmlink_libs =
List.map (fun s ->
let s = BuildSubst.subst_global s in
add_package_file lib s
) (asmlink_libs.get options) in
List.iter (fun s -> add_command_arg cmd (BF s)) asmlink_libs;
let cmd = add_files_to_link_to_command lib "asm prog" cmd options cmx_files in
let r = new_rule lib opt_file [cmd] in
add_more_rule_sources lib r [ ocamlopt_deps; asmlink_deps; link_deps ] options;
add_rule_sources r cmx_files;
add_rule_sources r cmx_o_files;
add_rule_sources r !(ptmp.cmi_files);
add_rule_sources r o_files;
List.iter (fun lib2 ->
List.iter (fun (obj, kind) ->
match kind with
| CMXA | CMXA_A
| CMX | CMX_O -> add_rule_source r obj
| _ -> ()
) lib2.lib_asm_targets;
List.iter (fun (obj, kind) ->
match kind with
| STUB_A -> add_rule_source r obj
| _ -> ()
) lib2.lib_stub_targets;
) lib.lib_linkdeps;
add_rule_sources r asmlink_libs;
()
let add_os2a_rule lib o_files a_file =
let envs = lib.lib_opk.opk_options in
if not lib.lib_opk.opk_installed then
let target = a_file.file_basename in
let ext_lib = BuildOCamlConfig.ocaml_config_ext_lib.get envs in
let target_without_ext = Filename.chop_suffix target ext_lib in
let target_without_prefix = chop_prefix target_without_ext "lib" in
let target = FileGen.add_basename a_file.file_dir.dir_file target_without_prefix in
let cmd = new_command (ocamlmklib_cmd.get envs)
[S "-custom"; S "-o"; F target] in
List.iter (add_command_string cmd)
(mklib_option.get lib.lib_opk.opk_options );
List.iter (fun o_file ->
add_command_arg cmd (BF o_file)) o_files;
let r = new_rule lib a_file
[Execute cmd] in
add_more_rule_sources lib r [ ocamlmklib_deps ] envs;
add_rule_sources r o_files;
()
let add_c_source b lib ptmp c_file options =
let envs = options :: lib.lib_opk.opk_options in
let dst_dir = lib.lib.lib_dst_dir in
let basename = c_file.file_basename in
let kernel_name = Filename.chop_suffix basename ".c" in
let ext_obj = BuildOCamlConfig.ocaml_config_ext_obj.get envs in
let o_file = add_dst_file lib dst_dir (kernel_name ^ ext_obj) in
if not lib.lib_opk.opk_installed then
add_c2o_rule b lib [] c_file o_file envs;
ptmp.o_files := o_file :: !(ptmp.o_files)
let add_command_pack_args cmd modnames =
if modnames <> [] then
add_command_args cmd [S "-for-pack";
S (String.concat "." modnames)]
let move_compilation_garbage r copy_dir temp_dir kernel_name lib =
let move_to_sources dst_dir_virt exts =
let dst_dir = dst_dir_virt.dir_file in
List.iter (fun ext ->
let basename = kernel_name ^ ext in
let src_file = FileGen.add_basename temp_dir basename in
let dst_file = FileGen.add_basename dst_dir basename in
let _maybe_file = add_file lib lib.lib.lib_mut_dir basename in
add_rule_command r (MoveIfExists (F src_file, F dst_file, None))
) exts
in
move_to_sources lib.lib.lib_mut_dir [ ".annot"; ".s" ];
let move_to_build exts =
List.iter (fun ext ->
let basename = kernel_name ^ ext in
let src_file = FileGen.add_basename temp_dir basename in
let dst_file = add_file lib lib.lib.lib_dst_dir basename in
let link_file = add_file lib copy_dir (basename ^ ".link") in
add_rule_command r (MoveIfExists
(F src_file, BF dst_file, Some (BF link_file)))
) exts
in
move_to_build [ ".cmt"; ".cmti"; ".spit"; ".spot"; ]
let do_copy_objects_from lib src_lib kernel_name extension obj_files =
Printf.eprintf " do_copy_objects_from % s:%s.%s - > % s:%s.%s\n% ! "
src_lib.lib_name kernel_name extension lib.lib_name kernel_name extension ;
Printf.eprintf "do_copy_objects_from %s:%s.%s -> %s:%s.%s\n%!"
src_lib.lib_name kernel_name extension lib.lib_name kernel_name extension;
*)
let obj_basename = kernel_name ^ extension in
let obj_file_to_build =
try
let obj_file = find_dst_file lib.lib.lib_dst_dir obj_basename in
if obj_file.file_target_of = [] then Some obj_file else None
with NoSuchFileInDir _ ->
Some (add_dst_file lib lib.lib.lib_dst_dir obj_basename)
in
match obj_file_to_build with
| None -> ()
| Some dst_obj_file ->
try
let src_obj_file = find_dst_file src_lib.lib.lib_dst_dir obj_basename in
let r = new_rule lib dst_obj_file [] in
add_rule_command r (Copy (BF src_obj_file, BF dst_obj_file));
add_rule_source r src_obj_file;
obj_files := dst_obj_file :: !obj_files
with NoSuchFileInDir _ ->
Printf.eprintf "Error: %s:%s is supposed to be copied from %s:%s that does not exist\n%!"
lib.lib.lib_name obj_basename src_lib.lib.lib_name obj_basename;
clean_exit 2
let get_copy_objects_from lib envs =
match BuildValue.get_string_option_with_default envs "copy_objects_from" None with
None -> None
| Some name ->
let bc = lib.lib.lib_builder_context in
try
BuildOCamlGlobals.get_by_id (StringMap.find name bc.packages_by_name)
with Not_found ->
Printf.eprintf "Error: in package %S, copy_objects_from %S, no such package\n%!" lib.lib.lib_name name;
clean_exit 2
let copy_ml_objects_from lib ptmp src_lib kernel_name =
let envs = lib.lib_opk.opk_options in
do_copy_objects_from lib src_lib kernel_name ".cmi" ptmp.cmi_files;
if lib.lib_opk.opk_has_byte then
do_copy_objects_from lib src_lib kernel_name ".cmo" ptmp.cmo_files;
if lib.lib_opk.opk_has_asm then
let ext_obj = BuildOCamlConfig.ocaml_config_ext_obj.get envs in
do_copy_objects_from lib src_lib kernel_name ".cmx" ptmp.cmx_files;
do_copy_objects_from lib src_lib kernel_name ext_obj ptmp.cmx_o_files
let object_dst_dir b lib pack_for =
let dst_dir = lib.lib.lib_dst_dir in
match pack_for with
[] -> dst_dir
| modnames ->
let name = String.concat "/" modnames in
let full_dirname = Filename.concat dst_dir.dir_fullname name in
if not lib.lib_opk.opk_installed then
safe_mkdir full_dirname;
BuildEngineContext.add_directory b full_dirname
let ml2odoc lib ptmp kernel_name envs before_cmd pack_for force temp_ml_file ml_file seq_order =
if needs_odoc lib && indocs envs then
let b = lib.lib.lib_context in
let dst_dir = object_dst_dir b lib pack_for in
let odoc_basename = kernel_name ^ ".odoc" in
let odoc_file = add_dst_file lib dst_dir odoc_basename in
let cmd = new_command (ocamldoc_cmd.get envs ) (docflags envs) in
let r = new_rule lib odoc_file before_cmd in
add_more_rule_sources lib r [ ocamldoc_deps ] envs;
add_command_args cmd [S "-dump"; T odoc_basename];
add_command_strings cmd (command_includes lib pack_for);
if force = Force_IMPL || ml_file_option.get envs then
add_command_string cmd "-impl";
add_command_arg cmd temp_ml_file;
add_rule_command r (Execute cmd);
add_rule_source r ml_file;
cross_move r [ T odoc_basename, BF odoc_file ];
ptmp.odoc_files := odoc_file :: !(ptmp.odoc_files);
add_rule_sources r seq_order;
()
let mli2odoc lib ptmp kernel_name envs pack_for force mli_file seq_order =
if needs_odoc lib && indocs envs then
let b = lib.lib.lib_context in
let dst_dir = object_dst_dir b lib pack_for in
let odoc_basename = kernel_name ^ ".odoc" in
let odoc_file = add_dst_file lib dst_dir odoc_basename in
let cmd = new_command (ocamldoc_cmd.get envs ) (docflags envs) in
let r = new_rule lib odoc_file [] in
add_more_rule_sources lib r [ ocamldoc_deps ] envs;
add_command_args cmd [S "-dump"; BF odoc_file];
add_command_strings cmd (command_includes lib pack_for);
if force = Force_INTF || mli_file_option.get envs then
add_command_string cmd "-intf";
add_command_args cmd [ BF mli_file];
add_rule_command r (Execute cmd);
add_rule_source r mli_file;
ptmp.odoc_files := odoc_file :: !(ptmp.odoc_files);
add_rule_sources r seq_order;
()
let add_mli_source w b lib ptmp mli_file options =
let envs = options :: lib.lib_opk.opk_options in
if lib.lib_opk.opk_installed then () else
let _ = () in
let basename = mli_file.file_basename in
let kernel_name = Filename.chop_extension basename in
let kernel_modalias = String.capitalize kernel_name in
let kernel_name = match lib.lib_alias with
| None -> kernel_name
| Some alias -> Printf.sprintf "%s__%s" alias kernel_modalias
in
let kernel_modname = String.capitalize kernel_name in
let copy_objects_from = get_copy_objects_from lib envs in
match copy_objects_from with
| Some src_lib ->
do_copy_objects_from lib src_lib kernel_name ".cmi" ptmp.cmi_files;
| None ->
if IntMap.mem mli_file.file_id ptmp.src_files then begin
Printf.eprintf "Error: interface %s should be specified before its implementation in project %s\n%!"
(file_filename mli_file) lib.lib.lib_name;
clean_exit 2
end;
let pack_for = BuildValue.get_strings_with_default envs "packed" [] in
let dst_dir = object_dst_dir b lib pack_for in
ptmp.src_files <- IntMap.add mli_file.file_id mli_file ptmp.src_files;
let copy_dir = copy_dir lib mli_file in
let ppv = BuildOCamlSyntaxes.get_pp "mli" w lib basename options in
let comp_deps = comp_deps w lib options in
let mli_file, force =
match ppv.pp_option with
[] -> mli_file, Force_not
| pp ->
TODO : we should create the new_ml_file in the same subdirectory
as the source file , not at the toplevel ! !
as the source file, not at the toplevel !! *)
let new_mli_file =
add_file lib lib.lib.lib_mut_dir (mli_file.file_basename ^ "pp")
in
let cmd = new_command pp (ppv.pp_flags @ [ BF mli_file ]) in
add_command_pipe cmd (FileGen.to_string new_mli_file.file_file);
let r = new_rule lib new_mli_file [] in
add_more_rule_sources lib r [] envs;
add_rule_command r (Execute cmd);
BuildOCamlSyntaxes.add_pp_requires r ppv;
add_more_rule_sources lib r [ pp_deps ] envs;
add_rule_source r mli_file;
add_rule_sources r comp_deps;
new_mli_file, Force_INTF
in
let mldep_file =
add_dst_file lib dst_dir (kernel_name ^ ".mlimods")
in
let needs_odoc = needs_odoc lib && indocs envs in
let mldep_file_ok =
add_ml2mldep_rule lib dst_dir pack_for force mli_file mldep_file needs_odoc options in
let seq_order = [mldep_file_ok] in
let cmi_basename = kernel_name ^ ".cmi" in
let cmi_temp = add_temp_file lib mli_file.file_dir cmi_basename in
let cmi_file = add_dst_file lib dst_dir cmi_basename in
let cmd, cmd_deps =
if lib.lib_opk.opk_has_byte then
let cmd = new_command (ocamlc_cmd.get envs ) (bytecompflags envs) in
add_bin_annot_argument cmd envs;
add_command_args cmd [S "-c"; S "-o"; BF cmi_temp];
add_command_strings cmd (comp_alias_options lib options);
add_command_strings cmd (command_includes lib pack_for);
if force = Force_INTF || mli_file_option.get envs then
add_command_args cmd [S "-intf" ];
add_command_args cmd [BF mli_file];
cmd, ocamlc_deps
else
let cmd = new_command (ocamlopt_cmd.get envs ) (asmcompflags envs) in
add_bin_annot_argument cmd envs;
add_command_args cmd [S "-c"; S "-o"; BF cmi_temp];
add_command_strings cmd (comp_alias_options lib options);
add_command_strings cmd (command_includes lib pack_for);
add_command_pack_args cmd pack_for;
( command_pp lib options ) ;
if force = Force_INTF || mli_file_option.get envs then
add_command_string cmd "-intf" ;
add_command_args cmd [BF mli_file];
cmd, ocamlopt_deps
in
let r = new_rule lib cmi_file [Execute cmd] in
add_more_rule_sources lib r [cmd_deps] envs;
add_rule_sources r comp_deps;
if cmi_temp != cmi_file then begin
cross_move r [ BF cmi_temp, BF cmi_file ];
add_rule_temporary r cmi_temp;
end;
move_compilation_garbage r copy_dir mli_file.file_dir.dir_file
kernel_name lib;
add_rule_source r mli_file;
add_rule_sources r seq_order;
TODO : we should actually rename all modules to fit
their capitalized name in the _ directory
their capitalized name in the _obuild directory *)
let lib_modules =
let pack_for = List.rev pack_for in
try
let (_, map) = StringsMap.find pack_for lib.lib_internal_modules in
map
with Not_found ->
let map = ref StringMap.empty in
lib.lib_internal_modules <-
StringsMap.add pack_for (dst_dir, map) lib.lib_internal_modules;
map
in
begin
let dep_info =
try
let (kind, basename) = StringMap.find kernel_modname !lib_modules in
match kind with
MLI
| MLandMLI -> None
| ML -> Some (MLandMLI, basename)
with Not_found ->
if verbose 5 then
Printf.eprintf "Adding MLI module %s to %s in %s\n"
kernel_modname kernel_name lib.lib.lib_name;
Some (MLI, DepBasename kernel_name)
in
match dep_info with
| None -> ()
| Some (kind, basename) ->
lib_modules :=
StringMap.add kernel_modname (kind, basename) !lib_modules;
if kernel_modname <> kernel_modalias then
lib.lib_aliases <-
StringMap.add kernel_modalias (kind, basename) lib.lib_aliases
end;
mli2odoc lib ptmp kernel_name
envs pack_for force mli_file seq_order;
if pack_for = [] then
ptmp.cmi_files := cmi_file :: !(ptmp.cmi_files)
let rec find_capital s len =
if len > 0 then
let pos = len-1 in
let c = s.[pos] in
if c = '/' || c = '\\' then len
else
find_capital s pos
else 0
let invert_capital s =
let len = String.length s in
let pos = find_capital s len in
Printf.eprintf " invert_capital % S at pos % d\n% ! " s pos ;
if pos < len then
let s= Bytes.of_string s in
let c = Bytes.get s pos in
begin
match c with
| 'a'..'z' -> s.[pos] <- Char.uppercase c
| 'A'..'Z' -> s.[pos] <- Char.lowercase c
| _ -> ()
end;
Bytes.to_string s
else s
let rec find_source_with_extension b lib src_dir kernel_name exts =
match exts with
| [] ->
Printf.eprintf "Error: package %S, module %S, could not find\n"
lib.lib.lib_name kernel_name;
Printf.eprintf " matching source in source directory\n";
Printf.eprintf " %S\n%!" src_dir.dir_fullname;
clean_exit 2
| ext :: rem_exts ->
let basename1 = kernel_name ^ "." ^ ext in
let test1 = FileGen.add_basename src_dir.dir_file basename1 in
if FileGen.exists test1 then
(basename1, ext)
else
let basename2 = invert_capital (kernel_name ^ "." ^ ext) in
let test2 = FileGen.add_basename src_dir.dir_file basename2 in
if FileGen.exists test2 then
(basename2, ext)
else
find_source_with_extension b lib src_dir kernel_name rem_exts
let standard_source_exts = [ "mly"; "mll"; "ml"; "mli"; "c" ]
let get_packed_objects lib r src_dir pack_of obj_ext =
let options = lib.lib_opk.opk_options in
let packed_cmx_files = ref [] in
let b = r.rule_context in
List.iter (fun basename ->
let basename, extension = FileString.cut_at_last_extension basename in
let (filename, _obj_extension) =
if extension = "" then
find_source_with_extension b lib src_dir basename
[ obj_ext; "cmi" ]
else
let obj_extension = match String.lowercase extension with
"ml" | "mll" | "mly" -> obj_ext
| "mli" -> "cmi"
| ext ->
if List.mem ext (BuildValue.get_strings_with_default options
"impl_exts" []) then
obj_ext
else
if List.mem ext (BuildValue.get_strings_with_default options
"intf_exts" []) then
"cmi"
else
Printf.ksprintf failwith
"Bad extension [%s] for filename [%s]" extension basename
in
(basename ^ "." ^ obj_extension, obj_extension)
in
let object_file = add_file lib src_dir filename in
packed_cmx_files := object_file :: !packed_cmx_files;
add_rule_source r object_file;
) pack_of;
let packed_cmx_files = List.rev !packed_cmx_files in
packed_cmx_files
let bprintf_list b name list =
Printf.bprintf b "let %s = [\n" name;
List.iter (fun s -> Printf.bprintf b " %S;\n" s) list;
Printf.bprintf b " ]\n"
let (//) = Filename.concat
let rec find_git_commit dir =
let git_dir = dir // ".git" in
if Sys.file_exists git_dir then
let filename = git_dir // "HEAD" in
try
let ref =
let ic = open_in filename in
let line = input_line ic in
close_in ic;
line
in
let ref, file = OcpString.cut_at ref ' ' in
if ref = "ref:" then
let ic = open_in (git_dir // file) in
let line = input_line ic in
close_in ic;
line
else ref
with _ ->
let head = try FileString.string_of_file filename with _ -> "??" in
Printf.eprintf "Warning: unreadable-git-commit\nHEAD %S:\n%S\n%!"
filename head;
"unreadable-git-commit"
else
let new_dir = Filename.dirname dir in
if dir = new_dir then "no-git-commit"
else find_git_commit new_dir
let add_info b lib options name =
match name with
| "ocp::dates" ->
let (date, en_date) =
try
BuildValue.get_string options "ocp_date",
BuildValue.get_string options "ocp_en_date"
ignore ( Sys.getenv " OCPBUILD_NODATE " ) ;
" NODATE " , " NODATE ( option OCPBUILD_NODATE ) "
"NODATE", "NODATE (option OCPBUILD_NODATE)" *)
with _ ->
Tue Jan 20 17:48:12 CET 2015
let tm = MinUnix.localtime (MinUnix.time()) in
let date =
Printf.sprintf "%04d-%02d-%02d %02d:%02d"
(1900+tm.MinUnix.tm_year) (1+tm.MinUnix.tm_mon)
tm.MinUnix.tm_mday tm.MinUnix.tm_hour tm.MinUnix.tm_min
in
let en_date =
try
let date = MinUnix.strftime "%a %b %d %T %Z %Y" tm in
if date = "" then failwith "strftime";
date
with _ -> date
in
(date, en_date)
in
Printf.bprintf b "let date = %S\n" date;
Printf.bprintf b "let en_date = %S\n" en_date;
| "ocp::commit" ->
Printf.bprintf b "let commit = %S\n"
(let commit = find_git_commit lib.lib.lib_src_dir.dir_fullname in
try String.sub commit 0 8
with _ -> commit
)
| name ->
Printf.bprintf b "let %s = %S\n" name
(BuildValue.get_string_with_default options name "")
let create_ml_file_if_needed lib mut_dir options ml_file =
if BuildValue.get_bool_with_default options "ocp2ml" false then begin
let tmp_ml = add_file lib mut_dir ml_file.file_basename in
let tmp_ml_file = tmp_ml.file_file in
let b = Buffer.create 1000 in
let opk = lib.lib_opk in
Printf.bprintf b "(* Generated by ocp-build *)\n";
Printf.bprintf b "let package = %S\n" lib.lib.lib_name;
Printf.bprintf b "let version = %S\n" opk.opk_version;
bprintf_list b "authors"
(
(BuildValue.get_strings_with_default options "author" [])
@
(BuildValue.get_strings_with_default options "authors" [])
);
List.iter (add_info b lib options) [
"copyright";
"license";
"description";
];
List.iter (fun variable ->
bprintf_list b variable ( BuildValue.get_strings_with_default options variable [] )
) (BuildValue.get_strings_with_default options "env_lists" []);
List.iter (add_info b lib options)
(BuildValue.get_strings_with_default options "env_strings" []);
List.iter (fun variable ->
Printf.bprintf b "let %s = %b\n" variable
(BuildValue.get_bool_with_default options variable false)
) (BuildValue.get_strings_with_default options "env_bools" []);
Printf.bprintf b "let requires = [\n";
List.iter (fun dep ->
let lib = dep.dep_project in
Printf.bprintf b " %S, %S;\n" lib.lib.lib_name
lib.lib_opk.opk_version;
) lib.lib_requires;
Printf.bprintf b " ]\n";
let ml_content = Buffer.contents b in
BuildEngineReport.cmd_file_from_content
(FileGen.to_string tmp_ml_file) ml_content;
if FileGen.exists tmp_ml_file then begin
let old_ml_content = FileGen.read_file tmp_ml_file in
if ml_content <> old_ml_content then begin
if verbose 2 then
Printf.fprintf stderr "create %s [outdated]\n%!"
(FileGen.to_string tmp_ml_file);
FileGen.write_file tmp_ml_file ml_content
end
end else begin
if verbose 2 then
Printf.fprintf stderr "create %s [unexisting] \n%!"
(FileGen.to_string tmp_ml_file);
FileGen.write_file tmp_ml_file ml_content;
end;
tmp_ml
end else ml_file
Instead of copy_mli_if_needed that copies the mli file during
OCamlBuildRules , we should instead create a rule to generate this
file , and makes the .ml rules depend on it .
OCamlBuildRules, we should instead create a rule to generate this
file, and makes the .ml rules depend on it.
*)
let copy_mli_if_needed lib mut_dir mll_file kernel_name =
try
let mli_file = FileGen.add_basename mll_file.file_dir.dir_file (kernel_name ^ ".mli") in
if FileGen.exists mli_file then begin
let mli_content = FileGen.read_file mli_file in
let tmp_mli = add_file lib mut_dir (kernel_name ^ ".mli") in
let tmp_mli_file = tmp_mli.file_file in
BuildEngineReport.cmd_copy (FileGen.to_string mli_file)
(FileGen.to_string tmp_mli_file);
if FileGen.exists tmp_mli_file then
let old_mli_content = FileGen.read_file tmp_mli_file in
if mli_content <> old_mli_content then begin
if verbose 2 then
Printf.fprintf stderr "cp %s %s [outdated]\n%!"
(FileGen.to_string mli_file) (FileGen.to_string tmp_mli_file);
FileGen.write_file tmp_mli_file mli_content
end else
()
else begin
if verbose 2 then
Printf.fprintf stderr "cp %s %s [unexisting] \n%!"
(FileGen.to_string mli_file) (FileGen.to_string tmp_mli_file);
FileGen.write_file tmp_mli_file mli_content;
end
else
Printf.eprintf " MLI FILE % S does not exist\n% ! "
( FileGen.to_string mli_file ) ;
Printf.eprintf "MLI FILE %S does not exist\n%!"
(FileGen.to_string mli_file); *)
with e ->
Printf.eprintf "copy_mli_if_needed error %s\n%!" (Printexc.to_string e);
clean_exit 2
let content_generator new_file f =
function () ->
let b = Buffer.create 10000 in
f b;
let content = Buffer.contents b in
let file = file_filename new_file in
BuildEngineReport.cmd_file_from_content file content;
FileString.file_of_string file content;
()
let add_ml_source w b lib ptmp ml_file options =
let needs_odoc = needs_odoc lib in
let envs = options :: lib.lib_opk.opk_options in
let basename = ml_file.file_basename in
Printf.eprintf " basename = [ % s]\n " basename ;
let kernel_alias =
BuildValue.get_string_with_default envs "module"
(Filename.chop_extension basename) in
let kernel_modalias = String.capitalize kernel_alias in
let kernel_name = match lib.lib_alias with
| None -> kernel_alias
| Some alias ->
if is_aliased options then
Printf.sprintf "%s__%s" alias kernel_modalias
else
kernel_alias
in
let kernel_modname = String.capitalize kernel_name in
let has_byte = lib.lib_opk.opk_has_byte in
let has_asm = lib.lib_opk.opk_has_asm in
let orig_ml_file = ml_file in
let pack_for = BuildValue.get_strings_with_default envs "packed" [] in
if lib.lib_opk.opk_installed then begin
if pack_for = [] then begin
Printf.eprintf " add_ml_source : % s is already installed in % s\n% ! "
basename ( FileGen.to_string dst_dir.dir_file ) ;
Printf.eprintf " ml_file % s\n% ! " ( file_filename ml_file ) ;
Printf.eprintf "add_ml_source: %s is already installed in %s\n%!"
basename (FileGen.to_string dst_dir.dir_file);
Printf.eprintf "ml_file %s\n%!" (file_filename ml_file);
*)
let dst_dir = ml_file.file_dir in
let cmo_basename = kernel_name ^ ".cmo" in
let cmo_file = add_dst_file lib dst_dir cmo_basename in
let cmx_basename = kernel_name ^ ".cmx" in
let cmx_file = add_dst_file lib dst_dir cmx_basename in
let ext_obj = BuildOCamlConfig.ocaml_config_ext_obj.get envs in
let o_basename = kernel_name ^ ext_obj in
let o_file = add_dst_file lib dst_dir o_basename in
if has_byte then
ptmp.cmo_files := cmo_file :: !(ptmp.cmo_files);
if has_asm then begin
ptmp.cmx_files := cmx_file :: !(ptmp.cmx_files);
ptmp.cmx_o_files := o_file :: !(ptmp.cmx_o_files)
end
end
end else
let comp_deps = comp_deps w lib options in
let copy_objects_from = get_copy_objects_from lib envs in
match copy_objects_from with
| Some src_lib ->
copy_ml_objects_from lib ptmp src_lib kernel_name
| None ->
let copy_dir = copy_dir lib ml_file in
let old_ml_file = ml_file in
let ml_file = create_ml_file_if_needed lib lib.lib.lib_mut_dir envs ml_file in
let ppv = BuildOCamlSyntaxes.get_pp "ml" w lib basename options in
let _has_mli =
if no_mli_option.get envs then None else
try
let mli_file = BuildValue.get_string envs mli_file_attr in
Some (add_package_file lib mli_file, false)
with Var_not_found _ ->
let mli_name = kernel_alias ^ ".mli" in
let mli_file =
Filename.concat
orig_ml_file.file_dir.dir_fullname
mli_name
in
if Sys.file_exists mli_file then
Some (add_file lib orig_ml_file.file_dir mli_name, true)
else
try
Some (find_dst_file lib.lib.lib_src_dir (kernel_alias ^ ".mli"), true)
with NoSuchFileInDir _ -> None
in
let ml_file =
let file2string = BuildValue.get_strings_with_default envs
file2string_attr [] in
if file2string = [] then ml_file else
let new_ml_file = add_file lib lib.lib.lib_mut_dir ml_file.file_basename
in
let r = new_rule lib new_ml_file [] in
let sources = List.map (fun file ->
file, add_file lib orig_ml_file.file_dir file
) file2string in
add_rule_sources r (List.map snd sources);
add_rule_command r (
Function ("file2string",
(fun b ->
List.iter (fun (file, _) ->
Printf.bprintf b "%s\n" file
) sources
),
content_generator new_ml_file
(fun b ->
Printf.bprintf b "let files = [\n";
List.iter (fun (file, src_file) ->
Printf.bprintf b "%S, %S;"
file (FileString.string_of_file (file_filename src_file))
) sources;
Printf.bprintf b " ]\n";
)));
new_ml_file
in
let ml_file, force =
match ppv.pp_option with
[] -> ml_file, Force_not
| pp ->
TODO : we should create the new_ml_file in the same subdirectory
as the source file , not at the toplevel ! !
as the source file, not at the toplevel !! *)
let new_ml_file =
add_file lib lib.lib.lib_mut_dir (ml_file.file_basename ^ "pp")
in
let cmd = new_command pp (ppv.pp_flags @ [ BF ml_file ]) in
add_command_pipe cmd (FileGen.to_string new_ml_file.file_file);
let r = new_rule lib new_ml_file [] in
add_more_rule_sources lib r [] envs;
add_rule_command r (Execute cmd);
BuildOCamlSyntaxes.add_pp_requires r ppv;
add_more_rule_sources lib r [ pp_deps ] envs;
add_rule_source r ml_file;
add_rule_sources r comp_deps;
new_ml_file, Force_IMPL
in
if old_ml_file != ml_file then begin
copy_mli_if_needed lib lib.lib.lib_mut_dir old_ml_file kernel_alias;
end;
let dst_dir = object_dst_dir b lib pack_for in
let pack_of = pack_option.get envs in
if pack_of < > [ ] then
List.iter ( fun pack - > Printf.eprintf " pack % s\n " pack ) pack_of ;
if pack_of <> [] then
List.iter (fun pack -> Printf.eprintf "pack %s\n" pack) pack_of;
*)
let cmi_name = kernel_name ^ ".cmi" in
let needs_cmi =
try
This case corresponds to a .mli file present in " files "
before the .ml
before the .ml *)
let cmi_file = find_dst_file dst_dir cmi_name in
Some cmi_file
with NoSuchFileInDir _ ->
let mli_name = kernel_alias ^ ".mli" in
let mli_file =
Filename.concat
orig_ml_file.file_dir.dir_fullname
mli_name
in
if not (no_mli_option.get envs ) then
let mli_file =
if Sys.file_exists mli_file then
Some (add_file lib orig_ml_file.file_dir mli_name)
else
try
Some (find_dst_file lib.lib.lib_src_dir (kernel_alias ^ ".mli"))
with NoSuchFileInDir _ -> None
in
match mli_file with
| Some mli_file ->
ignore (add_mli_source w b lib ptmp mli_file (BuildValue.set_bool options "ml" false) : unit);
let cmi_file = find_dst_file dst_dir cmi_name in
Some cmi_file
| None -> None
else
None
in
ptmp.src_files <- IntMap.add ml_file.file_id ml_file ptmp.src_files;
let seq_order =
if pack_of <> [] ||
(lib.lib_alias <> None && not (is_aliased options)) then
else
let mldep_file =
add_dst_file lib dst_dir (kernel_name ^ ".mlmods")
in
let mldep_file_ok = add_ml2mldep_rule lib dst_dir pack_for force ml_file mldep_file
(needs_odoc && needs_cmi = None) options in
ptmp.dep_files <- IntMap.add mldep_file.file_id mldep_file ptmp.dep_files;
[mldep_file_ok]
in
let seq_order = match needs_cmi with
None -> seq_order
| Some cmi_file -> cmi_file :: seq_order in
let gen_cmi = match needs_cmi with
None -> [add_dst_file lib dst_dir cmi_name ]
| Some _ -> []
in
let lib_modules =
let pack_for = List.rev pack_for in
try
let (_, map) = StringsMap.find pack_for lib.lib_internal_modules in
map
with Not_found ->
let map = ref StringMap.empty in
lib.lib_internal_modules <- StringsMap.add pack_for (dst_dir, map) lib.lib_internal_modules;
map
in
begin
let dep =
try
let (kind, basename) = StringMap.find kernel_modname !lib_modules
in
match kind with
ML
| MLandMLI -> None
| MLI -> Some (MLandMLI, basename)
with Not_found ->
if verbose 5 then
Printf.eprintf "Adding ML module %s to %s.CMO in %s\n"
kernel_modname kernel_name lib.lib.lib_name;
match lib.lib_alias with
| Some _ when kernel_modname = kernel_modalias ->
Some (ML, DepAlias lib)
| _ ->
Some (ML, DepBasename kernel_name)
in
match dep with
| None ->
Printf.eprintf
"ERROR: The file(s) %s appears more than once in %s\n%!"
(kernel_name ^ ".ml")
lib.lib.lib_filename
| Some (kind, basename) ->
lib_modules := StringMap.add kernel_modname (kind, basename)
!lib_modules;
if kernel_modname <> kernel_modalias then
lib.lib_aliases <-
StringMap.add kernel_modalias (kind, basename) lib.lib_aliases
end;
let cmi_basename = kernel_name ^ ".cmi" in
let cmi_file = add_dst_file lib dst_dir cmi_basename in
let (before_cmd, temp_ml_file) =
if no_mli_option.get envs then
let temp_ml_file = T (kernel_name ^ ".ml") in
([ NeedTempDir; Copy (BF ml_file, temp_ml_file)], temp_ml_file)
else
([], BF ml_file)
in
let needs_cmo =
if has_byte then begin
let cmo_basename = kernel_name ^ ".cmo" in
let cmo_file = add_dst_file lib dst_dir cmo_basename in
let r = new_rule lib cmo_file before_cmd in
add_more_rule_sources lib r [ ocamlc_deps ] envs;
add_rule_sources r comp_deps;
if pack_of = [] then begin
let cmd = new_command (ocamlc_cmd.get envs ) [] in
add_bin_annot_argument cmd envs;
add_command_args cmd [S "-c"; S "-o"; T cmo_basename];
add_command_strings cmd (comp_alias_options lib options);
add_command_pack_args cmd pack_for;
add_command_strings cmd (command_includes lib pack_for);
add_command_args cmd (bytecompflags envs);
if force = Force_IMPL || ml_file_option.get envs then
add_command_string cmd "-impl";
add_command_arg cmd temp_ml_file;
add_rule_command r (Execute cmd);
add_rule_source r ml_file;
end else begin
let cmd = new_command (ocamlc_cmd.get envs ) [] in
add_bin_annot_argument cmd envs;
add_command_args cmd (bytecompflags envs);
add_command_args cmd [S "-pack"; S "-o"; T cmo_basename];
add_command_pack_args cmd pack_for;
let src_dir =
Filename.concat dst_dir.dir_fullname kernel_modname in
Printf.eprintf " Pack in % s [ % s]\n " src_dir modname ;
let src_dir = BuildEngineContext.add_directory b src_dir in
let cmo_files = get_packed_objects lib r src_dir pack_of "cmo" in
let cmd = add_files_to_link_to_command
lib "byte pack" cmd envs cmo_files in
add_rule_command r cmd
end;
cross_move r [ T cmo_basename, BF cmo_file ];
begin match needs_cmi with
None ->
cross_update r [T cmi_basename, BF cmi_file]
| _ -> ();
end;
if pack_for = [] then
ptmp.cmo_files := cmo_file :: !(ptmp.cmo_files);
move_compilation_garbage r copy_dir
(BuildEngineRules.rule_temp_dir r) kernel_name lib;
add_rule_sources r seq_order;
add_rule_targets r gen_cmi;
match needs_cmi with
None -> Some cmo_file
| Some _ -> None
end else None
in
let _needs_cmx =
if has_asm then begin
let cmx_basename = kernel_name ^ ".cmx" in
let cmx_file = add_dst_file lib dst_dir cmx_basename in
let ext_obj = BuildOCamlConfig.ocaml_config_ext_obj.get envs in
let o_basename = kernel_name ^ ext_obj in
let o_file = add_dst_file lib dst_dir o_basename in
let r = new_rule lib cmx_file before_cmd in
add_more_rule_sources lib r [ ocamlopt_deps] envs;
add_rule_sources r comp_deps;
let temp_dir = BuildEngineRules.rule_temp_dir r in
let o_temp = FileGen.add_basename temp_dir o_basename in
let cmx_temp = FileGen.add_basename temp_dir cmx_basename in
let = FileGen.add_basename temp_dir cmi_basename in
let temp_dir = BuildEngineRules.rule_temp_dir r in
let o_temp = FileGen.add_basename temp_dir o_basename in
let cmx_temp = FileGen.add_basename temp_dir cmx_basename in
let cmi_temp = FileGen.add_basename temp_dir cmi_basename in
*)
if pack_of = [] then begin
let cmd = new_command (ocamlopt_cmd.get envs ) [] in
add_bin_annot_argument cmd envs;
add_command_args cmd [S "-c"; S "-o"; T cmx_basename];
add_command_pack_args cmd pack_for;
add_command_strings cmd (command_includes lib pack_for);
add_command_strings cmd (comp_alias_options lib options);
add_command_args cmd (asmcompflags envs);
if force = Force_IMPL || ml_file_option.get envs then
add_command_string cmd "-impl" ;
add_command_arg cmd temp_ml_file;
add_rule_command r (Execute cmd);
add_rule_source r ml_file;
end else begin
let cmd = new_command (ocamlopt_cmd.get envs ) [] in
add_bin_annot_argument cmd envs;
add_command_args cmd (asmcompflags envs);
add_command_args cmd [S "-pack"; S "-o"; T cmx_basename];
add_command_pack_args cmd pack_for;
let src_dir
= BuildEngineContext.add_directory
b (Filename.concat dst_dir.dir_fullname kernel_modname) in
let cmx_files = get_packed_objects
lib r src_dir pack_of "cmx" in
let cmd = add_files_to_link_to_command
lib "asm pack" cmd envs cmx_files in
add_rule_command r cmd
end;
cross_move r [ T cmx_basename, BF cmx_file;
T o_basename, BF o_file ];
begin match needs_cmi with
None ->
cross_update r [T cmi_basename, BF cmi_file]
| _ -> ();
end;
add_rule_sources r seq_order;
add_rule_targets r (o_file :: gen_cmi);
move_compilation_garbage r copy_dir (BuildEngineRules.rule_temp_dir r) kernel_name lib;
begin match needs_cmo with
Some cmo_file ->
If both ocamlc and ocamlopt build the cmi file , they should
not execute concurrently . For that , we create an artificial
ordering between them , by requesting the cmo file before
the cmx file , if both have to be generated .
not execute concurrently. For that, we create an artificial
ordering between them, by requesting the cmo file before
the cmx file, if both have to be generated. *)
add_rule_time_dependency r cmo_file
| None -> ()
end;
if pack_for = [] then begin
ptmp.cmx_files := cmx_file :: !(ptmp.cmx_files);
ptmp.cmx_o_files := o_file :: !(ptmp.cmx_o_files);
end;
Some cmx_file
end else None
in
begin
match needs_cmi with
| Some _ -> ()
| None ->
if pack_of = [] then
ml2odoc lib ptmp kernel_name envs before_cmd pack_for force temp_ml_file ml_file seq_order
end;
if pack_for = [] then begin
if needs_cmi = None then
ptmp.cmi_files := cmi_file :: !(ptmp.cmi_files);
end
let add_mll_source w b lib ptmp mll_file options =
let envs = options :: lib.lib_opk.opk_options in
let basename = mll_file.file_basename in
let kernel_name = Filename.chop_suffix basename ".mll" in
if lib.lib_opk.opk_installed then
let ml_file = add_file lib lib.lib.lib_src_dir (kernel_name ^ ".ml") in
add_ml_source w b lib ptmp ml_file options
else
let copy_objects_from = get_copy_objects_from lib envs in
match copy_objects_from with
| Some src_lib ->
copy_ml_objects_from lib ptmp src_lib kernel_name
| None ->
let tmp_dirname =
( Filename.concat b.build_dir_filename " _ temp_tree " )
( FileGen.to_string mll_file.file_dir.dir_file ) in
if not ( Sys.file_exists tmp_dirname ) then safe_mkdir tmp_dirname ;
let tmp_dir = add_directory b tmp_dirname in
Filename.concat
(Filename.concat b.build_dir_filename "_temp_tree")
(FileGen.to_string mll_file.file_dir.dir_file) in
if not (Sys.file_exists tmp_dirname) then safe_mkdir tmp_dirname;
let tmp_dir = add_directory b tmp_dirname in *)
let in
let _ = () in
copy_mli_if_needed lib lib.lib.lib_mut_dir mll_file kernel_name;
let ml_file = add_file lib lib.lib.lib_mut_dir (kernel_name ^ ".ml") in
add_mll2ml_rule lib mll_file ml_file options;
add_ml_source w b lib ptmp ml_file options
let add_mly_source w b lib ptmp mly_file options =
let envs = options :: lib.lib_opk.opk_options in
let basename = mly_file.file_basename in
let kernel_name = Filename.chop_suffix basename ".mly" in
if lib.lib_opk.opk_installed then
let ml_file = add_file lib mly_file.file_dir (kernel_name ^ ".ml") in
add_ml_source w b lib ptmp ml_file options
else
let copy_objects_from = get_copy_objects_from lib envs in
match copy_objects_from with
| Some src_lib ->
copy_ml_objects_from lib ptmp src_lib kernel_name
| None ->
let _ = () in
let in
let ml_file = add_file lib lib.lib.lib_mut_dir (kernel_name ^ ".ml") in
let mli_filename = kernel_name ^ ".mli" in
let mli_file = add_file lib lib.lib.lib_mut_dir mli_filename in
add_mli_source w b lib ptmp mli_file options;
add_mly2ml_rule lib mly_file ml_file mli_file options;
add_ml_source w b lib ptmp ml_file options
let process_source w b lib ptmp src_dir (basename, options) =
let _bc = lib.lib.lib_builder_context in
let envs = options :: lib.lib_opk.opk_options in
let (kernel_name, last_extension) = OcpString.rcut_at basename '.' in
let (basename, last_extension) =
if last_extension = "" then
find_source_with_extension b lib src_dir kernel_name
standard_source_exts
else
(basename, last_extension)
in
let src_file = try
add_file lib src_dir basename
with MinUnix.Unix_error(MinUnix.ENOENT, _, _) ->
Printf.eprintf "Error: missing source file %S for package %S\n%!"
(Filename.concat src_dir.dir_fullname basename) lib.lib.lib_name;
Printf.eprintf " (You may need to manually disable compilation of this package\n";
Printf.eprintf " with 'enabled = false')\n%!";
clean_exit 2
in
match last_extension with
"c" ->
add_c_source b lib ptmp src_file options
| " objects " - >
let obj_lib =
try
StringMap.find kernel_name bc.packages_by_name
with Not_found - >
Printf.eprintf " Package % s : Could not find % s.objects in:\n% ! "
lib.lib.lib_name kernel_name ;
StringMap.iter ( fun s _ - > Printf.eprintf " % s " s ) bc.packages_by_name ;
Printf.eprintf " \n% ! " ;
clean_exit 2
in
begin match BuildOCamlGlobals.get_by_id obj_lib with
| None - > ( )
| Some obj_lib - >
ptmp.cmo_files : = ( List.rev obj_lib.lib_cmo_objects ) @ ! ( ptmp.cmo_files ) ;
ptmp.cmx_files : = ( List.rev obj_lib.lib_cmx_objects ) @ ! ( ptmp.cmx_files ) ;
ptmp.cmx_o_files : = ( List.rev obj_lib.lib_cmx_o_objects ) @ ! ( ptmp.cmx_o_files ) ;
( )
end
| " files " - >
let obj_lib =
try
StringMap.find kernel_name bc.packages_by_name
with Not_found - >
Printf.eprintf " Package % s : Could not find % s.objects\n% ! "
lib.lib.lib_name kernel_name ;
clean_exit 2
in
begin match BuildOCamlGlobals.get_by_id obj_lib with
| None - > ( )
| Some obj_lib - >
let src_dir = obj_lib.lib.lib_src_dir in
List.iter ( process_source w b lib ptmp src_dir ) obj_lib.lib_sources
end
| "objects" ->
let obj_lib =
try
StringMap.find kernel_name bc.packages_by_name
with Not_found ->
Printf.eprintf "Package %s: Could not find %s.objects in:\n%!"
lib.lib.lib_name kernel_name;
StringMap.iter (fun s _ -> Printf.eprintf "%s " s) bc.packages_by_name;
Printf.eprintf "\n%!";
clean_exit 2
in
begin match BuildOCamlGlobals.get_by_id obj_lib with
| None -> ()
| Some obj_lib ->
ptmp.cmo_files := (List.rev obj_lib.lib_cmo_objects) @ !(ptmp.cmo_files);
ptmp.cmx_files := (List.rev obj_lib.lib_cmx_objects) @ !(ptmp.cmx_files);
ptmp.cmx_o_files := (List.rev obj_lib.lib_cmx_o_objects) @ !(ptmp.cmx_o_files);
()
end
| "files" ->
let obj_lib =
try
StringMap.find kernel_name bc.packages_by_name
with Not_found ->
Printf.eprintf "Package %s: Could not find %s.objects\n%!"
lib.lib.lib_name kernel_name;
clean_exit 2
in
begin match BuildOCamlGlobals.get_by_id obj_lib with
| None -> ()
| Some obj_lib ->
let src_dir = obj_lib.lib.lib_src_dir in
List.iter (process_source w b lib ptmp src_dir) obj_lib.lib_sources
end
*)
| "ml" ->
add_ml_source w b lib ptmp src_file options
| "mll" ->
add_mll_source w b lib ptmp src_file options
| "mly" ->
add_mly_source w b lib ptmp src_file options
| "mli" ->
add_mli_source w b lib ptmp src_file options
other ones : .ml4 , , .ml5 , .mli5 , .mly4 , .mly5 , .mll4 , .mll5
| ext ->
if ml_file_option.get envs
|| List.mem ext (BuildValue.get_strings_with_default envs "ml_exts" [])
|| List.mem ext (BuildValue.get_strings_with_default envs "impl_exts" [])
then
add_ml_source w b lib ptmp src_file options
else
if mli_file_option.get envs
|| List.mem ext (BuildValue.get_strings_with_default envs "mli_exts" [])
|| List.mem ext (BuildValue.get_strings_with_default envs "intf_exts" [])
then
add_mli_source w b lib ptmp src_file options
else
if
List.mem ext (BuildValue.get_strings_with_default envs "mll_exts" [])
then
add_mll_source w b lib ptmp src_file options
else
if
List.mem ext (BuildValue.get_strings_with_default envs "mly_exts" [])
then
add_mly_source w b lib ptmp src_file options
else
begin
Printf.eprintf "Don't know what to do with [%s] (extension %S)\n%!"
(String.escaped basename) ext;
Printf.eprintf "\tfrom project %s in dir %s\n%!"
lib.lib.lib_name src_dir.dir_fullname;
clean_exit 2;
end
let process_source w b lib ptmp src_dir (basename, options) =
let bc = lib.lib.lib_builder_context in
let envs = options :: lib.lib_opk.opk_options in
let src_dir =
let package = package_option.get envs in
if package = "" then src_dir else
let obj_lib =
try
StringMap.find package bc.packages_by_name
with Not_found ->
Printf.eprintf "Package %s: Could not find package %s\n%!"
lib.lib.lib_name package;
clean_exit 2
in
let src_dir = obj_lib.lib_src_dir in
src_dir
in
let basename =
let subdir = subdir_option.get envs in
match subdir with
[] -> basename
| subdir ->
let subdir = FileGen.add_basenames (FileGen.of_string "") subdir in
Filename.concat (FileGen.to_string subdir) basename
in
process_source w b lib ptmp src_dir (basename, options)
let process_sources w b lib =
let ptmp = new_package_temp_variables () in
begin
match lib.lib.lib_type with
| SyntaxPackage ->
if lib.lib_sources <> [] then begin
Printf.eprintf "Syntax %S: 'files' should be empty !\n" lib.lib.lib_name;
Printf.eprintf " If your syntax contains sources, you should build a library\n";
Printf.eprintf " and define the syntax to require this library.\n%!";
clean_exit 2
end
| RulesPackage -> assert false
| TestPackage
| LibraryPackage
| ProgramPackage
| ObjectsPackage ->
let src_dir = lib.lib.lib_src_dir in
let _dst_dir = lib.lib.lib_dst_dir in
begin
match lib.lib_alias with
| None -> ()
| Some alias ->
let alias_file = add_file lib lib.lib.lib_dst_dir (alias ^ ".ml") in
let r = new_rule lib alias_file [] in
let modnames =
List.fold_left (fun acc (filename, options) ->
let (is_ml, modname, _basename) =
BuildOCamldep.modname_of_file [options] Force_not filename
in
if is_ml then modname :: acc else acc
) [] lib.lib_sources
in
let mod_alias = String.capitalize alias in
add_rule_command r (
Function ("gen-alias",
(fun b ->
List.iter (fun s ->
Buffer.add_string b s;
Buffer.add_char b '|'
) modnames
),
content_generator alias_file
(fun b ->
List.iter (fun s ->
Printf.bprintf b
"module %s = %s__%s\n"
s
mod_alias
s
) modnames;
)
));
let options = BuildValue.empty_env in
let options = BuildValue.set_bool options open_aliases_flag false in
add_ml_source w b lib ptmp alias_file options
end;
List.iter (process_source w b lib ptmp src_dir) lib.lib_sources;
end;
ptmp.cmo_files := List.rev !(ptmp.cmo_files);
ptmp.odoc_files := List.rev !(ptmp.odoc_files);
lib.lib_doc_targets := !(ptmp.odoc_files) @ !(lib.lib_doc_targets);
ptmp.cmx_files := List.rev !(ptmp.cmx_files);
ptmp.cmx_o_files := List.rev !(ptmp.cmx_o_files);
ptmp.cmi_files := List.rev !(ptmp.cmi_files);
ptmp.o_files := List.rev !(ptmp.o_files);
ptmp
let add_library w b lib =
let src_dir = lib.lib.lib_src_dir in
let dst_dir = lib.lib.lib_dst_dir in
let envs = lib.lib_opk.opk_options in
let ptmp = process_sources w b lib in
let cclib = cclib_option.get envs in
let cclib = String.concat " " cclib in
let (cclib, stubs_files) =
let a_file =
let ext_lib = BuildOCamlConfig.ocaml_config_ext_lib.get envs in
let libbasename =
Printf.sprintf "lib%s%s" lib.lib_stubarchive ext_lib in
if !(ptmp.o_files) <> [] then
let a_file = add_dst_file lib dst_dir libbasename in
add_os2a_rule lib !(ptmp.o_files) a_file;
Some a_file
else
try
let a_file = libstubs.get envs in
if a_file = "" then raise (Var_not_found "libstubs");
let a_file = BuildSubst.subst_global a_file in
if Filename.basename a_file <> libbasename then begin
Printf.eprintf "%s\nError: %s=%S basename differs from %S^%s^%S=\"%s\"\n%!"
(string_of_libloc lib)
"libstubs" a_file "lib" "stubarchive" ext_lib libbasename;
BuildMisc.clean_exit 2
end;
let a_file = add_package_file lib a_file in
Some a_file
with Var_not_found _ -> None
in
match a_file with
| None -> cclib, []
| Some a_file ->
lib.lib_stub_targets <- (a_file, STUB_A) :: lib.lib_stub_targets;
Printf.sprintf "-l%s %s" lib.lib_stubarchive cclib, [a_file]
in
if lib.lib_opk.opk_has_byte &&
(lib.lib_opk.opk_installed || !(ptmp.cmo_files) <> []) then begin
let cma_file = add_dst_file lib dst_dir (lib.lib_archive ^ ".cma") in
add_cmo2cma_rule lib ptmp cclib !(ptmp.cmo_files) cma_file;
lib.lib_intf_targets <-
(List.map (fun cmi -> cmi, CMI)
(!(ptmp.cmi_files))) @ lib.lib_intf_targets;
lib.lib_byte_targets <- (cma_file, CMA) :: lib.lib_byte_targets;
end;
if lib.lib_opk.opk_has_asm &&
(lib.lib_opk.opk_installed || !(ptmp.cmx_files) <> []) then begin
let (cmxa_file, a_file, cmxs_files) =
add_cmx2cmxa_rule lib cclib !(ptmp.cmi_files)
!(ptmp.cmx_files) !(ptmp.cmx_o_files) stubs_files in
lib.lib_intf_targets <-
(List.map (fun cmi -> cmi, CMI) (!(ptmp.cmi_files))) @
(List.map (fun cmx -> cmx, CMX) (!(ptmp.cmx_files))) @
lib.lib_intf_targets;
lib.lib_asm_targets <-
(cmxa_file, CMXA) ::
(a_file, CMXA_A) ::
cmxs_files @ lib.lib_asm_targets
end;
if !(ptmp.odoc_files) <> [] then begin
let doc_dirname = Filename.concat dst_dir.dir_fullname "_doc" in
safe_mkdir doc_dirname;
let docdir = BuildEngineContext.add_directory b doc_dirname in
let html_file = add_file lib dst_dir "_doc/index.html" in
add_odocs2html_rule lib !(ptmp.odoc_files) docdir html_file;
lib.lib_doc_targets := html_file :: !(lib.lib_doc_targets)
end;
()
let add_objects w b lib =
let ptmp = process_sources w b lib in
if lib.lib_opk.opk_has_byte then begin
lib.lib_intf_targets <-
(List.map (fun cmi -> cmi, CMI) (!(ptmp.cmi_files))) @
lib.lib_intf_targets;
lib.lib_byte_targets <-
(List.map (fun cmo -> cmo, CMO)
(!(ptmp.cmo_files))) @ lib.lib_byte_targets;
end;
if lib.lib_opk.opk_has_asm then begin
lib.lib_intf_targets <-
(List.map (fun cmi -> cmi, CMI) (!(ptmp.cmi_files))) @
(List.map (fun cmx -> cmx, CMX) (!(ptmp.cmx_files))) @
lib.lib_intf_targets;
lib.lib_asm_targets <-
(List.map (fun cmx -> cmx, CMX)
(!(ptmp.cmx_files)))
@ (List.map (fun o -> o, CMX_O)
(!(ptmp.cmx_o_files)))
@ lib.lib_asm_targets;
end;
()
let local_subst (file, env) s =
let s = BuildSubst.subst_global s in
let s = BuildSubst.apply_substituter
BuildOCP.filesubst s (file,env) in
s
let add_extra_rules bc lib target_name target_files =
let lib_options = lib.lib_opk.opk_options in
let _b = bc.build_context in
let dirname = lib.lib.lib_dirname in
let files = BuildValue.get_strings_with_default lib_options "source_files" [] in
List.iter (fun file ->
let (_: build_file) = add_file lib lib.lib.lib_src_dir file
in
()
) files;
let build_rules =
BuildValue.get_local_prop_list_with_default lib_options
(target_name ^ "_rules") [] in
let build_targets =
BuildValue.get_local_prop_list_with_default lib_options
(target_name ^ "_targets") [] in
List.iter (fun (file, _env) ->
let file = BuildSubst.subst_global file in
let target_file = add_package_file lib file in
target_files := target_file :: !target_files
) build_targets;
if build_rules <> [] then
List.iter (fun (file, env) ->
Printf.eprintf " Adding rule to build % s/%s\n% ! " ( ) file ;
Printf.eprintf "Adding rule to build %s/%s\n%!" (FileGen.to_string dirname) file;
*)
let envs = env :: lib.lib_opk.opk_options in
let uniq_rule = BuildValue.get_string_option_with_default envs "uniq_rule" None in
let file = BuildSubst.subst_global file in
let target_file = add_package_file lib file in
let to_build = BuildValue.get_bool_with_default envs "build_target" false in
if to_build then
target_files := target_file :: ! target_files;
try
match uniq_rule with
None -> raise Not_found
| Some uniq_rule ->
let r = Hashtbl.find bc.uniq_rules uniq_rule in
add_rule_target r target_file
with Not_found ->
let local_subst = local_subst (file, envs) in
let targets = BuildValue.get_strings_with_default envs "more_targets" [] in
let targets = List.map local_subst targets in
let commands =
try
BuildValue.get_local_prop_list envs "commands"
with Var_not_found _ ->
Printf.eprintf "Error in package %S at %S:\n%!"
lib.lib.lib_name
(BuildEngineDisplay.string_of_loc lib.lib.lib_loc);
Printf.eprintf "\tRule for %S does not define 'commands'\n%!" file;
clean_exit 2
in
let sources = BuildValue.get_strings_with_default envs "sources" [] in
let sources = List.map local_subst sources in
let r = new_rule lib target_file [] in
begin match uniq_rule with
None -> () | Some uniq_rule ->
Hashtbl.add bc.uniq_rules uniq_rule r
end;
let sources = List.map (add_package_file lib) sources in
let dirname_s = FileGen.to_string dirname in
List.iter (fun (cmd_name, cmd_env) ->
let envs = cmd_env :: envs in
match cmd_name with
| "" ->
let cmd =
try
let cmd = BuildValue.get_strings envs "value" in
cmd
with Var_not_found _ -> assert false
in
let cmd = List.map local_subst cmd in
let cmd = new_command cmd [] in
begin
let dirname_s = try
let s = BuildValue.get_string envs "chdir" in
let s = local_subst s in
if Filename.is_relative s then
Filename.concat dirname_s s
else s
with Var_not_found _ -> dirname_s
in
cmd.cmd_move_to_dir <- Some dirname_s
end;
let get_pipe name =
try
let stdout = BuildValue.get_string envs name in
let stdout = local_subst stdout in
let stdout = if Filename.is_relative stdout then
Filename.concat dirname_s stdout
else stdout
in
Some stdout
with Var_not_found _ -> None
in
cmd.cmd_stdin_pipe <- get_pipe "stdin";
cmd.cmd_stdout_pipe <- get_pipe "stdout";
cmd.cmd_stderr_pipe <- get_pipe "stderr";
add_rule_command r (Execute cmd)
| "%%loaddeps" ->
make_virtual_file target_file;
let loader filename =
let dependencies =
BuildDepMisc.load_make_dependencies filename
in
List.map (fun (file, deps) ->
(Filename.concat dirname_s file,
List.map (fun file -> [
if Filename.is_relative file then
Filename.concat dirname_s file
else file ]) deps)
) dependencies
in
List.iter (fun source_file ->
add_rule_command r
(LoadDeps (loader, source_file, r))
) sources;
r.rule_forced <- true;
| "%%subst" ->
let to_file = BuildValue.get_path_with_default envs "to_file" file in
let to_file = local_subst to_file in
let to_file = add_package_file lib to_file in
let from_file = BuildValue.get_path_with_default envs "from_file" (file ^ ".in") in
let from_file = local_subst from_file in
let from_file = add_package_file lib from_file in
let substitutions = BuildValue.prop_list (BuildValue.get envs "substitutions") in
let substitutions =
List.map (fun (string, string_env) ->
let envs = string_env :: envs in
let with_string =
try
local_subst (BuildValue.get_string envs "with_string")
with Var_not_found _ ->
failwith (Printf.sprintf "In command %%subst, string %s has no 'with_string'" string)
in
(string, with_string)
) substitutions in
let subst = List.fold_left (fun subst (string, with_string) ->
StringMap.add string with_string subst
) StringMap.empty substitutions
in
let printer b =
Printf.bprintf b "subst %S %S\n"
(file_filename from_file) (file_filename to_file);
List.iter (fun (string, with_string) ->
Printf.bprintf b "\t%S -> %S\n" string with_string
) substitutions;
in
let actor () =
let s = FileString.string_of_file (file_filename from_file) in
let s = BuildSubst.map_subst subst s in
FileString.file_of_string (file_filename to_file) s
in
add_rule_source r from_file;
add_rule_target r to_file;
add_rule_command r (Function (cmd_name, printer, actor))
| "%%config_make2ocp" ->
let to_file = BuildValue.get_path_with_default envs "dst" file in
let from_file = BuildValue.get_path envs "src" in
let from_file = local_subst from_file in
let to_file = local_subst to_file in
let from_file = add_package_file lib from_file in
let to_file = add_package_file lib to_file in
let printer b =
Printf.bprintf b "config_make2ocp %S -> %S\n"
(file_filename from_file) (file_filename to_file)
in
let actor () =
Printf.eprintf "Loading %S\n" (file_filename from_file);
let make_subst = OcpSubst.empty_subst () in
OcpSubst.add_to_subst make_subst "\\ " " ";
let vars = ref [] in
FileGen.iter_lines (fun line ->
let _, line = OcpSubst.iter_subst make_subst line in
if String.length line > 0 && line.[0] <> '#' then
let var, value = OcpString.cut_at line '=' in
OcpSubst.add_to_subst make_subst
(Printf.sprintf "$(%s)" var) value;
vars := (var, value) :: !vars
) from_file.file_file;
let vars = List.rev !vars in
Printf.eprintf "Writing %S\n" (file_filename to_file);
let oc = open_out (file_filename to_file) in
List.iter (fun (var, value) ->
Printf.fprintf oc "%s = %S\n" var value
) vars;
close_out oc;
()
in
add_rule_source r from_file;
add_rule_target r to_file;
add_rule_command r (Function (cmd_name, printer, actor));
| _ ->
Printf.eprintf "Error: Unknown primitive command %S in %s\n" cmd_name
(BuildEngineDisplay.string_of_loc lib.lib.lib_loc);
Printf.eprintf " Commands to execute should be between { ... }, while\n";
Printf.eprintf " primitive commands start by %% (for example %%loaddeps)\n%!";
clean_exit 2
) commands;
add_more_rule_sources lib r [] envs;
add_rule_sources r sources;
let targets = List.map (add_package_file lib) targets in
add_rule_targets r targets;
) build_rules;
()
let add_program w b lib =
let lib_options = lib.lib_opk.opk_options in
let dst_dir = lib.lib.lib_dst_dir in
let ptmp = process_sources w b lib in
let map = ref StringMap.empty in
List.iter (fun dep ->
if dep.dep_link then
let lib1 = dep.dep_project in
match lib1.lib.lib_type with
| TestPackage -> assert false
| ProgramPackage
| ObjectsPackage
-> ()
| RulesPackage
| LibraryPackage ->
StringsMap.iter (fun _ (_, modules) ->
StringMap.iter (fun modname (kind1, _) ->
try
let (kind2, lib2) = StringMap.find modname !map in
match kind1, kind2 with
| (ML | MLandMLI), (ML | MLandMLI) ->
Printf.eprintf
"Warning: program %s, requirements %s and %s\n"
lib.lib.lib_name lib2.lib.lib_name lib1.lib.lib_name;
Printf.eprintf "\tboth define a module name %s.\n" modname;
| _ -> ()
with Not_found ->
map := StringMap.add modname (kind1,lib1) !map
) !modules
) lib1.lib_internal_modules
| SyntaxPackage ->
()
) lib.lib_requires
end;
let cclib = cclib_option.get lib_options in
let cclib = String.concat " " cclib in
let is_toplevel = is_toplevel.get lib_options in
let linkall = force_link_option.get lib_options || is_toplevel in
begin
let linkflags = bytelinkflags lib in
let linkflags =
if linkall || !(ptmp.cmo_files) = [] then
S "-linkall" :: linkflags
else linkflags
in
let byte_file = add_dst_file lib dst_dir (lib.lib_archive ^ byte_exe) in
add_cmo2byte_rule lib ptmp linkflags cclib !(ptmp.cmo_files)
!(ptmp.o_files) byte_file;
if lib.lib_opk.opk_has_byte then begin
lib.lib_byte_targets <- (byte_file, RUN_BYTE) :: lib.lib_byte_targets;
end
end;
if !(ptmp.cmx_files) <> [] then begin
let linkflags = asmlinkflags lib in
let linkflags =
if linkall || !(ptmp.cmx_files) = [] then S "-linkall" :: linkflags
else linkflags in
let asm_file = add_dst_file lib dst_dir (lib.lib_archive ^ asm_exe) in
add_cmx2asm_rule lib ptmp linkflags cclib
!(ptmp.cmx_files) !(ptmp.cmx_o_files) !(ptmp.o_files) asm_file;
if lib.lib_opk.opk_has_asm && not is_toplevel then begin
lib.lib_asm_targets <- (asm_file, RUN_ASM) :: lib.lib_asm_targets;
end
end;
()
let fix_windows_directory s =
let s = Bytes.of_string s in
let len = Bytes.length s in
for i = 0 to len - 1 do
if Bytes.get s i = '\\' then s.[i] <- '/'
done;
let rec iter i =
if i = 0 then "." else
if Bytes.get s (i-1) = '/' then iter (i-1)
else
if i = len
then Bytes.to_string s
else Bytes.sub_string s 0 i
in
iter len
let add_package bc opk =
let pk = opk.opk_package in
let b = bc.build_context in
let package_name = pk.BuildOCPTypes.package_name in
let package_dirname = pk.BuildOCPTypes.package_dirname in
let package_options = opk.opk_options in
try
if verbose 7 then Printf.eprintf "Adding %s\n" package_name;
let package_dirname =
try
let list = BuildValue.strings_of_plist ( BuildValue.get_local package_options " dirname " ) in
BuildSubst.subst_global ( String.concat Filename.dir_sep list )
with Var_not_found _ - >
let list = BuildValue.strings_of_plist ( BuildValue.get_local package_options "dirname" ) in
BuildSubst.subst_global (String.concat Filename.dir_sep list)
with Var_not_found _ -> *)
package_dirname
in
let package_dirname = fix_windows_directory package_dirname in
if verbose 7 then Printf.eprintf "\tfrom %s\n" package_dirname;
let src_dir = BuildEngineContext.add_directory b (absolute_filename package_dirname) in
if verbose 7 then Printf.eprintf "\tfrom %s\n" src_dir.dir_fullname;
let already_installed = BuildValue.is_already_installed package_options
in
let dst_dir =
if already_installed then src_dir else
let dirname =
Filename.concat b.build_dir_filename package_name
in
safe_mkdir dirname;
BuildEngineContext.add_directory b dirname
in
if verbose 7 then Printf.eprintf "\tto %s\n" dst_dir.dir_fullname;
let mut_dir =
if already_installed then src_dir else
let mut_dirname =
Filename.concat dst_dir.dir_fullname "_temp"
in
safe_mkdir mut_dirname;
BuildEngineContext.add_directory b mut_dirname
in
let lib = BuildGlobals.new_library bc pk
package_dirname src_dir dst_dir mut_dir in
let lib = BuildOCamlGlobals.create_package lib opk in
TOOD : we should do that in one pass before
BuildSubst.add_to_global_subst
(Printf.sprintf "%s_SRC_DIR" package_name) src_dir.dir_fullname;
BuildSubst.add_to_global_subst
(Printf.sprintf "%s_DST_DIR" package_name) dst_dir.dir_fullname;
let full_src_dir = absolute_filename src_dir.dir_fullname in
let full_dst_dir = absolute_filename dst_dir.dir_fullname in
BuildSubst.add_to_global_subst
(Printf.sprintf "%s_FULL_SRC_DIR" package_name)
full_src_dir;
BuildSubst.add_to_global_subst
(Printf.sprintf "%s_FULL_DST_DIR" package_name)
full_dst_dir;
lib
with Failure s ->
Printf.eprintf "While preparing package %S:\n%!" package_name;
Printf.eprintf "Error: %s\n%!" s;
clean_exit 2
let plugin =
let module Plugin = struct
let name = "OCaml"
end in
(module Plugin : Plugin)
let create w cin cout bc state =
BuildOCamlGlobals.reset ();
let b = bc.build_context in
let libs =
Array.map (fun pk ->
match pk.BuildOCPTypes.package_plugin with
| OCamlPackage opk ->
add_package bc opk
| _ -> assert false
) state.BuildOCPTypes.project_sorted
in
Array.iter (fun lib ->
let ext_lib, ext_obj =
let envs = lib.lib_opk.opk_options in
BuildOCamlConfig.(ocaml_config_ext_lib.get envs, ocaml_config_ext_obj.get envs)
in
try
if not lib.lib_opk.opk_installed then
safe_mkdir lib.lib.lib_dst_dir.dir_fullname;
add_extra_rules bc lib "build" lib.lib_build_targets;
add_extra_rules bc lib "doc" lib.lib_doc_targets;
add_extra_rules bc lib "test" lib.lib_test_targets;
lib.lib_linkdeps <- get_link_order lib;
Printf.eprintf " linkdeps for % S : % s\n% ! " lib.lib.lib_name
( String.concat " "
( List.map ( fun lib - > lib.lib.lib_name ) lib.lib_linkdeps ) ) ;
Printf.eprintf "linkdeps for %S : %s\n%!" lib.lib.lib_name
(String.concat " "
(List.map (fun lib -> lib.lib.lib_name) lib.lib_linkdeps));
*)
begin
if not lib.lib_opk.opk_installed then
match lib.lib.lib_type with
LibraryPackage -> add_library w b lib
| ProgramPackage -> add_program w b lib
| TestPackage ->
if lib.lib_sources <> [] then add_program w b lib;
lib.lib_opk.opk_install <- false;
| ObjectsPackage -> add_objects w b lib
| SyntaxPackage -> ()
| RulesPackage -> ()
end;
let options = lib.lib_opk.opk_options in
let set_objects lib name kinds f =
let objs = BuildValue.get_strings_with_default options name [] in
if objs <> [] then
f (
List.flatten (
List.map (fun s0 ->
let s = BuildSubst.subst_global s0 in
let bf = add_package_file lib s in
let basename = bf.file_basename in
match List.rev (OcpString.split basename '.') with
| [] ->
Printf.eprintf
"Error: package %S, option %S contains a file %S\n"
lib.lib.lib_name name s0;
Printf.eprintf " with no extension\n%!";
exit 2
| ext :: _ ->
match ext with
| "asm" when List.mem RUN_ASM kinds -> [ bf, RUN_ASM ]
| "byte" when List.mem RUN_BYTE kinds -> [ bf, RUN_BYTE ]
| "cmxs" when List.mem CMXS kinds -> [ bf, CMXS ]
| "cmx" when List.mem CMX kinds ->
let s = Filename.chop_extension s ^ ext_obj in
let bf2 = add_package_file lib s in
[ bf, CMX; bf2, CMX_O ]
| "cmxa" ->
let s = Filename.chop_extension s ^ ext_lib in
let bf2 = add_package_file lib s in
[ bf, CMXA; bf2, CMXA_A ]
| "cmi" when List.mem CMI kinds -> [ bf, CMI ]
| "cmo" when List.mem CMO kinds -> [ bf, CMO ]
| "cma" when List.mem CMA kinds -> [ bf, CMA ]
| _ ->
let dot_ext = "." ^ ext in
if (dot_ext = ext_obj || dot_ext = ext_lib) && List.mem STUB_A kinds then
[ bf, STUB_A ]
else begin
Printf.eprintf
"Error: package %S, option %S contains a file %S\n"
lib.lib.lib_name name s0;
Printf.eprintf " with unexpected extension %S\n%!" ext;
exit 2
end
) objs))
in
set_objects lib "intf_targets" [CMI; CMX]
(fun objs -> lib.lib_intf_targets <- objs);
set_objects lib "byte_targets" [CMO;CMA;RUN_BYTE]
(fun objs ->
lib.lib_byte_targets <- objs);
set_objects lib "asm_targets" [CMX;CMXA;CMXS;RUN_ASM]
(fun objs ->
lib.lib_asm_targets <- objs);
set_objects lib "stub_targets" [STUB_A]
(fun objs -> lib.lib_stub_targets <- objs);
begin
try
lib.lib_modules <- [StringsMap.find [] lib.lib_internal_modules]
with Not_found ->
let objs =
let objs = BuildValue.get_strings_with_default
options "internal_targets" [] in
if objs = [] then
BuildValue.get_strings_with_default
options "intf_targets" []
else []
in
let dirs = ref [] in
List.iter (fun s0 ->
let s = BuildSubst.subst_global s0 in
let bf = add_package_file lib s in
let dst_dir = bf.file_dir in
let (is_ml, modname, basename) =
BuildOCamldep.modname_of_file options Force_not
bf.file_basename in
let map =
try
List.assq dst_dir !dirs
with Not_found ->
let map = ref StringMap.empty in
dirs := (dst_dir, map) :: !dirs;
map
in
try
match StringMap.find modname !map with
| (ML, _) when not is_ml ->
map := StringMap.add modname
(MLandMLI, DepBasename basename) !map
| (MLI, _) when is_ml ->
map := StringMap.add modname
(MLandMLI, DepBasename basename) !map
| (MLandMLI, _) -> ()
| _ -> raise Not_found
with Not_found ->
map := StringMap.add modname (
(if is_ml then ML else MLI), DepBasename basename) !map
) objs;
lib.lib_modules <- !dirs
end;
with Failure s ->
Printf.eprintf "While preparing package %S:\n%!" lib.lib.lib_name;
Printf.eprintf "Error: %s\n%!" s;
clean_exit 2
) libs;
if !BuildOCamlGlobals.list_byte_targets_arg then begin
Printf.eprintf "Bytecode targets:\n";
StringMap.iter (fun _ lib ->
match BuildOCamlGlobals.get_by_id lib with
| None -> ()
| Some lib ->
if lib.lib_byte_targets <> [] then begin
List.iter (fun (target, _kind) ->
Printf.eprintf "\t%s\t->\t%s\n" lib.lib.lib_name target.file_basename)
lib.lib_byte_targets;
end) bc.packages_by_name;
Printf.eprintf "%!"
end;
if !BuildOCamlGlobals.list_asm_targets_arg then begin
Printf.eprintf "Native targets:\n";
StringMap.iter (fun _ lib ->
match BuildOCamlGlobals.get_by_id lib with
| None -> ()
| Some lib ->
if lib.lib_asm_targets <> [] then begin
List.iter (fun (target, _kind) ->
Printf.eprintf "\t%s\t->\t%s\n" lib.lib.lib_name target.file_basename)
lib.lib_asm_targets;
end) bc.packages_by_name;
Printf.eprintf "%!"
end;
let install_where = BuildOCamlInstall.install_where cin cout in
let install_what = BuildOCamlInstall.install_what () in
let pks =
Array.map (fun lib ->
let module P = struct
let name = lib.lib.lib_name
let info = lib.lib
let plugin = plugin
let clean_targets () = assert false
let build_targets () =
Printf.eprintf " ( pk % s)\n " lib.lib_opk.opk_name ;
Printf.eprintf " ( dir % s)\n " lib.lib_opk.opk_dirname ;
Printf.eprintf " (pk %s)\n" lib.lib_opk.opk_name;
Printf.eprintf " (dir %s)\n" lib.lib_opk.opk_dirname;
*)
if lib.lib_opk.opk_installed then begin
Printf.eprintf " % s is already installed\n% ! " name ;
{
targets = [];
depends = [];
} end
else
let targets = BuildOCamlGlobals.make_build_targets lib.lib cin in
Printf.eprintf " % s.build_targets = \n * % s\n End\n "
name
( String.concat " \n * "
( List.map ( fun f - > file_filename f ) targets ) ) ;
name
(String.concat "\n * "
(List.map (fun f -> file_filename f) targets)); *)
let depends =
let depends = ref [] in
List.iter (fun dep ->
if dep.dep_link || dep.dep_syntax then
depends := dep.dep_project.lib :: !depends
) lib.lib_requires;
!depends
in
{ targets; depends }
let test_targets () =
let targets = BuildOCamlGlobals.make_test_targets lib.lib cin in
let depends =
let depends = ref [] in
List.iter (fun dep ->
if dep.dep_link || dep.dep_syntax then
depends := dep.dep_project.lib :: !depends
) lib.lib_requires;
!depends
in
{ targets; depends }
let doc_targets () =
let targets = BuildOCamlGlobals.make_doc_targets lib.lib cin in
let depends =
let depends = ref [] in
List.iter (fun dep ->
if dep.dep_link || dep.dep_syntax then
depends := dep.dep_project.lib :: !depends
) lib.lib_requires;
!depends
in
{ targets; depends }
let conf_targets () =
let targets = BuildOCamlGlobals.make_build_targets lib.lib cin in
let depends =
let depends = ref [] in
List.iter (fun dep ->
if dep.dep_link || dep.dep_syntax then
depends := dep.dep_project.lib :: !depends
) lib.lib_requires;
!depends
in
{ targets; depends }
let install_dir =
lazy (BuildOCamlInstall.find_installdir
install_where lib)
let install_dirs () = install_where.install_libdirs
let test () = assert false
TODO
let install_dir () =
match Lazy.force install_dir with
| None -> assert false
| Some install_dir -> install_dir
let pre_installed () = lib.lib_opk.opk_installed
let to_install () = lib.lib_opk.opk_install
let install () =
if lib.lib_opk.opk_install then
let installdir = install_dir () in
BuildOCamlInstall.install
install_where install_what
lib.lib installdir
end in
(module P : BuildTypes.Package)
) libs
in
pks
let () =
BuildOCamlOCP2.init ()
|
423365c6ff0ea801fe13c8c1f650238ce84624f1bb77e8ded28195908a7889c8 | VisionsGlobalEmpowerment/webchange | views_scenes.cljs | (ns webchange.editor-v2.course-dashboard.views-scenes
(:require
[cljs-react-material-ui.icons :as ic]
[cljs-react-material-ui.reagent :as ui]
[clojure.string :as s]
[re-frame.core :as re-frame]
[reagent.core :as r]
[webchange.editor-v2.components.card.views :refer [list-card] :as card]
[webchange.routes :refer [redirect-to]]
[webchange.subs :as subs]
[webchange.ui-deprecated.theme :refer [get-in-theme]]
[webchange.editor-v2.course-dashboard.state :as state]
[webchange.ui-framework.components.index :refer [input]]))
(defn- get-styles
[]
{:skill-list {:color (get-in-theme [:palette :text :primary])}})
(defn- get-scenes-options
[scenes-list]
(->> scenes-list
(map (fn [scene-id]
{:value scene-id
:text (s/replace scene-id #"-" " ")}))
(sort-by :text)))
(defn- scene-info-data
[{:keys [scene-id data]}]
(let [scene-data @(re-frame/subscribe [::subs/scene scene-id])
scene-info @(re-frame/subscribe [::subs/scene-info scene-id])
styles (get-styles)]
[:div
[ui/typography {:variant "title"} "Skills:"]
[:ul {:style (:skill-list styles)}
(for [{:keys [id name abbr]} (:skills scene-data)]
^{:key id}
[:li [ui/typography (str "(" abbr ") " name)]])]
[ui/typography {:variant "title"} "Name:"]
[ui/text-field {:label "Name"
:full-width true
:default-value (:name scene-info)
:variant "outlined"
:on-change #(swap! data assoc :name (-> % .-target .-value))}]
[ui/typography {:variant "title"} "Archived:"]
[ui/checkbox {:label "Archived"
:variant "outlined"
:default-value (:archived scene-info)
:on-change #(swap! data assoc :archived (-> % .-target .-checked))}]]))
(defn- scene-info-window
[{:keys [scene-id on-close]}]
(let [data (atom {})
save #(do (re-frame/dispatch [::state/save-scene-info {:scene-id scene-id :data @data}])
(on-close))]
[ui/dialog
{:open (some? scene-id)
:on-close on-close}
[ui/dialog-title
"Scene Info"]
[ui/dialog-content
[scene-info-data {:scene-id scene-id :data data}]]
[ui/dialog-actions
[ui/button {:on-click save}
"Save"]
[ui/button {:on-click on-close}
"Cancel"]]]))
(defn- new-or-duplicate-window [{:keys [show on-new on-duplicate]}]
[ui/dialog
{:open show}
[ui/dialog-title "Create Activity"]
[ui/dialog-actions
[ui/button {:on-click on-new} "New"]
[ui/button {:on-click on-duplicate} "Duplicate"]]])
(defn- new-window [{:keys [show on-ok on-cancel name]}]
[ui/dialog
{:open show}
[ui/dialog-title "Choose name"]
[ui/text-field {:placeholder "New Activity"
:style {:padding "0px 20px"}
:on-click #(.stopPropagation %)
:on-change #(reset! name (->> % .-target .-value))}]
[ui/dialog-actions
[ui/button {:on-click on-ok} "Ok"]
[ui/button {:on-click on-cancel} "Cancel"]]])
(defn- duplicate-window [{:keys [show on-duplicate on-cancel scene-list old-name new-name]}]
[ui/dialog
{:open show}
[ui/dialog-title "Choose Activity To Duplicate"]
[ui/select {:value @old-name
:variant "outlined"
:on-change #(reset! old-name (->> % .-target .-value))
:style {:padding "10px 20px"}}
(for [{:keys [name scene-id]} scene-list]
^{:key scene-id}
[ui/menu-item {:value scene-id} name])]
[ui/text-field {:placeholder "Activity Name*"
:style {:padding "10px 20px"}
:on-click #(.stopPropagation %)
:on-change #(reset! new-name (->> % .-target .-value))}]
[ui/dialog-actions
[ui/button {:on-click on-duplicate} "Duplicate"]
[ui/button {:on-click on-cancel} "Cancel"]]])
(defn scenes-list
[{:keys [title]}]
(r/with-let [current-scene-info (r/atom nil)
handle-open-info #(reset! current-scene-info %)
handle-close-info #(reset! current-scene-info nil)
show-new-or-duplicate-window (r/atom false)
show-new-window (r/atom false)
show-duplicate-window (r/atom false)]
(let [course @(re-frame/subscribe [::subs/current-course])
scene-list @(re-frame/subscribe [::subs/scene-list-ordered])
list-styles (card/get-styles)
filter @(re-frame/subscribe [::subs/scene-list-filter])
set-filter #(re-frame/dispatch [::subs/set-scene-list-filter %])
new-activity-name (r/atom nil)
old-activity-name (r/atom (:scene-id (first scene-list)))]
[list-card {:title title
:title-action [input {:value filter
:on-change set-filter
:placeholder "Filter"
:on-esc-press #(set-filter "")}]
:full-height true
:on-add-click #(reset! show-new-or-duplicate-window true)}
[ui/list {:style (:list-full-height list-styles)}
(for [scene scene-list]
^{:key (:scene-id scene)}
[ui/list-item
[ui/list-item-text {:primary (:name scene)}]
[ui/list-item-secondary-action
[ui/icon-button {:aria-label "Info"
:on-click #(handle-open-info (:scene-id scene))}
[ic/info {:style (:action-icon list-styles)}]]
(if (:is-placeholder scene)
[ui/icon-button {:on-click #(redirect-to :wizard-configured :course-slug course :scene-slug (-> scene :scene-id name))}
[ic/warning]]
[ui/icon-button {:aria-label "Edit"
:on-click #(redirect-to :course-editor-scene :id course :scene-id (-> scene :scene-id name))}
[ic/edit {:style (:action-icon list-styles)}]])]])]
[scene-info-window {:scene-id @current-scene-info
:on-close handle-close-info}]
[new-or-duplicate-window
{:show @show-new-or-duplicate-window
:on-new #(do
(reset! show-new-or-duplicate-window false)
(reset! show-new-window true))
:on-duplicate #(do
(reset! show-new-or-duplicate-window false)
(reset! show-duplicate-window true))}]
[new-window
{:show @show-new-window
:on-ok #(do
(reset! show-new-window false)
(if (empty? @new-activity-name)
(print "Error creating activity")
(re-frame/dispatch [::state/create-new-activity @new-activity-name course])))
:on-cancel #(reset! show-new-window false)
:name new-activity-name}]
[duplicate-window
{:show @show-duplicate-window
:on-duplicate #(do
(reset! show-duplicate-window false)
(if (empty? @new-activity-name)
(print "Error duplicating activity")
(re-frame/dispatch [::state/duplicate-activity @old-activity-name @new-activity-name course])))
:on-cancel #(reset! show-duplicate-window false)
:scene-list scene-list
:old-name old-activity-name
:new-name new-activity-name}]])))
| null | https://raw.githubusercontent.com/VisionsGlobalEmpowerment/webchange/e5747e187937d85e9c92c728d52a704f323f00ef/src/cljs/webchange/editor_v2/course_dashboard/views_scenes.cljs | clojure | (ns webchange.editor-v2.course-dashboard.views-scenes
(:require
[cljs-react-material-ui.icons :as ic]
[cljs-react-material-ui.reagent :as ui]
[clojure.string :as s]
[re-frame.core :as re-frame]
[reagent.core :as r]
[webchange.editor-v2.components.card.views :refer [list-card] :as card]
[webchange.routes :refer [redirect-to]]
[webchange.subs :as subs]
[webchange.ui-deprecated.theme :refer [get-in-theme]]
[webchange.editor-v2.course-dashboard.state :as state]
[webchange.ui-framework.components.index :refer [input]]))
(defn- get-styles
[]
{:skill-list {:color (get-in-theme [:palette :text :primary])}})
(defn- get-scenes-options
[scenes-list]
(->> scenes-list
(map (fn [scene-id]
{:value scene-id
:text (s/replace scene-id #"-" " ")}))
(sort-by :text)))
(defn- scene-info-data
[{:keys [scene-id data]}]
(let [scene-data @(re-frame/subscribe [::subs/scene scene-id])
scene-info @(re-frame/subscribe [::subs/scene-info scene-id])
styles (get-styles)]
[:div
[ui/typography {:variant "title"} "Skills:"]
[:ul {:style (:skill-list styles)}
(for [{:keys [id name abbr]} (:skills scene-data)]
^{:key id}
[:li [ui/typography (str "(" abbr ") " name)]])]
[ui/typography {:variant "title"} "Name:"]
[ui/text-field {:label "Name"
:full-width true
:default-value (:name scene-info)
:variant "outlined"
:on-change #(swap! data assoc :name (-> % .-target .-value))}]
[ui/typography {:variant "title"} "Archived:"]
[ui/checkbox {:label "Archived"
:variant "outlined"
:default-value (:archived scene-info)
:on-change #(swap! data assoc :archived (-> % .-target .-checked))}]]))
(defn- scene-info-window
[{:keys [scene-id on-close]}]
(let [data (atom {})
save #(do (re-frame/dispatch [::state/save-scene-info {:scene-id scene-id :data @data}])
(on-close))]
[ui/dialog
{:open (some? scene-id)
:on-close on-close}
[ui/dialog-title
"Scene Info"]
[ui/dialog-content
[scene-info-data {:scene-id scene-id :data data}]]
[ui/dialog-actions
[ui/button {:on-click save}
"Save"]
[ui/button {:on-click on-close}
"Cancel"]]]))
(defn- new-or-duplicate-window [{:keys [show on-new on-duplicate]}]
[ui/dialog
{:open show}
[ui/dialog-title "Create Activity"]
[ui/dialog-actions
[ui/button {:on-click on-new} "New"]
[ui/button {:on-click on-duplicate} "Duplicate"]]])
(defn- new-window [{:keys [show on-ok on-cancel name]}]
[ui/dialog
{:open show}
[ui/dialog-title "Choose name"]
[ui/text-field {:placeholder "New Activity"
:style {:padding "0px 20px"}
:on-click #(.stopPropagation %)
:on-change #(reset! name (->> % .-target .-value))}]
[ui/dialog-actions
[ui/button {:on-click on-ok} "Ok"]
[ui/button {:on-click on-cancel} "Cancel"]]])
(defn- duplicate-window [{:keys [show on-duplicate on-cancel scene-list old-name new-name]}]
[ui/dialog
{:open show}
[ui/dialog-title "Choose Activity To Duplicate"]
[ui/select {:value @old-name
:variant "outlined"
:on-change #(reset! old-name (->> % .-target .-value))
:style {:padding "10px 20px"}}
(for [{:keys [name scene-id]} scene-list]
^{:key scene-id}
[ui/menu-item {:value scene-id} name])]
[ui/text-field {:placeholder "Activity Name*"
:style {:padding "10px 20px"}
:on-click #(.stopPropagation %)
:on-change #(reset! new-name (->> % .-target .-value))}]
[ui/dialog-actions
[ui/button {:on-click on-duplicate} "Duplicate"]
[ui/button {:on-click on-cancel} "Cancel"]]])
(defn scenes-list
[{:keys [title]}]
(r/with-let [current-scene-info (r/atom nil)
handle-open-info #(reset! current-scene-info %)
handle-close-info #(reset! current-scene-info nil)
show-new-or-duplicate-window (r/atom false)
show-new-window (r/atom false)
show-duplicate-window (r/atom false)]
(let [course @(re-frame/subscribe [::subs/current-course])
scene-list @(re-frame/subscribe [::subs/scene-list-ordered])
list-styles (card/get-styles)
filter @(re-frame/subscribe [::subs/scene-list-filter])
set-filter #(re-frame/dispatch [::subs/set-scene-list-filter %])
new-activity-name (r/atom nil)
old-activity-name (r/atom (:scene-id (first scene-list)))]
[list-card {:title title
:title-action [input {:value filter
:on-change set-filter
:placeholder "Filter"
:on-esc-press #(set-filter "")}]
:full-height true
:on-add-click #(reset! show-new-or-duplicate-window true)}
[ui/list {:style (:list-full-height list-styles)}
(for [scene scene-list]
^{:key (:scene-id scene)}
[ui/list-item
[ui/list-item-text {:primary (:name scene)}]
[ui/list-item-secondary-action
[ui/icon-button {:aria-label "Info"
:on-click #(handle-open-info (:scene-id scene))}
[ic/info {:style (:action-icon list-styles)}]]
(if (:is-placeholder scene)
[ui/icon-button {:on-click #(redirect-to :wizard-configured :course-slug course :scene-slug (-> scene :scene-id name))}
[ic/warning]]
[ui/icon-button {:aria-label "Edit"
:on-click #(redirect-to :course-editor-scene :id course :scene-id (-> scene :scene-id name))}
[ic/edit {:style (:action-icon list-styles)}]])]])]
[scene-info-window {:scene-id @current-scene-info
:on-close handle-close-info}]
[new-or-duplicate-window
{:show @show-new-or-duplicate-window
:on-new #(do
(reset! show-new-or-duplicate-window false)
(reset! show-new-window true))
:on-duplicate #(do
(reset! show-new-or-duplicate-window false)
(reset! show-duplicate-window true))}]
[new-window
{:show @show-new-window
:on-ok #(do
(reset! show-new-window false)
(if (empty? @new-activity-name)
(print "Error creating activity")
(re-frame/dispatch [::state/create-new-activity @new-activity-name course])))
:on-cancel #(reset! show-new-window false)
:name new-activity-name}]
[duplicate-window
{:show @show-duplicate-window
:on-duplicate #(do
(reset! show-duplicate-window false)
(if (empty? @new-activity-name)
(print "Error duplicating activity")
(re-frame/dispatch [::state/duplicate-activity @old-activity-name @new-activity-name course])))
:on-cancel #(reset! show-duplicate-window false)
:scene-list scene-list
:old-name old-activity-name
:new-name new-activity-name}]])))
|
|
1d0edd83a5eff1771eb404b11cbdc711940dfead0499a9658c76acc465a02d23 | xguerin/netml | NetML.ml | module Layer = NetML_Layer
module PCap = NetML_PCap
| null | https://raw.githubusercontent.com/xguerin/netml/de9d277d2f1ac055aea391b89391df6830f80eff/src/NetML.ml | ocaml | module Layer = NetML_Layer
module PCap = NetML_PCap
|
|
1a2ddd83bb7d7f993d0a470233537cc17133969a8d511e92f2e4ddebe110d35d | yuanqing/code-problems | insertion_sort.ml | let insertion_sort (compare:'a -> 'a -> int) (xs:'a list) : 'a list =
let rec insert x ys =
match ys with
| [] -> [x]
| y::ys as zs when (compare x y < 0) ->
(* `compare` returning a negative value means that `x` is smaller than
`y`. So we place `x` in front of both `y` and `ys`. *)
x::zs
| y::ys ->
(* Otherwise, continue looking for a place to insert `x` in the
rest of the list. *)
y::(insert x ys) in
let rec aux xs =
match xs with
| [] -> []
| x::xs -> insert x (aux xs) in
aux xs
| null | https://raw.githubusercontent.com/yuanqing/code-problems/30eb34ad616146306cddc50594a47deff111f341/src/insertion_sort/insertion_sort.ml | ocaml | `compare` returning a negative value means that `x` is smaller than
`y`. So we place `x` in front of both `y` and `ys`.
Otherwise, continue looking for a place to insert `x` in the
rest of the list. | let insertion_sort (compare:'a -> 'a -> int) (xs:'a list) : 'a list =
let rec insert x ys =
match ys with
| [] -> [x]
| y::ys as zs when (compare x y < 0) ->
x::zs
| y::ys ->
y::(insert x ys) in
let rec aux xs =
match xs with
| [] -> []
| x::xs -> insert x (aux xs) in
aux xs
|
2812cecf6e0b8f32cefa522359c3116a91fcdbeb849afe7c0f4fc8da95e418ae | ghc/nofib | Main.hs | A kernel fragment from a program written by
-- /~legere
Caltech Quantum Optics
It has the interesting property that Classic Hugs
runs it 20x faster than GHC !
Reason : calls itself with identical parameters ,
and Hugs commons that up for some reason .
( Even with that fixed , STG Hugs ran the program a lot
slower than Classic Hugs . )
So it seems like an interesting program . It appears here
in the form with the silly self - call , because that makes
it run a nice long time . It thrashes floating point
multiplication and lists .
Ron Legere -- /~legere
Caltech Quantum Optics
It has the interesting property that Classic Hugs
runs it 20x faster than GHC!
Reason: runExperiment calls itself with identical parameters,
and Hugs commons that up for some reason.
(Even with that fixed, STG Hugs ran the program a lot
slower than Classic Hugs.)
So it seems like an interesting program. It appears here
in the form with the silly self-call, because that makes
it run a nice long time. It thrashes floating point
multiplication and lists.
-}
module Main where
import System.Environment
infixl 9 .*
infix 9 <*>
main = do
[arg] <- getArgs
let n = read arg :: Int
putStr (show (take n test))
test :: StateStream
test = runExperiment testforce 0.02 [1.0] (State [1.0] [0.0])
testforce :: ForceLaw [Float]
testforce k [] = []
testforce k ( (State pos vel):atoms) = (-1.0) .* k * pos:
testforce k atoms
The test force : K is a list of spring
constants . ( But here I am only doing one dimension for the purposes
of demonstrating the approach )
The test force: K is a list of spring
constants. (But here I am only doing one dimension for the purposes
of demonstrating the approach)
-}
{-
******************
******************
Module: Numerical classical atom (atom.lhs)
******************
******************
-}
-- We will want types for the whole simulation (where we can configure
-- the dimensions, etc), for the results (a state stream), and the force laws.
data AtomState = State Position Velocity
type Position = [Float]
type Velocity = [Float]
type Force = [Float]
type StateStream = [AtomState]
I made AtomState a data type , just so I could play with them . I think
I would prefer to keep it just a synonym , because that would
be simpler !
Now we need a function to write out the results to a file in a nice format .
I think I would prefer a simple x y z /n x y z /n etc
NOTE that show AtomState only shows the position !
I made AtomState a data type, just so I could play with them. I think
I would prefer to keep it just a synonym, because that would
be simpler!
Now we need a function to write out the results to a file in a nice format.
I think I would prefer a simple x y z /n x y z /n etc
NOTE that show AtomState only shows the position!
-}
instance Show AtomState where
show (State pos vel) = concat [ (show component) ++ "\t" | component <- pos ]
showList states = showString (concat [(show state) ++ "\n" | state <- states])
Note that I used lists for the position and velocity to allow for
unknown number of dimensions . I suspect this will have to
be optimized into tuples at some point !
Ok , so how shall we define the ForceLaw type ?
Note that I used lists for the position and velocity to allow for
unknown number of dimensions. I suspect this will have to
be optimized into tuples at some point!
Ok, so how shall we define the ForceLaw type?
-}
type ForceLaw a = a -> StateStream -> [Force]
The force law maps a stream of states to a stream of forces so that time
dependant forces can be used . The parametric type ' a ' is to allow the force law
to depend on some parameter , for example ( the common case ! ) a seed for a random number
generater , and/or the timestep , or the spring constant
The force law maps a stream of states to a stream of forces so that time
dependant forces can be used. The parametric type 'a' is to allow the force law
to depend on some parameter, for example (the common case!) a seed for a random number
generater, and/or the timestep, or the spring constant
-}
runExperiment :: ForceLaw a -> Float -> a -> AtomState -> StateStream
In this form this program takes 1 min when compiled under ghc-4.05 ,
but takes 3 seconds under hugs ....
In this form this program takes 1 min when compiled under ghc-4.05,
but takes 3 seconds under hugs....
-}
runExperiment law dt param init = init : zipWith (propagate dt)
(law param stream)
stream
where stream =
runExperiment law dt param init
-- In this form GHC is ( as expected ) much faster
runExperiment law dt param init = stream
where
stream = init : zipWith ( propagate dt )
( law param stream )
stream
-- In this form GHC is (as expected) much faster
runExperiment law dt param init = stream
where
stream = init : zipWith (propagate dt)
(law param stream)
stream
-}
runExperiment forces timestep param initialcondition : : [ AtomState ]
is an infinite stream of atom states . We can then use this to
generate necessary averages , temperatures , , or wtf
you want .
We could for example , start the random number generator with seed param , if
the type is int .
It is an error to have the initial
atom state not have the correct number of dimensions .
runExperiment forces timestep param initialcondition :: [AtomState]
is an infinite stream of atom states. We can then use this to
generate necessary averages, temperatures, allen variences , or wtf
you want.
We could for example, start the random number generator with seed param, if
the type is int .
It is an error to have the initial
atom state not have the correct number of dimensions.
-}
propagate :: Float -> Force -> AtomState -> AtomState
Ok , I see one problem with this , not general enough ! Some better propagators
exist that can use previous atom states . Actually , by using previous atom states ,
we will not even need to seperately track the velocities either . Oh well , for now
I will stick with that .
Ok, I see one problem with this, not general enough! Some better propagators
exist that can use previous atom states. Actually, by using previous atom states,
we will not even need to seperately track the velocities either. Oh well, for now
I will stick with that.
-}
propagate dt aforce (State pos vel) = State newpos newvel
where newpos = pos + (dt .* vel)
newvel = vel + (dt .* aforce)
Note assumes mass = 1
{-
********************************************************
********************************************************
Numerical Lists
********************************************************
********************************************************
-}
instance Num a => Num [a] where
negate (f:fs) = (negate f):(negate fs)
l + [] = l
[] + l = l
(f:fs) + (g:gs) = (f+g):fs+gs
_ * [] = []
[] * _ = []
(f:fs) * (g:gs) = (f*g):(gs*gs)
fromInteger c = fromInteger c : [0]
(.*):: Num a => a-> [a] -> [a]
c .* [] = []
c .* (f:fs) = c*f : c .* fs
(<*>):: Num a => [a] -> [a] -> a
f <*> g = sum (f*g)
| null | https://raw.githubusercontent.com/ghc/nofib/f34b90b5a6ce46284693119a06d1133908b11856/spectral/atom/Main.hs | haskell | /~legere
/~legere
******************
******************
Module: Numerical classical atom (atom.lhs)
******************
******************
We will want types for the whole simulation (where we can configure
the dimensions, etc), for the results (a state stream), and the force laws.
In this form GHC is ( as expected ) much faster
In this form GHC is (as expected) much faster
********************************************************
********************************************************
Numerical Lists
********************************************************
********************************************************
| A kernel fragment from a program written by
Caltech Quantum Optics
It has the interesting property that Classic Hugs
runs it 20x faster than GHC !
Reason : calls itself with identical parameters ,
and Hugs commons that up for some reason .
( Even with that fixed , STG Hugs ran the program a lot
slower than Classic Hugs . )
So it seems like an interesting program . It appears here
in the form with the silly self - call , because that makes
it run a nice long time . It thrashes floating point
multiplication and lists .
Caltech Quantum Optics
It has the interesting property that Classic Hugs
runs it 20x faster than GHC!
Reason: runExperiment calls itself with identical parameters,
and Hugs commons that up for some reason.
(Even with that fixed, STG Hugs ran the program a lot
slower than Classic Hugs.)
So it seems like an interesting program. It appears here
in the form with the silly self-call, because that makes
it run a nice long time. It thrashes floating point
multiplication and lists.
-}
module Main where
import System.Environment
infixl 9 .*
infix 9 <*>
main = do
[arg] <- getArgs
let n = read arg :: Int
putStr (show (take n test))
test :: StateStream
test = runExperiment testforce 0.02 [1.0] (State [1.0] [0.0])
testforce :: ForceLaw [Float]
testforce k [] = []
testforce k ( (State pos vel):atoms) = (-1.0) .* k * pos:
testforce k atoms
The test force : K is a list of spring
constants . ( But here I am only doing one dimension for the purposes
of demonstrating the approach )
The test force: K is a list of spring
constants. (But here I am only doing one dimension for the purposes
of demonstrating the approach)
-}
data AtomState = State Position Velocity
type Position = [Float]
type Velocity = [Float]
type Force = [Float]
type StateStream = [AtomState]
I made AtomState a data type , just so I could play with them . I think
I would prefer to keep it just a synonym , because that would
be simpler !
Now we need a function to write out the results to a file in a nice format .
I think I would prefer a simple x y z /n x y z /n etc
NOTE that show AtomState only shows the position !
I made AtomState a data type, just so I could play with them. I think
I would prefer to keep it just a synonym, because that would
be simpler!
Now we need a function to write out the results to a file in a nice format.
I think I would prefer a simple x y z /n x y z /n etc
NOTE that show AtomState only shows the position!
-}
instance Show AtomState where
show (State pos vel) = concat [ (show component) ++ "\t" | component <- pos ]
showList states = showString (concat [(show state) ++ "\n" | state <- states])
Note that I used lists for the position and velocity to allow for
unknown number of dimensions . I suspect this will have to
be optimized into tuples at some point !
Ok , so how shall we define the ForceLaw type ?
Note that I used lists for the position and velocity to allow for
unknown number of dimensions. I suspect this will have to
be optimized into tuples at some point!
Ok, so how shall we define the ForceLaw type?
-}
type ForceLaw a = a -> StateStream -> [Force]
The force law maps a stream of states to a stream of forces so that time
dependant forces can be used . The parametric type ' a ' is to allow the force law
to depend on some parameter , for example ( the common case ! ) a seed for a random number
generater , and/or the timestep , or the spring constant
The force law maps a stream of states to a stream of forces so that time
dependant forces can be used. The parametric type 'a' is to allow the force law
to depend on some parameter, for example (the common case!) a seed for a random number
generater, and/or the timestep, or the spring constant
-}
runExperiment :: ForceLaw a -> Float -> a -> AtomState -> StateStream
In this form this program takes 1 min when compiled under ghc-4.05 ,
but takes 3 seconds under hugs ....
In this form this program takes 1 min when compiled under ghc-4.05,
but takes 3 seconds under hugs....
-}
runExperiment law dt param init = init : zipWith (propagate dt)
(law param stream)
stream
where stream =
runExperiment law dt param init
runExperiment law dt param init = stream
where
stream = init : zipWith ( propagate dt )
( law param stream )
stream
runExperiment law dt param init = stream
where
stream = init : zipWith (propagate dt)
(law param stream)
stream
-}
runExperiment forces timestep param initialcondition : : [ AtomState ]
is an infinite stream of atom states . We can then use this to
generate necessary averages , temperatures , , or wtf
you want .
We could for example , start the random number generator with seed param , if
the type is int .
It is an error to have the initial
atom state not have the correct number of dimensions .
runExperiment forces timestep param initialcondition :: [AtomState]
is an infinite stream of atom states. We can then use this to
generate necessary averages, temperatures, allen variences , or wtf
you want.
We could for example, start the random number generator with seed param, if
the type is int .
It is an error to have the initial
atom state not have the correct number of dimensions.
-}
propagate :: Float -> Force -> AtomState -> AtomState
Ok , I see one problem with this , not general enough ! Some better propagators
exist that can use previous atom states . Actually , by using previous atom states ,
we will not even need to seperately track the velocities either . Oh well , for now
I will stick with that .
Ok, I see one problem with this, not general enough! Some better propagators
exist that can use previous atom states. Actually, by using previous atom states,
we will not even need to seperately track the velocities either. Oh well, for now
I will stick with that.
-}
propagate dt aforce (State pos vel) = State newpos newvel
where newpos = pos + (dt .* vel)
newvel = vel + (dt .* aforce)
Note assumes mass = 1
instance Num a => Num [a] where
negate (f:fs) = (negate f):(negate fs)
l + [] = l
[] + l = l
(f:fs) + (g:gs) = (f+g):fs+gs
_ * [] = []
[] * _ = []
(f:fs) * (g:gs) = (f*g):(gs*gs)
fromInteger c = fromInteger c : [0]
(.*):: Num a => a-> [a] -> [a]
c .* [] = []
c .* (f:fs) = c*f : c .* fs
(<*>):: Num a => [a] -> [a] -> a
f <*> g = sum (f*g)
|
b58eca60d1569954e271df441dd2b4d862ec8728d349f499f7fc54902e78c8f5 | mpaltun/istanbus | istanbus_web.erl | @author author < >
YYYY author .
%% @doc istanbus_web startup code
-module(istanbus_web).
-author('author <>').
-export([start/0, start_link/0, stop/0]).
ensure_started(App) ->
case application:start(App) of
ok ->
ok;
{error, {already_started, App}} ->
ok
end.
( ) - > { ok , Pid::pid ( ) }
%% @doc Starts the app for inclusion in a supervisor tree
start_link() ->
ensure_started(inets),
ensure_started(crypto),
ensure_started(mochiweb),
application:set_env(webmachine, webmachine_logger_module,
webmachine_logger),
ensure_started(webmachine),
istanbus_web_sup:start_link().
@spec start ( ) - > ok
@doc Start the istanbus_web server .
start() ->
ensure_started(inets),
ensure_started(crypto),
ensure_started(mochiweb),
application:set_env(webmachine, webmachine_logger_module,
webmachine_logger),
ensure_started(webmachine),
application:start(istanbus_web).
stop ( ) - > ok
@doc Stop the istanbus_web server .
stop() ->
Res = application:stop(istanbus_web),
application:stop(webmachine),
application:stop(mochiweb),
application:stop(crypto),
application:stop(inets),
Res.
| null | https://raw.githubusercontent.com/mpaltun/istanbus/9f8edd1e092045d015615d29011481539ea23374/istanbus_web/src/istanbus_web.erl | erlang | @doc istanbus_web startup code
@doc Starts the app for inclusion in a supervisor tree | @author author < >
YYYY author .
-module(istanbus_web).
-author('author <>').
-export([start/0, start_link/0, stop/0]).
ensure_started(App) ->
case application:start(App) of
ok ->
ok;
{error, {already_started, App}} ->
ok
end.
( ) - > { ok , Pid::pid ( ) }
start_link() ->
ensure_started(inets),
ensure_started(crypto),
ensure_started(mochiweb),
application:set_env(webmachine, webmachine_logger_module,
webmachine_logger),
ensure_started(webmachine),
istanbus_web_sup:start_link().
@spec start ( ) - > ok
@doc Start the istanbus_web server .
start() ->
ensure_started(inets),
ensure_started(crypto),
ensure_started(mochiweb),
application:set_env(webmachine, webmachine_logger_module,
webmachine_logger),
ensure_started(webmachine),
application:start(istanbus_web).
stop ( ) - > ok
@doc Stop the istanbus_web server .
stop() ->
Res = application:stop(istanbus_web),
application:stop(webmachine),
application:stop(mochiweb),
application:stop(crypto),
application:stop(inets),
Res.
|
ef5aa7975271af29b078178f85a0473c197f257dd67da959a2c54a92d9f958bb | helium/blockchain-core | blockchain_txn_gen_gateway_v1.erl | %%%-------------------------------------------------------------------
%% @doc
%% == Blockchain Transaction Genesis Gateway ==
%% @end
%%%-------------------------------------------------------------------
-module(blockchain_txn_gen_gateway_v1).
-behavior(blockchain_txn).
-behavior(blockchain_json).
-include("blockchain_json.hrl").
-include("blockchain_utils.hrl").
-include_lib("helium_proto/include/blockchain_txn_gen_gateway_v1_pb.hrl").
-export([
new/4,
hash/1,
sign/2,
gateway/1,
owner/1,
location/1,
nonce/1,
fee/1,
fee_payer/2,
is_valid/2,
absorb/2,
print/1,
json_type/0,
to_json/2
]).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-type txn_genesis_gateway() :: #blockchain_txn_gen_gateway_v1_pb{}.
-export_type([txn_genesis_gateway/0]).
%%--------------------------------------------------------------------
%% @doc
%% @end
%%--------------------------------------------------------------------
-spec new(Gateway :: libp2p_crypto:pubkey_bin(),
Owner :: libp2p_crypto:pubkey_bin(),
Location :: undefined | h3:h3index(),
Nonce :: non_neg_integer()) -> txn_genesis_gateway().
new(Gateway, Owner, Location, Nonce) ->
L = case Location of
undefined -> undefined;
_ -> h3:to_string(Location)
end,
#blockchain_txn_gen_gateway_v1_pb{gateway=Gateway,
owner=Owner,
location=L,
nonce=Nonce}.
%%--------------------------------------------------------------------
%% @doc
%% @end
%%--------------------------------------------------------------------
-spec hash(txn_genesis_gateway()) -> blockchain_txn:hash().
hash(Txn) ->
EncodedTxn = blockchain_txn_gen_gateway_v1_pb:encode_msg(Txn),
crypto:hash(sha256, EncodedTxn).
%%--------------------------------------------------------------------
%% @doc
%% @end
%%--------------------------------------------------------------------
-spec sign(txn_genesis_gateway(), libp2p_crypto:sig_fun()) -> txn_genesis_gateway().
sign(Txn, _SigFun) ->
Txn.
%%--------------------------------------------------------------------
%% @doc
%% @end
%%--------------------------------------------------------------------
-spec gateway(txn_genesis_gateway()) -> libp2p_crypto:pubkey_bin().
gateway(Txn) ->
Txn#blockchain_txn_gen_gateway_v1_pb.gateway.
%%--------------------------------------------------------------------
%% @doc
%% @end
%%--------------------------------------------------------------------
-spec owner(txn_genesis_gateway()) -> libp2p_crypto:pubkey_bin().
owner(Txn) ->
Txn#blockchain_txn_gen_gateway_v1_pb.owner.
%%--------------------------------------------------------------------
%% @doc
%% @end
%%--------------------------------------------------------------------
-spec location(txn_genesis_gateway()) -> h3:h3index().
location(#blockchain_txn_gen_gateway_v1_pb{location=[]}) ->
undefined;
location(Txn) ->
h3:from_string(Txn#blockchain_txn_gen_gateway_v1_pb.location).
%%--------------------------------------------------------------------
%% @doc
%% @end
%%--------------------------------------------------------------------
-spec nonce(txn_genesis_gateway()) -> non_neg_integer().
nonce(Txn) ->
Txn#blockchain_txn_gen_gateway_v1_pb.nonce.
%%--------------------------------------------------------------------
%% @doc
%% @end
%%--------------------------------------------------------------------
-spec fee(txn_genesis_gateway()) -> non_neg_integer().
fee(_Txn) ->
0.
-spec fee_payer(txn_genesis_gateway(), blockchain_ledger_v1:ledger()) -> libp2p_crypto:pubkey_bin() | undefined.
fee_payer(_Txn, _Ledger) ->
undefined.
%%--------------------------------------------------------------------
%% @doc
%% This transaction should only be absorbed when it's in the genesis block
%% @end
%%--------------------------------------------------------------------
-spec is_valid(txn_genesis_gateway(), blockchain:blockchain()) -> ok | {error, atom()} | {error, {atom(), any()}}.
is_valid(_Txn, Chain) ->
Ledger = blockchain:ledger(Chain),
case blockchain_ledger_v1:current_height(Ledger) of
{ok, 0} ->
ok;
_ ->
{error, not_in_genesis_block}
end.
%%--------------------------------------------------------------------
%% @doc
%% @end
%%--------------------------------------------------------------------
-spec absorb(txn_genesis_gateway(), blockchain:blockchain()) -> ok | {error, atom()} | {error, {atom(), any()}}.
absorb(Txn, Chain) ->
Ledger = blockchain:ledger(Chain),
Gateway = ?MODULE:gateway(Txn),
Owner = ?MODULE:owner(Txn),
Location = ?MODULE:location(Txn),
Nonce = ?MODULE:nonce(Txn),
blockchain_ledger_v1:add_gateway(Owner,
Gateway,
Location,
Nonce,
full,
Ledger).
%%--------------------------------------------------------------------
%% @doc
%% @end
%%--------------------------------------------------------------------
-spec print(txn_genesis_gateway()) -> iodata().
print(undefined) -> <<"type=genesis_gateway, undefined">>;
print(#blockchain_txn_gen_gateway_v1_pb{
gateway=Gateway, owner=Owner,
location=L, nonce=Nonce}) ->
io_lib:format("type=genesis_gateway gateway=~p, owner=~p, location=~p, nonce=~p",
[?TO_ANIMAL_NAME(Gateway), ?TO_B58(Owner), L, Nonce]).
json_type() ->
<<"gen_gateway_v1">>.
-spec to_json(txn_genesis_gateway(), blockchain_json:opts()) -> blockchain_json:json_object().
to_json(Txn, _Opts) ->
#{
type => ?MODULE:json_type(),
hash => ?BIN_TO_B64(hash(Txn)),
gateway => ?BIN_TO_B58(gateway(Txn)),
owner => ?BIN_TO_B58(owner(Txn)),
location => ?MAYBE_H3(location(Txn)),
nonce => nonce(Txn)
}.
%% ------------------------------------------------------------------
EUNIT Tests
%% ------------------------------------------------------------------
-ifdef(TEST).
-define(TEST_LOCATION, 631210968840687103).
new_test() ->
Tx = #blockchain_txn_gen_gateway_v1_pb{gateway = <<"0">>,
owner = <<"1">>,
location = h3:to_string(?TEST_LOCATION),
nonce=10},
?assertEqual(Tx, new(<<"0">>, <<"1">>, ?TEST_LOCATION, 10)).
gateway_test() ->
Tx = new(<<"0">>, <<"1">>, ?TEST_LOCATION, 10),
?assertEqual(<<"0">>, gateway(Tx)).
owner_test() ->
Tx = new(<<"0">>, <<"1">>, ?TEST_LOCATION, 10),
?assertEqual(<<"1">>, owner(Tx)).
location_test() ->
Tx = new(<<"0">>, <<"1">>, ?TEST_LOCATION, 10),
?assertEqual(?TEST_LOCATION, location(Tx)).
nonce_test() ->
Tx = new(<<"0">>, <<"1">>, ?TEST_LOCATION, 10),
?assertEqual(10, nonce(Tx)).
json_test() ->
Tx = new(<<"0">>, <<"1">>, ?TEST_LOCATION, 10),
Json = to_json(Tx, []),
?assert(lists:all(fun(K) -> maps:is_key(K, Json) end,
[type, hash, gateway, owner, location, nonce])).
-endif.
| null | https://raw.githubusercontent.com/helium/blockchain-core/270f90544c870295d3d767771e59d8038535dbd5/src/transactions/v1/blockchain_txn_gen_gateway_v1.erl | erlang | -------------------------------------------------------------------
@doc
== Blockchain Transaction Genesis Gateway ==
@end
-------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
This transaction should only be absorbed when it's in the genesis block
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------ | -module(blockchain_txn_gen_gateway_v1).
-behavior(blockchain_txn).
-behavior(blockchain_json).
-include("blockchain_json.hrl").
-include("blockchain_utils.hrl").
-include_lib("helium_proto/include/blockchain_txn_gen_gateway_v1_pb.hrl").
-export([
new/4,
hash/1,
sign/2,
gateway/1,
owner/1,
location/1,
nonce/1,
fee/1,
fee_payer/2,
is_valid/2,
absorb/2,
print/1,
json_type/0,
to_json/2
]).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-type txn_genesis_gateway() :: #blockchain_txn_gen_gateway_v1_pb{}.
-export_type([txn_genesis_gateway/0]).
-spec new(Gateway :: libp2p_crypto:pubkey_bin(),
Owner :: libp2p_crypto:pubkey_bin(),
Location :: undefined | h3:h3index(),
Nonce :: non_neg_integer()) -> txn_genesis_gateway().
new(Gateway, Owner, Location, Nonce) ->
L = case Location of
undefined -> undefined;
_ -> h3:to_string(Location)
end,
#blockchain_txn_gen_gateway_v1_pb{gateway=Gateway,
owner=Owner,
location=L,
nonce=Nonce}.
-spec hash(txn_genesis_gateway()) -> blockchain_txn:hash().
hash(Txn) ->
EncodedTxn = blockchain_txn_gen_gateway_v1_pb:encode_msg(Txn),
crypto:hash(sha256, EncodedTxn).
-spec sign(txn_genesis_gateway(), libp2p_crypto:sig_fun()) -> txn_genesis_gateway().
sign(Txn, _SigFun) ->
Txn.
-spec gateway(txn_genesis_gateway()) -> libp2p_crypto:pubkey_bin().
gateway(Txn) ->
Txn#blockchain_txn_gen_gateway_v1_pb.gateway.
-spec owner(txn_genesis_gateway()) -> libp2p_crypto:pubkey_bin().
owner(Txn) ->
Txn#blockchain_txn_gen_gateway_v1_pb.owner.
-spec location(txn_genesis_gateway()) -> h3:h3index().
location(#blockchain_txn_gen_gateway_v1_pb{location=[]}) ->
undefined;
location(Txn) ->
h3:from_string(Txn#blockchain_txn_gen_gateway_v1_pb.location).
-spec nonce(txn_genesis_gateway()) -> non_neg_integer().
nonce(Txn) ->
Txn#blockchain_txn_gen_gateway_v1_pb.nonce.
-spec fee(txn_genesis_gateway()) -> non_neg_integer().
fee(_Txn) ->
0.
-spec fee_payer(txn_genesis_gateway(), blockchain_ledger_v1:ledger()) -> libp2p_crypto:pubkey_bin() | undefined.
fee_payer(_Txn, _Ledger) ->
undefined.
-spec is_valid(txn_genesis_gateway(), blockchain:blockchain()) -> ok | {error, atom()} | {error, {atom(), any()}}.
is_valid(_Txn, Chain) ->
Ledger = blockchain:ledger(Chain),
case blockchain_ledger_v1:current_height(Ledger) of
{ok, 0} ->
ok;
_ ->
{error, not_in_genesis_block}
end.
-spec absorb(txn_genesis_gateway(), blockchain:blockchain()) -> ok | {error, atom()} | {error, {atom(), any()}}.
absorb(Txn, Chain) ->
Ledger = blockchain:ledger(Chain),
Gateway = ?MODULE:gateway(Txn),
Owner = ?MODULE:owner(Txn),
Location = ?MODULE:location(Txn),
Nonce = ?MODULE:nonce(Txn),
blockchain_ledger_v1:add_gateway(Owner,
Gateway,
Location,
Nonce,
full,
Ledger).
-spec print(txn_genesis_gateway()) -> iodata().
print(undefined) -> <<"type=genesis_gateway, undefined">>;
print(#blockchain_txn_gen_gateway_v1_pb{
gateway=Gateway, owner=Owner,
location=L, nonce=Nonce}) ->
io_lib:format("type=genesis_gateway gateway=~p, owner=~p, location=~p, nonce=~p",
[?TO_ANIMAL_NAME(Gateway), ?TO_B58(Owner), L, Nonce]).
json_type() ->
<<"gen_gateway_v1">>.
-spec to_json(txn_genesis_gateway(), blockchain_json:opts()) -> blockchain_json:json_object().
to_json(Txn, _Opts) ->
#{
type => ?MODULE:json_type(),
hash => ?BIN_TO_B64(hash(Txn)),
gateway => ?BIN_TO_B58(gateway(Txn)),
owner => ?BIN_TO_B58(owner(Txn)),
location => ?MAYBE_H3(location(Txn)),
nonce => nonce(Txn)
}.
EUNIT Tests
-ifdef(TEST).
-define(TEST_LOCATION, 631210968840687103).
new_test() ->
Tx = #blockchain_txn_gen_gateway_v1_pb{gateway = <<"0">>,
owner = <<"1">>,
location = h3:to_string(?TEST_LOCATION),
nonce=10},
?assertEqual(Tx, new(<<"0">>, <<"1">>, ?TEST_LOCATION, 10)).
gateway_test() ->
Tx = new(<<"0">>, <<"1">>, ?TEST_LOCATION, 10),
?assertEqual(<<"0">>, gateway(Tx)).
owner_test() ->
Tx = new(<<"0">>, <<"1">>, ?TEST_LOCATION, 10),
?assertEqual(<<"1">>, owner(Tx)).
location_test() ->
Tx = new(<<"0">>, <<"1">>, ?TEST_LOCATION, 10),
?assertEqual(?TEST_LOCATION, location(Tx)).
nonce_test() ->
Tx = new(<<"0">>, <<"1">>, ?TEST_LOCATION, 10),
?assertEqual(10, nonce(Tx)).
json_test() ->
Tx = new(<<"0">>, <<"1">>, ?TEST_LOCATION, 10),
Json = to_json(Tx, []),
?assert(lists:all(fun(K) -> maps:is_key(K, Json) end,
[type, hash, gateway, owner, location, nonce])).
-endif.
|
abe07eec323c167e8d856fbfa10c5899d743e109c7adeedb43275fe30809d94f | Kah0ona/re-dnd | views.cljs | (ns re-dnd-demo.views
(:require [fipp.clojure :refer [pprint]]
[re-dnd-demo.events :as h]
[re-dnd.events :as dnd]
[re-dnd.views :as dndv]
[re-frame.core :as rf]
[reagent.core :as r]
[taoensso.timbre :as timbre
:refer-macros (log trace debug info warn error fatal report
logf tracef debugf infof warnf errorf fatalf reportf
spy get-env log-env)]))
(rf/reg-sub :db
(fn [db] db))
(defn debug-panel
"pretty prints data in a nice box on the screen."
[s]
(let [collapsed (r/atom false)]
(fn [s]
[:div.debug-window-wrap
[:div {:on-click #(swap! collapsed not)
:style {:cursor :pointer
:padding "10px"
:border "1px solid #ccc"}}
[:div.clear
[:div.pull-right [:b "Debug window "]]
[:div (if @collapsed "▷ expand" "▽ collapse")]]]
(when-not @collapsed
[:pre (with-out-str (pprint s))])])))
;;this should have its own file, custom_events
(defmethod dndv/dropped-widget
:my-drop-marker
[{:keys [type id]}]
[:div.drop-marker])
(defmethod dndv/drag-handle
:my-drop-marker
[{:keys [type id]}]
[:div])
(defmethod dndv/dropped-widget
:bluebox
[{:keys [type id]}]
[:div.box.blue-box
(str type ", " id)])
(defmethod dndv/drag-handle
:bluebox
[{:keys [type id]}]
[:div "bluedraghandlee"])
(defmethod dndv/dropped-widget
:redbox
[{:keys [type id]}]
[:div.box.red-box
(str type ", " id)])
(defmethod dndv/drag-handle
:redbox
[{:keys [type id]}]
[:div "reddraghandle"])
(defn main-panel
[]
(let [drag-box-state (rf/subscribe [:dnd/drag-box])
db (rf/subscribe [:db])
last-id (r/atom 1)]
(rf/dispatch [:dnd/initialize-drop-zone
:drop-zone-1
{:drop-dispatch [:my-drop-dispatch]
:three-part-drag-handle true
:drop-marker :my-drop-marker}
;;initial elements can be put here
[{:type :bluebox
:id (keyword (str "drop-zone-element-" 0))}
{:type :redbox
:id (keyword (str "drop-zone-element-" 1))}]])
(fn []
[:div.container
{:style {:height "1400px"}}
[:div {:style {:position :absolute
:border "1px solid black"
:top "400px"}}
#_(when @drag-box-state
[dndv/drag-box]) ;;this thing follows the mouse, and takes over the draggable's size
[dndv/drag-box]
[:div
[:p "Drag draggables to the drop-zone to the right, or re-order dropped elements in the drop-zone"]
#_[debug-panel @db]
[:button.btn.btn-primary
{:on-click #(do
(swap! last-id inc)
(rf/dispatch [:dnd/add-drop-zone-element
:drop-zone-1
{:type (if (odd? @last-id) :redbox :bluebox)
:id (keyword (str "drop-zone-element-" @last-id))}]))}
"Add element to dropzone programmatically"]
[:div.clear]
[:div {:style {:float :left}}
[dndv/draggable :draggable1 [:span "draggable1"]]
[dndv/draggable :draggable2 [:span "draggable2"]]
[dndv/draggable :draggable3 [:span "draggable3"]]]
[:div {:style {:float :right}}
[dndv/drop-zone :drop-zone-1
[:div "Drop zone"]]]]]
[:div.clear]
#_[debug-panel @db]])))
| null | https://raw.githubusercontent.com/Kah0ona/re-dnd/32ecacc8c28a25aa854bb340a5d73cb03e5751cb/src/cljs/re_dnd_demo/views.cljs | clojure | this should have its own file, custom_events
initial elements can be put here
this thing follows the mouse, and takes over the draggable's size | (ns re-dnd-demo.views
(:require [fipp.clojure :refer [pprint]]
[re-dnd-demo.events :as h]
[re-dnd.events :as dnd]
[re-dnd.views :as dndv]
[re-frame.core :as rf]
[reagent.core :as r]
[taoensso.timbre :as timbre
:refer-macros (log trace debug info warn error fatal report
logf tracef debugf infof warnf errorf fatalf reportf
spy get-env log-env)]))
(rf/reg-sub :db
(fn [db] db))
(defn debug-panel
"pretty prints data in a nice box on the screen."
[s]
(let [collapsed (r/atom false)]
(fn [s]
[:div.debug-window-wrap
[:div {:on-click #(swap! collapsed not)
:style {:cursor :pointer
:padding "10px"
:border "1px solid #ccc"}}
[:div.clear
[:div.pull-right [:b "Debug window "]]
[:div (if @collapsed "▷ expand" "▽ collapse")]]]
(when-not @collapsed
[:pre (with-out-str (pprint s))])])))
(defmethod dndv/dropped-widget
:my-drop-marker
[{:keys [type id]}]
[:div.drop-marker])
(defmethod dndv/drag-handle
:my-drop-marker
[{:keys [type id]}]
[:div])
(defmethod dndv/dropped-widget
:bluebox
[{:keys [type id]}]
[:div.box.blue-box
(str type ", " id)])
(defmethod dndv/drag-handle
:bluebox
[{:keys [type id]}]
[:div "bluedraghandlee"])
(defmethod dndv/dropped-widget
:redbox
[{:keys [type id]}]
[:div.box.red-box
(str type ", " id)])
(defmethod dndv/drag-handle
:redbox
[{:keys [type id]}]
[:div "reddraghandle"])
(defn main-panel
[]
(let [drag-box-state (rf/subscribe [:dnd/drag-box])
db (rf/subscribe [:db])
last-id (r/atom 1)]
(rf/dispatch [:dnd/initialize-drop-zone
:drop-zone-1
{:drop-dispatch [:my-drop-dispatch]
:three-part-drag-handle true
:drop-marker :my-drop-marker}
[{:type :bluebox
:id (keyword (str "drop-zone-element-" 0))}
{:type :redbox
:id (keyword (str "drop-zone-element-" 1))}]])
(fn []
[:div.container
{:style {:height "1400px"}}
[:div {:style {:position :absolute
:border "1px solid black"
:top "400px"}}
#_(when @drag-box-state
[dndv/drag-box]
[:div
[:p "Drag draggables to the drop-zone to the right, or re-order dropped elements in the drop-zone"]
#_[debug-panel @db]
[:button.btn.btn-primary
{:on-click #(do
(swap! last-id inc)
(rf/dispatch [:dnd/add-drop-zone-element
:drop-zone-1
{:type (if (odd? @last-id) :redbox :bluebox)
:id (keyword (str "drop-zone-element-" @last-id))}]))}
"Add element to dropzone programmatically"]
[:div.clear]
[:div {:style {:float :left}}
[dndv/draggable :draggable1 [:span "draggable1"]]
[dndv/draggable :draggable2 [:span "draggable2"]]
[dndv/draggable :draggable3 [:span "draggable3"]]]
[:div {:style {:float :right}}
[dndv/drop-zone :drop-zone-1
[:div "Drop zone"]]]]]
[:div.clear]
#_[debug-panel @db]])))
|
7abffe1135cbf3627dd9d7dd705c293f27be25c3fce4cc6363d5dbc59a848ac9 | ashinn/chibi-scheme | binary-record-chicken.scm | ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; binary records, simpler version with type-checking on set! removed
(define-syntax defrec
(syntax-rules (make: pred: read: write: block:)
((defrec () n m p r w
((field-tmp field-read field-read-expr field-write field-write-expr field-get) ...)
((field getter . s) ...))
(begin
(define-record-type n (m field ...) p
(field getter . s) ...)
(define n 'n) ; chicken define-record-type doesn't define the rtd
(define r
(let ((field-read field-read-expr) ...)
(lambda (in)
(let* ((field-tmp (field-read in)) ...)
(m field ...)))))
(define w
(let ((field-write field-write-expr) ...)
(lambda (x out)
(field-write (field-get x) out) ...)))))
((defrec ((make: x) . rest) n m p r w b f)
(defrec rest n x p r w b f))
((defrec ((pred: x) . rest) n m p r w b f)
(defrec rest n m x r w b f))
((defrec ((read: x) . rest) n m p r w b f)
(defrec rest n m p x w b f))
((defrec ((write: x) . rest) n m p r w b f)
(defrec rest n m p r x b f))
((defrec ((block: (field (type . args) getter . s) . fields) . rest) n m p r w
(b ...) (f ...))
(defrec ((block: . fields) . rest) n m p r w
(b ...
(field read-tmp (type read: args) write-tmp (type write: args) getter))
(f ...
(field getter . s))))
((defrec ((block: (field . x)) . rest) n m p r w b f)
(syntax-error "invalid field in block" (field . x)))
((defrec ((block: data . fields) . rest) n m p r w (b ...) f)
(defrec ((block: . fields) . rest) n m p r w
(b ...
(tmp-data read-tmp (read-literal 'data) write-tmp (write-literal 'data) (lambda (x) x)))
f))
((defrec ((block:) . rest) n m p r w b f)
(defrec rest n m p r w b f))
))
(define-syntax define-binary-record-type
(syntax-rules ()
((define-binary-record-type name x ...)
(defrec (x ...) name hidden-make hidden-pred hidden-read hidden-write
() ()))))
| null | https://raw.githubusercontent.com/ashinn/chibi-scheme/8b27ce97265e5028c61b2386a86a2c43c1cfba0d/lib/chibi/binary-record-chicken.scm | scheme |
binary records, simpler version with type-checking on set! removed
chicken define-record-type doesn't define the rtd |
(define-syntax defrec
(syntax-rules (make: pred: read: write: block:)
((defrec () n m p r w
((field-tmp field-read field-read-expr field-write field-write-expr field-get) ...)
((field getter . s) ...))
(begin
(define-record-type n (m field ...) p
(field getter . s) ...)
(define r
(let ((field-read field-read-expr) ...)
(lambda (in)
(let* ((field-tmp (field-read in)) ...)
(m field ...)))))
(define w
(let ((field-write field-write-expr) ...)
(lambda (x out)
(field-write (field-get x) out) ...)))))
((defrec ((make: x) . rest) n m p r w b f)
(defrec rest n x p r w b f))
((defrec ((pred: x) . rest) n m p r w b f)
(defrec rest n m x r w b f))
((defrec ((read: x) . rest) n m p r w b f)
(defrec rest n m p x w b f))
((defrec ((write: x) . rest) n m p r w b f)
(defrec rest n m p r x b f))
((defrec ((block: (field (type . args) getter . s) . fields) . rest) n m p r w
(b ...) (f ...))
(defrec ((block: . fields) . rest) n m p r w
(b ...
(field read-tmp (type read: args) write-tmp (type write: args) getter))
(f ...
(field getter . s))))
((defrec ((block: (field . x)) . rest) n m p r w b f)
(syntax-error "invalid field in block" (field . x)))
((defrec ((block: data . fields) . rest) n m p r w (b ...) f)
(defrec ((block: . fields) . rest) n m p r w
(b ...
(tmp-data read-tmp (read-literal 'data) write-tmp (write-literal 'data) (lambda (x) x)))
f))
((defrec ((block:) . rest) n m p r w b f)
(defrec rest n m p r w b f))
))
(define-syntax define-binary-record-type
(syntax-rules ()
((define-binary-record-type name x ...)
(defrec (x ...) name hidden-make hidden-pred hidden-read hidden-write
() ()))))
|
9600999d20574a615715b6695eb7432d26530d9c47ae7b9cbef63906298e0cff | zotonic/zotonic | mod_oauth2.erl | @author < >
2019 - 2022
%% @doc OAuth2 (-ietf-oauth-v2-26)
Copyright 2019 - 2022
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(mod_oauth2).
-author("Marc Worrell <>").
-mod_title("OAuth2").
-mod_description("Provides authentication over OAuth2.").
-mod_prio(900).
-mod_schema(11).
-mod_depends([ authentication ]).
-export([
event/2,
observe_request_context/3,
observe_url_fetch_options/2,
observe_admin_menu/3,
observe_tick_3h/2,
manage_schema/2
]).
-include_lib("zotonic_core/include/zotonic.hrl").
-include_lib("zotonic_mod_admin/include/admin_menu.hrl").
event(#submit{ message={oauth2_authorize, Args}}, Context) ->
{client_id, ClientId} = proplists:lookup(client_id, Args),
{redirect_uri, RedirectUri} = proplists:lookup(redirect_uri, Args),
{state, State} = proplists:lookup(state, Args),
{response_type, _ResponseType} = proplists:lookup(response_type, Args),
{scope, Scope} = proplists:lookup(scope, Args),
Redirect = case z_context:get_q(<<"accept">>, Context) of
<<>> ->
oauth_authorize_accept(ClientId, RedirectUri, Scope, State, Context);
undefined ->
oauth_authorize_cancel(RedirectUri, State)
end,
z_render:wire({redirect, [ {location, Redirect} ]}, Context);
event(#submit{ message={oauth2_app_insert, []} }, Context) ->
App = #{
<<"user_id">> => z_acl:user(Context),
<<"description">> => z_string:trim(z_context:get_q_validated(<<"description">>, Context)),
<<"is_enabled">> => z_convert:to_bool(z_context:get_q(<<"is_enabled">>, Context)),
<<"redirect_urls">> => z_string:trim(z_context:get_q(<<"redirect_urls">>, Context))
},
case m_oauth2:insert_app(App, Context) of
{ok, _AppId} ->
z_render:wire({redirect, [ {dispatch, admin_oauth2_apps} ]}, Context);
{error, _} ->
z_render:growl_error(?__("Could not insert the App.", Context), Context)
end;
event(#submit{ message={oauth2_app_update, [ {app_id, AppId} ]} }, Context) ->
App = #{
<<"description">> => z_string:trim(z_context:get_q_validated(<<"description">>, Context)),
<<"is_enabled">> => z_convert:to_bool(z_context:get_q(<<"is_enabled">>, Context)),
<<"redirect_urls">> => z_string:trim(z_context:get_q(<<"redirect_urls">>, Context))
},
case m_oauth2:update_app(AppId, App, Context) of
ok ->
z_render:wire({redirect, [ {dispatch, admin_oauth2_apps} ]}, Context);
{error, _} ->
z_render:growl_error(?__("Could not insert the App.", Context), Context)
end;
event(#postback{ message={oauth2_app_delete, [ {app_id, AppId} ]} }, Context) ->
case m_oauth2:delete_app(AppId, Context) of
ok ->
z_render:wire({redirect, [ {dispatch, admin_oauth2_apps} ]}, Context);
{error, _} ->
z_render:growl_error(?__("Could not insert the App.", Context), Context)
end;
event(#postback{ message={oauth2_app_token_generate, [ {app_id, AppId} ]} }, Context) ->
TPs = #{
<<"is_read_only">> => false,
<<"is_full_access">> => true,
<<"note">> => ?__("Generated using the admin interface", Context)
},
case m_oauth2:insert_token(AppId, z_acl:user(Context), TPs, Context) of
{ok, TId} ->
{ok, Token} = m_oauth2:encode_bearer_token(TId, undefined, Context),
z_render:dialog(
?__("New access token", Context),
"_dialog_oauth2_app_token.tpl",
[
{app_id, AppId},
{token, Token}
],
Context);
{error, _} ->
z_render:growl_error(?__("Could not generate the access token.", Context), Context)
end;
event(#submit{ message={oauth2_consumer_insert, []} }, Context) ->
Consumer = #{
<<"name">> => z_string:trim(z_context:get_q_validated(<<"name">>, Context)),
<<"user_id">> => z_acl:user(Context),
<<"description">> => z_string:trim(z_context:get_q_validated(<<"description">>, Context)),
<<"domain">> => z_string:to_lower(z_string:trim(z_context:get_q_validated(<<"domain">>, Context))),
<<"app_code">> => z_string:trim(z_context:get_q_validated(<<"app_code">>, Context)),
<<"app_secret">> => z_string:trim(z_context:get_q_validated(<<"app_secret">>, Context)),
<<"is_use_auth">> => z_convert:to_bool(z_context:get_q(<<"is_use_auth">>, Context)),
<<"is_use_import">> => z_convert:to_bool(z_context:get_q(<<"is_use_import">>, Context)),
<<"authorize_url">> => z_string:trim(z_context:get_q(<<"authorize_url">>, Context)),
<<"access_token_url">> => z_string:trim(z_context:get_q(<<"access_token_url">>, Context)),
<<"grant_type">> => z_string:trim(z_context:get_q(<<"grant_type">>, Context)),
<<"is_extend_automatic">> => z_convert:to_bool(z_context:get_q(<<"is_extend_automatic">>, Context))
},
case m_oauth2_consumer:insert_consumer(Consumer, Context) of
{ok, _AppId} ->
z_render:wire({redirect, [ {dispatch, admin_oauth2_consumers} ]}, Context);
{error, duplicate_name} ->
z_render:growl_error(?__("An OAuth2 consumer with this name already exsists, please use another name.", Context), Context);
{error, _} ->
z_render:growl_error(?__("Could not insert the Consumer.", Context), Context)
end;
event(#submit{ message={oauth2_consumer_update, [ {app_id, AppId} ]} }, Context) ->
Consumer = #{
<<"description">> => z_string:trim(z_context:get_q_validated(<<"description">>, Context)),
<<"domain">> => z_string:to_lower(z_string:trim(z_context:get_q_validated(<<"domain">>, Context))),
<<"app_code">> => z_string:trim(z_context:get_q_validated(<<"app_code">>, Context)),
<<"app_secret">> => z_string:trim(z_context:get_q_validated(<<"app_secret">>, Context)),
<<"is_use_auth">> => z_convert:to_bool(z_context:get_q(<<"is_use_auth">>, Context)),
<<"is_use_import">> => z_convert:to_bool(z_context:get_q(<<"is_use_import">>, Context)),
<<"authorize_url">> => z_string:trim(z_context:get_q(<<"authorize_url">>, Context)),
<<"access_token_url">> => z_string:trim(z_context:get_q(<<"access_token_url">>, Context)),
<<"grant_type">> => z_string:trim(z_context:get_q(<<"grant_type">>, Context)),
<<"is_extend_automatic">> => z_convert:to_bool(z_context:get_q(<<"is_extend_automatic">>, Context))
},
case m_oauth2_consumer:update_consumer(AppId, Consumer, Context) of
ok ->
z_render:wire({redirect, [ {dispatch, admin_oauth2_consumers} ]}, Context);
{error, _} ->
z_render:growl_error(?__("Could not update the Consumer.", Context), Context)
end;
event(#postback{ message={oauth2_consumer_delete, [ {app_id, AppId} ]} }, Context) ->
case m_oauth2_consumer:delete_consumer(AppId, Context) of
ok ->
z_render:wire({redirect, [ {dispatch, admin_oauth2_consumers} ]}, Context);
{error, _} ->
z_render:growl_error(?__("Could not insert the Consumer.", Context), Context)
end;
event(#postback{ message={oauth2_fetch_consumer_token, [ {app_id, AppId} ]} }, Context) ->
case z_acl:is_admin(Context) of
true ->
case m_oauth2_consumer:fetch_token(AppId, z_acl:user(Context), Context) of
{ok, _AccessToken} ->
?LOG_INFO(#{
text => <<"Fetched new consumer token">>,
in => mod_oauth2,
result => ok,
app_id => AppId,
user_id => z_acl:user(Context)
}),
z_render:wire([
{alert, [
{title, ?__("Success", Context)},
{text, ?__("Fetched a new access token.", Context)},
{action, {reload, []}}
]}
], Context);
{error, Reason} ->
?LOG_ERROR(#{
text => <<"Could not fetch a new consumer token">>,
in => mod_oauth2,
result => error,
reason => Reason,
app_id => AppId,
user_id => z_acl:user(Context)
}),
ReasonText = iolist_to_binary(io_lib:format("~p", [ Reason ])),
z_render:wire([
{alert, [
{text, [
?__("Could not fetch a new access token.", Context),
" (", z_html:escape(ReasonText), ")"
]}
]}
], Context)
end;
false ->
z_render:growl_error(?__("You are not allowed to fetch a consumer token.", Context), Context)
end.
oauth_authorize_accept(ClientId, RedirectUri, Scope, State, Context) ->
{ok, Code} = m_oauth2:encode_accept_code(ClientId, RedirectUri, Scope, Context),
Parsed = uri_string:parse(RedirectUri),
Qs = case maps:find(query, Parsed) of
{ok, Q} -> <<"?", Q/binary, $&>>;
error -> <<"?">>
end,
Qs1 = iolist_to_binary([
Qs, <<"&state=">>, cow_qs:urlencode(State),
<<"&code=">>, cow_qs:urlencode(Code)
]),
combine_url(Parsed#{ query => Qs1 }).
oauth_authorize_cancel(RedirectUri, State) ->
Parsed = uri_string:parse(RedirectUri),
Qs = case maps:find(query, Parsed) of
{ok, Q} -> <<"?", Q/binary, $&>>;
error -> <<"?">>
end,
Qs1 = iolist_to_binary([
Qs, <<"state=">>, cow_qs:urlencode(State),
<<"&error=access_denied">>,
<<"&error_reason=user_denied">>,
<<"&error_description=The+user+denied+your+request">>
]),
combine_url(Parsed#{ query => Qs1 }).
combine_url(#{
scheme := Scheme,
host := Host,
path := Path,
query := Qs
} = Parsed) ->
iolist_to_binary([
Scheme, "://", Host,
case maps:find(port, Parsed) of
{ok, Port} -> [ ":", integer_to_binary(Port) ];
error -> <<>>
end,
Path,
Qs,
case maps:find(fragment, Parsed) of
{ok, Frag} -> [ "#", Frag ];
error -> <<>>
end
]).
%% @doc Check if there is a valid Authorization header or 'access_token' argument.
-spec observe_request_context( #request_context{}, z:context(), z:context() ) -> z:context().
observe_request_context(#request_context{ phase = init }, Context, _Context) ->
case z_context:get(anonymous, Context, false) of
true ->
Context;
false ->
case z_auth:is_auth(Context) of
true ->
Context;
false ->
try_auth(Context)
end
end;
observe_request_context(#request_context{ phase = _Phase }, Context, _Context) ->
Context.
%% @doc Check if the current user has a token for the given host. If so then
%% add it to the headers for the fetch request.
observe_url_fetch_options(#url_fetch_options{
url = <<"https:", _/binary>>,
host = Host,
options = Options
}, Context) ->
case proplists:is_defined(authorization, Options) of
false ->
case z_acl:user(Context) of
UserId when is_integer(UserId) ->
case m_oauth2_consumer:find_token(UserId, Host, Context) of
{ok, AccessToken} ->
[
{authorization, <<"Bearer ", AccessToken/binary>>}
| Options
];
{error, _} ->
undefined
end;
_ ->
undefined
end;
true ->
undefined
end;
observe_url_fetch_options(_, _Context) ->
undefined.
@doc Periodically try to extend tokens that are expiring in the next 8 hours .
observe_tick_3h(tick_3h, Context) ->
Next8H = z_datetime:next_hour(calendar:universal_time(), 8),
Expiring = z_db:q("
select id, rsc_id, key
from identity
where expires > now()
and expires < $1
and type = 'mod_oauth2'
",
[ Next8H ],
Context),
lists:foreach(
fun({_IdnId, RscId, Key}) ->
case binary:split(Key, <<":">>) of
[Name, RId] ->
AppId = m_oauth2_consumer:name_to_id(Name, Context),
case m_rsc:rid(RId, Context) of
RscId ->
case m_oauth2_consumer:fetch_token(Name, RscId, z_acl:sudo(Context)) of
{ok, _} ->
?LOG_INFO(#{
text => <<"Fetched new consumer token">>,
in => mod_oauth2,
result => ok,
app_id => AppId,
user_id => RscId
});
{error, Reason} ->
?LOG_ERROR(#{
text => <<"Could not fetch a new consumer token">>,
in => mod_oauth2,
result => error,
reason => Reason,
app_id => AppId,
user_id => RscId
})
end;
_ ->
ok
end;
_ ->
ok
end
end,
Expiring).
observe_admin_menu(#admin_menu{}, Acc, Context) ->
[
#menu_item{id=admin_oauth2_apps,
parent=admin_auth,
label=?__("OAuth2 Applications", Context),
url={admin_oauth2_apps, []},
visiblecheck={acl, use, mod_admin_config}},
#menu_item{id=admin_oauth2_consumers,
parent=admin_auth,
label=?__("OAuth2 Consumer Tokens", Context),
url={admin_oauth2_consumers, []}}
| Acc ].
try_auth(Context) ->
case cowmachine_req:get_req_header(<<"authorization">>, Context) of
<<"Bearer ", Token/binary>> ->
try_bearer(Token, Context);
<<"bearer ", Token/binary>> ->
try_bearer(Token, Context);
_ ->
case z_context:get_q(<<"access_token">>, Context) of
undefined ->
Context;
Token when is_binary(Token) ->
try_bearer(Token, Context)
end
end.
try_bearer(<<>>, Context) ->
Context;
try_bearer(<<" ", Token/binary>>, Context) ->
try_bearer(Token, Context);
try_bearer(Token, Context) ->
case m_oauth2:decode_bearer_token(Token, Context) of
{ok, TokenMap} ->
try_token(TokenMap, Context);
{error, unknown_token} ->
% Somebody else's token - ignore
Context;
{error, Reason} ->
Illegal token , maybe throw a 400 here ?
?LOG_NOTICE(#{
text => <<"Could not decode OAuth2 token">>,
in => zotonic_mod_oauth2,
result => error,
reason => Reason,
token => Token
}),
Context
end.
try_token(#{
<<"id">> := TokenId,
<<"user_id">> := UserId,
<<"user_groups">> := UserGroups,
<<"is_read_only">> := IsReadOnly,
<<"is_full_access">> := IsFullAccess
}, Context) ->
Options = case IsFullAccess of
true ->
% No restriction on user groups
#{
is_read_only => IsReadOnly
};
false ->
Limited access , user groups will be filtered
#{
user_groups => UserGroups,
is_read_only => IsReadOnly
}
end,
case z_auth:is_enabled(UserId, Context) of
true ->
z_acl:logon(UserId, Options, Context);
false ->
User is disabled , maybe throw a 403 here ?
?LOG_NOTICE(#{
text => <<"Authenticated OAuth2 request for disabled user">>,
in => zotonic_mod_oauth2,
user_id => UserId,
result => error,
reason => disabled,
token_id => TokenId
}),
Context
end.
-spec manage_schema( z_module_manager:manage_schema(), z:context() ) -> ok.
manage_schema(Version, Context) ->
m_oauth2:manage_schema(Version, Context),
m_oauth2_consumer:manage_schema(Version, Context).
| null | https://raw.githubusercontent.com/zotonic/zotonic/3ac6d8ee63a7526f1d1cb9f001e255a75642eec8/apps/zotonic_mod_oauth2/src/mod_oauth2.erl | erlang | @doc OAuth2 (-ietf-oauth-v2-26)
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@doc Check if there is a valid Authorization header or 'access_token' argument.
@doc Check if the current user has a token for the given host. If so then
add it to the headers for the fetch request.
Somebody else's token - ignore
No restriction on user groups | @author < >
2019 - 2022
Copyright 2019 - 2022
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(mod_oauth2).
-author("Marc Worrell <>").
-mod_title("OAuth2").
-mod_description("Provides authentication over OAuth2.").
-mod_prio(900).
-mod_schema(11).
-mod_depends([ authentication ]).
-export([
event/2,
observe_request_context/3,
observe_url_fetch_options/2,
observe_admin_menu/3,
observe_tick_3h/2,
manage_schema/2
]).
-include_lib("zotonic_core/include/zotonic.hrl").
-include_lib("zotonic_mod_admin/include/admin_menu.hrl").
event(#submit{ message={oauth2_authorize, Args}}, Context) ->
{client_id, ClientId} = proplists:lookup(client_id, Args),
{redirect_uri, RedirectUri} = proplists:lookup(redirect_uri, Args),
{state, State} = proplists:lookup(state, Args),
{response_type, _ResponseType} = proplists:lookup(response_type, Args),
{scope, Scope} = proplists:lookup(scope, Args),
Redirect = case z_context:get_q(<<"accept">>, Context) of
<<>> ->
oauth_authorize_accept(ClientId, RedirectUri, Scope, State, Context);
undefined ->
oauth_authorize_cancel(RedirectUri, State)
end,
z_render:wire({redirect, [ {location, Redirect} ]}, Context);
event(#submit{ message={oauth2_app_insert, []} }, Context) ->
App = #{
<<"user_id">> => z_acl:user(Context),
<<"description">> => z_string:trim(z_context:get_q_validated(<<"description">>, Context)),
<<"is_enabled">> => z_convert:to_bool(z_context:get_q(<<"is_enabled">>, Context)),
<<"redirect_urls">> => z_string:trim(z_context:get_q(<<"redirect_urls">>, Context))
},
case m_oauth2:insert_app(App, Context) of
{ok, _AppId} ->
z_render:wire({redirect, [ {dispatch, admin_oauth2_apps} ]}, Context);
{error, _} ->
z_render:growl_error(?__("Could not insert the App.", Context), Context)
end;
event(#submit{ message={oauth2_app_update, [ {app_id, AppId} ]} }, Context) ->
App = #{
<<"description">> => z_string:trim(z_context:get_q_validated(<<"description">>, Context)),
<<"is_enabled">> => z_convert:to_bool(z_context:get_q(<<"is_enabled">>, Context)),
<<"redirect_urls">> => z_string:trim(z_context:get_q(<<"redirect_urls">>, Context))
},
case m_oauth2:update_app(AppId, App, Context) of
ok ->
z_render:wire({redirect, [ {dispatch, admin_oauth2_apps} ]}, Context);
{error, _} ->
z_render:growl_error(?__("Could not insert the App.", Context), Context)
end;
event(#postback{ message={oauth2_app_delete, [ {app_id, AppId} ]} }, Context) ->
case m_oauth2:delete_app(AppId, Context) of
ok ->
z_render:wire({redirect, [ {dispatch, admin_oauth2_apps} ]}, Context);
{error, _} ->
z_render:growl_error(?__("Could not insert the App.", Context), Context)
end;
event(#postback{ message={oauth2_app_token_generate, [ {app_id, AppId} ]} }, Context) ->
TPs = #{
<<"is_read_only">> => false,
<<"is_full_access">> => true,
<<"note">> => ?__("Generated using the admin interface", Context)
},
case m_oauth2:insert_token(AppId, z_acl:user(Context), TPs, Context) of
{ok, TId} ->
{ok, Token} = m_oauth2:encode_bearer_token(TId, undefined, Context),
z_render:dialog(
?__("New access token", Context),
"_dialog_oauth2_app_token.tpl",
[
{app_id, AppId},
{token, Token}
],
Context);
{error, _} ->
z_render:growl_error(?__("Could not generate the access token.", Context), Context)
end;
event(#submit{ message={oauth2_consumer_insert, []} }, Context) ->
Consumer = #{
<<"name">> => z_string:trim(z_context:get_q_validated(<<"name">>, Context)),
<<"user_id">> => z_acl:user(Context),
<<"description">> => z_string:trim(z_context:get_q_validated(<<"description">>, Context)),
<<"domain">> => z_string:to_lower(z_string:trim(z_context:get_q_validated(<<"domain">>, Context))),
<<"app_code">> => z_string:trim(z_context:get_q_validated(<<"app_code">>, Context)),
<<"app_secret">> => z_string:trim(z_context:get_q_validated(<<"app_secret">>, Context)),
<<"is_use_auth">> => z_convert:to_bool(z_context:get_q(<<"is_use_auth">>, Context)),
<<"is_use_import">> => z_convert:to_bool(z_context:get_q(<<"is_use_import">>, Context)),
<<"authorize_url">> => z_string:trim(z_context:get_q(<<"authorize_url">>, Context)),
<<"access_token_url">> => z_string:trim(z_context:get_q(<<"access_token_url">>, Context)),
<<"grant_type">> => z_string:trim(z_context:get_q(<<"grant_type">>, Context)),
<<"is_extend_automatic">> => z_convert:to_bool(z_context:get_q(<<"is_extend_automatic">>, Context))
},
case m_oauth2_consumer:insert_consumer(Consumer, Context) of
{ok, _AppId} ->
z_render:wire({redirect, [ {dispatch, admin_oauth2_consumers} ]}, Context);
{error, duplicate_name} ->
z_render:growl_error(?__("An OAuth2 consumer with this name already exsists, please use another name.", Context), Context);
{error, _} ->
z_render:growl_error(?__("Could not insert the Consumer.", Context), Context)
end;
event(#submit{ message={oauth2_consumer_update, [ {app_id, AppId} ]} }, Context) ->
Consumer = #{
<<"description">> => z_string:trim(z_context:get_q_validated(<<"description">>, Context)),
<<"domain">> => z_string:to_lower(z_string:trim(z_context:get_q_validated(<<"domain">>, Context))),
<<"app_code">> => z_string:trim(z_context:get_q_validated(<<"app_code">>, Context)),
<<"app_secret">> => z_string:trim(z_context:get_q_validated(<<"app_secret">>, Context)),
<<"is_use_auth">> => z_convert:to_bool(z_context:get_q(<<"is_use_auth">>, Context)),
<<"is_use_import">> => z_convert:to_bool(z_context:get_q(<<"is_use_import">>, Context)),
<<"authorize_url">> => z_string:trim(z_context:get_q(<<"authorize_url">>, Context)),
<<"access_token_url">> => z_string:trim(z_context:get_q(<<"access_token_url">>, Context)),
<<"grant_type">> => z_string:trim(z_context:get_q(<<"grant_type">>, Context)),
<<"is_extend_automatic">> => z_convert:to_bool(z_context:get_q(<<"is_extend_automatic">>, Context))
},
case m_oauth2_consumer:update_consumer(AppId, Consumer, Context) of
ok ->
z_render:wire({redirect, [ {dispatch, admin_oauth2_consumers} ]}, Context);
{error, _} ->
z_render:growl_error(?__("Could not update the Consumer.", Context), Context)
end;
event(#postback{ message={oauth2_consumer_delete, [ {app_id, AppId} ]} }, Context) ->
case m_oauth2_consumer:delete_consumer(AppId, Context) of
ok ->
z_render:wire({redirect, [ {dispatch, admin_oauth2_consumers} ]}, Context);
{error, _} ->
z_render:growl_error(?__("Could not insert the Consumer.", Context), Context)
end;
event(#postback{ message={oauth2_fetch_consumer_token, [ {app_id, AppId} ]} }, Context) ->
case z_acl:is_admin(Context) of
true ->
case m_oauth2_consumer:fetch_token(AppId, z_acl:user(Context), Context) of
{ok, _AccessToken} ->
?LOG_INFO(#{
text => <<"Fetched new consumer token">>,
in => mod_oauth2,
result => ok,
app_id => AppId,
user_id => z_acl:user(Context)
}),
z_render:wire([
{alert, [
{title, ?__("Success", Context)},
{text, ?__("Fetched a new access token.", Context)},
{action, {reload, []}}
]}
], Context);
{error, Reason} ->
?LOG_ERROR(#{
text => <<"Could not fetch a new consumer token">>,
in => mod_oauth2,
result => error,
reason => Reason,
app_id => AppId,
user_id => z_acl:user(Context)
}),
ReasonText = iolist_to_binary(io_lib:format("~p", [ Reason ])),
z_render:wire([
{alert, [
{text, [
?__("Could not fetch a new access token.", Context),
" (", z_html:escape(ReasonText), ")"
]}
]}
], Context)
end;
false ->
z_render:growl_error(?__("You are not allowed to fetch a consumer token.", Context), Context)
end.
oauth_authorize_accept(ClientId, RedirectUri, Scope, State, Context) ->
{ok, Code} = m_oauth2:encode_accept_code(ClientId, RedirectUri, Scope, Context),
Parsed = uri_string:parse(RedirectUri),
Qs = case maps:find(query, Parsed) of
{ok, Q} -> <<"?", Q/binary, $&>>;
error -> <<"?">>
end,
Qs1 = iolist_to_binary([
Qs, <<"&state=">>, cow_qs:urlencode(State),
<<"&code=">>, cow_qs:urlencode(Code)
]),
combine_url(Parsed#{ query => Qs1 }).
oauth_authorize_cancel(RedirectUri, State) ->
Parsed = uri_string:parse(RedirectUri),
Qs = case maps:find(query, Parsed) of
{ok, Q} -> <<"?", Q/binary, $&>>;
error -> <<"?">>
end,
Qs1 = iolist_to_binary([
Qs, <<"state=">>, cow_qs:urlencode(State),
<<"&error=access_denied">>,
<<"&error_reason=user_denied">>,
<<"&error_description=The+user+denied+your+request">>
]),
combine_url(Parsed#{ query => Qs1 }).
combine_url(#{
scheme := Scheme,
host := Host,
path := Path,
query := Qs
} = Parsed) ->
iolist_to_binary([
Scheme, "://", Host,
case maps:find(port, Parsed) of
{ok, Port} -> [ ":", integer_to_binary(Port) ];
error -> <<>>
end,
Path,
Qs,
case maps:find(fragment, Parsed) of
{ok, Frag} -> [ "#", Frag ];
error -> <<>>
end
]).
-spec observe_request_context( #request_context{}, z:context(), z:context() ) -> z:context().
observe_request_context(#request_context{ phase = init }, Context, _Context) ->
case z_context:get(anonymous, Context, false) of
true ->
Context;
false ->
case z_auth:is_auth(Context) of
true ->
Context;
false ->
try_auth(Context)
end
end;
observe_request_context(#request_context{ phase = _Phase }, Context, _Context) ->
Context.
observe_url_fetch_options(#url_fetch_options{
url = <<"https:", _/binary>>,
host = Host,
options = Options
}, Context) ->
case proplists:is_defined(authorization, Options) of
false ->
case z_acl:user(Context) of
UserId when is_integer(UserId) ->
case m_oauth2_consumer:find_token(UserId, Host, Context) of
{ok, AccessToken} ->
[
{authorization, <<"Bearer ", AccessToken/binary>>}
| Options
];
{error, _} ->
undefined
end;
_ ->
undefined
end;
true ->
undefined
end;
observe_url_fetch_options(_, _Context) ->
undefined.
@doc Periodically try to extend tokens that are expiring in the next 8 hours .
observe_tick_3h(tick_3h, Context) ->
Next8H = z_datetime:next_hour(calendar:universal_time(), 8),
Expiring = z_db:q("
select id, rsc_id, key
from identity
where expires > now()
and expires < $1
and type = 'mod_oauth2'
",
[ Next8H ],
Context),
lists:foreach(
fun({_IdnId, RscId, Key}) ->
case binary:split(Key, <<":">>) of
[Name, RId] ->
AppId = m_oauth2_consumer:name_to_id(Name, Context),
case m_rsc:rid(RId, Context) of
RscId ->
case m_oauth2_consumer:fetch_token(Name, RscId, z_acl:sudo(Context)) of
{ok, _} ->
?LOG_INFO(#{
text => <<"Fetched new consumer token">>,
in => mod_oauth2,
result => ok,
app_id => AppId,
user_id => RscId
});
{error, Reason} ->
?LOG_ERROR(#{
text => <<"Could not fetch a new consumer token">>,
in => mod_oauth2,
result => error,
reason => Reason,
app_id => AppId,
user_id => RscId
})
end;
_ ->
ok
end;
_ ->
ok
end
end,
Expiring).
observe_admin_menu(#admin_menu{}, Acc, Context) ->
[
#menu_item{id=admin_oauth2_apps,
parent=admin_auth,
label=?__("OAuth2 Applications", Context),
url={admin_oauth2_apps, []},
visiblecheck={acl, use, mod_admin_config}},
#menu_item{id=admin_oauth2_consumers,
parent=admin_auth,
label=?__("OAuth2 Consumer Tokens", Context),
url={admin_oauth2_consumers, []}}
| Acc ].
try_auth(Context) ->
case cowmachine_req:get_req_header(<<"authorization">>, Context) of
<<"Bearer ", Token/binary>> ->
try_bearer(Token, Context);
<<"bearer ", Token/binary>> ->
try_bearer(Token, Context);
_ ->
case z_context:get_q(<<"access_token">>, Context) of
undefined ->
Context;
Token when is_binary(Token) ->
try_bearer(Token, Context)
end
end.
try_bearer(<<>>, Context) ->
Context;
try_bearer(<<" ", Token/binary>>, Context) ->
try_bearer(Token, Context);
try_bearer(Token, Context) ->
case m_oauth2:decode_bearer_token(Token, Context) of
{ok, TokenMap} ->
try_token(TokenMap, Context);
{error, unknown_token} ->
Context;
{error, Reason} ->
Illegal token , maybe throw a 400 here ?
?LOG_NOTICE(#{
text => <<"Could not decode OAuth2 token">>,
in => zotonic_mod_oauth2,
result => error,
reason => Reason,
token => Token
}),
Context
end.
try_token(#{
<<"id">> := TokenId,
<<"user_id">> := UserId,
<<"user_groups">> := UserGroups,
<<"is_read_only">> := IsReadOnly,
<<"is_full_access">> := IsFullAccess
}, Context) ->
Options = case IsFullAccess of
true ->
#{
is_read_only => IsReadOnly
};
false ->
Limited access , user groups will be filtered
#{
user_groups => UserGroups,
is_read_only => IsReadOnly
}
end,
case z_auth:is_enabled(UserId, Context) of
true ->
z_acl:logon(UserId, Options, Context);
false ->
User is disabled , maybe throw a 403 here ?
?LOG_NOTICE(#{
text => <<"Authenticated OAuth2 request for disabled user">>,
in => zotonic_mod_oauth2,
user_id => UserId,
result => error,
reason => disabled,
token_id => TokenId
}),
Context
end.
-spec manage_schema( z_module_manager:manage_schema(), z:context() ) -> ok.
manage_schema(Version, Context) ->
m_oauth2:manage_schema(Version, Context),
m_oauth2_consumer:manage_schema(Version, Context).
|
a5a3126019f1ce4e861381c466ca9ae215d75db8d491a9e00988f2df6aaef938 | mindpool/cs-termite | gen_event.scm | Erlang / OTP - like behavior for " event handlers "
;;; "Types" for the functions in a EVENT-HANDLER
;;;
INIT : : arg - > state
;;; NOTIFY :: event state -> state
;;; CALL :: args state -> reply state
;;; TERMINATE :: reason state -> void
(define-record event-handler
init
notify
call
terminate)
(define *event-manager-timeout* 1)
(define (event-manager)
(let loop ((handlers '()))
(recv
((from tag ('call handler args))
(match (assq handler handlers)
((handler . state)
(call-with-values
(lambda ()
((event-handler-call handler) args state))
(lambda (reply state)
(! from (list tag reply))
(loop (cons (cons handler state)
(remove (lambda (x)
(eq? (car x) handler))
handlers))))))
(#f (error "handler doesn't exists"))))
;; should check to avoid duplicates
(('add-handler handler args)
(loop (cons (cons handler
((event-handler-init handler) args))
handlers)))
(('notify event)
(loop (map
(lambda (pair)
(match pair
((handler . state)
(cons handler
((event-handler-notify handler) event state)))))
handlers)))
(('stop)
(for-each
(lambda (pair)
(match pair
((handler . state)
((event-handler-terminate handler) 'normal state))))
handlers)
(void)))))
(define (internal-event-manager-start spawner handlers name)
(let ((em (spawn event-manager name: name)))
(for-each
(lambda (handler)
(event-manager:add-handler em handler))
handlers)
em))
(define (event-manager:start
#!rest handlers
#!key (name 'anonymous-event-manager))
(internal-event-manager-start spawn handlers name))
(define (event-manager:start-link
#!rest handlers
#!key (name 'anonymous-linked-event-manager))
(internal-event-manager-start spawn-link handlers name))
(define (event-manager:add-handler event-manager handler . args)
(! event-manager (list 'add-handler handler args)))
(define (event-manager:notify event-manager event)
(! event-manager (list 'notify event)))
(define (event-manager:call event-manager handler args)
(!? event-manager (list 'call handler args) *event-manager-timeout*))
(define (event-manager:stop event-manager)
(! event-manager (list 'stop)))
;; build a trivial event handler with no state, only invoking a
;; callback on any event
(define (make-simple-event-handler callback initial-state)
(make-event-handler
INIT
(lambda (args)
initial-state)
NOTIFY
(lambda (event state)
(callback event state))
;; CALL
(lambda (args state)
(values (void) state))
;; TERMINATE
(lambda (reason state)
(void))))
| null | https://raw.githubusercontent.com/mindpool/cs-termite/23df38627bfd4bd2257fb8d9f6c1812d2cd6bc04/otp/gen_event.scm | scheme | "Types" for the functions in a EVENT-HANDLER
NOTIFY :: event state -> state
CALL :: args state -> reply state
TERMINATE :: reason state -> void
should check to avoid duplicates
build a trivial event handler with no state, only invoking a
callback on any event
CALL
TERMINATE | Erlang / OTP - like behavior for " event handlers "
INIT : : arg - > state
(define-record event-handler
init
notify
call
terminate)
(define *event-manager-timeout* 1)
(define (event-manager)
(let loop ((handlers '()))
(recv
((from tag ('call handler args))
(match (assq handler handlers)
((handler . state)
(call-with-values
(lambda ()
((event-handler-call handler) args state))
(lambda (reply state)
(! from (list tag reply))
(loop (cons (cons handler state)
(remove (lambda (x)
(eq? (car x) handler))
handlers))))))
(#f (error "handler doesn't exists"))))
(('add-handler handler args)
(loop (cons (cons handler
((event-handler-init handler) args))
handlers)))
(('notify event)
(loop (map
(lambda (pair)
(match pair
((handler . state)
(cons handler
((event-handler-notify handler) event state)))))
handlers)))
(('stop)
(for-each
(lambda (pair)
(match pair
((handler . state)
((event-handler-terminate handler) 'normal state))))
handlers)
(void)))))
(define (internal-event-manager-start spawner handlers name)
(let ((em (spawn event-manager name: name)))
(for-each
(lambda (handler)
(event-manager:add-handler em handler))
handlers)
em))
(define (event-manager:start
#!rest handlers
#!key (name 'anonymous-event-manager))
(internal-event-manager-start spawn handlers name))
(define (event-manager:start-link
#!rest handlers
#!key (name 'anonymous-linked-event-manager))
(internal-event-manager-start spawn-link handlers name))
(define (event-manager:add-handler event-manager handler . args)
(! event-manager (list 'add-handler handler args)))
(define (event-manager:notify event-manager event)
(! event-manager (list 'notify event)))
(define (event-manager:call event-manager handler args)
(!? event-manager (list 'call handler args) *event-manager-timeout*))
(define (event-manager:stop event-manager)
(! event-manager (list 'stop)))
(define (make-simple-event-handler callback initial-state)
(make-event-handler
INIT
(lambda (args)
initial-state)
NOTIFY
(lambda (event state)
(callback event state))
(lambda (args state)
(values (void) state))
(lambda (reason state)
(void))))
|
55468812a5f6e879df837a1ccba7df0a26ab5b5dbf45ad117a083a5dc193994e | nuprl/gradual-typing-performance | ukkonen-jump-to-suffix.rkt | #lang typed/racket/base
(provide jump-to-suffix)
;; -----------------------------------------------------------------------------
(require "data-node-adapted.rkt"
"data-label-adapted.rkt"
benchmark-util)
(require/typed/check "label-label-length.rkt"
[label-length (-> label Index)])
(require/typed/check "structs-node-root.rkt"
[node-root? (-> Node Boolean)])
;; =============================================================================
;; jump-to-suffix: node -> (values node (union boolean number))
;;
;; Given an internal node, jumps to the suffix from that node.
;; According to the theory of suffix trees, such a node will exist
in the tree if we follow the construction . If we had to
;; go up a few characters, returns the number of chars at the suffix
;; end that need to be compared to get the real suffix.
;; If we hit the root, that offset is #f to indicate that we have to
;; start searching the suffix from scratch.
(: jump-to-suffix (-> Node (values Node (U Boolean Integer))))
(define (jump-to-suffix node)
(define PARENT (node-parent node))
(cond ((node-root? node)
(values node #f))
((node-suffix-link node)
(begin
(let ([node2 (node-suffix-link node)])
(unless node2 (error "jump to suffix"))
(values node2 0))))
((and PARENT (node-root? PARENT))
(values PARENT #f))
(else
(let* ([parent (node-parent node)]
[sl (begin (unless parent (error "j2s"))
(node-suffix-link parent))])
(unless sl (error "j2s whoahao"))
(values sl
(label-length (node-up-label node)))))))
| null | https://raw.githubusercontent.com/nuprl/gradual-typing-performance/35442b3221299a9cadba6810573007736b0d65d4/experimental/micro/suffixtree/typed/ukkonen-jump-to-suffix.rkt | racket | -----------------------------------------------------------------------------
=============================================================================
jump-to-suffix: node -> (values node (union boolean number))
Given an internal node, jumps to the suffix from that node.
According to the theory of suffix trees, such a node will exist
go up a few characters, returns the number of chars at the suffix
end that need to be compared to get the real suffix.
If we hit the root, that offset is #f to indicate that we have to
start searching the suffix from scratch. | #lang typed/racket/base
(provide jump-to-suffix)
(require "data-node-adapted.rkt"
"data-label-adapted.rkt"
benchmark-util)
(require/typed/check "label-label-length.rkt"
[label-length (-> label Index)])
(require/typed/check "structs-node-root.rkt"
[node-root? (-> Node Boolean)])
in the tree if we follow the construction . If we had to
(: jump-to-suffix (-> Node (values Node (U Boolean Integer))))
(define (jump-to-suffix node)
(define PARENT (node-parent node))
(cond ((node-root? node)
(values node #f))
((node-suffix-link node)
(begin
(let ([node2 (node-suffix-link node)])
(unless node2 (error "jump to suffix"))
(values node2 0))))
((and PARENT (node-root? PARENT))
(values PARENT #f))
(else
(let* ([parent (node-parent node)]
[sl (begin (unless parent (error "j2s"))
(node-suffix-link parent))])
(unless sl (error "j2s whoahao"))
(values sl
(label-length (node-up-label node)))))))
|
87d8627e59c07a1e6b668a9c20409aafba8c982d710552d877a3f3204ad20a19 | joewilliams/erl_geo_dns | lhttpc_sock.erl | %%% ----------------------------------------------------------------------------
Copyright ( c ) 2009 , Erlang Training and Consulting Ltd.
%%% All rights reserved.
%%%
%%% Redistribution and use in source and binary forms, with or without
%%% modification, are permitted provided that the following conditions are met:
%%% * Redistributions of source code must retain the above copyright
%%% notice, this list of conditions and the following disclaimer.
%%% * Redistributions in binary form must reproduce the above copyright
%%% notice, this list of conditions and the following disclaimer in the
%%% documentation and/or other materials provided with the distribution.
* Neither the name of Erlang Training and Consulting Ltd. nor the
%%% names of its contributors may be used to endorse or promote products
%%% derived from this software without specific prior written permission.
%%%
%%% THIS SOFTWARE IS PROVIDED BY Erlang Training and Consulting Ltd. ''AS IS''
%%% AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL Erlang Training and Consulting Ltd. BE
%%% LIABLE SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
%%% BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
%%% WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
%%% OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
%%% ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
%%% ----------------------------------------------------------------------------
@private
@author < >
%%% @doc
%%% This module implements wrappers for socket operations.
%%% Makes it possible to have the same interface to ssl and tcp sockets.
%%% @end
-module(lhttpc_sock).
-export([
connect/5,
recv/2,
recv/3,
send/3,
controlling_process/3,
setopts/3,
close/2
]).
-include("lhttpc_types.hrl").
@spec ( Host , Port , Options , Timeout , SslFlag ) - > { ok , Socket } | { error , Reason }
%% Host = string() | ip_address()
%% Port = integer()
%% Options = [{atom(), term()} | atom()]
%% Timeout = infinity | integer()
SslFlag = bool ( )
%% Socket = socket()
%% Reason = atom()
%% @doc
%% Connects to `Host' and `Port'.
Will use the ` ssl ' module if ` SslFlag ' is ` true ' and gen_tcp otherwise .
%% `Options' are the normal `gen_tcp' or `ssl' Options.
%% @end
-spec connect(host(), integer(), socket_options(), timeout(), bool()) ->
{ok, socket()} | {error, atom()}.
connect(Host, Port, Options, Timeout, true) ->
ssl:connect(Host, Port, Options, Timeout);
connect(Host, Port, Options, Timeout, false) ->
gen_tcp:connect(Host, Port, Options, Timeout).
@spec ( Socket , SslFlag ) - > { ok , Data } | { error , Reason }
%% Socket = socket()
%% Length = integer()
SslFlag = bool ( )
%% Data = term()
%% Reason = atom()
%% @doc
%% Reads available bytes from `Socket'.
Will block untill data is available on the socket and return the first
%% packet.
%% @end
-spec recv(socket(), bool()) ->
{ok, any()} | {error, atom()} | {error, {http_error, string()}}.
recv(Socket, true) ->
ssl:recv(Socket, 0);
recv(Socket, false) ->
gen_tcp:recv(Socket, 0).
@spec ( Socket , Length , SslFlag ) - > { ok , Data } | { error , Reason }
%% Socket = socket()
%% Length = integer()
SslFlag = bool ( )
%% Data = term()
%% Reason = atom()
%% @doc
%% Receives `Length' bytes from `Socket'.
%% Will block untill `Length' bytes is available.
%% @end
-spec recv(socket(), integer(), bool()) -> {ok, any()} | {error, atom()}.
recv(_, 0, _) ->
{ok, <<>>};
recv(Socket, Length, true) ->
ssl:recv(Socket, Length);
recv(Socket, Length, false) ->
gen_tcp:recv(Socket, Length).
@spec ( Socket , Data , SslFlag ) - > ok | { error , Reason }
%% Socket = socket()
%% Data = iolist()
SslFlag = bool ( )
%% Reason = atom()
%% @doc
%% Sends data on a socket.
Will use the ` ssl ' module if ` SslFlag ' is set to ` true ' , otherwise the
%% gen_tcp module.
%% @end
-spec send(socket(), iolist() | binary(), bool()) -> ok | {error, atom()}.
send(Socket, Request, true) ->
ssl:send(Socket, Request);
send(Socket, Request, false) ->
gen_tcp:send(Socket, Request).
@spec ( Socket , Pid , SslFlag ) - > ok | { error , Reason }
%% Socket = socket()
Pid = pid ( )
SslFlag = bool ( )
%% Reason = atom()
%% @doc
%% Sets the controlling proces for the `Socket'.
%% @end
-spec controlling_process(socket(), pid(), bool()) ->
ok | {error, atom()}.
controlling_process(Socket, Pid, true) ->
ssl:controlling_process(Socket, Pid);
controlling_process(Socket, Pid, false) ->
gen_tcp:controlling_process(Socket, Pid).
@spec ( Socket , Options , SslFlag ) - > ok | { error , Reason }
%% Socket = socket()
%% Options = [atom() | {atom(), term()}]
SslFlag = bool ( )
%% Reason = atom()
%% @doc
%% Sets options for a socket. Look in `inet:setopts/2' for more info.
%% @end
-spec setopts(socket(), socket_options(), bool()) ->
ok | {error, atom()}.
setopts(Socket, Options, true) ->
ssl:setopts(Socket, Options);
setopts(Socket, Options, false) ->
inet:setopts(Socket, Options).
@spec ( Socket , SslFlag ) - > ok | { error , Reason }
%% Socket = socket()
SslFlag = bool ( )
%% Reason = atom()
%% @doc
%% Closes a socket.
%% @end
-spec close(socket(), bool()) -> ok | {error, atom()}.
close(Socket, true) ->
ssl:close(Socket);
close(Socket, false) ->
gen_tcp:close(Socket).
| null | https://raw.githubusercontent.com/joewilliams/erl_geo_dns/682c3925959db61ead99f13160ef8bd77486a871/apps/lhttpc/src/lhttpc_sock.erl | erlang | ----------------------------------------------------------------------------
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY Erlang Training and Consulting Ltd. ''AS IS''
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
LIABLE SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
----------------------------------------------------------------------------
@doc
This module implements wrappers for socket operations.
Makes it possible to have the same interface to ssl and tcp sockets.
@end
Host = string() | ip_address()
Port = integer()
Options = [{atom(), term()} | atom()]
Timeout = infinity | integer()
Socket = socket()
Reason = atom()
@doc
Connects to `Host' and `Port'.
`Options' are the normal `gen_tcp' or `ssl' Options.
@end
Socket = socket()
Length = integer()
Data = term()
Reason = atom()
@doc
Reads available bytes from `Socket'.
packet.
@end
Socket = socket()
Length = integer()
Data = term()
Reason = atom()
@doc
Receives `Length' bytes from `Socket'.
Will block untill `Length' bytes is available.
@end
Socket = socket()
Data = iolist()
Reason = atom()
@doc
Sends data on a socket.
gen_tcp module.
@end
Socket = socket()
Reason = atom()
@doc
Sets the controlling proces for the `Socket'.
@end
Socket = socket()
Options = [atom() | {atom(), term()}]
Reason = atom()
@doc
Sets options for a socket. Look in `inet:setopts/2' for more info.
@end
Socket = socket()
Reason = atom()
@doc
Closes a socket.
@end | Copyright ( c ) 2009 , Erlang Training and Consulting Ltd.
* Neither the name of Erlang Training and Consulting Ltd. nor the
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL Erlang Training and Consulting Ltd. BE
@private
@author < >
-module(lhttpc_sock).
-export([
connect/5,
recv/2,
recv/3,
send/3,
controlling_process/3,
setopts/3,
close/2
]).
-include("lhttpc_types.hrl").
@spec ( Host , Port , Options , Timeout , SslFlag ) - > { ok , Socket } | { error , Reason }
SslFlag = bool ( )
Will use the ` ssl ' module if ` SslFlag ' is ` true ' and gen_tcp otherwise .
-spec connect(host(), integer(), socket_options(), timeout(), bool()) ->
{ok, socket()} | {error, atom()}.
connect(Host, Port, Options, Timeout, true) ->
ssl:connect(Host, Port, Options, Timeout);
connect(Host, Port, Options, Timeout, false) ->
gen_tcp:connect(Host, Port, Options, Timeout).
@spec ( Socket , SslFlag ) - > { ok , Data } | { error , Reason }
SslFlag = bool ( )
Will block untill data is available on the socket and return the first
-spec recv(socket(), bool()) ->
{ok, any()} | {error, atom()} | {error, {http_error, string()}}.
recv(Socket, true) ->
ssl:recv(Socket, 0);
recv(Socket, false) ->
gen_tcp:recv(Socket, 0).
@spec ( Socket , Length , SslFlag ) - > { ok , Data } | { error , Reason }
SslFlag = bool ( )
-spec recv(socket(), integer(), bool()) -> {ok, any()} | {error, atom()}.
recv(_, 0, _) ->
{ok, <<>>};
recv(Socket, Length, true) ->
ssl:recv(Socket, Length);
recv(Socket, Length, false) ->
gen_tcp:recv(Socket, Length).
@spec ( Socket , Data , SslFlag ) - > ok | { error , Reason }
SslFlag = bool ( )
Will use the ` ssl ' module if ` SslFlag ' is set to ` true ' , otherwise the
-spec send(socket(), iolist() | binary(), bool()) -> ok | {error, atom()}.
send(Socket, Request, true) ->
ssl:send(Socket, Request);
send(Socket, Request, false) ->
gen_tcp:send(Socket, Request).
@spec ( Socket , Pid , SslFlag ) - > ok | { error , Reason }
Pid = pid ( )
SslFlag = bool ( )
-spec controlling_process(socket(), pid(), bool()) ->
ok | {error, atom()}.
controlling_process(Socket, Pid, true) ->
ssl:controlling_process(Socket, Pid);
controlling_process(Socket, Pid, false) ->
gen_tcp:controlling_process(Socket, Pid).
@spec ( Socket , Options , SslFlag ) - > ok | { error , Reason }
SslFlag = bool ( )
-spec setopts(socket(), socket_options(), bool()) ->
ok | {error, atom()}.
setopts(Socket, Options, true) ->
ssl:setopts(Socket, Options);
setopts(Socket, Options, false) ->
inet:setopts(Socket, Options).
@spec ( Socket , SslFlag ) - > ok | { error , Reason }
SslFlag = bool ( )
-spec close(socket(), bool()) -> ok | {error, atom()}.
close(Socket, true) ->
ssl:close(Socket);
close(Socket, false) ->
gen_tcp:close(Socket).
|
4f66f06a4d2c6822e296f11844645c5f3e0f0b4d1e971d1c10ea9f42227a5b46 | rrnewton/haskell-lockfree | ReactorDeque.hs | # LANGUAGE UndecidableInstances , FlexibleContexts , DeriveDataTypeable #
This is the from the Reactor package written by .
Copyright 2011
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions
are met :
1 . Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
2 . Redistributions in binary form must reproduce the above copyright
notice , this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution .
3 . Neither the name of the author nor the names of his contributors
may be used to endorse or promote products derived from this software
without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE AUTHORS ` ` AS IS '' AND ANY EXPRESS OR
IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED . IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION )
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT ,
STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE .
Copyright 2011 Edward Kmett
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the author nor the names of his contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
-}
module Data.Concurrent.Deque.ReactorDeque (
Deque
-- * Local stack operations
: : ( MonadIO m , MArray a e IO ) = > IO ( Deque a e )
: : ( MonadIO m , MArray a e IO ) = > e - > Deque a e - > IO ( )
: : ( MonadIO m , MArray a e IO ) = > a e - > IO ( Maybe e )
-- * Performance tuning
: : ( MonadIO m , MArray a e IO ) = > Int - > IO ( Deque a e )
, minimumCapacity -- :: Int
, defaultCapacity -- :: Int
-- * Work stealing
: : ( MonadIO m , MArray a e IO ) = > a e - > IO ( Stolen e )
, Stolen(..)
) where
-- | For an explanation of the implementation, see \"Dynamic Circular Work-Stealing Deque\"
by and of Sun Microsystems .
import Prelude hiding (read)
import Control.Applicative hiding (empty)
import Data.Bits.Atomic
import Foreign.Ptr
import Foreign.ForeignPtr
import Foreign.Storable
import Data.IORef
import Data.Array.MArray
import Control.Monad
import Control.Monad.IO.Class
import Data.Data
import System.IO.Unsafe
data Buffer a e = Buffer {-# UNPACK #-} !Int !(a Int e)
instance Typeable2 a => Typeable1 (Buffer a) where
typeOf1 tae = mkTyConApp bufferTyCon [typeOf1 (aInte tae)]
where aInte :: t a e -> a Int e
aInte = undefined
bufferTyCon :: TyCon
bufferTyCon = mkTyCon "Reactor.Deque.Buffer"
size :: Buffer a e -> Int
size (Buffer i _) = i
data Deque a e = Deque
{ _tb :: ForeignPtr Int
, _content :: IORef (Buffer a e)
}
instance (MArray a e IO, Show e) => Show (Deque a e) where
showsPrec d (Deque tb content) = unsafePerformIO $ do
(t,b) <- withForeignPtr tb $ \p -> (,) <$> peekTop p <*> peekBottom p
buffer <- readIORef content
contents <- forM [t..b-1] (read buffer)
return $ showParen (d > 10) $
showString "Deque (ptr " . showsPrec 11 t . showChar ' ' . showsPrec 11 b . showString ") (buffer " . showsPrec 11 contents . showChar ')'
instance Typeable2 a => Typeable1 (Deque a) where
typeOf1 dae = mkTyConApp dequeTyCon [typeOf1 (aInte dae)]
where aInte :: t a e -> a Int e
aInte = undefined
dequeTyCon :: TyCon
dequeTyCon = mkTyCon "Reactor.Deque.Deque"
ptr :: Storable a => a -> a -> IO (ForeignPtr a)
ptr a b = do
p <- mallocForeignPtrArray 2
withForeignPtr p $ \q -> do
poke q a
pokeElemOff q 1 b
return p
minimumCapacity :: Int
minimumCapacity = 16
defaultCapacity :: Int
defaultCapacity = 32
bufferWithCapacity :: MArray a e IO => Int -> IO (Buffer a e)
bufferWithCapacity i =
Buffer i <$> newArray_ (0, (minimumCapacity `max` i) - 1)
withCapacity :: (MonadIO m, MArray a e IO) => Int -> m (Deque a e)
withCapacity i = liftIO (Deque <$> ptr 0 0 <*> (bufferWithCapacity i >>= newIORef))
empty :: (MonadIO m, MArray a e IO) => m (Deque a e)
empty = withCapacity defaultCapacity
{-# INLINE empty #-}
-- unsafeRead
read :: MArray a e IO => Buffer a e -> Int -> IO e
read (Buffer s c) i = do
readArray c (i `mod` s)
# INLINE read #
-- unsafeWrite
write :: MArray a e IO => Buffer a e -> Int -> e -> IO ()
write (Buffer s c) i e = do
writeArray c (i `mod` s) e
# INLINE write #
grow :: MArray a e IO => Buffer a e -> Int -> Int -> IO (Buffer a e)
grow c b t = do
c' <- bufferWithCapacity (size c * 2)
forM_ [t..b-1] $ \i -> read c i >>= write c' i
return c'
# INLINE grow #
peekBottom :: Ptr Int -> IO Int
peekBottom p = peekElemOff p 1
peekTop :: Ptr Int -> IO Int
peekTop p = peek p
pokeBottom :: Ptr Int -> Int -> IO ()
pokeBottom p = pokeElemOff p 1
push :: (MonadIO m, MArray a e IO) => e -> Deque a e -> m ()
push o (Deque tb content) = liftIO $ withForeignPtr tb $ \p -> do
b <- peekBottom p
t <- peekTop p
a <- readIORef content
let size' = b - t
if size' >= size a
then do
a' <- grow a b t
writeIORef content a'
go p a' b
else go p a b
where
go p arr b = do
write arr b o
pokeBottom p (b + 1)
data Stolen e
= Empty
| Abort
| Stolen e
deriving (Data,Typeable,Eq,Ord,Show,Read)
steal :: (MonadIO m, MArray a e IO) => Deque a e -> m (Stolen e)
steal (Deque tb content) = liftIO $ withForeignPtr tb $ \p -> do
t <- peekTop p
b <- peekBottom p
a <- readIORef content
let size' = b - t
if size' <= 0
then return Empty
else do
o <- read a t
result <- compareAndSwapBool p t (t + 1)
return $ if result then Stolen o else Abort
steal ' : : MArray a e IO = > a e - > IO ( Maybe e )
steal ' deque = do
s < - steal deque
case s of
Stolen e - > return ( Just e )
Empty - > return Nothing
Abort - > steal ' deque
steal' :: MArray a e IO => Deque a e -> IO (Maybe e)
steal' deque = do
s <- steal deque
case s of
Stolen e -> return (Just e)
Empty -> return Nothing
Abort -> steal' deque
-}
pop :: (MonadIO m, MArray a e IO) => Deque a e -> m (Maybe e)
pop (Deque tb content) = liftIO $ withForeignPtr tb $ \p -> do
b <- peekBottom p
a <- readIORef content
let b' = b - 1
pokeBottom p b'
t <- peekTop p
let size' = b' - t
if size' < 0
then do
pokeBottom p t
return Nothing
else do
o <- read a b'
if size' > 0
then return (Just o)
else do
result <- compareAndSwapBool p t (t + 1)
if result
then do
pokeBottom p (t + 1)
return (Just o)
else do
pokeBottom p (t + 1)
return Nothing
| null | https://raw.githubusercontent.com/rrnewton/haskell-lockfree/87122157cbbc96954fcc575b4b110003d3e5c2f8/chaselev-deque/Data/Concurrent/Deque/ReactorDeque.hs | haskell | * Local stack operations
* Performance tuning
:: Int
:: Int
* Work stealing
| For an explanation of the implementation, see \"Dynamic Circular Work-Stealing Deque\"
# UNPACK #
# INLINE empty #
unsafeRead
unsafeWrite | # LANGUAGE UndecidableInstances , FlexibleContexts , DeriveDataTypeable #
This is the from the Reactor package written by .
Copyright 2011
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions
are met :
1 . Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
2 . Redistributions in binary form must reproduce the above copyright
notice , this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution .
3 . Neither the name of the author nor the names of his contributors
may be used to endorse or promote products derived from this software
without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE AUTHORS ` ` AS IS '' AND ANY EXPRESS OR
IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED . IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION )
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT ,
STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE .
Copyright 2011 Edward Kmett
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the author nor the names of his contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
-}
module Data.Concurrent.Deque.ReactorDeque (
Deque
: : ( MonadIO m , MArray a e IO ) = > IO ( Deque a e )
: : ( MonadIO m , MArray a e IO ) = > e - > Deque a e - > IO ( )
: : ( MonadIO m , MArray a e IO ) = > a e - > IO ( Maybe e )
: : ( MonadIO m , MArray a e IO ) = > Int - > IO ( Deque a e )
: : ( MonadIO m , MArray a e IO ) = > a e - > IO ( Stolen e )
, Stolen(..)
) where
by and of Sun Microsystems .
import Prelude hiding (read)
import Control.Applicative hiding (empty)
import Data.Bits.Atomic
import Foreign.Ptr
import Foreign.ForeignPtr
import Foreign.Storable
import Data.IORef
import Data.Array.MArray
import Control.Monad
import Control.Monad.IO.Class
import Data.Data
import System.IO.Unsafe
instance Typeable2 a => Typeable1 (Buffer a) where
typeOf1 tae = mkTyConApp bufferTyCon [typeOf1 (aInte tae)]
where aInte :: t a e -> a Int e
aInte = undefined
bufferTyCon :: TyCon
bufferTyCon = mkTyCon "Reactor.Deque.Buffer"
size :: Buffer a e -> Int
size (Buffer i _) = i
data Deque a e = Deque
{ _tb :: ForeignPtr Int
, _content :: IORef (Buffer a e)
}
instance (MArray a e IO, Show e) => Show (Deque a e) where
showsPrec d (Deque tb content) = unsafePerformIO $ do
(t,b) <- withForeignPtr tb $ \p -> (,) <$> peekTop p <*> peekBottom p
buffer <- readIORef content
contents <- forM [t..b-1] (read buffer)
return $ showParen (d > 10) $
showString "Deque (ptr " . showsPrec 11 t . showChar ' ' . showsPrec 11 b . showString ") (buffer " . showsPrec 11 contents . showChar ')'
instance Typeable2 a => Typeable1 (Deque a) where
typeOf1 dae = mkTyConApp dequeTyCon [typeOf1 (aInte dae)]
where aInte :: t a e -> a Int e
aInte = undefined
dequeTyCon :: TyCon
dequeTyCon = mkTyCon "Reactor.Deque.Deque"
ptr :: Storable a => a -> a -> IO (ForeignPtr a)
ptr a b = do
p <- mallocForeignPtrArray 2
withForeignPtr p $ \q -> do
poke q a
pokeElemOff q 1 b
return p
minimumCapacity :: Int
minimumCapacity = 16
defaultCapacity :: Int
defaultCapacity = 32
bufferWithCapacity :: MArray a e IO => Int -> IO (Buffer a e)
bufferWithCapacity i =
Buffer i <$> newArray_ (0, (minimumCapacity `max` i) - 1)
withCapacity :: (MonadIO m, MArray a e IO) => Int -> m (Deque a e)
withCapacity i = liftIO (Deque <$> ptr 0 0 <*> (bufferWithCapacity i >>= newIORef))
empty :: (MonadIO m, MArray a e IO) => m (Deque a e)
empty = withCapacity defaultCapacity
read :: MArray a e IO => Buffer a e -> Int -> IO e
read (Buffer s c) i = do
readArray c (i `mod` s)
# INLINE read #
write :: MArray a e IO => Buffer a e -> Int -> e -> IO ()
write (Buffer s c) i e = do
writeArray c (i `mod` s) e
# INLINE write #
grow :: MArray a e IO => Buffer a e -> Int -> Int -> IO (Buffer a e)
grow c b t = do
c' <- bufferWithCapacity (size c * 2)
forM_ [t..b-1] $ \i -> read c i >>= write c' i
return c'
# INLINE grow #
peekBottom :: Ptr Int -> IO Int
peekBottom p = peekElemOff p 1
peekTop :: Ptr Int -> IO Int
peekTop p = peek p
pokeBottom :: Ptr Int -> Int -> IO ()
pokeBottom p = pokeElemOff p 1
push :: (MonadIO m, MArray a e IO) => e -> Deque a e -> m ()
push o (Deque tb content) = liftIO $ withForeignPtr tb $ \p -> do
b <- peekBottom p
t <- peekTop p
a <- readIORef content
let size' = b - t
if size' >= size a
then do
a' <- grow a b t
writeIORef content a'
go p a' b
else go p a b
where
go p arr b = do
write arr b o
pokeBottom p (b + 1)
data Stolen e
= Empty
| Abort
| Stolen e
deriving (Data,Typeable,Eq,Ord,Show,Read)
steal :: (MonadIO m, MArray a e IO) => Deque a e -> m (Stolen e)
steal (Deque tb content) = liftIO $ withForeignPtr tb $ \p -> do
t <- peekTop p
b <- peekBottom p
a <- readIORef content
let size' = b - t
if size' <= 0
then return Empty
else do
o <- read a t
result <- compareAndSwapBool p t (t + 1)
return $ if result then Stolen o else Abort
steal ' : : MArray a e IO = > a e - > IO ( Maybe e )
steal ' deque = do
s < - steal deque
case s of
Stolen e - > return ( Just e )
Empty - > return Nothing
Abort - > steal ' deque
steal' :: MArray a e IO => Deque a e -> IO (Maybe e)
steal' deque = do
s <- steal deque
case s of
Stolen e -> return (Just e)
Empty -> return Nothing
Abort -> steal' deque
-}
pop :: (MonadIO m, MArray a e IO) => Deque a e -> m (Maybe e)
pop (Deque tb content) = liftIO $ withForeignPtr tb $ \p -> do
b <- peekBottom p
a <- readIORef content
let b' = b - 1
pokeBottom p b'
t <- peekTop p
let size' = b' - t
if size' < 0
then do
pokeBottom p t
return Nothing
else do
o <- read a b'
if size' > 0
then return (Just o)
else do
result <- compareAndSwapBool p t (t + 1)
if result
then do
pokeBottom p (t + 1)
return (Just o)
else do
pokeBottom p (t + 1)
return Nothing
|
b30f98f9c4a26352d7bca3ecbac6dc6dc64987ab196a87dc066ca431d383a16d | tonymorris/geo-osm | TypeL.hs | -- | Values with a @type@ accessor.
module Data.Geo.OSM.Lens.TypeL where
import Data.Geo.OSM.MemberType
import Control.Lens.Lens
class TypeL a where
typeL ::
Lens' a MemberType
| null | https://raw.githubusercontent.com/tonymorris/geo-osm/776542be2fd30a05f0f9e867128eca5ad5d66bec/src/Data/Geo/OSM/Lens/TypeL.hs | haskell | | Values with a @type@ accessor. | module Data.Geo.OSM.Lens.TypeL where
import Data.Geo.OSM.MemberType
import Control.Lens.Lens
class TypeL a where
typeL ::
Lens' a MemberType
|
b621f145fcaadb568451ecec32af217ba7072d7220f83ee384af35c0b53887cf | OCamlPro/typerex-lint | common.mli | val id : string -> string
val cstr : string -> string
val print_longident : Longident.t -> string
val upprint : Parsetree.core_type -> string option
val mk_exploded : string -> Longident.t
val mk_aut : string -> Longident.t
val mk_aut_cstr : string -> Longident.t
val mk_match : string -> Longident.t
val warn : ('a, unit, string, unit) format4 -> 'a
val debug : ('a, unit, string, unit) format4 -> 'a
val raise_errorf : ?loc:Location.t -> ('a, unit, string, 'b) format4 -> 'a
(** [instantiate_type_decl replacements typ] replaces all the free variables of
typ present in the association list replacements by its corresponding value
*)
val instantiate_type_decl : (string * Parsetree.core_type) list
-> Parsetree.type_declaration
-> Parsetree.type_declaration
(** Returns the list of toplevel value-bindings in the given structure *)
val get_val_decls : Parsetree.structure -> Parsetree.value_binding list
* [ filter_decls type_decls ] returns the list of monomorphic type
declarations in type_decls
declarations in type_decls *)
val filter_decls : Parsetree.type_declaration list
-> Parsetree.type_declaration list
val stdlib : Parsetree.type_declaration list
val concrete_stdlib : Parsetree.type_declaration list
| null | https://raw.githubusercontent.com/OCamlPro/typerex-lint/6d9e994c8278fb65e1f7de91d74876531691120c/libs/ocplib-sempatch/lib/automaton/generator/common.mli | ocaml | * [instantiate_type_decl replacements typ] replaces all the free variables of
typ present in the association list replacements by its corresponding value
* Returns the list of toplevel value-bindings in the given structure | val id : string -> string
val cstr : string -> string
val print_longident : Longident.t -> string
val upprint : Parsetree.core_type -> string option
val mk_exploded : string -> Longident.t
val mk_aut : string -> Longident.t
val mk_aut_cstr : string -> Longident.t
val mk_match : string -> Longident.t
val warn : ('a, unit, string, unit) format4 -> 'a
val debug : ('a, unit, string, unit) format4 -> 'a
val raise_errorf : ?loc:Location.t -> ('a, unit, string, 'b) format4 -> 'a
val instantiate_type_decl : (string * Parsetree.core_type) list
-> Parsetree.type_declaration
-> Parsetree.type_declaration
val get_val_decls : Parsetree.structure -> Parsetree.value_binding list
* [ filter_decls type_decls ] returns the list of monomorphic type
declarations in type_decls
declarations in type_decls *)
val filter_decls : Parsetree.type_declaration list
-> Parsetree.type_declaration list
val stdlib : Parsetree.type_declaration list
val concrete_stdlib : Parsetree.type_declaration list
|
9d3bc5c7d8055622f7eea0612d974e430c22f6a0916684e7e39cf82c162b6360 | static-analysis-engineering/codehawk | jCHDumpBasicTypes.mli | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2020 Kestrel Technology LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author: Arnaud Venet
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2020 Kestrel Technology LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
(* jchlib *)
open JCHBasicTypesAPI
val value_signature : value_type_t -> string
val type2shortstring : value_type_t -> string
val method_signature :
string ->
< arguments : value_type_t list; return_value : value_type_t option; .. > ->
string
val signature : string -> descriptor_t -> string
val jvm_basic_type : java_basic_type_t -> char
val java_basic_type : java_basic_type_t -> char
val dump_constant_value : 'a IO.output -> constant_value_t -> unit
val dump_constant : 'a IO.output -> constant_t -> unit
val dump_verification_type : verification_type_t -> string
val dump_stackmap :
'a IO.output -> int * verification_type_t list * verification_type_t list -> unit
val dump_exc :
'a IO.output ->
'b ->
< catch_type : < name : string; .. > option; h_end : int; h_start :
int; handler : int; .. > ->
unit
| null | https://raw.githubusercontent.com/static-analysis-engineering/codehawk/98ced4d5e6d7989575092df232759afc2cb851f6/CodeHawk/CHJ/jchlib/jCHDumpBasicTypes.mli | ocaml | jchlib | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2020 Kestrel Technology LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author: Arnaud Venet
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2020 Kestrel Technology LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
open JCHBasicTypesAPI
val value_signature : value_type_t -> string
val type2shortstring : value_type_t -> string
val method_signature :
string ->
< arguments : value_type_t list; return_value : value_type_t option; .. > ->
string
val signature : string -> descriptor_t -> string
val jvm_basic_type : java_basic_type_t -> char
val java_basic_type : java_basic_type_t -> char
val dump_constant_value : 'a IO.output -> constant_value_t -> unit
val dump_constant : 'a IO.output -> constant_t -> unit
val dump_verification_type : verification_type_t -> string
val dump_stackmap :
'a IO.output -> int * verification_type_t list * verification_type_t list -> unit
val dump_exc :
'a IO.output ->
'b ->
< catch_type : < name : string; .. > option; h_end : int; h_start :
int; handler : int; .. > ->
unit
|
4318c141d9bf4747051902aa19cfb55e190a0a09a202cdd8a5fc9b03df37b411 | nixeagle/cl-github | json.lisp | This file modifies the basic behavior of CL - JSON . All of the
;;; functions that directly modify and manipulate how cl-json reads
input are derived from the way CL - JSON does the default handling .
;;;
;;; My modifications are pretty extensive, but for completeness:
;;;
;;; Specifically
;;; - beginning-of-object
;;; - key-add-or-set
;;; - value-add-or-set
;;; - accumulator-get-object
;;; - accumulator-add-preserved-key
Are especially derived from CL - JSON .
;;;
CL - JSON 's license is included here for completeness .
;;;
( This is the MIT / X Consortium license as taken from
;;; -license.html)
;;;
Copyright ( c ) 2006 - 2008 Henrik Hjelte
Copyright ( c ) 2008 ( code from the program )
;;;
;;; Permission is hereby granted, free of charge, to any person obtaining
;;; a copy of this software and associated documentation files (the
" Software " ) , to deal in the Software without restriction , including
;;; without limitation the rights to use, copy, modify, merge, publish,
distribute , sublicense , and/or sell copies of the Software , and to
permit persons to whom the Software is furnished to do so , subject to
;;; the following conditions:
;;;
;;; The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software .
;;;
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
;;; EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
;;; MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
;;; NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION
;;; OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
;;; WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
(in-package :cl-github)
From Alexandria
(defun alist-hash-table (alist &rest hash-table-initargs)
"Returns a hash table containing the keys and values of the association list
ALIST. Hash table is initialized using the HASH-TABLE-INITARGS."
(let ((table (apply #'make-hash-table hash-table-initargs)))
(dolist (cons alist)
(setf (gethash (car cons) table) (cdr cons)))
table))
(defparameter +github-class-map+
(alist-hash-table '(("USER" . "USER") ("PLAN" . "PLAN") ("AUTHOR" . "SIMPLE-USER")
("PARENTS" . "PARENT") ("COMMIT" . "COMMIT")
("MODIFIED" . "FILE-DIFF") ("COMMITTER" . "SIMPLE-USER")
("DELETE-TOKEN" . "DELETE-TOKEN") ("TREE" . "TREEISH")
("BLOB" . "BLOB") ("BLOCKS" . "COMMIT-RANGE")
("HEADS" . "HEAD") ("COMMITS" . "COMMITS")
("REPOSITORY" . "REPOSITORY")
("PUBLIC-KEYS" . "PUBLIC-KEY")
("REPOSITORIES" . "REPOSITORIES")
("NETWORK" . "NETWORK") ("USERS" . "USERS")
("ISSUES" . "ISSUE") ("ISSUE" . "ISSUE")
("COMMENT" . "COMMENT"))
:test #'equal)
"mapping of class strings to real classes.")
(defvar *current-prototype* nil
"Stores the key of an object until its stored in `*PREVIOUS-PROTOTYPE*'.")
(defvar *previous-prototype* nil
"Stores the prototype of the json class above the current one.
For example: {\"user\":{\"plan\":{\"name\":....}}}
When parsing the plan json object, this will be set to \"USER\".")
(defun beginning-of-object ()
"Do more at prototype init"
(setq *previous-prototype* *current-prototype*)
(setq *current-prototype* nil)
(json::init-accumulator-and-prototype))
(defun camel-case-to-lisp (string)
(declare (type string string))
(string-upcase (iter (for char :in-string string)
(if (char= #\_ char)
(collect #\- :result-type string)
(collect char :result-type string)))))
(defgeneric key-add-or-set (key)
(:documentation "Mark KEY a prototype if it is, and add it to the accumulator."))
(defmethod key-add-or-set (key)
(let ((key (funcall #'camel-case-to-lisp key)))
(let ((class-key (gethash key +github-class-map+ nil)))
(if (and (not *current-prototype*)
class-key)
(progn (setq json::*accumulator-last*
(setf (cdr json::*accumulator-last*) (cons (cons key nil) nil)))
(setq *current-prototype* class-key)
#+ () (pushnew (cons "PROTOTYPE" key) (cddr json::*accumulator*))
(setq json::*prototype* class-key))
(setq json::*accumulator-last*
(setf (cdr json::*accumulator-last*) (cons (cons key nil) nil)))))
json::*accumulator*))
(defgeneric value-add-or-set (value)
(:documentation "If VALUE (in a JSON Object being decoded)
corresponds to a key which matches *PROTOTYPE-NAME*,
set VALUE to be the prototype of the Object.
Otherwise, do the same as ACCUMULATOR-ADD-VALUE."))
(defmethod value-add-or-set (value)
(if (eql json::*prototype* t)
(progn
(check-type value (or json::prototype string)
(format nil "Invalid prototype: ~S." value))
(setq json::*prototype* *current-prototype*)
json::*accumulator*)
(json::accumulator-add-value value)))
(defmethod value-add-or-set :after (value)
(setq *current-prototype* nil))
(defgeneric as-symbol (object)
(:method ((object string))
"Change OBJECT to a symbol by interning it."
(intern object))
(:method ((object symbol))
"Return OBJECT as is."
object)
(:documentation "Get the symbolic representation of object."))
(defgeneric accumulator-get-object ()
(:documentation
"Return a CLOS object, using keys and values accumulated so far in
the list accumulator as slot names and values, respectively. If the
JSON Object had a prototype field infer the class of the object and
the package wherein to intern slot names from the prototype.
Otherwise, create a FLUID-OBJECT with slots interned in
*JSON-SYMBOLS-PACKAGE*."))
;;; Modified from cl-json
(defmethod accumulator-get-object ()
(flet ((intern-keys (bindings)
(loop for (key . value) in bindings
collect (cons (json:json-intern key) value))))
(if (typep *previous-prototype* 'json::prototype)
(with-slots (lisp-class lisp-superclasses lisp-package)
*previous-prototype*
(let* ((package-name (as-symbol lisp-package))
(json:*json-symbols-package*
(if package-name
(or (find-package package-name)
(error 'package-error :package package-name))
json::*json-symbols-package*))
(class (as-symbol lisp-class))
(superclasses (mapcar #'as-symbol lisp-superclasses)))
(json::maybe-add-prototype
(json:make-object (intern-keys (cdr json::*accumulator*))
class superclasses)
*previous-prototype*)))
(let ((bindings (intern-keys (cdr json::*accumulator*)))
(class (if (stringp *previous-prototype*) (as-symbol *previous-prototype*))))
(when (and *previous-prototype* (not class))
(push (cons json::*prototype-name* *previous-prototype*) bindings))
(if (and (not class) (listp bindings) (not (consp (cdr bindings))))
(cdar bindings)
(json:make-object bindings class))))))
(defmacro with-github-decoder (&body body)
"Execute BODY with decoder bindings appropriate for github's api."
`(json:bind-custom-vars
(:beginning-of-object #'beginning-of-object
:object-key #'key-add-or-set
:object-value #'value-add-or-set
:end-of-object #'accumulator-get-object
:object-scope '(json:*INTERNAL-DECODER*
*current-prototype*
*previous-prototype*))
,@body))
(defgeneric accumulator-add-preserved-keyword-key (key))
(defmethod accumulator-add-preserved-keyword-key (key)
(let ((*package* (find-package :keyword))
(*read-eval* nil)
(*readtable* (copy-readtable nil)))
(setf (readtable-case *readtable*) :preserve)
(setq json::*accumulator-last*
(setf (cdr json::*accumulator-last*)
(cons (cons (read-from-string key nil nil :preserve-whitespace t)
nil) nil)))))
(defgeneric accumulator-add-preserved-key (key))
(defmethod accumulator-add-preserved-key (key)
(setq json::*accumulator-last*
(setf (cdr json::*accumulator-last*) (cons (cons key nil) nil))))
(defmacro with-simple-alist-decoder (&body body)
"Execute body with decoder bindings set to return preserved alists."
`(json:bind-custom-vars
(:object-key #'accumulator-add-preserved-key)
,@body))
(defgeneric to-json (object)
(:method :around (obj)
(let ((json:*json-symbols-package* :cl-github)
(*package* (find-package :cl-github)))
(with-local-class-registry (:inherit nil)
(call-next-method)))))
(defmethod to-json ((obj string))
(with-github-decoder
(json:decode-json-from-string obj)))
(defmethod to-json ((obj stream))
"Read directly from a stream and close the stream when done."
(prog1 (with-github-decoder
(json:decode-json obj))
(close obj)))
(defgeneric json->alist (object))
(defmethod json->alist ((object stream))
(with-decoder-simple-list-semantics
(decode-json object)))
(defmethod json->alist :after ((object stream))
(close object))
(defun ensure-list (object)
"Ensure OBJECT is a list."
(the list (if (listp object)
object
(list object))))
(defgeneric json->list (object))
(defmethod json->list ((object stream))
(ensure-list (cdar (with-simple-alist-decoder
(decode-json object)))))
(defmethod json->list :after ((object stream))
(close object))
(defgeneric json->element (object)
(:documentation "Return first element of OBJECT's json conversion result."))
(defmethod json->element ((object stream))
(car (json->list object)))
(defgeneric json->class (object class)
(:documentation "Store json in OBJECT to CLASS"))
(defmethod json->class ((object stream)
(class symbol))
"Store json from STREAM in an instance of CLASS."
(make-object (with-decoder-simple-list-semantics
(decode-json object))
class))
(defmethod json->class :around (object class)
"Set package to cl-github and use local class registry."
(let ((json:*json-symbols-package* :cl-github))
(with-local-class-registry (:inherit nil)
(call-next-method))))
(defmethod json->class :after ((object stream) class)
"Close STREAM after we are done with it."
(close object))
| null | https://raw.githubusercontent.com/nixeagle/cl-github/19ba2477ea65e52e74e166482407ea96bee8e395/json.lisp | lisp | functions that directly modify and manipulate how cl-json reads
My modifications are pretty extensive, but for completeness:
Specifically
- beginning-of-object
- key-add-or-set
- value-add-or-set
- accumulator-get-object
- accumulator-add-preserved-key
-license.html)
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
without limitation the rights to use, copy, modify, merge, publish,
the following conditions:
The above copyright notice and this permission notice shall be
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Modified from cl-json | This file modifies the basic behavior of CL - JSON . All of the
input are derived from the way CL - JSON does the default handling .
Are especially derived from CL - JSON .
CL - JSON 's license is included here for completeness .
( This is the MIT / X Consortium license as taken from
Copyright ( c ) 2006 - 2008 Henrik Hjelte
Copyright ( c ) 2008 ( code from the program )
" Software " ) , to deal in the Software without restriction , including
distribute , sublicense , and/or sell copies of the Software , and to
permit persons to whom the Software is furnished to do so , subject to
included in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION
(in-package :cl-github)
From Alexandria
(defun alist-hash-table (alist &rest hash-table-initargs)
"Returns a hash table containing the keys and values of the association list
ALIST. Hash table is initialized using the HASH-TABLE-INITARGS."
(let ((table (apply #'make-hash-table hash-table-initargs)))
(dolist (cons alist)
(setf (gethash (car cons) table) (cdr cons)))
table))
(defparameter +github-class-map+
(alist-hash-table '(("USER" . "USER") ("PLAN" . "PLAN") ("AUTHOR" . "SIMPLE-USER")
("PARENTS" . "PARENT") ("COMMIT" . "COMMIT")
("MODIFIED" . "FILE-DIFF") ("COMMITTER" . "SIMPLE-USER")
("DELETE-TOKEN" . "DELETE-TOKEN") ("TREE" . "TREEISH")
("BLOB" . "BLOB") ("BLOCKS" . "COMMIT-RANGE")
("HEADS" . "HEAD") ("COMMITS" . "COMMITS")
("REPOSITORY" . "REPOSITORY")
("PUBLIC-KEYS" . "PUBLIC-KEY")
("REPOSITORIES" . "REPOSITORIES")
("NETWORK" . "NETWORK") ("USERS" . "USERS")
("ISSUES" . "ISSUE") ("ISSUE" . "ISSUE")
("COMMENT" . "COMMENT"))
:test #'equal)
"mapping of class strings to real classes.")
(defvar *current-prototype* nil
"Stores the key of an object until its stored in `*PREVIOUS-PROTOTYPE*'.")
(defvar *previous-prototype* nil
"Stores the prototype of the json class above the current one.
For example: {\"user\":{\"plan\":{\"name\":....}}}
When parsing the plan json object, this will be set to \"USER\".")
(defun beginning-of-object ()
"Do more at prototype init"
(setq *previous-prototype* *current-prototype*)
(setq *current-prototype* nil)
(json::init-accumulator-and-prototype))
(defun camel-case-to-lisp (string)
(declare (type string string))
(string-upcase (iter (for char :in-string string)
(if (char= #\_ char)
(collect #\- :result-type string)
(collect char :result-type string)))))
(defgeneric key-add-or-set (key)
(:documentation "Mark KEY a prototype if it is, and add it to the accumulator."))
(defmethod key-add-or-set (key)
(let ((key (funcall #'camel-case-to-lisp key)))
(let ((class-key (gethash key +github-class-map+ nil)))
(if (and (not *current-prototype*)
class-key)
(progn (setq json::*accumulator-last*
(setf (cdr json::*accumulator-last*) (cons (cons key nil) nil)))
(setq *current-prototype* class-key)
#+ () (pushnew (cons "PROTOTYPE" key) (cddr json::*accumulator*))
(setq json::*prototype* class-key))
(setq json::*accumulator-last*
(setf (cdr json::*accumulator-last*) (cons (cons key nil) nil)))))
json::*accumulator*))
(defgeneric value-add-or-set (value)
(:documentation "If VALUE (in a JSON Object being decoded)
corresponds to a key which matches *PROTOTYPE-NAME*,
set VALUE to be the prototype of the Object.
Otherwise, do the same as ACCUMULATOR-ADD-VALUE."))
(defmethod value-add-or-set (value)
(if (eql json::*prototype* t)
(progn
(check-type value (or json::prototype string)
(format nil "Invalid prototype: ~S." value))
(setq json::*prototype* *current-prototype*)
json::*accumulator*)
(json::accumulator-add-value value)))
(defmethod value-add-or-set :after (value)
(setq *current-prototype* nil))
(defgeneric as-symbol (object)
(:method ((object string))
"Change OBJECT to a symbol by interning it."
(intern object))
(:method ((object symbol))
"Return OBJECT as is."
object)
(:documentation "Get the symbolic representation of object."))
(defgeneric accumulator-get-object ()
(:documentation
"Return a CLOS object, using keys and values accumulated so far in
the list accumulator as slot names and values, respectively. If the
JSON Object had a prototype field infer the class of the object and
the package wherein to intern slot names from the prototype.
Otherwise, create a FLUID-OBJECT with slots interned in
*JSON-SYMBOLS-PACKAGE*."))
(defmethod accumulator-get-object ()
(flet ((intern-keys (bindings)
(loop for (key . value) in bindings
collect (cons (json:json-intern key) value))))
(if (typep *previous-prototype* 'json::prototype)
(with-slots (lisp-class lisp-superclasses lisp-package)
*previous-prototype*
(let* ((package-name (as-symbol lisp-package))
(json:*json-symbols-package*
(if package-name
(or (find-package package-name)
(error 'package-error :package package-name))
json::*json-symbols-package*))
(class (as-symbol lisp-class))
(superclasses (mapcar #'as-symbol lisp-superclasses)))
(json::maybe-add-prototype
(json:make-object (intern-keys (cdr json::*accumulator*))
class superclasses)
*previous-prototype*)))
(let ((bindings (intern-keys (cdr json::*accumulator*)))
(class (if (stringp *previous-prototype*) (as-symbol *previous-prototype*))))
(when (and *previous-prototype* (not class))
(push (cons json::*prototype-name* *previous-prototype*) bindings))
(if (and (not class) (listp bindings) (not (consp (cdr bindings))))
(cdar bindings)
(json:make-object bindings class))))))
(defmacro with-github-decoder (&body body)
"Execute BODY with decoder bindings appropriate for github's api."
`(json:bind-custom-vars
(:beginning-of-object #'beginning-of-object
:object-key #'key-add-or-set
:object-value #'value-add-or-set
:end-of-object #'accumulator-get-object
:object-scope '(json:*INTERNAL-DECODER*
*current-prototype*
*previous-prototype*))
,@body))
(defgeneric accumulator-add-preserved-keyword-key (key))
(defmethod accumulator-add-preserved-keyword-key (key)
(let ((*package* (find-package :keyword))
(*read-eval* nil)
(*readtable* (copy-readtable nil)))
(setf (readtable-case *readtable*) :preserve)
(setq json::*accumulator-last*
(setf (cdr json::*accumulator-last*)
(cons (cons (read-from-string key nil nil :preserve-whitespace t)
nil) nil)))))
(defgeneric accumulator-add-preserved-key (key))
(defmethod accumulator-add-preserved-key (key)
(setq json::*accumulator-last*
(setf (cdr json::*accumulator-last*) (cons (cons key nil) nil))))
(defmacro with-simple-alist-decoder (&body body)
"Execute body with decoder bindings set to return preserved alists."
`(json:bind-custom-vars
(:object-key #'accumulator-add-preserved-key)
,@body))
(defgeneric to-json (object)
(:method :around (obj)
(let ((json:*json-symbols-package* :cl-github)
(*package* (find-package :cl-github)))
(with-local-class-registry (:inherit nil)
(call-next-method)))))
(defmethod to-json ((obj string))
(with-github-decoder
(json:decode-json-from-string obj)))
(defmethod to-json ((obj stream))
"Read directly from a stream and close the stream when done."
(prog1 (with-github-decoder
(json:decode-json obj))
(close obj)))
(defgeneric json->alist (object))
(defmethod json->alist ((object stream))
(with-decoder-simple-list-semantics
(decode-json object)))
(defmethod json->alist :after ((object stream))
(close object))
(defun ensure-list (object)
"Ensure OBJECT is a list."
(the list (if (listp object)
object
(list object))))
(defgeneric json->list (object))
(defmethod json->list ((object stream))
(ensure-list (cdar (with-simple-alist-decoder
(decode-json object)))))
(defmethod json->list :after ((object stream))
(close object))
(defgeneric json->element (object)
(:documentation "Return first element of OBJECT's json conversion result."))
(defmethod json->element ((object stream))
(car (json->list object)))
(defgeneric json->class (object class)
(:documentation "Store json in OBJECT to CLASS"))
(defmethod json->class ((object stream)
(class symbol))
"Store json from STREAM in an instance of CLASS."
(make-object (with-decoder-simple-list-semantics
(decode-json object))
class))
(defmethod json->class :around (object class)
"Set package to cl-github and use local class registry."
(let ((json:*json-symbols-package* :cl-github))
(with-local-class-registry (:inherit nil)
(call-next-method))))
(defmethod json->class :after ((object stream) class)
"Close STREAM after we are done with it."
(close object))
|
0a64c0178d933d98f65728a603346de6f4030b373dffaf572cee0b63a1207d51 | Octachron/tensority | small_unified.ml | module V = Small_vec
module M = Small_matrix
module T = Tensor
type _ t =
| Scalar: float ref -> < contr:Shape.empty; cov: Shape.empty > t
| Vec: 'a V.t -> < contr: 'a Shape.single; cov: Shape.empty > t
| Matrix: ('a * 'b) M.t -> < contr:'a Shape.single; cov:'b Shape.single > t
let scalar f = Scalar (ref f)
let vector n array = Vec(V.create n array)
let matrix n m array = Matrix(M.create n m array)
module Operators = struct
let (+) (type a) (x:a t) (y:a t): a t = match x, y with
| Scalar x, Scalar y -> Scalar ( ref @@ !x +. !y )
| Vec x, Vec y -> Vec V.(x + y)
| Matrix x, Matrix y -> Matrix M.( x + y )
let (-) (type a) (x: a t)(y: a t): a t = match x, y with
| Scalar x, Scalar y -> Scalar ( ref @@ !x -. !y )
| Vec x, Vec y -> Vec V.(x - y)
| Matrix x, Matrix y -> Matrix M.( x - y )
let (~-) (type a) (t:a t) : a t = match t with
| Scalar f -> Scalar ( ref @@ -. !f)
| Vec v -> Vec V.( - v)
| Matrix m -> Matrix M.( - m )
let ( |*| ) (type a) (t: a t) (u: a t) = match t, u with
| Scalar x, Scalar y -> !x *. !y
| Vec u, Vec v -> V.( u |*| v )
| Matrix m, Matrix n -> M.( m |*| n )
let ( * ) (type a) (type b) (type c)
(x: <contr:a; cov:b> t)(y: <contr:b;cov:c> t): <contr:a;cov:c> t =
match x, y with
| Scalar x, Scalar y -> Scalar ( ref @@ !x *. !y )
| Matrix m, Matrix n -> Matrix M.( m * n)
| Matrix m, Vec v -> Vec M.( m @ v )
| Vec v, Scalar f -> Vec V.( !f *. v )
let one (type a): <contr:a;cov:a> t -> <contr:a;cov:a> t = function
| Scalar _ -> Scalar(ref 1.)
| Matrix m -> Matrix M.(id @@ fst @@ typed_dims m)
let ( **^ ) (type a) (t: <contr:a; cov:a> t) k =
let rec aux: type a.
acc:(<contr:a; cov:a> t as 'te) -> t:'te -> int -> 'te =
fun ~acc ~t k ->
match k with
| 0 -> acc
| 1 -> acc * t
| k when k land 1 = 1 -> aux ~acc:( acc * t ) ~t:(t * t) (k lsr 1)
| k -> aux ~acc ~t:(t*t) (k lsr 1) in
aux ~acc:(one t) ~t k
let ( *. ) (type a) s (t : a t) : a t = match t with
| Scalar x -> Scalar ( ref @@ s *. !x )
| Vec v -> Vec V.( s *. v )
| Matrix m -> Matrix M.( s *. m )
let ( /. ) (type a) (t : a t) s : a t = match t with
| Scalar x -> Scalar ( ref @@ !x /. s )
| Vec v -> Vec V.( v /. s )
| Matrix m -> Matrix M.( m /. s)
end
let (.%()): type a b. <contr:a; cov:b> t -> (a Shape.lt * b Shape.lt)
-> float = fun t (contr,cov) ->
let open Shape in
match[@warning "-4"] t, contr, cov with
| Scalar f, [] , [] -> !f
| Vec v, [a], [] -> v.V.%(a)
| Matrix m, [i], [j] -> m.M.%(i,j)
| _ -> .
and (.%()<-): type a b.
<contr:a; cov:b> t -> (a Shape.lt * b Shape.lt) -> float -> unit
= fun t (contr,cov) x ->
let open Shape in
match[@warning "-4"] t, contr, cov with
| Scalar f, [] , [] -> f := x
| Vec v, [a] , [] -> v.V.%(a) <- x
| Matrix m, [i], [j] -> m.M.%(i,j) <- x
| _ -> .
| null | https://raw.githubusercontent.com/Octachron/tensority/2689fba0bb9c693ef51bebe9cf92c37ab30ca17e/lib/small_unified.ml | ocaml | module V = Small_vec
module M = Small_matrix
module T = Tensor
type _ t =
| Scalar: float ref -> < contr:Shape.empty; cov: Shape.empty > t
| Vec: 'a V.t -> < contr: 'a Shape.single; cov: Shape.empty > t
| Matrix: ('a * 'b) M.t -> < contr:'a Shape.single; cov:'b Shape.single > t
let scalar f = Scalar (ref f)
let vector n array = Vec(V.create n array)
let matrix n m array = Matrix(M.create n m array)
module Operators = struct
let (+) (type a) (x:a t) (y:a t): a t = match x, y with
| Scalar x, Scalar y -> Scalar ( ref @@ !x +. !y )
| Vec x, Vec y -> Vec V.(x + y)
| Matrix x, Matrix y -> Matrix M.( x + y )
let (-) (type a) (x: a t)(y: a t): a t = match x, y with
| Scalar x, Scalar y -> Scalar ( ref @@ !x -. !y )
| Vec x, Vec y -> Vec V.(x - y)
| Matrix x, Matrix y -> Matrix M.( x - y )
let (~-) (type a) (t:a t) : a t = match t with
| Scalar f -> Scalar ( ref @@ -. !f)
| Vec v -> Vec V.( - v)
| Matrix m -> Matrix M.( - m )
let ( |*| ) (type a) (t: a t) (u: a t) = match t, u with
| Scalar x, Scalar y -> !x *. !y
| Vec u, Vec v -> V.( u |*| v )
| Matrix m, Matrix n -> M.( m |*| n )
let ( * ) (type a) (type b) (type c)
(x: <contr:a; cov:b> t)(y: <contr:b;cov:c> t): <contr:a;cov:c> t =
match x, y with
| Scalar x, Scalar y -> Scalar ( ref @@ !x *. !y )
| Matrix m, Matrix n -> Matrix M.( m * n)
| Matrix m, Vec v -> Vec M.( m @ v )
| Vec v, Scalar f -> Vec V.( !f *. v )
let one (type a): <contr:a;cov:a> t -> <contr:a;cov:a> t = function
| Scalar _ -> Scalar(ref 1.)
| Matrix m -> Matrix M.(id @@ fst @@ typed_dims m)
let ( **^ ) (type a) (t: <contr:a; cov:a> t) k =
let rec aux: type a.
acc:(<contr:a; cov:a> t as 'te) -> t:'te -> int -> 'te =
fun ~acc ~t k ->
match k with
| 0 -> acc
| 1 -> acc * t
| k when k land 1 = 1 -> aux ~acc:( acc * t ) ~t:(t * t) (k lsr 1)
| k -> aux ~acc ~t:(t*t) (k lsr 1) in
aux ~acc:(one t) ~t k
let ( *. ) (type a) s (t : a t) : a t = match t with
| Scalar x -> Scalar ( ref @@ s *. !x )
| Vec v -> Vec V.( s *. v )
| Matrix m -> Matrix M.( s *. m )
let ( /. ) (type a) (t : a t) s : a t = match t with
| Scalar x -> Scalar ( ref @@ !x /. s )
| Vec v -> Vec V.( v /. s )
| Matrix m -> Matrix M.( m /. s)
end
let (.%()): type a b. <contr:a; cov:b> t -> (a Shape.lt * b Shape.lt)
-> float = fun t (contr,cov) ->
let open Shape in
match[@warning "-4"] t, contr, cov with
| Scalar f, [] , [] -> !f
| Vec v, [a], [] -> v.V.%(a)
| Matrix m, [i], [j] -> m.M.%(i,j)
| _ -> .
and (.%()<-): type a b.
<contr:a; cov:b> t -> (a Shape.lt * b Shape.lt) -> float -> unit
= fun t (contr,cov) x ->
let open Shape in
match[@warning "-4"] t, contr, cov with
| Scalar f, [] , [] -> f := x
| Vec v, [a] , [] -> v.V.%(a) <- x
| Matrix m, [i], [j] -> m.M.%(i,j) <- x
| _ -> .
|
|
42166a810479c7d12a097414198feff077d131850564e9ccc38094bc7d7739d8 | haskell-lisp/yale-haskell | default.scm | ;;; This handles the default rule.
(define (maybe-default-ambiguous-tyvar type def module)
(let ((classes (ntyvar-context type)))
(and (not (null? classes)) ; this happens only during cleanup after an error
(let ((non-standard? '#f)
(numeric? '#f))
(dolist (class classes)
(cond ((eq? (class-kind class) 'numeric)
(setf numeric? '#t))
((not (eq? (class-kind class) 'standard))
(setf non-standard? '#t))))
(cond ((or non-standard? (not numeric?))
(remember-context def
(phase-error 'Non-defaultable-ambiguous-context
"An ambiguous context, ~A, cannot be defaulted.~%Ambiguity in call to ~A~%"
classes def))
'#f)
(else
(find-default-type type classes classes
(tuple-2-2 (assq module *default-decls*)))))))))
(define (find-default-type tyvar classes all-classes defaults)
(cond ((null? defaults)
(phase-error 'no-default-applies
"Ambiguous context: ~A~%No default applies.~%"
all-classes)
'#f)
((null? classes)
(instantiate-tyvar tyvar (car defaults))
'#t)
((type-in-class? (car defaults) (car classes))
(find-default-type tyvar (cdr classes) all-classes defaults))
(else
(find-default-type tyvar all-classes all-classes (cdr defaults)))))
(define (type-in-class? ntype class)
(let* ((ntype (expand-ntype-synonym ntype))
(alg (ntycon-tycon ntype))
(inst (lookup-instance alg class)))
(if (eq? inst '#f)
'#f
(let ((res '#t))
(do-contexts (c (instance-context inst)) (ty (ntycon-args ntype))
(when (not (type-in-class? ty c))
(setf res '#f)))
res))))
| null | https://raw.githubusercontent.com/haskell-lisp/yale-haskell/4e987026148fe65c323afbc93cd560c07bf06b3f/type/default.scm | scheme | This handles the default rule.
this happens only during cleanup after an error |
(define (maybe-default-ambiguous-tyvar type def module)
(let ((classes (ntyvar-context type)))
(let ((non-standard? '#f)
(numeric? '#f))
(dolist (class classes)
(cond ((eq? (class-kind class) 'numeric)
(setf numeric? '#t))
((not (eq? (class-kind class) 'standard))
(setf non-standard? '#t))))
(cond ((or non-standard? (not numeric?))
(remember-context def
(phase-error 'Non-defaultable-ambiguous-context
"An ambiguous context, ~A, cannot be defaulted.~%Ambiguity in call to ~A~%"
classes def))
'#f)
(else
(find-default-type type classes classes
(tuple-2-2 (assq module *default-decls*)))))))))
(define (find-default-type tyvar classes all-classes defaults)
(cond ((null? defaults)
(phase-error 'no-default-applies
"Ambiguous context: ~A~%No default applies.~%"
all-classes)
'#f)
((null? classes)
(instantiate-tyvar tyvar (car defaults))
'#t)
((type-in-class? (car defaults) (car classes))
(find-default-type tyvar (cdr classes) all-classes defaults))
(else
(find-default-type tyvar all-classes all-classes (cdr defaults)))))
(define (type-in-class? ntype class)
(let* ((ntype (expand-ntype-synonym ntype))
(alg (ntycon-tycon ntype))
(inst (lookup-instance alg class)))
(if (eq? inst '#f)
'#f
(let ((res '#t))
(do-contexts (c (instance-context inst)) (ty (ntycon-args ntype))
(when (not (type-in-class? ty c))
(setf res '#f)))
res))))
|
9c7e7697117ae51101591804fbb2cb5594ea44079e7962965df07be3912571a1 | johnlawrenceaspden/hobby-code | efficiencyandprogress1.clj | ;; Efficiency and Progress
;; Are ours once again
;; Now that we have the neut-ron bomb
It 's nice and quick and clean and ge - ets things done ...
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
When you program in Clojure , you get the raw speed of assembler .
Unfortunately , that is , assembler on a ZX81 , running a Z80 processor at 4MHz in 1981 .
If anything , that comparison is unfair to my old ZX81 . Does anyone
remember ' 3D Invaders ' , a fast and exciting first person shooter /
flight simulator that ran in 1 K of RAM * including memory for the
;; screen*?
;; Once upon a time, I had the knack of making clojure run at the same
speed as Java , which is not far off the same speed as C , which is
;; not far off the speed of the sort of hand-crafted machine code which
;; no-one in their right mind ever writes, in these degenerate latter
;; days which we must reluctantly learn to call the future.
;; But I seem to have lost the knack. Can anyone show me what I am doing wrong?
;; At any rate, it isn't too hard to get it to run at something like
;; the real speed of the machine, as long as you're prepared to write
code that is more like Java or C than Clojure .
;; So here are some thoughts about how to do this.
;; Which I offer up only as a basis for discussion, and not in any way
;; meaning to stir up controversy, or as flame-bait or ammunition for
;; trolls or anything of that sort.
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Clojure is very slow :
(time (reduce + (map + (range 1000000) (range 1000000))))
"Elapsed time: 5316.638869 msecs"
;-> 999999000000
;; The greater part of its slowness seems to be do with lazy sequences
(time (def seqa (doall (range 1000000))))
"Elapsed time: 3119.468963 msecs"
(time (def seqb (doall (range 1000000))))
"Elapsed time: 2839.593429 msecs"
(time (reduce + (map + seqa seqb)))
"Elapsed time: 3558.975552 msecs"
;-> 999999000000
;; It looks as though making a new sequence is the expensive bit
(time (doall (map + seqa seqb)))
"Elapsed time: 3612.553803 msecs"
;-> (0 2 4 6 8 10 12 14 16 18 20 22 24 26 28 30 32 34 36 38 40 42 44 46 48 50 52 ...)
;; Just adding things up is way faster
(time (reduce + seqa))
"Elapsed time: 315.717033 msecs"
499999500000
;; I wondered if there was a way of avoiding lazy-seqs
(time (def veca (vec seqa)))
"Elapsed time: 470.512696 msecs"
(time (def vecb (vec seqb)))
"Elapsed time: 374.796054 msecs"
After all , ' use the right data structure for the problem ' is pretty much lesson 1 , and if vectors are not a good data structure
;; for this problem, then what is?
;; But it seems that despite the speed of making the vectors, it doesn't help much when we do our thing.
;; In fact it's a bit slower
(time (reduce + (mapv + veca vecb)))
"Elapsed time: 4329.070268 msecs"
999999000000
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
So lets say 3600ms to add together two arrays of 1000000 elements and sum the result .
In C on the same machine ( my little netbook with its 1.66GHz Atom
and 512 kb cache ) this seems to take 16 ms , being 8ms for the map
and 8 ms for the reduce . I 'm assuming that that time is mostly
;; spent waiting on the main memory, but I may be wrong. Who knows how
;; these things are done?
- > 225
;; So shall we call this a 225x slowdown for the natural expression in
the two languages of mapping and reducing ?
(time (reduce + seqa))
"Elapsed time: 358.152249 msecs"
499999500000
;; If we just look at the reduction, then that's
44.75
So around 50x
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
| null | https://raw.githubusercontent.com/johnlawrenceaspden/hobby-code/48e2a89d28557994c72299962cd8e3ace6a75b2d/efficiencyandprogress1.clj | clojure | Efficiency and Progress
Are ours once again
Now that we have the neut-ron bomb
screen*?
Once upon a time, I had the knack of making clojure run at the same
not far off the speed of the sort of hand-crafted machine code which
no-one in their right mind ever writes, in these degenerate latter
days which we must reluctantly learn to call the future.
But I seem to have lost the knack. Can anyone show me what I am doing wrong?
At any rate, it isn't too hard to get it to run at something like
the real speed of the machine, as long as you're prepared to write
So here are some thoughts about how to do this.
Which I offer up only as a basis for discussion, and not in any way
meaning to stir up controversy, or as flame-bait or ammunition for
trolls or anything of that sort.
-> 999999000000
The greater part of its slowness seems to be do with lazy sequences
-> 999999000000
It looks as though making a new sequence is the expensive bit
-> (0 2 4 6 8 10 12 14 16 18 20 22 24 26 28 30 32 34 36 38 40 42 44 46 48 50 52 ...)
Just adding things up is way faster
I wondered if there was a way of avoiding lazy-seqs
for this problem, then what is?
But it seems that despite the speed of making the vectors, it doesn't help much when we do our thing.
In fact it's a bit slower
spent waiting on the main memory, but I may be wrong. Who knows how
these things are done?
So shall we call this a 225x slowdown for the natural expression in
If we just look at the reduction, then that's
| It 's nice and quick and clean and ge - ets things done ...
When you program in Clojure , you get the raw speed of assembler .
Unfortunately , that is , assembler on a ZX81 , running a Z80 processor at 4MHz in 1981 .
If anything , that comparison is unfair to my old ZX81 . Does anyone
remember ' 3D Invaders ' , a fast and exciting first person shooter /
flight simulator that ran in 1 K of RAM * including memory for the
speed as Java , which is not far off the same speed as C , which is
code that is more like Java or C than Clojure .
Clojure is very slow :
(time (reduce + (map + (range 1000000) (range 1000000))))
"Elapsed time: 5316.638869 msecs"
(time (def seqa (doall (range 1000000))))
"Elapsed time: 3119.468963 msecs"
(time (def seqb (doall (range 1000000))))
"Elapsed time: 2839.593429 msecs"
(time (reduce + (map + seqa seqb)))
"Elapsed time: 3558.975552 msecs"
(time (doall (map + seqa seqb)))
"Elapsed time: 3612.553803 msecs"
(time (reduce + seqa))
"Elapsed time: 315.717033 msecs"
499999500000
(time (def veca (vec seqa)))
"Elapsed time: 470.512696 msecs"
(time (def vecb (vec seqb)))
"Elapsed time: 374.796054 msecs"
After all , ' use the right data structure for the problem ' is pretty much lesson 1 , and if vectors are not a good data structure
(time (reduce + (mapv + veca vecb)))
"Elapsed time: 4329.070268 msecs"
999999000000
So lets say 3600ms to add together two arrays of 1000000 elements and sum the result .
In C on the same machine ( my little netbook with its 1.66GHz Atom
and 512 kb cache ) this seems to take 16 ms , being 8ms for the map
and 8 ms for the reduce . I 'm assuming that that time is mostly
- > 225
the two languages of mapping and reducing ?
(time (reduce + seqa))
"Elapsed time: 358.152249 msecs"
499999500000
44.75
So around 50x
|
87af97ab04bbf1f57783a6434592f7e3b7549563963404a7b71edb2a0255f8b7 | binsec/binsec | sse_symbolic.mli | (**************************************************************************)
This file is part of BINSEC .
(* *)
Copyright ( C ) 2016 - 2022
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
module State (S : Smt_sig.Solver) : Types.STATE_FACTORY
| null | https://raw.githubusercontent.com/binsec/binsec/22ee39aad58219e8837b6ba15f150ba04a498b63/src/sse/formula/sse_symbolic.mli | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************ | This file is part of BINSEC .
Copyright ( C ) 2016 - 2022
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
module State (S : Smt_sig.Solver) : Types.STATE_FACTORY
|
e9078a9e2d03c091195926583957c98a842a13ca666f8890b848cce35b0a00ed | yurug/ocaml4.04.0-copatterns | buffer.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
and , projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1999 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* Extensible buffers *)
type t =
{mutable buffer : bytes;
mutable position : int;
mutable length : int;
initial_buffer : bytes}
let create n =
let n = if n < 1 then 1 else n in
let n = if n > Sys.max_string_length then Sys.max_string_length else n in
let s = Bytes.create n in
{buffer = s; position = 0; length = n; initial_buffer = s}
let contents b = Bytes.sub_string b.buffer 0 b.position
let to_bytes b = Bytes.sub b.buffer 0 b.position
let sub b ofs len =
if ofs < 0 || len < 0 || ofs > b.position - len
then invalid_arg "Buffer.sub"
else Bytes.sub_string b.buffer ofs len
let blit src srcoff dst dstoff len =
if len < 0 || srcoff < 0 || srcoff > src.position - len
|| dstoff < 0 || dstoff > (Bytes.length dst) - len
then invalid_arg "Buffer.blit"
else
Bytes.unsafe_blit src.buffer srcoff dst dstoff len
let nth b ofs =
if ofs < 0 || ofs >= b.position then
invalid_arg "Buffer.nth"
else Bytes.unsafe_get b.buffer ofs
let length b = b.position
let clear b = b.position <- 0
let reset b =
b.position <- 0; b.buffer <- b.initial_buffer;
b.length <- Bytes.length b.buffer
let resize b more =
let len = b.length in
let new_len = ref len in
while b.position + more > !new_len do new_len := 2 * !new_len done;
if !new_len > Sys.max_string_length then begin
if b.position + more <= Sys.max_string_length
then new_len := Sys.max_string_length
else failwith "Buffer.add: cannot grow buffer"
end;
let new_buffer = Bytes.create !new_len in
PR#6148 : let 's keep using [ blit ] rather than [ unsafe_blit ] in
this tricky function that is slow anyway .
this tricky function that is slow anyway. *)
Bytes.blit b.buffer 0 new_buffer 0 b.position;
b.buffer <- new_buffer;
b.length <- !new_len
let add_char b c =
let pos = b.position in
if pos >= b.length then resize b 1;
Bytes.unsafe_set b.buffer pos c;
b.position <- pos + 1
let add_substring b s offset len =
if offset < 0 || len < 0 || offset > String.length s - len
then invalid_arg "Buffer.add_substring/add_subbytes";
let new_position = b.position + len in
if new_position > b.length then resize b len;
Bytes.blit_string s offset b.buffer b.position len;
b.position <- new_position
let add_subbytes b s offset len =
add_substring b (Bytes.unsafe_to_string s) offset len
let add_string b s =
let len = String.length s in
let new_position = b.position + len in
if new_position > b.length then resize b len;
Bytes.blit_string s 0 b.buffer b.position len;
b.position <- new_position
let add_bytes b s = add_string b (Bytes.unsafe_to_string s)
let add_buffer b bs =
add_subbytes b bs.buffer 0 bs.position
(* read up to [len] bytes from [ic] into [b]. *)
let rec add_channel_rec b ic len =
if len > 0 then (
let n = input ic b.buffer b.position len in
b.position <- b.position + n;
if n = 0 then raise End_of_file
else add_channel_rec b ic (len-n) (* n <= len *)
)
let add_channel b ic len =
if len < 0 || len > Sys.max_string_length then (* PR#5004 *)
invalid_arg "Buffer.add_channel";
if b.position + len > b.length then resize b len;
add_channel_rec b ic len
let output_buffer oc b =
output oc b.buffer 0 b.position
let closing = function
| '(' -> ')'
| '{' -> '}'
| _ -> assert false
(* opening and closing: open and close characters, typically ( and )
k: balance of opening and closing chars
s: the string where we are searching
start: the index where we start the search. *)
let advance_to_closing opening closing k s start =
let rec advance k i lim =
if i >= lim then raise Not_found else
if s.[i] = opening then advance (k + 1) (i + 1) lim else
if s.[i] = closing then
if k = 0 then i else advance (k - 1) (i + 1) lim
else advance k (i + 1) lim in
advance k start (String.length s)
let advance_to_non_alpha s start =
let rec advance i lim =
if i >= lim then lim else
match s.[i] with
| 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '_' -> advance (i + 1) lim
| _ -> i in
advance start (String.length s)
(* We are just at the beginning of an ident in s, starting at start. *)
let find_ident s start lim =
if start >= lim then raise Not_found else
match s.[start] with
(* Parenthesized ident ? *)
| '(' | '{' as c ->
let new_start = start + 1 in
let stop = advance_to_closing c (closing c) 0 s new_start in
String.sub s new_start (stop - start - 1), stop + 1
(* Regular ident *)
| _ ->
let stop = advance_to_non_alpha s (start + 1) in
String.sub s start (stop - start), stop
(* Substitute $ident, $(ident), or ${ident} in s,
according to the function mapping f. *)
let add_substitute b f s =
let lim = String.length s in
let rec subst previous i =
if i < lim then begin
match s.[i] with
| '$' as current when previous = '\\' ->
add_char b current;
subst ' ' (i + 1)
| '$' ->
let j = i + 1 in
let ident, next_i = find_ident s j lim in
add_string b (f ident);
subst ' ' next_i
| current when previous == '\\' ->
add_char b '\\';
add_char b current;
subst ' ' (i + 1)
| '\\' as current ->
subst current (i + 1)
| current ->
add_char b current;
subst current (i + 1)
end else
if previous = '\\' then add_char b previous in
subst ' ' 0
| null | https://raw.githubusercontent.com/yurug/ocaml4.04.0-copatterns/b3ec6a3cc203bd2cde3b618546d29e10f1102323/stdlib/buffer.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Extensible buffers
read up to [len] bytes from [ic] into [b].
n <= len
PR#5004
opening and closing: open and close characters, typically ( and )
k: balance of opening and closing chars
s: the string where we are searching
start: the index where we start the search.
We are just at the beginning of an ident in s, starting at start.
Parenthesized ident ?
Regular ident
Substitute $ident, $(ident), or ${ident} in s,
according to the function mapping f. | and , projet Cristal , INRIA Rocquencourt
Copyright 1999 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
type t =
{mutable buffer : bytes;
mutable position : int;
mutable length : int;
initial_buffer : bytes}
let create n =
let n = if n < 1 then 1 else n in
let n = if n > Sys.max_string_length then Sys.max_string_length else n in
let s = Bytes.create n in
{buffer = s; position = 0; length = n; initial_buffer = s}
let contents b = Bytes.sub_string b.buffer 0 b.position
let to_bytes b = Bytes.sub b.buffer 0 b.position
let sub b ofs len =
if ofs < 0 || len < 0 || ofs > b.position - len
then invalid_arg "Buffer.sub"
else Bytes.sub_string b.buffer ofs len
let blit src srcoff dst dstoff len =
if len < 0 || srcoff < 0 || srcoff > src.position - len
|| dstoff < 0 || dstoff > (Bytes.length dst) - len
then invalid_arg "Buffer.blit"
else
Bytes.unsafe_blit src.buffer srcoff dst dstoff len
let nth b ofs =
if ofs < 0 || ofs >= b.position then
invalid_arg "Buffer.nth"
else Bytes.unsafe_get b.buffer ofs
let length b = b.position
let clear b = b.position <- 0
let reset b =
b.position <- 0; b.buffer <- b.initial_buffer;
b.length <- Bytes.length b.buffer
let resize b more =
let len = b.length in
let new_len = ref len in
while b.position + more > !new_len do new_len := 2 * !new_len done;
if !new_len > Sys.max_string_length then begin
if b.position + more <= Sys.max_string_length
then new_len := Sys.max_string_length
else failwith "Buffer.add: cannot grow buffer"
end;
let new_buffer = Bytes.create !new_len in
PR#6148 : let 's keep using [ blit ] rather than [ unsafe_blit ] in
this tricky function that is slow anyway .
this tricky function that is slow anyway. *)
Bytes.blit b.buffer 0 new_buffer 0 b.position;
b.buffer <- new_buffer;
b.length <- !new_len
let add_char b c =
let pos = b.position in
if pos >= b.length then resize b 1;
Bytes.unsafe_set b.buffer pos c;
b.position <- pos + 1
let add_substring b s offset len =
if offset < 0 || len < 0 || offset > String.length s - len
then invalid_arg "Buffer.add_substring/add_subbytes";
let new_position = b.position + len in
if new_position > b.length then resize b len;
Bytes.blit_string s offset b.buffer b.position len;
b.position <- new_position
let add_subbytes b s offset len =
add_substring b (Bytes.unsafe_to_string s) offset len
let add_string b s =
let len = String.length s in
let new_position = b.position + len in
if new_position > b.length then resize b len;
Bytes.blit_string s 0 b.buffer b.position len;
b.position <- new_position
let add_bytes b s = add_string b (Bytes.unsafe_to_string s)
let add_buffer b bs =
add_subbytes b bs.buffer 0 bs.position
let rec add_channel_rec b ic len =
if len > 0 then (
let n = input ic b.buffer b.position len in
b.position <- b.position + n;
if n = 0 then raise End_of_file
)
let add_channel b ic len =
invalid_arg "Buffer.add_channel";
if b.position + len > b.length then resize b len;
add_channel_rec b ic len
let output_buffer oc b =
output oc b.buffer 0 b.position
let closing = function
| '(' -> ')'
| '{' -> '}'
| _ -> assert false
let advance_to_closing opening closing k s start =
let rec advance k i lim =
if i >= lim then raise Not_found else
if s.[i] = opening then advance (k + 1) (i + 1) lim else
if s.[i] = closing then
if k = 0 then i else advance (k - 1) (i + 1) lim
else advance k (i + 1) lim in
advance k start (String.length s)
let advance_to_non_alpha s start =
let rec advance i lim =
if i >= lim then lim else
match s.[i] with
| 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '_' -> advance (i + 1) lim
| _ -> i in
advance start (String.length s)
let find_ident s start lim =
if start >= lim then raise Not_found else
match s.[start] with
| '(' | '{' as c ->
let new_start = start + 1 in
let stop = advance_to_closing c (closing c) 0 s new_start in
String.sub s new_start (stop - start - 1), stop + 1
| _ ->
let stop = advance_to_non_alpha s (start + 1) in
String.sub s start (stop - start), stop
let add_substitute b f s =
let lim = String.length s in
let rec subst previous i =
if i < lim then begin
match s.[i] with
| '$' as current when previous = '\\' ->
add_char b current;
subst ' ' (i + 1)
| '$' ->
let j = i + 1 in
let ident, next_i = find_ident s j lim in
add_string b (f ident);
subst ' ' next_i
| current when previous == '\\' ->
add_char b '\\';
add_char b current;
subst ' ' (i + 1)
| '\\' as current ->
subst current (i + 1)
| current ->
add_char b current;
subst current (i + 1)
end else
if previous = '\\' then add_char b previous in
subst ' ' 0
|
f44eee9dbd049dbf8023a06d4bfda573037541ec601591c70b324bdcc2f6a6ee | kmi/irs | load.lisp | Mode : Lisp ; Package :
File created in WebOnto
(in-package "OCML")
(def-ontology time-ontology
:includes (units-manipulation xsd-types)
:type :domain
;;:namespace-uri "-super.org/ontologies/time-ontology/20080612#"
:namespace-uri "-super.org/ontologies/process/time/v1.2.0#"
:namespaces (("TIME" time-ontology)
("UNITS" units-manipulation)
("SI" international-system-units)
("PHYS-Q" physical-quantities)
("XSD" xsd-types))
:files ("time-ontology"
"new" )
:author "carlos" :allowed-editors ("nil")) | null | https://raw.githubusercontent.com/kmi/irs/e1b8d696f61c6b6878c0e92d993ed549fee6e7dd/ontologies/domains/time-ontology/load.lisp | lisp | Package :
:namespace-uri "-super.org/ontologies/time-ontology/20080612#" |
File created in WebOnto
(in-package "OCML")
(def-ontology time-ontology
:includes (units-manipulation xsd-types)
:type :domain
:namespace-uri "-super.org/ontologies/process/time/v1.2.0#"
:namespaces (("TIME" time-ontology)
("UNITS" units-manipulation)
("SI" international-system-units)
("PHYS-Q" physical-quantities)
("XSD" xsd-types))
:files ("time-ontology"
"new" )
:author "carlos" :allowed-editors ("nil")) |
6582e727a8a736bab237e9903e4c42b701173ea0d6eb8587ec04a46e040ac78b | dbuenzli/uunf | unftrip.ml | ---------------------------------------------------------------------------
Copyright ( c ) 2012 The uunf programmers . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2012 The uunf programmers. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
let strf = Printf.sprintf
let pp = Format.fprintf
let pp_pos ppf d = pp ppf "%d.%d:(%d,%06X) "
(Uutf.decoder_line d) (Uutf.decoder_col d) (Uutf.decoder_count d)
(Uutf.decoder_byte_count d)
let pp_malformed ppf bs =
let l = String.length bs in
pp ppf "@[malformed bytes @[(";
if l > 0 then pp ppf "%02X" (Char.code (bs.[0]));
for i = 1 to l - 1 do pp ppf "@ %02X" (Char.code (bs.[i])) done;
pp ppf ")@]@]"
let pp_dump_uchar ppf u = Format.fprintf ppf "U+%04X" (Uchar.to_int u)
let exec = Filename.basename Sys.executable_name
let log f = Format.eprintf ("%s: " ^^ f ^^ "@?") exec
let input_malformed = ref false
let log_malformed inf d bs =
input_malformed := true;
log "%s:%a: %a@." inf pp_pos d pp_malformed bs
(* Output *)
let uchar_dump ppf = function
| `End -> () | `Uchar u -> pp ppf "%a@\n" pp_dump_uchar u
let uchar_encoder enc =
let enc = match enc with `ISO_8859_1 | `US_ASCII -> `UTF_8
| #Uutf.encoding as enc -> enc
in
let e = Uutf.encoder enc (`Channel stdout) in
fun v -> ignore (Uutf.encode e v)
let out_fun ascii oe =
if ascii then uchar_dump Format.std_formatter else uchar_encoder oe
(* Trip *)
let u_rep = `Uchar Uutf.u_rep
let id inf d first_dec out = (* no normalization. *)
let rec loop d = function
| `Uchar _ as v -> out v; loop d (Uutf.decode d)
| `End as v -> out v
| `Malformed bs -> log_malformed inf d bs; out u_rep; loop d (Uutf.decode d)
| `Await -> assert false
in
if Uutf.decoder_removed_bom d then out (`Uchar Uutf.u_bom);
loop d first_dec
let normalize nf inf d first_dec out = (* normalize to nf. *)
let n = Uunf.create nf in
let rec add v = match Uunf.add n v with
| `Uchar cp as u -> out u; add `Await
| `Await | `End -> ()
in
let rec loop d = function
| `Uchar _ as v -> add v; loop d (Uutf.decode d)
| `End as v -> add v; out `End
| `Malformed bs -> log_malformed inf d bs; add u_rep; loop d (Uutf.decode d)
| `Await -> assert false
in
if Uutf.decoder_removed_bom d then add (`Uchar Uutf.u_bom);
loop d first_dec
let trip nf inf enc ascii =
try
let ic = if inf = "-" then stdin else open_in inf in
let d = Uutf.decoder ?encoding:enc (`Channel ic) in
let first_dec = Uutf.decode d in (* guess encoding if needed. *)
let out = out_fun ascii (Uutf.decoder_encoding d) in
begin match nf with
| None -> id inf d first_dec out
| Some nf -> normalize nf inf d first_dec out
end;
if inf <> "-" then close_in ic;
flush stdout;
with Sys_error e -> log "%s@." e; exit 1
(* Version *)
let unicode_version () = Format.printf "%s@." Uunf.unicode_version
(* Cmd *)
let do_cmd cmd nf inf enc ascii = match cmd with
| `Unicode_version -> unicode_version ()
| `Trip -> trip nf inf enc ascii
(* Cmdline interface *)
open Cmdliner
let cmd =
let doc = "Output supported Unicode version." in
let unicode_version = `Unicode_version, Arg.info ["unicode-version"] ~doc in
Arg.(value & vflag `Trip [unicode_version])
let nf_doc = "NORMALIZATION"
let nf =
let docs = nf_doc in
let doc = "Normalization Form D (NFD), canonical decomposition." in
let nfd = Some `NFD, Arg.info ["nfd"] ~doc ~docs in
let doc = "Normalization Form C (NFC), canonical decomposition followed by \
canonical composition." in
let nfc = Some `NFC, Arg.info ["nfc"] ~doc ~docs in
let doc = "Normalization form KD (NFKD), compatibility decomposition." in
let nfkd = Some `NFKD, Arg.info ["nfkd"] ~doc ~docs in
let doc = "Normalization form KC (NFKC), compatibility decomposition \
followed by canonical composition." in
let nfkc = Some `NFKC, Arg.info ["nfkc"] ~doc ~docs in
Arg.(value & vflag None [nfd; nfc; nfkd; nfkc])
let file =
let doc = "The input file. Reads from stdin if unspecified." in
Arg.(value & pos 0 string "-" & info [] ~doc ~docv:"FILE")
let enc =
let enc = [ "UTF-8", `UTF_8; "UTF-16", `UTF_16; "UTF-16LE", `UTF_16LE;
"UTF-16BE", `UTF_16BE; "ASCII", `US_ASCII; "latin1", `ISO_8859_1 ]
in
let doc = strf "Input encoding, must %s. If unspecified the encoding is \
guessed. The output encoding is the same as the input \
encoding except for ASCII and latin1 where UTF-8 is output."
(Arg.doc_alts_enum enc)
in
Arg.(value & opt (some (enum enc)) None & info [ "e"; "encoding" ] ~doc)
let ascii =
let doc = "Output the input text as newline (U+000A) separated Unicode
scalar values written in the US-ASCII charset."
in
Arg.(value & flag & info ["a"; "ascii"] ~doc)
let cmd =
let doc = "normalize Unicode text" in
let man = [
`S "DESCRIPTION";
`P "$(tname) inputs Unicode text from stdin and rewrites it to stdout
according to a specified Unicode normalization form (see UAX 15).";
`P "If no normalization form is specified the character stream is left
intact.";
`P "Invalid byte sequences in the input are reported on stderr and
replaced by the Unicode replacement character (U+FFFD) in the output.";
`S nf_doc;
`S "OPTIONS";
`S "EXIT STATUS";
`P "$(tname) exits with one of the following values:";
`I ("0", "no error occurred");
`I ("1", "a command line parsing error occurred");
`I ("2", "the input text was malformed");
`S "BUGS";
`P "This program is distributed with the Uunf OCaml library.
See for contact information." ]
in
Cmd.v (Cmd.info "unftrip" ~version:"%%VERSION%%" ~doc ~man)
Term.(const do_cmd $ cmd $ nf $ file $ enc $ ascii)
let main () = match Cmd.eval cmd with
| 0 -> if !input_malformed then exit 2 else exit 0
| c when c = Cmd.Exit.cli_error -> exit 1
| c -> exit c
let () = if !Sys.interactive then () else main ()
---------------------------------------------------------------------------
Copyright ( c ) 2012 The uunf programmers
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2012 The uunf programmers
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| null | https://raw.githubusercontent.com/dbuenzli/uunf/afd25d9f5348a441e2ff69d077647e56f5de572f/test/unftrip.ml | ocaml | Output
Trip
no normalization.
normalize to nf.
guess encoding if needed.
Version
Cmd
Cmdline interface | ---------------------------------------------------------------------------
Copyright ( c ) 2012 The uunf programmers . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2012 The uunf programmers. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
let strf = Printf.sprintf
let pp = Format.fprintf
let pp_pos ppf d = pp ppf "%d.%d:(%d,%06X) "
(Uutf.decoder_line d) (Uutf.decoder_col d) (Uutf.decoder_count d)
(Uutf.decoder_byte_count d)
let pp_malformed ppf bs =
let l = String.length bs in
pp ppf "@[malformed bytes @[(";
if l > 0 then pp ppf "%02X" (Char.code (bs.[0]));
for i = 1 to l - 1 do pp ppf "@ %02X" (Char.code (bs.[i])) done;
pp ppf ")@]@]"
let pp_dump_uchar ppf u = Format.fprintf ppf "U+%04X" (Uchar.to_int u)
let exec = Filename.basename Sys.executable_name
let log f = Format.eprintf ("%s: " ^^ f ^^ "@?") exec
let input_malformed = ref false
let log_malformed inf d bs =
input_malformed := true;
log "%s:%a: %a@." inf pp_pos d pp_malformed bs
let uchar_dump ppf = function
| `End -> () | `Uchar u -> pp ppf "%a@\n" pp_dump_uchar u
let uchar_encoder enc =
let enc = match enc with `ISO_8859_1 | `US_ASCII -> `UTF_8
| #Uutf.encoding as enc -> enc
in
let e = Uutf.encoder enc (`Channel stdout) in
fun v -> ignore (Uutf.encode e v)
let out_fun ascii oe =
if ascii then uchar_dump Format.std_formatter else uchar_encoder oe
let u_rep = `Uchar Uutf.u_rep
let rec loop d = function
| `Uchar _ as v -> out v; loop d (Uutf.decode d)
| `End as v -> out v
| `Malformed bs -> log_malformed inf d bs; out u_rep; loop d (Uutf.decode d)
| `Await -> assert false
in
if Uutf.decoder_removed_bom d then out (`Uchar Uutf.u_bom);
loop d first_dec
let n = Uunf.create nf in
let rec add v = match Uunf.add n v with
| `Uchar cp as u -> out u; add `Await
| `Await | `End -> ()
in
let rec loop d = function
| `Uchar _ as v -> add v; loop d (Uutf.decode d)
| `End as v -> add v; out `End
| `Malformed bs -> log_malformed inf d bs; add u_rep; loop d (Uutf.decode d)
| `Await -> assert false
in
if Uutf.decoder_removed_bom d then add (`Uchar Uutf.u_bom);
loop d first_dec
let trip nf inf enc ascii =
try
let ic = if inf = "-" then stdin else open_in inf in
let d = Uutf.decoder ?encoding:enc (`Channel ic) in
let out = out_fun ascii (Uutf.decoder_encoding d) in
begin match nf with
| None -> id inf d first_dec out
| Some nf -> normalize nf inf d first_dec out
end;
if inf <> "-" then close_in ic;
flush stdout;
with Sys_error e -> log "%s@." e; exit 1
let unicode_version () = Format.printf "%s@." Uunf.unicode_version
let do_cmd cmd nf inf enc ascii = match cmd with
| `Unicode_version -> unicode_version ()
| `Trip -> trip nf inf enc ascii
open Cmdliner
let cmd =
let doc = "Output supported Unicode version." in
let unicode_version = `Unicode_version, Arg.info ["unicode-version"] ~doc in
Arg.(value & vflag `Trip [unicode_version])
let nf_doc = "NORMALIZATION"
let nf =
let docs = nf_doc in
let doc = "Normalization Form D (NFD), canonical decomposition." in
let nfd = Some `NFD, Arg.info ["nfd"] ~doc ~docs in
let doc = "Normalization Form C (NFC), canonical decomposition followed by \
canonical composition." in
let nfc = Some `NFC, Arg.info ["nfc"] ~doc ~docs in
let doc = "Normalization form KD (NFKD), compatibility decomposition." in
let nfkd = Some `NFKD, Arg.info ["nfkd"] ~doc ~docs in
let doc = "Normalization form KC (NFKC), compatibility decomposition \
followed by canonical composition." in
let nfkc = Some `NFKC, Arg.info ["nfkc"] ~doc ~docs in
Arg.(value & vflag None [nfd; nfc; nfkd; nfkc])
let file =
let doc = "The input file. Reads from stdin if unspecified." in
Arg.(value & pos 0 string "-" & info [] ~doc ~docv:"FILE")
let enc =
let enc = [ "UTF-8", `UTF_8; "UTF-16", `UTF_16; "UTF-16LE", `UTF_16LE;
"UTF-16BE", `UTF_16BE; "ASCII", `US_ASCII; "latin1", `ISO_8859_1 ]
in
let doc = strf "Input encoding, must %s. If unspecified the encoding is \
guessed. The output encoding is the same as the input \
encoding except for ASCII and latin1 where UTF-8 is output."
(Arg.doc_alts_enum enc)
in
Arg.(value & opt (some (enum enc)) None & info [ "e"; "encoding" ] ~doc)
let ascii =
let doc = "Output the input text as newline (U+000A) separated Unicode
scalar values written in the US-ASCII charset."
in
Arg.(value & flag & info ["a"; "ascii"] ~doc)
let cmd =
let doc = "normalize Unicode text" in
let man = [
`S "DESCRIPTION";
`P "$(tname) inputs Unicode text from stdin and rewrites it to stdout
according to a specified Unicode normalization form (see UAX 15).";
`P "If no normalization form is specified the character stream is left
intact.";
`P "Invalid byte sequences in the input are reported on stderr and
replaced by the Unicode replacement character (U+FFFD) in the output.";
`S nf_doc;
`S "OPTIONS";
`S "EXIT STATUS";
`P "$(tname) exits with one of the following values:";
`I ("0", "no error occurred");
`I ("1", "a command line parsing error occurred");
`I ("2", "the input text was malformed");
`S "BUGS";
`P "This program is distributed with the Uunf OCaml library.
See for contact information." ]
in
Cmd.v (Cmd.info "unftrip" ~version:"%%VERSION%%" ~doc ~man)
Term.(const do_cmd $ cmd $ nf $ file $ enc $ ascii)
let main () = match Cmd.eval cmd with
| 0 -> if !input_malformed then exit 2 else exit 0
| c when c = Cmd.Exit.cli_error -> exit 1
| c -> exit c
let () = if !Sys.interactive then () else main ()
---------------------------------------------------------------------------
Copyright ( c ) 2012 The uunf programmers
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2012 The uunf programmers
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
|
f7fb21f6a609410bbde3e36e4f72dfbf3468ec8ca6314c7433ceb102dbbf9b94 | rpt/lager_logstash | lager_logstash_json_formatter.erl | Copyright ( c ) 2014
%%
%% Permission is hereby granted, free of charge, to any person obtaining a
%% copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction , including without limitation
%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software , and to permit persons to whom the
%% Software is furnished to do so, subject to the following conditions:
%%
%% The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
%%
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
%% IN THE SOFTWARE.
@author < >
-module(lager_logstash_json_formatter).
-export([format/2]).
-export([format/3]).
-define(DEFAULT_JSON_FORMATTER, jsx).
format(LagerMsg, Config) ->
Encoder = value(json_encoder, Config, ?DEFAULT_JSON_FORMATTER),
Level = lager_msg:severity(LagerMsg),
Timestamp = timestamp(lager_msg:datetime(LagerMsg)),
Message = lager_msg:message(LagerMsg),
Metadata = lager_msg:metadata(LagerMsg),
Data = [{type, lager_logstash},
{level, Level},
{'@timestamp', Timestamp},
{message, Message} | Metadata],
[encode(Encoder, convert(Data)), $\n].
format(Message, Config, _) ->
format(Message, Config).
value(Name, Config, Default) ->
case lists:keyfind(Name, 1, Config) of
{Name, Value} -> Value;
false -> Default
end.
timestamp({Date, Time}) -> [Date, $T, Time].
convert(Data) -> lists:foldl(fun convert/2, [], Data).
convert({_, undefined}, Acc) -> Acc;
convert({pid, Pid}, Acc) when is_pid(Pid) ->
[{pid, list_to_binary(pid_to_list(Pid))} | Acc];
convert({K, List}, Acc) when is_list(List) ->
[{K, iolist_to_binary(List)} | Acc];
convert({K, Atom}, Acc) when is_atom(Atom) ->
[{K, atom_to_binary(Atom, latin1)} | Acc];
convert(Else, Acc) -> [Else | Acc].
encode(jsx, Data) -> jsx:encode(Data);
encode(jiffy, Data) -> jiffy:encode({Data}).
| null | https://raw.githubusercontent.com/rpt/lager_logstash/001838211299cf5608694b63d5811ae1b4e6bb9a/src/lager_logstash_json_formatter.erl | erlang |
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE. | Copyright ( c ) 2014
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
@author < >
-module(lager_logstash_json_formatter).
-export([format/2]).
-export([format/3]).
-define(DEFAULT_JSON_FORMATTER, jsx).
format(LagerMsg, Config) ->
Encoder = value(json_encoder, Config, ?DEFAULT_JSON_FORMATTER),
Level = lager_msg:severity(LagerMsg),
Timestamp = timestamp(lager_msg:datetime(LagerMsg)),
Message = lager_msg:message(LagerMsg),
Metadata = lager_msg:metadata(LagerMsg),
Data = [{type, lager_logstash},
{level, Level},
{'@timestamp', Timestamp},
{message, Message} | Metadata],
[encode(Encoder, convert(Data)), $\n].
format(Message, Config, _) ->
format(Message, Config).
value(Name, Config, Default) ->
case lists:keyfind(Name, 1, Config) of
{Name, Value} -> Value;
false -> Default
end.
timestamp({Date, Time}) -> [Date, $T, Time].
convert(Data) -> lists:foldl(fun convert/2, [], Data).
convert({_, undefined}, Acc) -> Acc;
convert({pid, Pid}, Acc) when is_pid(Pid) ->
[{pid, list_to_binary(pid_to_list(Pid))} | Acc];
convert({K, List}, Acc) when is_list(List) ->
[{K, iolist_to_binary(List)} | Acc];
convert({K, Atom}, Acc) when is_atom(Atom) ->
[{K, atom_to_binary(Atom, latin1)} | Acc];
convert(Else, Acc) -> [Else | Acc].
encode(jsx, Data) -> jsx:encode(Data);
encode(jiffy, Data) -> jiffy:encode({Data}).
|
f84767623221778e5da99dca049915616422b202854ff029d2b75546f0a07cd0 | leopiney/tensor-safe | Examples.hs | | This module implements the examples command for TensorSafe .
module TensorSafe.Commands.Examples (examples) where
import TensorSafe.Examples.Examples
( mnistConcatenateComplexExample,
mnistConcatenateExample,
mnistExample,
mnistExampleDense,
simpleExample,
)
-- | Outputs to stdout the results of the examples
examples :: IO ()
examples = do
simpleExample
putStrLn "\n\n"
mnistExample
putStrLn "\n\n"
mnistExampleDense
putStrLn "\n\n"
mnistConcatenateExample
putStrLn "\n\n"
mnistConcatenateComplexExample
| null | https://raw.githubusercontent.com/leopiney/tensor-safe/cdf611dbbe68f6f6cb0b44fe31d86b28a547a3f2/src/TensorSafe/Commands/Examples.hs | haskell | | Outputs to stdout the results of the examples | | This module implements the examples command for TensorSafe .
module TensorSafe.Commands.Examples (examples) where
import TensorSafe.Examples.Examples
( mnistConcatenateComplexExample,
mnistConcatenateExample,
mnistExample,
mnistExampleDense,
simpleExample,
)
examples :: IO ()
examples = do
simpleExample
putStrLn "\n\n"
mnistExample
putStrLn "\n\n"
mnistExampleDense
putStrLn "\n\n"
mnistConcatenateExample
putStrLn "\n\n"
mnistConcatenateComplexExample
|
d2ec0bf75b0a117f82ea312eb323b9a9c4d3d32ab6979adcd047e4abeae1853b | Ramarren/cl-parser-combinators | cache.lisp | (in-package :parser-combinators)
(defvar *parser-cache* (make-hash-table))
(defun drop-parser-cache ()
(clrhash *parser-cache*))
;; parser caching
(defmacro cached? (parser label)
"Parser modifier macro: cache parser as label in global cache."
(with-unique-names (inp cache)
`#'(lambda (,inp)
(if-let ((,cache (gethash ',label *parser-cache*)))
(funcall ,cache ,inp)
(funcall (setf (gethash ',label *parser-cache*) ,parser) ,inp)))))
(defmacro def-cached-parser (name &body body)
"Define cached parser of no arguments."
(multiple-value-bind (forms declarations docstring) (parse-body body :documentation t)
`(defun ,name ()
,docstring
,@declarations
(cached? ,@forms ,(gensym)))))
(defmacro cached-arguments? (parser label &rest arguments)
"Parser modifier macro: cache parser as label with argument list equal under equal in global cache."
(with-unique-names (inp cache subcache args)
(let ((filtered-arguments (mapcar #'ensure-car
(remove-if (rcurry #'member '(&optional &key &rest)) arguments))))
`(let ((,args ,(cons 'list filtered-arguments)))
#'(lambda (,inp)
(unless (gethash ',label *parser-cache*)
(setf (gethash ',label *parser-cache*) (make-hash-table :test 'equal)))
(let ((,cache (gethash ',label *parser-cache*)))
(if-let ((,subcache (gethash ,args ,cache)))
(funcall ,subcache ,inp)
(funcall (setf (gethash ,args ,cache) ,parser) ,inp))))))))
(defmacro def-cached-arg-parser (name arguments &body body)
"Define cached parser with arguments."
(multiple-value-bind (forms declarations docstring) (parse-body body :documentation t)
`(defun ,name ,arguments
,docstring
,@declarations
(cached-arguments? ,@forms ,(gensym) ,@arguments))))
| null | https://raw.githubusercontent.com/Ramarren/cl-parser-combinators/9c7569a4f6af5e60c0d3a51d9c15c16d1714c845/cache.lisp | lisp | parser caching | (in-package :parser-combinators)
(defvar *parser-cache* (make-hash-table))
(defun drop-parser-cache ()
(clrhash *parser-cache*))
(defmacro cached? (parser label)
"Parser modifier macro: cache parser as label in global cache."
(with-unique-names (inp cache)
`#'(lambda (,inp)
(if-let ((,cache (gethash ',label *parser-cache*)))
(funcall ,cache ,inp)
(funcall (setf (gethash ',label *parser-cache*) ,parser) ,inp)))))
(defmacro def-cached-parser (name &body body)
"Define cached parser of no arguments."
(multiple-value-bind (forms declarations docstring) (parse-body body :documentation t)
`(defun ,name ()
,docstring
,@declarations
(cached? ,@forms ,(gensym)))))
(defmacro cached-arguments? (parser label &rest arguments)
"Parser modifier macro: cache parser as label with argument list equal under equal in global cache."
(with-unique-names (inp cache subcache args)
(let ((filtered-arguments (mapcar #'ensure-car
(remove-if (rcurry #'member '(&optional &key &rest)) arguments))))
`(let ((,args ,(cons 'list filtered-arguments)))
#'(lambda (,inp)
(unless (gethash ',label *parser-cache*)
(setf (gethash ',label *parser-cache*) (make-hash-table :test 'equal)))
(let ((,cache (gethash ',label *parser-cache*)))
(if-let ((,subcache (gethash ,args ,cache)))
(funcall ,subcache ,inp)
(funcall (setf (gethash ,args ,cache) ,parser) ,inp))))))))
(defmacro def-cached-arg-parser (name arguments &body body)
"Define cached parser with arguments."
(multiple-value-bind (forms declarations docstring) (parse-body body :documentation t)
`(defun ,name ,arguments
,docstring
,@declarations
(cached-arguments? ,@forms ,(gensym) ,@arguments))))
|
f9bcc5b492cf074fa384af399821b5c7b7eba1f4f66ce01a02dbbb8ab8c02165 | geophf/1HaskellADay | Solution.hs | # LANGUAGE OverloadedStrings , QuasiQuotes #
module Y2017.M12.D27.Solution where
-
Yesterday , we stored the unparsed article set into the database . We also were
able to parse the HTML content of articles stored in JSON the day prior .
Today , we continue parsing article information and then storing those parsed
data into our database .
From the article , we need to store in the database some stuff . These stuff are :
uuid
title
url
starttime
lastupdated
keywords
sections
authors
prologue
content
Now , DON'T PANIC ! ( tm )
We 'll take parsing these fields one day at a time . The simple parsing fields
we 'll do today ( the transliteration from a simple field in JSON to a simple
field in Haskell ) , the more complex fields we 'll handle a day at a time in turn .
-
Yesterday, we stored the unparsed article set into the database. We also were
able to parse the HTML content of articles stored in JSON the day prior.
Today, we continue parsing article information and then storing those parsed
data into our database.
From the article, we need to store in the database some stuff. These stuff are:
uuid
title
url
starttime
lastupdated
keywords
sections
authors
prologue
content
Now, DON'T PANIC! (tm)
We'll take parsing these fields one day at a time. The simple parsing fields
we'll do today (the transliteration from a simple field in JSON to a simple
field in Haskell), the more complex fields we'll handle a day at a time in turn.
--}
import Control.Monad (zipWithM)
import Data.Aeson
import Data.Aeson.Types
import Data.ByteString.Lazy.Char8 (ByteString)
import qualified Data.ByteString.Lazy.Char8 as BL
import Data.Maybe (fromJust, catMaybes)
import qualified Data.Text as T
import Text.HTML.TagSoup
below import available via 1HaskellADay git repository
import Control.List (weave)
import Control.Logic.Frege ((<<-))
import Data.HTML
import Store.SQL.Connection (withConnection, Database(PILOT))
import Store.SQL.Util.Indexed
import Y2017.M12.D20.Solution (Packet, readSample, rows)
import Y2017.M12.D26.Solution (insertStagedArt)
-
You see from the above imported exercise that we have already scanned and parsed
the uuid , the title , and the content of each article . We have three more tricky
parsing exercises : starttime and lastupdated for time , authors , which is already
parsed out a bit for us , and sections .
Let 's flesh out our article structure from ' yesterday , ' leaving the authors
and sections unparsed for now , but figure out how to ingest the times , then ,
with the parsed information , save those articles to our PostgreSQL data store .
-
You see from the above imported exercise that we have already scanned and parsed
the uuid, the title, and the content of each article. We have three more tricky
parsing exercises: starttime and lastupdated for time, authors, which is already
parsed out a bit for us, and sections.
Let's flesh out our article structure from 'yesterday,' leaving the authors
and sections unparsed for now, but figure out how to ingest the times, then,
with the parsed information, save those articles to our PostgreSQL data store.
--}
import Data.Time
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.SqlQQ
import Database.PostgreSQL.Simple.ToField
import Database.PostgreSQL.Simple.ToRow
data DatedArticle a =
Carbon { uuid, title, url :: String,
prologue :: Maybe String,
authors :: a,
starttime, lastupdated :: Maybe ZonedTime,
sections :: [String],
keywords :: [Value],
content :: [String],
byline :: Maybe String }
deriving Show
-- so, but how do we get from that wild and wonderful structure in the JSON
for dates to a Haskell Day value ?
iso8601like :: String
iso8601like = "%FT%T%z"
parseDate :: Value -> Parser (Maybe ZonedTime)
parseDate (Object o) =
o.:? "iso8601" >>= \mbstr -> return (case mbstr of
Nothing -> Nothing
Just t -> parseTimeM True defaultTimeLocale iso8601like t)
showDate :: ZonedTime -> String
showDate = formatTime defaultTimeLocale iso8601like
sampleDate :: ByteString
sampleDate = BL.unlines ["{",
"\"rfc2822\": \"Tue, 12 Dec 2017 22:00:00 -0500\",",
"\"utc\": \"1513134000000\",",
"\"iso8601\": \"2017-12-12T22:00:00-05:00\"",
"}"]
-- note that changing -05:00 to -04:00 does NOT change the time zone
-
readArticles : : FilePath - > IO ( Packet ( DatedArticle Value ) )
fromJust . decode < $ > BL.readFile json
> > > arts < - readArticles " Y2017 / M12 / D27 / one - article.json "
> > > showDate ( starttime . head $ rows arts )
" 2017 - 12 - 12T22:00:00 - 0500 "
-
readArticles :: FilePath -> IO (Packet (DatedArticle Value))
readArticles json = fromJust . decode <$> BL.readFile json
>>> arts <- readArticles "Y2017/M12/D27/one-article.json"
>>> showDate (starttime . head $ rows arts)
"2017-12-12T22:00:00-0500"
--}
instance FromJSON a => FromJSON (DatedArticle a) where
parseJSON (Object o) =
Carbon <$> o .: "uuid" <*> o .: "title" <*> o .: "url"
<*> o .: "prologue" <*> o .: "authors"
<*> (o .: "starttime" >>= parseDate)
<*> (o .: "lastupdated" >>= parseDate)
<*> o .: "sections" <*> o .: "keywords"
<*> o .: "content" <*> o .: "byline"
we also need to make DatedArticle an HTML instance
instance HTML (DatedArticle a) where
body = content
Now , with that parsed structure , save the Article set to the database
instance ToField a => ToRow (DatedArticle a) where
toRow art@(Carbon uu ti ur pr au st la se ke co _) =
[toField la, toField st, toField uu, toField ur, toField (demark <$> pr),
toField (unlines $ plainText art), toField (htmlBlock art),
toField (weave se), toField ti, toField au,
toField (weave (map showVal ke))]
showVal :: Value -> String
showVal (String str) = T.unpack str
showVal (Number n) = show n
The insert statement gives the Article structure
-- (also image attached from the Entity-relation diagram)
insertArticleStmt :: Query
insertArticleStmt =
[sql|INSERT INTO article (src_id,update_dt,publish_dt,article_id,url,
abstract,full_text,rendered_text,sections,title,
authors,keywords)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) returning id|]
ixArt2ixArt :: Index -> (DatedArticle a) -> IxValue (DatedArticle a)
ixArt2ixArt (Idx x) art = IxV x art
insertArts :: ToField a => Connection -> [Index] -> [DatedArticle a] -> IO [Index]
insertArts conn = returning conn insertArticleStmt <<- zipWith ixArt2ixArt
-- from the source article ids in the article_stg table and the parsed articles,
-- store the articles in the database
{-- BONUS -----------------------------------------------------------------
Write an ETL process that reads in the JSON, stores the raw article information
in article_stg (hint: Y2017.M12.D26.Exercise), then stores the parsed article
information with the source article id in the database as well.
--}
parseArticle :: Int -> Value -> IO (Maybe (DatedArticle Value))
parseArticle idx = pa idx . fromJSON
pa :: Int -> Result (DatedArticle Value) -> IO (Maybe (DatedArticle Value))
pa idx (Success art) =
putStrLn ("Parsed " ++ uuid art) >> return (Just art)
pa idx (Error err) =
putStrLn ("Could not parse article " ++ show idx ++ ", error: " ++ err) >>
return Nothing
main' :: [String] -> IO ()
main' [jsonFilePath] =
readSample jsonFilePath >>= \pac ->
let blocks = rows pac in
zipWithM parseArticle [1..] blocks >>= \arts ->
withConnection PILOT (\conn -> do
ixs <- insertStagedArt conn blocks
insertArts conn ixs (catMaybes arts)) >>
putStrLn ("Wrote " ++ (show $ length blocks) ++ " articles to the database.")
main' _ =
putStrLn (unlines ["","pilot-etl <json-file-name>", "",
"\tLoads json-file-name into the Pilot database", ""])
| null | https://raw.githubusercontent.com/geophf/1HaskellADay/514792071226cd1e2ba7640af942667b85601006/exercises/HAD/Y2017/M12/D27/Solution.hs | haskell | }
}
so, but how do we get from that wild and wonderful structure in the JSON
note that changing -05:00 to -04:00 does NOT change the time zone
}
(also image attached from the Entity-relation diagram)
from the source article ids in the article_stg table and the parsed articles,
store the articles in the database
- BONUS -----------------------------------------------------------------
Write an ETL process that reads in the JSON, stores the raw article information
in article_stg (hint: Y2017.M12.D26.Exercise), then stores the parsed article
information with the source article id in the database as well.
- | # LANGUAGE OverloadedStrings , QuasiQuotes #
module Y2017.M12.D27.Solution where
-
Yesterday , we stored the unparsed article set into the database . We also were
able to parse the HTML content of articles stored in JSON the day prior .
Today , we continue parsing article information and then storing those parsed
data into our database .
From the article , we need to store in the database some stuff . These stuff are :
uuid
title
url
starttime
lastupdated
keywords
sections
authors
prologue
content
Now , DON'T PANIC ! ( tm )
We 'll take parsing these fields one day at a time . The simple parsing fields
we 'll do today ( the transliteration from a simple field in JSON to a simple
field in Haskell ) , the more complex fields we 'll handle a day at a time in turn .
-
Yesterday, we stored the unparsed article set into the database. We also were
able to parse the HTML content of articles stored in JSON the day prior.
Today, we continue parsing article information and then storing those parsed
data into our database.
From the article, we need to store in the database some stuff. These stuff are:
uuid
title
url
starttime
lastupdated
keywords
sections
authors
prologue
content
Now, DON'T PANIC! (tm)
We'll take parsing these fields one day at a time. The simple parsing fields
we'll do today (the transliteration from a simple field in JSON to a simple
field in Haskell), the more complex fields we'll handle a day at a time in turn.
import Control.Monad (zipWithM)
import Data.Aeson
import Data.Aeson.Types
import Data.ByteString.Lazy.Char8 (ByteString)
import qualified Data.ByteString.Lazy.Char8 as BL
import Data.Maybe (fromJust, catMaybes)
import qualified Data.Text as T
import Text.HTML.TagSoup
below import available via 1HaskellADay git repository
import Control.List (weave)
import Control.Logic.Frege ((<<-))
import Data.HTML
import Store.SQL.Connection (withConnection, Database(PILOT))
import Store.SQL.Util.Indexed
import Y2017.M12.D20.Solution (Packet, readSample, rows)
import Y2017.M12.D26.Solution (insertStagedArt)
-
You see from the above imported exercise that we have already scanned and parsed
the uuid , the title , and the content of each article . We have three more tricky
parsing exercises : starttime and lastupdated for time , authors , which is already
parsed out a bit for us , and sections .
Let 's flesh out our article structure from ' yesterday , ' leaving the authors
and sections unparsed for now , but figure out how to ingest the times , then ,
with the parsed information , save those articles to our PostgreSQL data store .
-
You see from the above imported exercise that we have already scanned and parsed
the uuid, the title, and the content of each article. We have three more tricky
parsing exercises: starttime and lastupdated for time, authors, which is already
parsed out a bit for us, and sections.
Let's flesh out our article structure from 'yesterday,' leaving the authors
and sections unparsed for now, but figure out how to ingest the times, then,
with the parsed information, save those articles to our PostgreSQL data store.
import Data.Time
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.SqlQQ
import Database.PostgreSQL.Simple.ToField
import Database.PostgreSQL.Simple.ToRow
data DatedArticle a =
Carbon { uuid, title, url :: String,
prologue :: Maybe String,
authors :: a,
starttime, lastupdated :: Maybe ZonedTime,
sections :: [String],
keywords :: [Value],
content :: [String],
byline :: Maybe String }
deriving Show
for dates to a Haskell Day value ?
iso8601like :: String
iso8601like = "%FT%T%z"
parseDate :: Value -> Parser (Maybe ZonedTime)
parseDate (Object o) =
o.:? "iso8601" >>= \mbstr -> return (case mbstr of
Nothing -> Nothing
Just t -> parseTimeM True defaultTimeLocale iso8601like t)
showDate :: ZonedTime -> String
showDate = formatTime defaultTimeLocale iso8601like
sampleDate :: ByteString
sampleDate = BL.unlines ["{",
"\"rfc2822\": \"Tue, 12 Dec 2017 22:00:00 -0500\",",
"\"utc\": \"1513134000000\",",
"\"iso8601\": \"2017-12-12T22:00:00-05:00\"",
"}"]
-
readArticles : : FilePath - > IO ( Packet ( DatedArticle Value ) )
fromJust . decode < $ > BL.readFile json
> > > arts < - readArticles " Y2017 / M12 / D27 / one - article.json "
> > > showDate ( starttime . head $ rows arts )
" 2017 - 12 - 12T22:00:00 - 0500 "
-
readArticles :: FilePath -> IO (Packet (DatedArticle Value))
readArticles json = fromJust . decode <$> BL.readFile json
>>> arts <- readArticles "Y2017/M12/D27/one-article.json"
>>> showDate (starttime . head $ rows arts)
"2017-12-12T22:00:00-0500"
instance FromJSON a => FromJSON (DatedArticle a) where
parseJSON (Object o) =
Carbon <$> o .: "uuid" <*> o .: "title" <*> o .: "url"
<*> o .: "prologue" <*> o .: "authors"
<*> (o .: "starttime" >>= parseDate)
<*> (o .: "lastupdated" >>= parseDate)
<*> o .: "sections" <*> o .: "keywords"
<*> o .: "content" <*> o .: "byline"
we also need to make DatedArticle an HTML instance
instance HTML (DatedArticle a) where
body = content
Now , with that parsed structure , save the Article set to the database
instance ToField a => ToRow (DatedArticle a) where
toRow art@(Carbon uu ti ur pr au st la se ke co _) =
[toField la, toField st, toField uu, toField ur, toField (demark <$> pr),
toField (unlines $ plainText art), toField (htmlBlock art),
toField (weave se), toField ti, toField au,
toField (weave (map showVal ke))]
showVal :: Value -> String
showVal (String str) = T.unpack str
showVal (Number n) = show n
The insert statement gives the Article structure
insertArticleStmt :: Query
insertArticleStmt =
[sql|INSERT INTO article (src_id,update_dt,publish_dt,article_id,url,
abstract,full_text,rendered_text,sections,title,
authors,keywords)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) returning id|]
ixArt2ixArt :: Index -> (DatedArticle a) -> IxValue (DatedArticle a)
ixArt2ixArt (Idx x) art = IxV x art
insertArts :: ToField a => Connection -> [Index] -> [DatedArticle a] -> IO [Index]
insertArts conn = returning conn insertArticleStmt <<- zipWith ixArt2ixArt
parseArticle :: Int -> Value -> IO (Maybe (DatedArticle Value))
parseArticle idx = pa idx . fromJSON
pa :: Int -> Result (DatedArticle Value) -> IO (Maybe (DatedArticle Value))
pa idx (Success art) =
putStrLn ("Parsed " ++ uuid art) >> return (Just art)
pa idx (Error err) =
putStrLn ("Could not parse article " ++ show idx ++ ", error: " ++ err) >>
return Nothing
main' :: [String] -> IO ()
main' [jsonFilePath] =
readSample jsonFilePath >>= \pac ->
let blocks = rows pac in
zipWithM parseArticle [1..] blocks >>= \arts ->
withConnection PILOT (\conn -> do
ixs <- insertStagedArt conn blocks
insertArts conn ixs (catMaybes arts)) >>
putStrLn ("Wrote " ++ (show $ length blocks) ++ " articles to the database.")
main' _ =
putStrLn (unlines ["","pilot-etl <json-file-name>", "",
"\tLoads json-file-name into the Pilot database", ""])
|
e4e686ccbbe9ab2f834b3ffd7119b225451b5e78c7197ccdcea53de30a8e83cc | input-output-hk/cardano-base | Strict.hs | {-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DeriveGeneric #
{-# LANGUAGE DeriveTraversable #-}
-- | Strict version of the 'Maybe' type.
module Data.Maybe.Strict
( StrictMaybe (SNothing, SJust),
-- * Conversion: StrictMaybe <--> Maybe
strictMaybeToMaybe,
maybeToStrictMaybe,
-- * Accessing the underlying value
fromSMaybe,
isSNothing,
isSJust,
strictMaybe,
)
where
import Cardano.Binary
( FromCBOR (fromCBOR),
ToCBOR (toCBOR),
decodeBreakOr,
decodeListLenOrIndef,
encodeListLen,
)
import Control.Applicative (Alternative(..))
import Control.DeepSeq (NFData)
import Data.Aeson (FromJSON (..), ToJSON (..))
import Data.Default.Class (Default (..))
import GHC.Generics (Generic)
import NoThunks.Class (NoThunks (..))
data StrictMaybe a
= SNothing
| SJust !a
deriving
( Eq,
Ord,
Show,
Generic,
Functor,
Foldable,
Traversable,
NoThunks,
NFData
)
instance Applicative StrictMaybe where
pure = SJust
SJust f <*> m = fmap f m
SNothing <*> _m = SNothing
SJust _m1 *> m2 = m2
SNothing *> _m2 = SNothing
instance Monad StrictMaybe where
SJust x >>= k = k x
SNothing >>= _ = SNothing
(>>) = (*>)
return = pure
instance MonadFail StrictMaybe where
fail _ = SNothing
instance ToCBOR a => ToCBOR (StrictMaybe a) where
toCBOR SNothing = encodeListLen 0
toCBOR (SJust x) = encodeListLen 1 <> toCBOR x
instance FromCBOR a => FromCBOR (StrictMaybe a) where
fromCBOR = do
maybeN <- decodeListLenOrIndef
case maybeN of
Just 0 -> pure SNothing
Just 1 -> SJust <$> fromCBOR
Just _ -> fail "too many elements in length-style decoding of StrictMaybe."
Nothing -> do
isBreak <- decodeBreakOr
if isBreak
then pure SNothing
else do
x <- fromCBOR
isBreak2 <- decodeBreakOr
if isBreak2
then pure (SJust x)
else fail "too many elements in break-style decoding of StrictMaybe."
instance ToJSON a => ToJSON (StrictMaybe a) where
toJSON = toJSON . strictMaybeToMaybe
toEncoding = toEncoding . strictMaybeToMaybe
instance FromJSON a => FromJSON (StrictMaybe a) where
parseJSON v = maybeToStrictMaybe <$> parseJSON v
strictMaybeToMaybe :: StrictMaybe a -> Maybe a
strictMaybeToMaybe SNothing = Nothing
strictMaybeToMaybe (SJust x) = Just x
maybeToStrictMaybe :: Maybe a -> StrictMaybe a
maybeToStrictMaybe Nothing = SNothing
maybeToStrictMaybe (Just x) = SJust x
-- | Same as `Data.Maybe.fromMaybe`
fromSMaybe :: a -> StrictMaybe a -> a
fromSMaybe d SNothing = d
fromSMaybe _ (SJust x) = x
-- | Same as `Data.Maybe.isNothing`
isSNothing :: StrictMaybe a -> Bool
isSNothing SNothing = True
isSNothing _ = False
-- | Same as `Data.Maybe.isJust`
isSJust :: StrictMaybe a -> Bool
isSJust = not . isSNothing
-- | Same as `Data.Maybe.maybe`
strictMaybe :: a -> (b -> a) -> StrictMaybe b -> a
strictMaybe x _ SNothing = x
strictMaybe _ f (SJust y) = f y
instance Default (StrictMaybe t) where
def = SNothing
instance Semigroup a => Semigroup (StrictMaybe a) where
SNothing <> x = x
x <> SNothing = x
SJust x <> SJust y = SJust (x <> y)
instance Semigroup a => Monoid (StrictMaybe a) where
mempty = SNothing
instance Alternative StrictMaybe where
empty = SNothing
SNothing <|> r = r
l <|> _ = l
| null | https://raw.githubusercontent.com/input-output-hk/cardano-base/9989b5571db1636bf6afd71a8f93d2c989d68d74/cardano-strict-containers/src/Data/Maybe/Strict.hs | haskell | # LANGUAGE DeriveAnyClass #
# LANGUAGE DeriveTraversable #
| Strict version of the 'Maybe' type.
* Conversion: StrictMaybe <--> Maybe
* Accessing the underlying value
| Same as `Data.Maybe.fromMaybe`
| Same as `Data.Maybe.isNothing`
| Same as `Data.Maybe.isJust`
| Same as `Data.Maybe.maybe` | # LANGUAGE DeriveGeneric #
module Data.Maybe.Strict
( StrictMaybe (SNothing, SJust),
strictMaybeToMaybe,
maybeToStrictMaybe,
fromSMaybe,
isSNothing,
isSJust,
strictMaybe,
)
where
import Cardano.Binary
( FromCBOR (fromCBOR),
ToCBOR (toCBOR),
decodeBreakOr,
decodeListLenOrIndef,
encodeListLen,
)
import Control.Applicative (Alternative(..))
import Control.DeepSeq (NFData)
import Data.Aeson (FromJSON (..), ToJSON (..))
import Data.Default.Class (Default (..))
import GHC.Generics (Generic)
import NoThunks.Class (NoThunks (..))
data StrictMaybe a
= SNothing
| SJust !a
deriving
( Eq,
Ord,
Show,
Generic,
Functor,
Foldable,
Traversable,
NoThunks,
NFData
)
instance Applicative StrictMaybe where
pure = SJust
SJust f <*> m = fmap f m
SNothing <*> _m = SNothing
SJust _m1 *> m2 = m2
SNothing *> _m2 = SNothing
instance Monad StrictMaybe where
SJust x >>= k = k x
SNothing >>= _ = SNothing
(>>) = (*>)
return = pure
instance MonadFail StrictMaybe where
fail _ = SNothing
instance ToCBOR a => ToCBOR (StrictMaybe a) where
toCBOR SNothing = encodeListLen 0
toCBOR (SJust x) = encodeListLen 1 <> toCBOR x
instance FromCBOR a => FromCBOR (StrictMaybe a) where
fromCBOR = do
maybeN <- decodeListLenOrIndef
case maybeN of
Just 0 -> pure SNothing
Just 1 -> SJust <$> fromCBOR
Just _ -> fail "too many elements in length-style decoding of StrictMaybe."
Nothing -> do
isBreak <- decodeBreakOr
if isBreak
then pure SNothing
else do
x <- fromCBOR
isBreak2 <- decodeBreakOr
if isBreak2
then pure (SJust x)
else fail "too many elements in break-style decoding of StrictMaybe."
instance ToJSON a => ToJSON (StrictMaybe a) where
toJSON = toJSON . strictMaybeToMaybe
toEncoding = toEncoding . strictMaybeToMaybe
instance FromJSON a => FromJSON (StrictMaybe a) where
parseJSON v = maybeToStrictMaybe <$> parseJSON v
strictMaybeToMaybe :: StrictMaybe a -> Maybe a
strictMaybeToMaybe SNothing = Nothing
strictMaybeToMaybe (SJust x) = Just x
maybeToStrictMaybe :: Maybe a -> StrictMaybe a
maybeToStrictMaybe Nothing = SNothing
maybeToStrictMaybe (Just x) = SJust x
fromSMaybe :: a -> StrictMaybe a -> a
fromSMaybe d SNothing = d
fromSMaybe _ (SJust x) = x
isSNothing :: StrictMaybe a -> Bool
isSNothing SNothing = True
isSNothing _ = False
isSJust :: StrictMaybe a -> Bool
isSJust = not . isSNothing
strictMaybe :: a -> (b -> a) -> StrictMaybe b -> a
strictMaybe x _ SNothing = x
strictMaybe _ f (SJust y) = f y
instance Default (StrictMaybe t) where
def = SNothing
instance Semigroup a => Semigroup (StrictMaybe a) where
SNothing <> x = x
x <> SNothing = x
SJust x <> SJust y = SJust (x <> y)
instance Semigroup a => Monoid (StrictMaybe a) where
mempty = SNothing
instance Alternative StrictMaybe where
empty = SNothing
SNothing <|> r = r
l <|> _ = l
|
753b543905d460186cc0a720e15358c85ddab6bb8af2215a086bd87b45394bcd | fmthoma/vgrep | Type.hs | | The ' VgrepT ' monad transformer allows reading from the ' Environment '
-- and changing the state of the 'Vgrep.App.App' or a 'Vgrep.Widget.Widget'.
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TupleSections #
# LANGUAGE TypeFamilies #
{-# LANGUAGE UndecidableInstances #-}
module Vgrep.Type
* The ' VgrepT ' monad transformer
VgrepT ()
, Vgrep
, mkVgrepT
, runVgrepT
-- ** Modifying the environment
, modifyEnvironment
-- ** Utilities
, vgrepBracket
-- * Re-exports
, lift
, hoist
, module Vgrep.Environment
, module Export
) where
import qualified Control.Exception as E
import Control.Lens.Compat
import Control.Monad.Identity
import Control.Monad.Morph
import Control.Monad.Reader
import qualified Control.Monad.Reader as Export
( MonadReader
, ask
, local
)
import Control.Monad.State.Extended
import qualified Control.Monad.State.Extended as Export
( MonadState
, get
, modify
, put
)
import Lens.Micro.Mtl.Internal
import Vgrep.Environment
| The ' VgrepT ' monad transformer is parameterized over the state @s@ of
a ' Vgrep . Widget . Widget ' or an ' Vgepr . App . App ' .
newtype VgrepT s m a = VgrepT (StateT s (StateT Environment m) a)
deriving ( Functor
, Applicative
, Monad
, MonadIO )
| ' VgrepT ' can read from the ' Environment ' . Modifications to the
-- enviromnent are only possible globally (see 'modifyEnvironment'), the
-- 'local' environment is pure.
instance Monad m => MonadReader Environment (VgrepT s m) where
ask = VgrepT (lift get)
local f action = mkVgrepT $ \s env -> runVgrepT action s (f env)
instance Monad m => MonadState s (VgrepT s m) where
get = VgrepT get
put = VgrepT . put
instance MonadTrans (VgrepT s) where
lift = VgrepT . lift . lift
instance MFunctor (VgrepT s) where
hoist f (VgrepT action) = VgrepT (hoist (hoist f) action)
type instance Zoomed (VgrepT s m) = Zoomed (StateT s (StateT Environment m))
instance Monad m => Zoom (VgrepT s m) (VgrepT t m) s t where
zoom l (VgrepT m) = VgrepT (zoom l m)
| Lift a monadic action to ' VgrepT ' .
mkVgrepT
:: Monad m
=> (s -> Environment -> m (a, s))
-> VgrepT s m a
mkVgrepT action =
let action' s env = fmap (, env) (action s env)
in VgrepT (StateT (StateT . action'))
| Pass an initial state and an ' Environment ' and reduce a ' VgrepT '
-- action to an action in the base monad.
runVgrepT
:: Monad m
=> VgrepT s m a
-> s
-> Environment
-> m (a, s)
runVgrepT (VgrepT action) s env = do
((a, s'), _env') <- runStateT (runStateT action s) env
pure (a, s')
type Vgrep s = VgrepT s Identity
| A version of ' E.bracket ' where the action is lifted to ' VgrepT ' .
vgrepBracket
:: IO a
-> (a -> IO c)
-> (a -> VgrepT s IO b)
-> VgrepT s IO b
vgrepBracket before after action = mkVgrepT $ \s env ->
let baseAction a = runVgrepT (action a) s env
in E.bracket before after baseAction
| The ' Environment ' of ' VgrepT ' is not stateful , however it can be
-- modified globally. An example is resizing the application by changing
-- the display bounds.
modifyEnvironment :: Monad m => (Environment -> Environment) -> VgrepT s m ()
modifyEnvironment = VgrepT . lift . modify
| null | https://raw.githubusercontent.com/fmthoma/vgrep/f3b140bf3150a3699234469c34ff8c13a298998e/src/Vgrep/Type.hs | haskell | and changing the state of the 'Vgrep.App.App' or a 'Vgrep.Widget.Widget'.
# LANGUAGE UndecidableInstances #
** Modifying the environment
** Utilities
* Re-exports
enviromnent are only possible globally (see 'modifyEnvironment'), the
'local' environment is pure.
action to an action in the base monad.
modified globally. An example is resizing the application by changing
the display bounds. | | The ' VgrepT ' monad transformer allows reading from the ' Environment '
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TupleSections #
# LANGUAGE TypeFamilies #
module Vgrep.Type
* The ' VgrepT ' monad transformer
VgrepT ()
, Vgrep
, mkVgrepT
, runVgrepT
, modifyEnvironment
, vgrepBracket
, lift
, hoist
, module Vgrep.Environment
, module Export
) where
import qualified Control.Exception as E
import Control.Lens.Compat
import Control.Monad.Identity
import Control.Monad.Morph
import Control.Monad.Reader
import qualified Control.Monad.Reader as Export
( MonadReader
, ask
, local
)
import Control.Monad.State.Extended
import qualified Control.Monad.State.Extended as Export
( MonadState
, get
, modify
, put
)
import Lens.Micro.Mtl.Internal
import Vgrep.Environment
| The ' VgrepT ' monad transformer is parameterized over the state @s@ of
a ' Vgrep . Widget . Widget ' or an ' Vgepr . App . App ' .
newtype VgrepT s m a = VgrepT (StateT s (StateT Environment m) a)
deriving ( Functor
, Applicative
, Monad
, MonadIO )
| ' VgrepT ' can read from the ' Environment ' . Modifications to the
instance Monad m => MonadReader Environment (VgrepT s m) where
ask = VgrepT (lift get)
local f action = mkVgrepT $ \s env -> runVgrepT action s (f env)
instance Monad m => MonadState s (VgrepT s m) where
get = VgrepT get
put = VgrepT . put
instance MonadTrans (VgrepT s) where
lift = VgrepT . lift . lift
instance MFunctor (VgrepT s) where
hoist f (VgrepT action) = VgrepT (hoist (hoist f) action)
type instance Zoomed (VgrepT s m) = Zoomed (StateT s (StateT Environment m))
instance Monad m => Zoom (VgrepT s m) (VgrepT t m) s t where
zoom l (VgrepT m) = VgrepT (zoom l m)
| Lift a monadic action to ' VgrepT ' .
mkVgrepT
:: Monad m
=> (s -> Environment -> m (a, s))
-> VgrepT s m a
mkVgrepT action =
let action' s env = fmap (, env) (action s env)
in VgrepT (StateT (StateT . action'))
| Pass an initial state and an ' Environment ' and reduce a ' VgrepT '
runVgrepT
:: Monad m
=> VgrepT s m a
-> s
-> Environment
-> m (a, s)
runVgrepT (VgrepT action) s env = do
((a, s'), _env') <- runStateT (runStateT action s) env
pure (a, s')
type Vgrep s = VgrepT s Identity
| A version of ' E.bracket ' where the action is lifted to ' VgrepT ' .
vgrepBracket
:: IO a
-> (a -> IO c)
-> (a -> VgrepT s IO b)
-> VgrepT s IO b
vgrepBracket before after action = mkVgrepT $ \s env ->
let baseAction a = runVgrepT (action a) s env
in E.bracket before after baseAction
| The ' Environment ' of ' VgrepT ' is not stateful , however it can be
modifyEnvironment :: Monad m => (Environment -> Environment) -> VgrepT s m ()
modifyEnvironment = VgrepT . lift . modify
|
07a7fa858f951a5abca2202d89a8e940363652857a5ad79b9b9879de5968de68 | lspitzner/brittany | Test486.hs | -- brittany { lconfig_columnAlignMode: { tag: ColumnAlignModeDisabled }, lconfig_indentPolicy: IndentPolicyLeft }
import Test (longbindingNameThatoverflowsColum)
import Test (Long(List, Of, Things))
| null | https://raw.githubusercontent.com/lspitzner/brittany/a15eed5f3608bf1fa7084fcf008c6ecb79542562/data/Test486.hs | haskell | brittany { lconfig_columnAlignMode: { tag: ColumnAlignModeDisabled }, lconfig_indentPolicy: IndentPolicyLeft } | import Test (longbindingNameThatoverflowsColum)
import Test (Long(List, Of, Things))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.